Thanks to visit codestin.com
Credit goes to doc.rust-lang.org

alloc/
rc.rs

1//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
2//! Counted'.
3//!
4//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
5//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
6//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
7//! given allocation is destroyed, the value stored in that allocation (often
8//! referred to as "inner value") is also dropped.
9//!
10//! Shared references in Rust disallow mutation by default, and [`Rc`]
11//! is no exception: you cannot generally obtain a mutable reference to
12//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
13//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
14//! inside an `Rc`][mutability].
15//!
16//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
17//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
18//! does not implement [`Send`]. As a result, the Rust compiler
19//! will check *at compile time* that you are not sending [`Rc`]s between
20//! threads. If you need multi-threaded, atomic reference counting, use
21//! [`sync::Arc`][arc].
22//!
23//! The [`downgrade`][downgrade] method can be used to create a non-owning
24//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
25//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
26//! already been dropped. In other words, `Weak` pointers do not keep the value
27//! inside the allocation alive; however, they *do* keep the allocation
28//! (the backing store for the inner value) alive.
29//!
30//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
31//! [`Weak`] is used to break cycles. For example, a tree could have strong
32//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
33//! children back to their parents.
34//!
35//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
36//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
37//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
38//! functions, called using [fully qualified syntax]:
39//!
40//! ```
41//! use std::rc::Rc;
42//!
43//! let my_rc = Rc::new(());
44//! let my_weak = Rc::downgrade(&my_rc);
45//! ```
46//!
47//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
48//! fully qualified syntax. Some people prefer to use fully qualified syntax,
49//! while others prefer using method-call syntax.
50//!
51//! ```
52//! use std::rc::Rc;
53//!
54//! let rc = Rc::new(());
55//! // Method-call syntax
56//! let rc2 = rc.clone();
57//! // Fully qualified syntax
58//! let rc3 = Rc::clone(&rc);
59//! ```
60//!
61//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
62//! already been dropped.
63//!
64//! # Cloning references
65//!
66//! Creating a new reference to the same allocation as an existing reference counted pointer
67//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
68//!
69//! ```
70//! use std::rc::Rc;
71//!
72//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
73//! // The two syntaxes below are equivalent.
74//! let a = foo.clone();
75//! let b = Rc::clone(&foo);
76//! // a and b both point to the same memory location as foo.
77//! ```
78//!
79//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
80//! the meaning of the code. In the example above, this syntax makes it easier to see that
81//! this code is creating a new reference rather than copying the whole content of foo.
82//!
83//! # Examples
84//!
85//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
86//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
87//! unique ownership, because more than one gadget may belong to the same
88//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
89//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
90//!
91//! ```
92//! use std::rc::Rc;
93//!
94//! struct Owner {
95//!     name: String,
96//!     // ...other fields
97//! }
98//!
99//! struct Gadget {
100//!     id: i32,
101//!     owner: Rc<Owner>,
102//!     // ...other fields
103//! }
104//!
105//! fn main() {
106//!     // Create a reference-counted `Owner`.
107//!     let gadget_owner: Rc<Owner> = Rc::new(
108//!         Owner {
109//!             name: "Gadget Man".to_string(),
110//!         }
111//!     );
112//!
113//!     // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
114//!     // gives us a new pointer to the same `Owner` allocation, incrementing
115//!     // the reference count in the process.
116//!     let gadget1 = Gadget {
117//!         id: 1,
118//!         owner: Rc::clone(&gadget_owner),
119//!     };
120//!     let gadget2 = Gadget {
121//!         id: 2,
122//!         owner: Rc::clone(&gadget_owner),
123//!     };
124//!
125//!     // Dispose of our local variable `gadget_owner`.
126//!     drop(gadget_owner);
127//!
128//!     // Despite dropping `gadget_owner`, we're still able to print out the name
129//!     // of the `Owner` of the `Gadget`s. This is because we've only dropped a
130//!     // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
131//!     // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
132//!     // live. The field projection `gadget1.owner.name` works because
133//!     // `Rc<Owner>` automatically dereferences to `Owner`.
134//!     println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
135//!     println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
136//!
137//!     // At the end of the function, `gadget1` and `gadget2` are destroyed, and
138//!     // with them the last counted references to our `Owner`. Gadget Man now
139//!     // gets destroyed as well.
140//! }
141//! ```
142//!
143//! If our requirements change, and we also need to be able to traverse from
144//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
145//! to `Gadget` introduces a cycle. This means that their
146//! reference counts can never reach 0, and the allocation will never be destroyed:
147//! a memory leak. In order to get around this, we can use [`Weak`]
148//! pointers.
149//!
150//! Rust actually makes it somewhat difficult to produce this loop in the first
151//! place. In order to end up with two values that point at each other, one of
152//! them needs to be mutable. This is difficult because [`Rc`] enforces
153//! memory safety by only giving out shared references to the value it wraps,
154//! and these don't allow direct mutation. We need to wrap the part of the
155//! value we wish to mutate in a [`RefCell`], which provides *interior
156//! mutability*: a method to achieve mutability through a shared reference.
157//! [`RefCell`] enforces Rust's borrowing rules at runtime.
158//!
159//! ```
160//! use std::rc::Rc;
161//! use std::rc::Weak;
162//! use std::cell::RefCell;
163//!
164//! struct Owner {
165//!     name: String,
166//!     gadgets: RefCell<Vec<Weak<Gadget>>>,
167//!     // ...other fields
168//! }
169//!
170//! struct Gadget {
171//!     id: i32,
172//!     owner: Rc<Owner>,
173//!     // ...other fields
174//! }
175//!
176//! fn main() {
177//!     // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
178//!     // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
179//!     // a shared reference.
180//!     let gadget_owner: Rc<Owner> = Rc::new(
181//!         Owner {
182//!             name: "Gadget Man".to_string(),
183//!             gadgets: RefCell::new(vec![]),
184//!         }
185//!     );
186//!
187//!     // Create `Gadget`s belonging to `gadget_owner`, as before.
188//!     let gadget1 = Rc::new(
189//!         Gadget {
190//!             id: 1,
191//!             owner: Rc::clone(&gadget_owner),
192//!         }
193//!     );
194//!     let gadget2 = Rc::new(
195//!         Gadget {
196//!             id: 2,
197//!             owner: Rc::clone(&gadget_owner),
198//!         }
199//!     );
200//!
201//!     // Add the `Gadget`s to their `Owner`.
202//!     {
203//!         let mut gadgets = gadget_owner.gadgets.borrow_mut();
204//!         gadgets.push(Rc::downgrade(&gadget1));
205//!         gadgets.push(Rc::downgrade(&gadget2));
206//!
207//!         // `RefCell` dynamic borrow ends here.
208//!     }
209//!
210//!     // Iterate over our `Gadget`s, printing their details out.
211//!     for gadget_weak in gadget_owner.gadgets.borrow().iter() {
212//!
213//!         // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
214//!         // guarantee the allocation still exists, we need to call
215//!         // `upgrade`, which returns an `Option<Rc<Gadget>>`.
216//!         //
217//!         // In this case we know the allocation still exists, so we simply
218//!         // `unwrap` the `Option`. In a more complicated program, you might
219//!         // need graceful error handling for a `None` result.
220//!
221//!         let gadget = gadget_weak.upgrade().unwrap();
222//!         println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
223//!     }
224//!
225//!     // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
226//!     // are destroyed. There are now no strong (`Rc`) pointers to the
227//!     // gadgets, so they are destroyed. This zeroes the reference count on
228//!     // Gadget Man, so he gets destroyed as well.
229//! }
230//! ```
231//!
232//! [clone]: Clone::clone
233//! [`Cell`]: core::cell::Cell
234//! [`RefCell`]: core::cell::RefCell
235//! [arc]: crate::sync::Arc
236//! [`Deref`]: core::ops::Deref
237//! [downgrade]: Rc::downgrade
238//! [upgrade]: Weak::upgrade
239//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
240//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
241
242#![stable(feature = "rust1", since = "1.0.0")]
243
244use core::any::Any;
245use core::cell::{Cell, CloneFromCell};
246use core::clone::UseCloned;
247#[cfg(not(no_global_oom_handling))]
248use core::clone::{CloneToUninit, TrivialClone};
249use core::cmp::Ordering;
250use core::hash::{Hash, Hasher};
251use core::intrinsics::abort;
252#[cfg(not(no_global_oom_handling))]
253use core::iter;
254use core::marker::{PhantomData, Unsize};
255use core::mem::{self, ManuallyDrop, align_of_val_raw};
256use core::num::NonZeroUsize;
257use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver};
258#[cfg(not(no_global_oom_handling))]
259use core::ops::{Residual, Try};
260use core::panic::{RefUnwindSafe, UnwindSafe};
261#[cfg(not(no_global_oom_handling))]
262use core::pin::Pin;
263use core::pin::PinCoerceUnsized;
264use core::ptr::{self, NonNull, drop_in_place};
265#[cfg(not(no_global_oom_handling))]
266use core::slice::from_raw_parts_mut;
267use core::{borrow, fmt, hint};
268
269#[cfg(not(no_global_oom_handling))]
270use crate::alloc::handle_alloc_error;
271use crate::alloc::{AllocError, Allocator, Global, Layout};
272use crate::borrow::{Cow, ToOwned};
273use crate::boxed::Box;
274#[cfg(not(no_global_oom_handling))]
275use crate::string::String;
276#[cfg(not(no_global_oom_handling))]
277use crate::vec::Vec;
278
279// This is repr(C) to future-proof against possible field-reordering, which
280// would interfere with otherwise safe [into|from]_raw() of transmutable
281// inner types.
282// repr(align(2)) (forcing alignment to at least 2) is required because usize
283// has 1-byte alignment on AVR.
284#[repr(C, align(2))]
285struct RcInner<T: ?Sized> {
286    strong: Cell<usize>,
287    weak: Cell<usize>,
288    value: T,
289}
290
291/// Calculate layout for `RcInner<T>` using the inner value's layout
292fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout {
293    // Calculate layout using the given value layout.
294    // Previously, layout was calculated on the expression
295    // `&*(ptr as *const RcInner<T>)`, but this created a misaligned
296    // reference (see #54908).
297    Layout::new::<RcInner<()>>().extend(layout).unwrap().0.pad_to_align()
298}
299
300/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
301/// Counted'.
302///
303/// See the [module-level documentation](./index.html) for more details.
304///
305/// The inherent methods of `Rc` are all associated functions, which means
306/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
307/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
308///
309/// [get_mut]: Rc::get_mut
310#[doc(search_unbox)]
311#[rustc_diagnostic_item = "Rc"]
312#[stable(feature = "rust1", since = "1.0.0")]
313#[rustc_insignificant_dtor]
314pub struct Rc<
315    T: ?Sized,
316    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
317> {
318    ptr: NonNull<RcInner<T>>,
319    phantom: PhantomData<RcInner<T>>,
320    alloc: A,
321}
322
323#[stable(feature = "rust1", since = "1.0.0")]
324impl<T: ?Sized, A: Allocator> !Send for Rc<T, A> {}
325
326// Note that this negative impl isn't strictly necessary for correctness,
327// as `Rc` transitively contains a `Cell`, which is itself `!Sync`.
328// However, given how important `Rc`'s `!Sync`-ness is,
329// having an explicit negative impl is nice for documentation purposes
330// and results in nicer error messages.
331#[stable(feature = "rust1", since = "1.0.0")]
332impl<T: ?Sized, A: Allocator> !Sync for Rc<T, A> {}
333
334#[stable(feature = "catch_unwind", since = "1.9.0")]
335impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Rc<T, A> {}
336#[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")]
337impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> RefUnwindSafe for Rc<T, A> {}
338
339#[unstable(feature = "coerce_unsized", issue = "18598")]
340impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for Rc<T, A> {}
341
342#[unstable(feature = "dispatch_from_dyn", issue = "none")]
343impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
344
345// SAFETY: `Rc::clone` doesn't access any `Cell`s which could contain the `Rc` being cloned.
346#[unstable(feature = "cell_get_cloned", issue = "145329")]
347unsafe impl<T: ?Sized> CloneFromCell for Rc<T> {}
348
349impl<T: ?Sized> Rc<T> {
350    #[inline]
351    unsafe fn from_inner(ptr: NonNull<RcInner<T>>) -> Self {
352        unsafe { Self::from_inner_in(ptr, Global) }
353    }
354
355    #[inline]
356    unsafe fn from_ptr(ptr: *mut RcInner<T>) -> Self {
357        unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
358    }
359}
360
361impl<T: ?Sized, A: Allocator> Rc<T, A> {
362    #[inline(always)]
363    fn inner(&self) -> &RcInner<T> {
364        // This unsafety is ok because while this Rc is alive we're guaranteed
365        // that the inner pointer is valid.
366        unsafe { self.ptr.as_ref() }
367    }
368
369    #[inline]
370    fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
371        let this = mem::ManuallyDrop::new(this);
372        (this.ptr, unsafe { ptr::read(&this.alloc) })
373    }
374
375    #[inline]
376    unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
377        Self { ptr, phantom: PhantomData, alloc }
378    }
379
380    #[inline]
381    unsafe fn from_ptr_in(ptr: *mut RcInner<T>, alloc: A) -> Self {
382        unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
383    }
384
385    // Non-inlined part of `drop`.
386    #[inline(never)]
387    unsafe fn drop_slow(&mut self) {
388        // Reconstruct the "strong weak" pointer and drop it when this
389        // variable goes out of scope. This ensures that the memory is
390        // deallocated even if the destructor of `T` panics.
391        let _weak = Weak { ptr: self.ptr, alloc: &self.alloc };
392
393        // Destroy the contained object.
394        // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
395        unsafe {
396            ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value);
397        }
398    }
399}
400
401impl<T> Rc<T> {
402    /// Constructs a new `Rc<T>`.
403    ///
404    /// # Examples
405    ///
406    /// ```
407    /// use std::rc::Rc;
408    ///
409    /// let five = Rc::new(5);
410    /// ```
411    #[cfg(not(no_global_oom_handling))]
412    #[stable(feature = "rust1", since = "1.0.0")]
413    pub fn new(value: T) -> Rc<T> {
414        // There is an implicit weak pointer owned by all the strong
415        // pointers, which ensures that the weak destructor never frees
416        // the allocation while the strong destructor is running, even
417        // if the weak pointer is stored inside the strong one.
418        unsafe {
419            Self::from_inner(
420                Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value }))
421                    .into(),
422            )
423        }
424    }
425
426    /// Constructs a new `Rc<T>` while giving you a `Weak<T>` to the allocation,
427    /// to allow you to construct a `T` which holds a weak pointer to itself.
428    ///
429    /// Generally, a structure circularly referencing itself, either directly or
430    /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
431    /// Using this function, you get access to the weak pointer during the
432    /// initialization of `T`, before the `Rc<T>` is created, such that you can
433    /// clone and store it inside the `T`.
434    ///
435    /// `new_cyclic` first allocates the managed allocation for the `Rc<T>`,
436    /// then calls your closure, giving it a `Weak<T>` to this allocation,
437    /// and only afterwards completes the construction of the `Rc<T>` by placing
438    /// the `T` returned from your closure into the allocation.
439    ///
440    /// Since the new `Rc<T>` is not fully-constructed until `Rc<T>::new_cyclic`
441    /// returns, calling [`upgrade`] on the weak reference inside your closure will
442    /// fail and result in a `None` value.
443    ///
444    /// # Panics
445    ///
446    /// If `data_fn` panics, the panic is propagated to the caller, and the
447    /// temporary [`Weak<T>`] is dropped normally.
448    ///
449    /// # Examples
450    ///
451    /// ```
452    /// # #![allow(dead_code)]
453    /// use std::rc::{Rc, Weak};
454    ///
455    /// struct Gadget {
456    ///     me: Weak<Gadget>,
457    /// }
458    ///
459    /// impl Gadget {
460    ///     /// Constructs a reference counted Gadget.
461    ///     fn new() -> Rc<Self> {
462    ///         // `me` is a `Weak<Gadget>` pointing at the new allocation of the
463    ///         // `Rc` we're constructing.
464    ///         Rc::new_cyclic(|me| {
465    ///             // Create the actual struct here.
466    ///             Gadget { me: me.clone() }
467    ///         })
468    ///     }
469    ///
470    ///     /// Returns a reference counted pointer to Self.
471    ///     fn me(&self) -> Rc<Self> {
472    ///         self.me.upgrade().unwrap()
473    ///     }
474    /// }
475    /// ```
476    /// [`upgrade`]: Weak::upgrade
477    #[cfg(not(no_global_oom_handling))]
478    #[stable(feature = "arc_new_cyclic", since = "1.60.0")]
479    pub fn new_cyclic<F>(data_fn: F) -> Rc<T>
480    where
481        F: FnOnce(&Weak<T>) -> T,
482    {
483        Self::new_cyclic_in(data_fn, Global)
484    }
485
486    /// Constructs a new `Rc` with uninitialized contents.
487    ///
488    /// # Examples
489    ///
490    /// ```
491    /// use std::rc::Rc;
492    ///
493    /// let mut five = Rc::<u32>::new_uninit();
494    ///
495    /// // Deferred initialization:
496    /// Rc::get_mut(&mut five).unwrap().write(5);
497    ///
498    /// let five = unsafe { five.assume_init() };
499    ///
500    /// assert_eq!(*five, 5)
501    /// ```
502    #[cfg(not(no_global_oom_handling))]
503    #[stable(feature = "new_uninit", since = "1.82.0")]
504    #[must_use]
505    pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
506        unsafe {
507            Rc::from_ptr(Rc::allocate_for_layout(
508                Layout::new::<T>(),
509                |layout| Global.allocate(layout),
510                <*mut u8>::cast,
511            ))
512        }
513    }
514
515    /// Constructs a new `Rc` with uninitialized contents, with the memory
516    /// being filled with `0` bytes.
517    ///
518    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
519    /// incorrect usage of this method.
520    ///
521    /// # Examples
522    ///
523    /// ```
524    /// use std::rc::Rc;
525    ///
526    /// let zero = Rc::<u32>::new_zeroed();
527    /// let zero = unsafe { zero.assume_init() };
528    ///
529    /// assert_eq!(*zero, 0)
530    /// ```
531    ///
532    /// [zeroed]: mem::MaybeUninit::zeroed
533    #[cfg(not(no_global_oom_handling))]
534    #[stable(feature = "new_zeroed_alloc", since = "1.92.0")]
535    #[must_use]
536    pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
537        unsafe {
538            Rc::from_ptr(Rc::allocate_for_layout(
539                Layout::new::<T>(),
540                |layout| Global.allocate_zeroed(layout),
541                <*mut u8>::cast,
542            ))
543        }
544    }
545
546    /// Constructs a new `Rc<T>`, returning an error if the allocation fails
547    ///
548    /// # Examples
549    ///
550    /// ```
551    /// #![feature(allocator_api)]
552    /// use std::rc::Rc;
553    ///
554    /// let five = Rc::try_new(5);
555    /// # Ok::<(), std::alloc::AllocError>(())
556    /// ```
557    #[unstable(feature = "allocator_api", issue = "32838")]
558    pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
559        // There is an implicit weak pointer owned by all the strong
560        // pointers, which ensures that the weak destructor never frees
561        // the allocation while the strong destructor is running, even
562        // if the weak pointer is stored inside the strong one.
563        unsafe {
564            Ok(Self::from_inner(
565                Box::leak(Box::try_new(RcInner {
566                    strong: Cell::new(1),
567                    weak: Cell::new(1),
568                    value,
569                })?)
570                .into(),
571            ))
572        }
573    }
574
575    /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
576    ///
577    /// # Examples
578    ///
579    /// ```
580    /// #![feature(allocator_api)]
581    ///
582    /// use std::rc::Rc;
583    ///
584    /// let mut five = Rc::<u32>::try_new_uninit()?;
585    ///
586    /// // Deferred initialization:
587    /// Rc::get_mut(&mut five).unwrap().write(5);
588    ///
589    /// let five = unsafe { five.assume_init() };
590    ///
591    /// assert_eq!(*five, 5);
592    /// # Ok::<(), std::alloc::AllocError>(())
593    /// ```
594    #[unstable(feature = "allocator_api", issue = "32838")]
595    pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
596        unsafe {
597            Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
598                Layout::new::<T>(),
599                |layout| Global.allocate(layout),
600                <*mut u8>::cast,
601            )?))
602        }
603    }
604
605    /// Constructs a new `Rc` with uninitialized contents, with the memory
606    /// being filled with `0` bytes, returning an error if the allocation fails
607    ///
608    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
609    /// incorrect usage of this method.
610    ///
611    /// # Examples
612    ///
613    /// ```
614    /// #![feature(allocator_api)]
615    ///
616    /// use std::rc::Rc;
617    ///
618    /// let zero = Rc::<u32>::try_new_zeroed()?;
619    /// let zero = unsafe { zero.assume_init() };
620    ///
621    /// assert_eq!(*zero, 0);
622    /// # Ok::<(), std::alloc::AllocError>(())
623    /// ```
624    ///
625    /// [zeroed]: mem::MaybeUninit::zeroed
626    #[unstable(feature = "allocator_api", issue = "32838")]
627    pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
628        unsafe {
629            Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
630                Layout::new::<T>(),
631                |layout| Global.allocate_zeroed(layout),
632                <*mut u8>::cast,
633            )?))
634        }
635    }
636    /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
637    /// `value` will be pinned in memory and unable to be moved.
638    #[cfg(not(no_global_oom_handling))]
639    #[stable(feature = "pin", since = "1.33.0")]
640    #[must_use]
641    pub fn pin(value: T) -> Pin<Rc<T>> {
642        unsafe { Pin::new_unchecked(Rc::new(value)) }
643    }
644
645    /// Maps the value in an `Rc`, reusing the allocation if possible.
646    ///
647    /// `f` is called on a reference to the value in the `Rc`, and the result is returned, also in
648    /// an `Rc`.
649    ///
650    /// Note: this is an associated function, which means that you have
651    /// to call it as `Rc::map(r, f)` instead of `r.map(f)`. This
652    /// is so that there is no conflict with a method on the inner type.
653    ///
654    /// # Examples
655    ///
656    /// ```
657    /// #![feature(smart_pointer_try_map)]
658    ///
659    /// use std::rc::Rc;
660    ///
661    /// let r = Rc::new(7);
662    /// let new = Rc::map(r, |i| i + 7);
663    /// assert_eq!(*new, 14);
664    /// ```
665    #[cfg(not(no_global_oom_handling))]
666    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
667    pub fn map<U>(this: Self, f: impl FnOnce(&T) -> U) -> Rc<U> {
668        if size_of::<T>() == size_of::<U>()
669            && align_of::<T>() == align_of::<U>()
670            && Rc::is_unique(&this)
671        {
672            unsafe {
673                let ptr = Rc::into_raw(this);
674                let value = ptr.read();
675                let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
676
677                Rc::get_mut_unchecked(&mut allocation).write(f(&value));
678                allocation.assume_init()
679            }
680        } else {
681            Rc::new(f(&*this))
682        }
683    }
684
685    /// Attempts to map the value in an `Rc`, reusing the allocation if possible.
686    ///
687    /// `f` is called on a reference to the value in the `Rc`, and if the operation succeeds, the
688    /// result is returned, also in an `Rc`.
689    ///
690    /// Note: this is an associated function, which means that you have
691    /// to call it as `Rc::try_map(r, f)` instead of `r.try_map(f)`. This
692    /// is so that there is no conflict with a method on the inner type.
693    ///
694    /// # Examples
695    ///
696    /// ```
697    /// #![feature(smart_pointer_try_map)]
698    ///
699    /// use std::rc::Rc;
700    ///
701    /// let b = Rc::new(7);
702    /// let new = Rc::try_map(b, |&i| u32::try_from(i)).unwrap();
703    /// assert_eq!(*new, 7);
704    /// ```
705    #[cfg(not(no_global_oom_handling))]
706    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
707    pub fn try_map<R>(
708        this: Self,
709        f: impl FnOnce(&T) -> R,
710    ) -> <R::Residual as Residual<Rc<R::Output>>>::TryType
711    where
712        R: Try,
713        R::Residual: Residual<Rc<R::Output>>,
714    {
715        if size_of::<T>() == size_of::<R::Output>()
716            && align_of::<T>() == align_of::<R::Output>()
717            && Rc::is_unique(&this)
718        {
719            unsafe {
720                let ptr = Rc::into_raw(this);
721                let value = ptr.read();
722                let mut allocation = Rc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
723
724                Rc::get_mut_unchecked(&mut allocation).write(f(&value)?);
725                try { allocation.assume_init() }
726            }
727        } else {
728            try { Rc::new(f(&*this)?) }
729        }
730    }
731}
732
733impl<T, A: Allocator> Rc<T, A> {
734    /// Constructs a new `Rc` in the provided allocator.
735    ///
736    /// # Examples
737    ///
738    /// ```
739    /// #![feature(allocator_api)]
740    /// use std::rc::Rc;
741    /// use std::alloc::System;
742    ///
743    /// let five = Rc::new_in(5, System);
744    /// ```
745    #[cfg(not(no_global_oom_handling))]
746    #[unstable(feature = "allocator_api", issue = "32838")]
747    #[inline]
748    pub fn new_in(value: T, alloc: A) -> Rc<T, A> {
749        // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
750        // That would make code size bigger.
751        match Self::try_new_in(value, alloc) {
752            Ok(m) => m,
753            Err(_) => handle_alloc_error(Layout::new::<RcInner<T>>()),
754        }
755    }
756
757    /// Constructs a new `Rc` with uninitialized contents in the provided allocator.
758    ///
759    /// # Examples
760    ///
761    /// ```
762    /// #![feature(get_mut_unchecked)]
763    /// #![feature(allocator_api)]
764    ///
765    /// use std::rc::Rc;
766    /// use std::alloc::System;
767    ///
768    /// let mut five = Rc::<u32, _>::new_uninit_in(System);
769    ///
770    /// let five = unsafe {
771    ///     // Deferred initialization:
772    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
773    ///
774    ///     five.assume_init()
775    /// };
776    ///
777    /// assert_eq!(*five, 5)
778    /// ```
779    #[cfg(not(no_global_oom_handling))]
780    #[unstable(feature = "allocator_api", issue = "32838")]
781    #[inline]
782    pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
783        unsafe {
784            Rc::from_ptr_in(
785                Rc::allocate_for_layout(
786                    Layout::new::<T>(),
787                    |layout| alloc.allocate(layout),
788                    <*mut u8>::cast,
789                ),
790                alloc,
791            )
792        }
793    }
794
795    /// Constructs a new `Rc` with uninitialized contents, with the memory
796    /// being filled with `0` bytes, in the provided allocator.
797    ///
798    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
799    /// incorrect usage of this method.
800    ///
801    /// # Examples
802    ///
803    /// ```
804    /// #![feature(allocator_api)]
805    ///
806    /// use std::rc::Rc;
807    /// use std::alloc::System;
808    ///
809    /// let zero = Rc::<u32, _>::new_zeroed_in(System);
810    /// let zero = unsafe { zero.assume_init() };
811    ///
812    /// assert_eq!(*zero, 0)
813    /// ```
814    ///
815    /// [zeroed]: mem::MaybeUninit::zeroed
816    #[cfg(not(no_global_oom_handling))]
817    #[unstable(feature = "allocator_api", issue = "32838")]
818    #[inline]
819    pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
820        unsafe {
821            Rc::from_ptr_in(
822                Rc::allocate_for_layout(
823                    Layout::new::<T>(),
824                    |layout| alloc.allocate_zeroed(layout),
825                    <*mut u8>::cast,
826                ),
827                alloc,
828            )
829        }
830    }
831
832    /// Constructs a new `Rc<T, A>` in the given allocator while giving you a `Weak<T, A>` to the allocation,
833    /// to allow you to construct a `T` which holds a weak pointer to itself.
834    ///
835    /// Generally, a structure circularly referencing itself, either directly or
836    /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
837    /// Using this function, you get access to the weak pointer during the
838    /// initialization of `T`, before the `Rc<T, A>` is created, such that you can
839    /// clone and store it inside the `T`.
840    ///
841    /// `new_cyclic_in` first allocates the managed allocation for the `Rc<T, A>`,
842    /// then calls your closure, giving it a `Weak<T, A>` to this allocation,
843    /// and only afterwards completes the construction of the `Rc<T, A>` by placing
844    /// the `T` returned from your closure into the allocation.
845    ///
846    /// Since the new `Rc<T, A>` is not fully-constructed until `Rc<T, A>::new_cyclic_in`
847    /// returns, calling [`upgrade`] on the weak reference inside your closure will
848    /// fail and result in a `None` value.
849    ///
850    /// # Panics
851    ///
852    /// If `data_fn` panics, the panic is propagated to the caller, and the
853    /// temporary [`Weak<T, A>`] is dropped normally.
854    ///
855    /// # Examples
856    ///
857    /// See [`new_cyclic`].
858    ///
859    /// [`new_cyclic`]: Rc::new_cyclic
860    /// [`upgrade`]: Weak::upgrade
861    #[cfg(not(no_global_oom_handling))]
862    #[unstable(feature = "allocator_api", issue = "32838")]
863    pub fn new_cyclic_in<F>(data_fn: F, alloc: A) -> Rc<T, A>
864    where
865        F: FnOnce(&Weak<T, A>) -> T,
866    {
867        // Construct the inner in the "uninitialized" state with a single
868        // weak reference.
869        let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
870            RcInner {
871                strong: Cell::new(0),
872                weak: Cell::new(1),
873                value: mem::MaybeUninit::<T>::uninit(),
874            },
875            alloc,
876        ));
877        let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into();
878        let init_ptr: NonNull<RcInner<T>> = uninit_ptr.cast();
879
880        let weak = Weak { ptr: init_ptr, alloc };
881
882        // It's important we don't give up ownership of the weak pointer, or
883        // else the memory might be freed by the time `data_fn` returns. If
884        // we really wanted to pass ownership, we could create an additional
885        // weak pointer for ourselves, but this would result in additional
886        // updates to the weak reference count which might not be necessary
887        // otherwise.
888        let data = data_fn(&weak);
889
890        let strong = unsafe {
891            let inner = init_ptr.as_ptr();
892            ptr::write(&raw mut (*inner).value, data);
893
894            let prev_value = (*inner).strong.get();
895            debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
896            (*inner).strong.set(1);
897
898            // Strong references should collectively own a shared weak reference,
899            // so don't run the destructor for our old weak reference.
900            // Calling into_raw_with_allocator has the double effect of giving us back the allocator,
901            // and forgetting the weak reference.
902            let alloc = weak.into_raw_with_allocator().1;
903
904            Rc::from_inner_in(init_ptr, alloc)
905        };
906
907        strong
908    }
909
910    /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
911    /// fails
912    ///
913    /// # Examples
914    ///
915    /// ```
916    /// #![feature(allocator_api)]
917    /// use std::rc::Rc;
918    /// use std::alloc::System;
919    ///
920    /// let five = Rc::try_new_in(5, System);
921    /// # Ok::<(), std::alloc::AllocError>(())
922    /// ```
923    #[unstable(feature = "allocator_api", issue = "32838")]
924    #[inline]
925    pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> {
926        // There is an implicit weak pointer owned by all the strong
927        // pointers, which ensures that the weak destructor never frees
928        // the allocation while the strong destructor is running, even
929        // if the weak pointer is stored inside the strong one.
930        let (ptr, alloc) = Box::into_unique(Box::try_new_in(
931            RcInner { strong: Cell::new(1), weak: Cell::new(1), value },
932            alloc,
933        )?);
934        Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
935    }
936
937    /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an
938    /// error if the allocation fails
939    ///
940    /// # Examples
941    ///
942    /// ```
943    /// #![feature(allocator_api)]
944    /// #![feature(get_mut_unchecked)]
945    ///
946    /// use std::rc::Rc;
947    /// use std::alloc::System;
948    ///
949    /// let mut five = Rc::<u32, _>::try_new_uninit_in(System)?;
950    ///
951    /// let five = unsafe {
952    ///     // Deferred initialization:
953    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
954    ///
955    ///     five.assume_init()
956    /// };
957    ///
958    /// assert_eq!(*five, 5);
959    /// # Ok::<(), std::alloc::AllocError>(())
960    /// ```
961    #[unstable(feature = "allocator_api", issue = "32838")]
962    #[inline]
963    pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
964        unsafe {
965            Ok(Rc::from_ptr_in(
966                Rc::try_allocate_for_layout(
967                    Layout::new::<T>(),
968                    |layout| alloc.allocate(layout),
969                    <*mut u8>::cast,
970                )?,
971                alloc,
972            ))
973        }
974    }
975
976    /// Constructs a new `Rc` with uninitialized contents, with the memory
977    /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation
978    /// fails
979    ///
980    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
981    /// incorrect usage of this method.
982    ///
983    /// # Examples
984    ///
985    /// ```
986    /// #![feature(allocator_api)]
987    ///
988    /// use std::rc::Rc;
989    /// use std::alloc::System;
990    ///
991    /// let zero = Rc::<u32, _>::try_new_zeroed_in(System)?;
992    /// let zero = unsafe { zero.assume_init() };
993    ///
994    /// assert_eq!(*zero, 0);
995    /// # Ok::<(), std::alloc::AllocError>(())
996    /// ```
997    ///
998    /// [zeroed]: mem::MaybeUninit::zeroed
999    #[unstable(feature = "allocator_api", issue = "32838")]
1000    #[inline]
1001    pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
1002        unsafe {
1003            Ok(Rc::from_ptr_in(
1004                Rc::try_allocate_for_layout(
1005                    Layout::new::<T>(),
1006                    |layout| alloc.allocate_zeroed(layout),
1007                    <*mut u8>::cast,
1008                )?,
1009                alloc,
1010            ))
1011        }
1012    }
1013
1014    /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then
1015    /// `value` will be pinned in memory and unable to be moved.
1016    #[cfg(not(no_global_oom_handling))]
1017    #[unstable(feature = "allocator_api", issue = "32838")]
1018    #[inline]
1019    pub fn pin_in(value: T, alloc: A) -> Pin<Self>
1020    where
1021        A: 'static,
1022    {
1023        unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) }
1024    }
1025
1026    /// Returns the inner value, if the `Rc` has exactly one strong reference.
1027    ///
1028    /// Otherwise, an [`Err`] is returned with the same `Rc` that was
1029    /// passed in.
1030    ///
1031    /// This will succeed even if there are outstanding weak references.
1032    ///
1033    /// # Examples
1034    ///
1035    /// ```
1036    /// use std::rc::Rc;
1037    ///
1038    /// let x = Rc::new(3);
1039    /// assert_eq!(Rc::try_unwrap(x), Ok(3));
1040    ///
1041    /// let x = Rc::new(4);
1042    /// let _y = Rc::clone(&x);
1043    /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
1044    /// ```
1045    #[inline]
1046    #[stable(feature = "rc_unique", since = "1.4.0")]
1047    pub fn try_unwrap(this: Self) -> Result<T, Self> {
1048        if Rc::strong_count(&this) == 1 {
1049            let this = ManuallyDrop::new(this);
1050
1051            let val: T = unsafe { ptr::read(&**this) }; // copy the contained object
1052            let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator
1053
1054            // Indicate to Weaks that they can't be promoted by decrementing
1055            // the strong count, and then remove the implicit "strong weak"
1056            // pointer while also handling drop logic by just crafting a
1057            // fake Weak.
1058            this.inner().dec_strong();
1059            let _weak = Weak { ptr: this.ptr, alloc };
1060            Ok(val)
1061        } else {
1062            Err(this)
1063        }
1064    }
1065
1066    /// Returns the inner value, if the `Rc` has exactly one strong reference.
1067    ///
1068    /// Otherwise, [`None`] is returned and the `Rc` is dropped.
1069    ///
1070    /// This will succeed even if there are outstanding weak references.
1071    ///
1072    /// If `Rc::into_inner` is called on every clone of this `Rc`,
1073    /// it is guaranteed that exactly one of the calls returns the inner value.
1074    /// This means in particular that the inner value is not dropped.
1075    ///
1076    /// [`Rc::try_unwrap`] is conceptually similar to `Rc::into_inner`.
1077    /// And while they are meant for different use-cases, `Rc::into_inner(this)`
1078    /// is in fact equivalent to <code>[Rc::try_unwrap]\(this).[ok][Result::ok]()</code>.
1079    /// (Note that the same kind of equivalence does **not** hold true for
1080    /// [`Arc`](crate::sync::Arc), due to race conditions that do not apply to `Rc`!)
1081    ///
1082    /// # Examples
1083    ///
1084    /// ```
1085    /// use std::rc::Rc;
1086    ///
1087    /// let x = Rc::new(3);
1088    /// assert_eq!(Rc::into_inner(x), Some(3));
1089    ///
1090    /// let x = Rc::new(4);
1091    /// let y = Rc::clone(&x);
1092    ///
1093    /// assert_eq!(Rc::into_inner(y), None);
1094    /// assert_eq!(Rc::into_inner(x), Some(4));
1095    /// ```
1096    #[inline]
1097    #[stable(feature = "rc_into_inner", since = "1.70.0")]
1098    pub fn into_inner(this: Self) -> Option<T> {
1099        Rc::try_unwrap(this).ok()
1100    }
1101}
1102
1103impl<T> Rc<[T]> {
1104    /// Constructs a new reference-counted slice with uninitialized contents.
1105    ///
1106    /// # Examples
1107    ///
1108    /// ```
1109    /// use std::rc::Rc;
1110    ///
1111    /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1112    ///
1113    /// // Deferred initialization:
1114    /// let data = Rc::get_mut(&mut values).unwrap();
1115    /// data[0].write(1);
1116    /// data[1].write(2);
1117    /// data[2].write(3);
1118    ///
1119    /// let values = unsafe { values.assume_init() };
1120    ///
1121    /// assert_eq!(*values, [1, 2, 3])
1122    /// ```
1123    #[cfg(not(no_global_oom_handling))]
1124    #[stable(feature = "new_uninit", since = "1.82.0")]
1125    #[must_use]
1126    pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1127        unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
1128    }
1129
1130    /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1131    /// filled with `0` bytes.
1132    ///
1133    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1134    /// incorrect usage of this method.
1135    ///
1136    /// # Examples
1137    ///
1138    /// ```
1139    /// use std::rc::Rc;
1140    ///
1141    /// let values = Rc::<[u32]>::new_zeroed_slice(3);
1142    /// let values = unsafe { values.assume_init() };
1143    ///
1144    /// assert_eq!(*values, [0, 0, 0])
1145    /// ```
1146    ///
1147    /// [zeroed]: mem::MaybeUninit::zeroed
1148    #[cfg(not(no_global_oom_handling))]
1149    #[stable(feature = "new_zeroed_alloc", since = "1.92.0")]
1150    #[must_use]
1151    pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1152        unsafe {
1153            Rc::from_ptr(Rc::allocate_for_layout(
1154                Layout::array::<T>(len).unwrap(),
1155                |layout| Global.allocate_zeroed(layout),
1156                |mem| {
1157                    ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1158                        as *mut RcInner<[mem::MaybeUninit<T>]>
1159                },
1160            ))
1161        }
1162    }
1163
1164    /// Converts the reference-counted slice into a reference-counted array.
1165    ///
1166    /// This operation does not reallocate; the underlying array of the slice is simply reinterpreted as an array type.
1167    ///
1168    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1169    #[unstable(feature = "alloc_slice_into_array", issue = "148082")]
1170    #[inline]
1171    #[must_use]
1172    pub fn into_array<const N: usize>(self) -> Option<Rc<[T; N]>> {
1173        if self.len() == N {
1174            let ptr = Self::into_raw(self) as *const [T; N];
1175
1176            // SAFETY: The underlying array of a slice has the exact same layout as an actual array `[T; N]` if `N` is equal to the slice's length.
1177            let me = unsafe { Rc::from_raw(ptr) };
1178            Some(me)
1179        } else {
1180            None
1181        }
1182    }
1183}
1184
1185impl<T, A: Allocator> Rc<[T], A> {
1186    /// Constructs a new reference-counted slice with uninitialized contents.
1187    ///
1188    /// # Examples
1189    ///
1190    /// ```
1191    /// #![feature(get_mut_unchecked)]
1192    /// #![feature(allocator_api)]
1193    ///
1194    /// use std::rc::Rc;
1195    /// use std::alloc::System;
1196    ///
1197    /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System);
1198    ///
1199    /// let values = unsafe {
1200    ///     // Deferred initialization:
1201    ///     Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
1202    ///     Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
1203    ///     Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
1204    ///
1205    ///     values.assume_init()
1206    /// };
1207    ///
1208    /// assert_eq!(*values, [1, 2, 3])
1209    /// ```
1210    #[cfg(not(no_global_oom_handling))]
1211    #[unstable(feature = "allocator_api", issue = "32838")]
1212    #[inline]
1213    pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1214        unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) }
1215    }
1216
1217    /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1218    /// filled with `0` bytes.
1219    ///
1220    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1221    /// incorrect usage of this method.
1222    ///
1223    /// # Examples
1224    ///
1225    /// ```
1226    /// #![feature(allocator_api)]
1227    ///
1228    /// use std::rc::Rc;
1229    /// use std::alloc::System;
1230    ///
1231    /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System);
1232    /// let values = unsafe { values.assume_init() };
1233    ///
1234    /// assert_eq!(*values, [0, 0, 0])
1235    /// ```
1236    ///
1237    /// [zeroed]: mem::MaybeUninit::zeroed
1238    #[cfg(not(no_global_oom_handling))]
1239    #[unstable(feature = "allocator_api", issue = "32838")]
1240    #[inline]
1241    pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1242        unsafe {
1243            Rc::from_ptr_in(
1244                Rc::allocate_for_layout(
1245                    Layout::array::<T>(len).unwrap(),
1246                    |layout| alloc.allocate_zeroed(layout),
1247                    |mem| {
1248                        ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1249                            as *mut RcInner<[mem::MaybeUninit<T>]>
1250                    },
1251                ),
1252                alloc,
1253            )
1254        }
1255    }
1256}
1257
1258impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
1259    /// Converts to `Rc<T>`.
1260    ///
1261    /// # Safety
1262    ///
1263    /// As with [`MaybeUninit::assume_init`],
1264    /// it is up to the caller to guarantee that the inner value
1265    /// really is in an initialized state.
1266    /// Calling this when the content is not yet fully initialized
1267    /// causes immediate undefined behavior.
1268    ///
1269    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1270    ///
1271    /// # Examples
1272    ///
1273    /// ```
1274    /// use std::rc::Rc;
1275    ///
1276    /// let mut five = Rc::<u32>::new_uninit();
1277    ///
1278    /// // Deferred initialization:
1279    /// Rc::get_mut(&mut five).unwrap().write(5);
1280    ///
1281    /// let five = unsafe { five.assume_init() };
1282    ///
1283    /// assert_eq!(*five, 5)
1284    /// ```
1285    #[stable(feature = "new_uninit", since = "1.82.0")]
1286    #[inline]
1287    pub unsafe fn assume_init(self) -> Rc<T, A> {
1288        let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1289        unsafe { Rc::from_inner_in(ptr.cast(), alloc) }
1290    }
1291}
1292
1293impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
1294    /// Converts to `Rc<[T]>`.
1295    ///
1296    /// # Safety
1297    ///
1298    /// As with [`MaybeUninit::assume_init`],
1299    /// it is up to the caller to guarantee that the inner value
1300    /// really is in an initialized state.
1301    /// Calling this when the content is not yet fully initialized
1302    /// causes immediate undefined behavior.
1303    ///
1304    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1305    ///
1306    /// # Examples
1307    ///
1308    /// ```
1309    /// use std::rc::Rc;
1310    ///
1311    /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1312    ///
1313    /// // Deferred initialization:
1314    /// let data = Rc::get_mut(&mut values).unwrap();
1315    /// data[0].write(1);
1316    /// data[1].write(2);
1317    /// data[2].write(3);
1318    ///
1319    /// let values = unsafe { values.assume_init() };
1320    ///
1321    /// assert_eq!(*values, [1, 2, 3])
1322    /// ```
1323    #[stable(feature = "new_uninit", since = "1.82.0")]
1324    #[inline]
1325    pub unsafe fn assume_init(self) -> Rc<[T], A> {
1326        let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1327        unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) }
1328    }
1329}
1330
1331impl<T: ?Sized> Rc<T> {
1332    /// Constructs an `Rc<T>` from a raw pointer.
1333    ///
1334    /// The raw pointer must have been previously returned by a call to
1335    /// [`Rc<U>::into_raw`][into_raw] with the following requirements:
1336    ///
1337    /// * If `U` is sized, it must have the same size and alignment as `T`. This
1338    ///   is trivially true if `U` is `T`.
1339    /// * If `U` is unsized, its data pointer must have the same size and
1340    ///   alignment as `T`. This is trivially true if `Rc<U>` was constructed
1341    ///   through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1342    ///   coercion].
1343    ///
1344    /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1345    /// and alignment, this is basically like transmuting references of
1346    /// different types. See [`mem::transmute`][transmute] for more information
1347    /// on what restrictions apply in this case.
1348    ///
1349    /// The raw pointer must point to a block of memory allocated by the global allocator
1350    ///
1351    /// The user of `from_raw` has to make sure a specific value of `T` is only
1352    /// dropped once.
1353    ///
1354    /// This function is unsafe because improper use may lead to memory unsafety,
1355    /// even if the returned `Rc<T>` is never accessed.
1356    ///
1357    /// [into_raw]: Rc::into_raw
1358    /// [transmute]: core::mem::transmute
1359    /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1360    ///
1361    /// # Examples
1362    ///
1363    /// ```
1364    /// use std::rc::Rc;
1365    ///
1366    /// let x = Rc::new("hello".to_owned());
1367    /// let x_ptr = Rc::into_raw(x);
1368    ///
1369    /// unsafe {
1370    ///     // Convert back to an `Rc` to prevent leak.
1371    ///     let x = Rc::from_raw(x_ptr);
1372    ///     assert_eq!(&*x, "hello");
1373    ///
1374    ///     // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1375    /// }
1376    ///
1377    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1378    /// ```
1379    ///
1380    /// Convert a slice back into its original array:
1381    ///
1382    /// ```
1383    /// use std::rc::Rc;
1384    ///
1385    /// let x: Rc<[u32]> = Rc::new([1, 2, 3]);
1386    /// let x_ptr: *const [u32] = Rc::into_raw(x);
1387    ///
1388    /// unsafe {
1389    ///     let x: Rc<[u32; 3]> = Rc::from_raw(x_ptr.cast::<[u32; 3]>());
1390    ///     assert_eq!(&*x, &[1, 2, 3]);
1391    /// }
1392    /// ```
1393    #[inline]
1394    #[stable(feature = "rc_raw", since = "1.17.0")]
1395    pub unsafe fn from_raw(ptr: *const T) -> Self {
1396        unsafe { Self::from_raw_in(ptr, Global) }
1397    }
1398
1399    /// Consumes the `Rc`, returning the wrapped pointer.
1400    ///
1401    /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1402    /// [`Rc::from_raw`].
1403    ///
1404    /// # Examples
1405    ///
1406    /// ```
1407    /// use std::rc::Rc;
1408    ///
1409    /// let x = Rc::new("hello".to_owned());
1410    /// let x_ptr = Rc::into_raw(x);
1411    /// assert_eq!(unsafe { &*x_ptr }, "hello");
1412    /// # // Prevent leaks for Miri.
1413    /// # drop(unsafe { Rc::from_raw(x_ptr) });
1414    /// ```
1415    #[must_use = "losing the pointer will leak memory"]
1416    #[stable(feature = "rc_raw", since = "1.17.0")]
1417    #[rustc_never_returns_null_ptr]
1418    pub fn into_raw(this: Self) -> *const T {
1419        let this = ManuallyDrop::new(this);
1420        Self::as_ptr(&*this)
1421    }
1422
1423    /// Increments the strong reference count on the `Rc<T>` associated with the
1424    /// provided pointer by one.
1425    ///
1426    /// # Safety
1427    ///
1428    /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1429    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1430    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1431    /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1432    /// allocated by the global allocator.
1433    ///
1434    /// [from_raw_in]: Rc::from_raw_in
1435    ///
1436    /// # Examples
1437    ///
1438    /// ```
1439    /// use std::rc::Rc;
1440    ///
1441    /// let five = Rc::new(5);
1442    ///
1443    /// unsafe {
1444    ///     let ptr = Rc::into_raw(five);
1445    ///     Rc::increment_strong_count(ptr);
1446    ///
1447    ///     let five = Rc::from_raw(ptr);
1448    ///     assert_eq!(2, Rc::strong_count(&five));
1449    /// #   // Prevent leaks for Miri.
1450    /// #   Rc::decrement_strong_count(ptr);
1451    /// }
1452    /// ```
1453    #[inline]
1454    #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1455    pub unsafe fn increment_strong_count(ptr: *const T) {
1456        unsafe { Self::increment_strong_count_in(ptr, Global) }
1457    }
1458
1459    /// Decrements the strong reference count on the `Rc<T>` associated with the
1460    /// provided pointer by one.
1461    ///
1462    /// # Safety
1463    ///
1464    /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1465    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1466    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1467    /// least 1) when invoking this method, and `ptr` must point to a block of memory
1468    /// allocated by the global allocator. This method can be used to release the final `Rc` and
1469    /// backing storage, but **should not** be called after the final `Rc` has been released.
1470    ///
1471    /// [from_raw_in]: Rc::from_raw_in
1472    ///
1473    /// # Examples
1474    ///
1475    /// ```
1476    /// use std::rc::Rc;
1477    ///
1478    /// let five = Rc::new(5);
1479    ///
1480    /// unsafe {
1481    ///     let ptr = Rc::into_raw(five);
1482    ///     Rc::increment_strong_count(ptr);
1483    ///
1484    ///     let five = Rc::from_raw(ptr);
1485    ///     assert_eq!(2, Rc::strong_count(&five));
1486    ///     Rc::decrement_strong_count(ptr);
1487    ///     assert_eq!(1, Rc::strong_count(&five));
1488    /// }
1489    /// ```
1490    #[inline]
1491    #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1492    pub unsafe fn decrement_strong_count(ptr: *const T) {
1493        unsafe { Self::decrement_strong_count_in(ptr, Global) }
1494    }
1495}
1496
1497impl<T: ?Sized, A: Allocator> Rc<T, A> {
1498    /// Returns a reference to the underlying allocator.
1499    ///
1500    /// Note: this is an associated function, which means that you have
1501    /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
1502    /// is so that there is no conflict with a method on the inner type.
1503    #[inline]
1504    #[unstable(feature = "allocator_api", issue = "32838")]
1505    pub fn allocator(this: &Self) -> &A {
1506        &this.alloc
1507    }
1508
1509    /// Consumes the `Rc`, returning the wrapped pointer and allocator.
1510    ///
1511    /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1512    /// [`Rc::from_raw_in`].
1513    ///
1514    /// # Examples
1515    ///
1516    /// ```
1517    /// #![feature(allocator_api)]
1518    /// use std::rc::Rc;
1519    /// use std::alloc::System;
1520    ///
1521    /// let x = Rc::new_in("hello".to_owned(), System);
1522    /// let (ptr, alloc) = Rc::into_raw_with_allocator(x);
1523    /// assert_eq!(unsafe { &*ptr }, "hello");
1524    /// let x = unsafe { Rc::from_raw_in(ptr, alloc) };
1525    /// assert_eq!(&*x, "hello");
1526    /// ```
1527    #[must_use = "losing the pointer will leak memory"]
1528    #[unstable(feature = "allocator_api", issue = "32838")]
1529    pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
1530        let this = mem::ManuallyDrop::new(this);
1531        let ptr = Self::as_ptr(&this);
1532        // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
1533        let alloc = unsafe { ptr::read(&this.alloc) };
1534        (ptr, alloc)
1535    }
1536
1537    /// Provides a raw pointer to the data.
1538    ///
1539    /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
1540    /// for as long as there are strong counts in the `Rc`.
1541    ///
1542    /// # Examples
1543    ///
1544    /// ```
1545    /// use std::rc::Rc;
1546    ///
1547    /// let x = Rc::new(0);
1548    /// let y = Rc::clone(&x);
1549    /// let x_ptr = Rc::as_ptr(&x);
1550    /// assert_eq!(x_ptr, Rc::as_ptr(&y));
1551    /// assert_eq!(unsafe { *x_ptr }, 0);
1552    /// ```
1553    #[stable(feature = "weak_into_raw", since = "1.45.0")]
1554    #[rustc_never_returns_null_ptr]
1555    pub fn as_ptr(this: &Self) -> *const T {
1556        let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
1557
1558        // SAFETY: This cannot go through Deref::deref or Rc::inner because
1559        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
1560        // write through the pointer after the Rc is recovered through `from_raw`.
1561        unsafe { &raw mut (*ptr).value }
1562    }
1563
1564    /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
1565    ///
1566    /// The raw pointer must have been previously returned by a call to [`Rc<U,
1567    /// A>::into_raw`][into_raw] with the following requirements:
1568    ///
1569    /// * If `U` is sized, it must have the same size and alignment as `T`. This
1570    ///   is trivially true if `U` is `T`.
1571    /// * If `U` is unsized, its data pointer must have the same size and
1572    ///   alignment as `T`. This is trivially true if `Rc<U>` was constructed
1573    ///   through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1574    ///   coercion].
1575    ///
1576    /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1577    /// and alignment, this is basically like transmuting references of
1578    /// different types. See [`mem::transmute`][transmute] for more information
1579    /// on what restrictions apply in this case.
1580    ///
1581    /// The raw pointer must point to a block of memory allocated by `alloc`
1582    ///
1583    /// The user of `from_raw` has to make sure a specific value of `T` is only
1584    /// dropped once.
1585    ///
1586    /// This function is unsafe because improper use may lead to memory unsafety,
1587    /// even if the returned `Rc<T>` is never accessed.
1588    ///
1589    /// [into_raw]: Rc::into_raw
1590    /// [transmute]: core::mem::transmute
1591    /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1592    ///
1593    /// # Examples
1594    ///
1595    /// ```
1596    /// #![feature(allocator_api)]
1597    ///
1598    /// use std::rc::Rc;
1599    /// use std::alloc::System;
1600    ///
1601    /// let x = Rc::new_in("hello".to_owned(), System);
1602    /// let (x_ptr, _alloc) = Rc::into_raw_with_allocator(x);
1603    ///
1604    /// unsafe {
1605    ///     // Convert back to an `Rc` to prevent leak.
1606    ///     let x = Rc::from_raw_in(x_ptr, System);
1607    ///     assert_eq!(&*x, "hello");
1608    ///
1609    ///     // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1610    /// }
1611    ///
1612    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1613    /// ```
1614    ///
1615    /// Convert a slice back into its original array:
1616    ///
1617    /// ```
1618    /// #![feature(allocator_api)]
1619    ///
1620    /// use std::rc::Rc;
1621    /// use std::alloc::System;
1622    ///
1623    /// let x: Rc<[u32], _> = Rc::new_in([1, 2, 3], System);
1624    /// let x_ptr: *const [u32] = Rc::into_raw_with_allocator(x).0;
1625    ///
1626    /// unsafe {
1627    ///     let x: Rc<[u32; 3], _> = Rc::from_raw_in(x_ptr.cast::<[u32; 3]>(), System);
1628    ///     assert_eq!(&*x, &[1, 2, 3]);
1629    /// }
1630    /// ```
1631    #[unstable(feature = "allocator_api", issue = "32838")]
1632    pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
1633        let offset = unsafe { data_offset(ptr) };
1634
1635        // Reverse the offset to find the original RcInner.
1636        let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
1637
1638        unsafe { Self::from_ptr_in(rc_ptr, alloc) }
1639    }
1640
1641    /// Creates a new [`Weak`] pointer to this allocation.
1642    ///
1643    /// # Examples
1644    ///
1645    /// ```
1646    /// use std::rc::Rc;
1647    ///
1648    /// let five = Rc::new(5);
1649    ///
1650    /// let weak_five = Rc::downgrade(&five);
1651    /// ```
1652    #[must_use = "this returns a new `Weak` pointer, \
1653                  without modifying the original `Rc`"]
1654    #[stable(feature = "rc_weak", since = "1.4.0")]
1655    pub fn downgrade(this: &Self) -> Weak<T, A>
1656    where
1657        A: Clone,
1658    {
1659        this.inner().inc_weak();
1660        // Make sure we do not create a dangling Weak
1661        debug_assert!(!is_dangling(this.ptr.as_ptr()));
1662        Weak { ptr: this.ptr, alloc: this.alloc.clone() }
1663    }
1664
1665    /// Gets the number of [`Weak`] pointers to this allocation.
1666    ///
1667    /// # Examples
1668    ///
1669    /// ```
1670    /// use std::rc::Rc;
1671    ///
1672    /// let five = Rc::new(5);
1673    /// let _weak_five = Rc::downgrade(&five);
1674    ///
1675    /// assert_eq!(1, Rc::weak_count(&five));
1676    /// ```
1677    #[inline]
1678    #[stable(feature = "rc_counts", since = "1.15.0")]
1679    pub fn weak_count(this: &Self) -> usize {
1680        this.inner().weak() - 1
1681    }
1682
1683    /// Gets the number of strong (`Rc`) pointers to this allocation.
1684    ///
1685    /// # Examples
1686    ///
1687    /// ```
1688    /// use std::rc::Rc;
1689    ///
1690    /// let five = Rc::new(5);
1691    /// let _also_five = Rc::clone(&five);
1692    ///
1693    /// assert_eq!(2, Rc::strong_count(&five));
1694    /// ```
1695    #[inline]
1696    #[stable(feature = "rc_counts", since = "1.15.0")]
1697    pub fn strong_count(this: &Self) -> usize {
1698        this.inner().strong()
1699    }
1700
1701    /// Increments the strong reference count on the `Rc<T>` associated with the
1702    /// provided pointer by one.
1703    ///
1704    /// # Safety
1705    ///
1706    /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1707    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1708    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1709    /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1710    /// allocated by `alloc`.
1711    ///
1712    /// [from_raw_in]: Rc::from_raw_in
1713    ///
1714    /// # Examples
1715    ///
1716    /// ```
1717    /// #![feature(allocator_api)]
1718    ///
1719    /// use std::rc::Rc;
1720    /// use std::alloc::System;
1721    ///
1722    /// let five = Rc::new_in(5, System);
1723    ///
1724    /// unsafe {
1725    ///     let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1726    ///     Rc::increment_strong_count_in(ptr, System);
1727    ///
1728    ///     let five = Rc::from_raw_in(ptr, System);
1729    ///     assert_eq!(2, Rc::strong_count(&five));
1730    /// #   // Prevent leaks for Miri.
1731    /// #   Rc::decrement_strong_count_in(ptr, System);
1732    /// }
1733    /// ```
1734    #[inline]
1735    #[unstable(feature = "allocator_api", issue = "32838")]
1736    pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A)
1737    where
1738        A: Clone,
1739    {
1740        // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
1741        let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) };
1742        // Now increase refcount, but don't drop new refcount either
1743        let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
1744    }
1745
1746    /// Decrements the strong reference count on the `Rc<T>` associated with the
1747    /// provided pointer by one.
1748    ///
1749    /// # Safety
1750    ///
1751    /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1752    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1753    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1754    /// least 1) when invoking this method, and `ptr` must point to a block of memory
1755    /// allocated by `alloc`. This method can be used to release the final `Rc` and
1756    /// backing storage, but **should not** be called after the final `Rc` has been released.
1757    ///
1758    /// [from_raw_in]: Rc::from_raw_in
1759    ///
1760    /// # Examples
1761    ///
1762    /// ```
1763    /// #![feature(allocator_api)]
1764    ///
1765    /// use std::rc::Rc;
1766    /// use std::alloc::System;
1767    ///
1768    /// let five = Rc::new_in(5, System);
1769    ///
1770    /// unsafe {
1771    ///     let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1772    ///     Rc::increment_strong_count_in(ptr, System);
1773    ///
1774    ///     let five = Rc::from_raw_in(ptr, System);
1775    ///     assert_eq!(2, Rc::strong_count(&five));
1776    ///     Rc::decrement_strong_count_in(ptr, System);
1777    ///     assert_eq!(1, Rc::strong_count(&five));
1778    /// }
1779    /// ```
1780    #[inline]
1781    #[unstable(feature = "allocator_api", issue = "32838")]
1782    pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
1783        unsafe { drop(Rc::from_raw_in(ptr, alloc)) };
1784    }
1785
1786    /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
1787    /// this allocation.
1788    #[inline]
1789    fn is_unique(this: &Self) -> bool {
1790        Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
1791    }
1792
1793    /// Returns a mutable reference into the given `Rc`, if there are
1794    /// no other `Rc` or [`Weak`] pointers to the same allocation.
1795    ///
1796    /// Returns [`None`] otherwise, because it is not safe to
1797    /// mutate a shared value.
1798    ///
1799    /// See also [`make_mut`][make_mut], which will [`clone`][clone]
1800    /// the inner value when there are other `Rc` pointers.
1801    ///
1802    /// [make_mut]: Rc::make_mut
1803    /// [clone]: Clone::clone
1804    ///
1805    /// # Examples
1806    ///
1807    /// ```
1808    /// use std::rc::Rc;
1809    ///
1810    /// let mut x = Rc::new(3);
1811    /// *Rc::get_mut(&mut x).unwrap() = 4;
1812    /// assert_eq!(*x, 4);
1813    ///
1814    /// let _y = Rc::clone(&x);
1815    /// assert!(Rc::get_mut(&mut x).is_none());
1816    /// ```
1817    #[inline]
1818    #[stable(feature = "rc_unique", since = "1.4.0")]
1819    pub fn get_mut(this: &mut Self) -> Option<&mut T> {
1820        if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
1821    }
1822
1823    /// Returns a mutable reference into the given `Rc`,
1824    /// without any check.
1825    ///
1826    /// See also [`get_mut`], which is safe and does appropriate checks.
1827    ///
1828    /// [`get_mut`]: Rc::get_mut
1829    ///
1830    /// # Safety
1831    ///
1832    /// If any other `Rc` or [`Weak`] pointers to the same allocation exist, then
1833    /// they must not be dereferenced or have active borrows for the duration
1834    /// of the returned borrow, and their inner type must be exactly the same as the
1835    /// inner type of this Rc (including lifetimes). This is trivially the case if no
1836    /// such pointers exist, for example immediately after `Rc::new`.
1837    ///
1838    /// # Examples
1839    ///
1840    /// ```
1841    /// #![feature(get_mut_unchecked)]
1842    ///
1843    /// use std::rc::Rc;
1844    ///
1845    /// let mut x = Rc::new(String::new());
1846    /// unsafe {
1847    ///     Rc::get_mut_unchecked(&mut x).push_str("foo")
1848    /// }
1849    /// assert_eq!(*x, "foo");
1850    /// ```
1851    /// Other `Rc` pointers to the same allocation must be to the same type.
1852    /// ```no_run
1853    /// #![feature(get_mut_unchecked)]
1854    ///
1855    /// use std::rc::Rc;
1856    ///
1857    /// let x: Rc<str> = Rc::from("Hello, world!");
1858    /// let mut y: Rc<[u8]> = x.clone().into();
1859    /// unsafe {
1860    ///     // this is Undefined Behavior, because x's inner type is str, not [u8]
1861    ///     Rc::get_mut_unchecked(&mut y).fill(0xff); // 0xff is invalid in UTF-8
1862    /// }
1863    /// println!("{}", &*x); // Invalid UTF-8 in a str
1864    /// ```
1865    /// Other `Rc` pointers to the same allocation must be to the exact same type, including lifetimes.
1866    /// ```no_run
1867    /// #![feature(get_mut_unchecked)]
1868    ///
1869    /// use std::rc::Rc;
1870    ///
1871    /// let x: Rc<&str> = Rc::new("Hello, world!");
1872    /// {
1873    ///     let s = String::from("Oh, no!");
1874    ///     let mut y: Rc<&str> = x.clone();
1875    ///     unsafe {
1876    ///         // this is Undefined Behavior, because x's inner type
1877    ///         // is &'long str, not &'short str
1878    ///         *Rc::get_mut_unchecked(&mut y) = &s;
1879    ///     }
1880    /// }
1881    /// println!("{}", &*x); // Use-after-free
1882    /// ```
1883    #[inline]
1884    #[unstable(feature = "get_mut_unchecked", issue = "63292")]
1885    pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
1886        // We are careful to *not* create a reference covering the "count" fields, as
1887        // this would conflict with accesses to the reference counts (e.g. by `Weak`).
1888        unsafe { &mut (*this.ptr.as_ptr()).value }
1889    }
1890
1891    #[inline]
1892    #[stable(feature = "ptr_eq", since = "1.17.0")]
1893    /// Returns `true` if the two `Rc`s point to the same allocation in a vein similar to
1894    /// [`ptr::eq`]. This function ignores the metadata of  `dyn Trait` pointers.
1895    ///
1896    /// # Examples
1897    ///
1898    /// ```
1899    /// use std::rc::Rc;
1900    ///
1901    /// let five = Rc::new(5);
1902    /// let same_five = Rc::clone(&five);
1903    /// let other_five = Rc::new(5);
1904    ///
1905    /// assert!(Rc::ptr_eq(&five, &same_five));
1906    /// assert!(!Rc::ptr_eq(&five, &other_five));
1907    /// ```
1908    pub fn ptr_eq(this: &Self, other: &Self) -> bool {
1909        ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr())
1910    }
1911}
1912
1913#[cfg(not(no_global_oom_handling))]
1914impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Rc<T, A> {
1915    /// Makes a mutable reference into the given `Rc`.
1916    ///
1917    /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
1918    /// [`clone`] the inner value to a new allocation to ensure unique ownership.  This is also
1919    /// referred to as clone-on-write.
1920    ///
1921    /// However, if there are no other `Rc` pointers to this allocation, but some [`Weak`]
1922    /// pointers, then the [`Weak`] pointers will be disassociated and the inner value will not
1923    /// be cloned.
1924    ///
1925    /// See also [`get_mut`], which will fail rather than cloning the inner value
1926    /// or disassociating [`Weak`] pointers.
1927    ///
1928    /// [`clone`]: Clone::clone
1929    /// [`get_mut`]: Rc::get_mut
1930    ///
1931    /// # Examples
1932    ///
1933    /// ```
1934    /// use std::rc::Rc;
1935    ///
1936    /// let mut data = Rc::new(5);
1937    ///
1938    /// *Rc::make_mut(&mut data) += 1;         // Won't clone anything
1939    /// let mut other_data = Rc::clone(&data); // Won't clone inner data
1940    /// *Rc::make_mut(&mut data) += 1;         // Clones inner data
1941    /// *Rc::make_mut(&mut data) += 1;         // Won't clone anything
1942    /// *Rc::make_mut(&mut other_data) *= 2;   // Won't clone anything
1943    ///
1944    /// // Now `data` and `other_data` point to different allocations.
1945    /// assert_eq!(*data, 8);
1946    /// assert_eq!(*other_data, 12);
1947    /// ```
1948    ///
1949    /// [`Weak`] pointers will be disassociated:
1950    ///
1951    /// ```
1952    /// use std::rc::Rc;
1953    ///
1954    /// let mut data = Rc::new(75);
1955    /// let weak = Rc::downgrade(&data);
1956    ///
1957    /// assert!(75 == *data);
1958    /// assert!(75 == *weak.upgrade().unwrap());
1959    ///
1960    /// *Rc::make_mut(&mut data) += 1;
1961    ///
1962    /// assert!(76 == *data);
1963    /// assert!(weak.upgrade().is_none());
1964    /// ```
1965    #[inline]
1966    #[stable(feature = "rc_unique", since = "1.4.0")]
1967    pub fn make_mut(this: &mut Self) -> &mut T {
1968        let size_of_val = size_of_val::<T>(&**this);
1969
1970        if Rc::strong_count(this) != 1 {
1971            // Gotta clone the data, there are other Rcs.
1972
1973            let this_data_ref: &T = &**this;
1974            // `in_progress` drops the allocation if we panic before finishing initializing it.
1975            let mut in_progress: UniqueRcUninit<T, A> =
1976                UniqueRcUninit::new(this_data_ref, this.alloc.clone());
1977
1978            // Initialize with clone of this.
1979            let initialized_clone = unsafe {
1980                // Clone. If the clone panics, `in_progress` will be dropped and clean up.
1981                this_data_ref.clone_to_uninit(in_progress.data_ptr().cast());
1982                // Cast type of pointer, now that it is initialized.
1983                in_progress.into_rc()
1984            };
1985
1986            // Replace `this` with newly constructed Rc.
1987            *this = initialized_clone;
1988        } else if Rc::weak_count(this) != 0 {
1989            // Can just steal the data, all that's left is Weaks
1990
1991            // We don't need panic-protection like the above branch does, but we might as well
1992            // use the same mechanism.
1993            let mut in_progress: UniqueRcUninit<T, A> =
1994                UniqueRcUninit::new(&**this, this.alloc.clone());
1995            unsafe {
1996                // Initialize `in_progress` with move of **this.
1997                // We have to express this in terms of bytes because `T: ?Sized`; there is no
1998                // operation that just copies a value based on its `size_of_val()`.
1999                ptr::copy_nonoverlapping(
2000                    ptr::from_ref(&**this).cast::<u8>(),
2001                    in_progress.data_ptr().cast::<u8>(),
2002                    size_of_val,
2003                );
2004
2005                this.inner().dec_strong();
2006                // Remove implicit strong-weak ref (no need to craft a fake
2007                // Weak here -- we know other Weaks can clean up for us)
2008                this.inner().dec_weak();
2009                // Replace `this` with newly constructed Rc that has the moved data.
2010                ptr::write(this, in_progress.into_rc());
2011            }
2012        }
2013        // This unsafety is ok because we're guaranteed that the pointer
2014        // returned is the *only* pointer that will ever be returned to T. Our
2015        // reference count is guaranteed to be 1 at this point, and we required
2016        // the `Rc<T>` itself to be `mut`, so we're returning the only possible
2017        // reference to the allocation.
2018        unsafe { &mut this.ptr.as_mut().value }
2019    }
2020}
2021
2022impl<T: Clone, A: Allocator> Rc<T, A> {
2023    /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
2024    /// clone.
2025    ///
2026    /// Assuming `rc_t` is of type `Rc<T>`, this function is functionally equivalent to
2027    /// `(*rc_t).clone()`, but will avoid cloning the inner value where possible.
2028    ///
2029    /// # Examples
2030    ///
2031    /// ```
2032    /// # use std::{ptr, rc::Rc};
2033    /// let inner = String::from("test");
2034    /// let ptr = inner.as_ptr();
2035    ///
2036    /// let rc = Rc::new(inner);
2037    /// let inner = Rc::unwrap_or_clone(rc);
2038    /// // The inner value was not cloned
2039    /// assert!(ptr::eq(ptr, inner.as_ptr()));
2040    ///
2041    /// let rc = Rc::new(inner);
2042    /// let rc2 = rc.clone();
2043    /// let inner = Rc::unwrap_or_clone(rc);
2044    /// // Because there were 2 references, we had to clone the inner value.
2045    /// assert!(!ptr::eq(ptr, inner.as_ptr()));
2046    /// // `rc2` is the last reference, so when we unwrap it we get back
2047    /// // the original `String`.
2048    /// let inner = Rc::unwrap_or_clone(rc2);
2049    /// assert!(ptr::eq(ptr, inner.as_ptr()));
2050    /// ```
2051    #[inline]
2052    #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")]
2053    pub fn unwrap_or_clone(this: Self) -> T {
2054        Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone())
2055    }
2056}
2057
2058impl<A: Allocator> Rc<dyn Any, A> {
2059    /// Attempts to downcast the `Rc<dyn Any>` to a concrete type.
2060    ///
2061    /// # Examples
2062    ///
2063    /// ```
2064    /// use std::any::Any;
2065    /// use std::rc::Rc;
2066    ///
2067    /// fn print_if_string(value: Rc<dyn Any>) {
2068    ///     if let Ok(string) = value.downcast::<String>() {
2069    ///         println!("String ({}): {}", string.len(), string);
2070    ///     }
2071    /// }
2072    ///
2073    /// let my_string = "Hello World".to_string();
2074    /// print_if_string(Rc::new(my_string));
2075    /// print_if_string(Rc::new(0i8));
2076    /// ```
2077    #[inline]
2078    #[stable(feature = "rc_downcast", since = "1.29.0")]
2079    pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
2080        if (*self).is::<T>() {
2081            unsafe {
2082                let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2083                Ok(Rc::from_inner_in(ptr.cast(), alloc))
2084            }
2085        } else {
2086            Err(self)
2087        }
2088    }
2089
2090    /// Downcasts the `Rc<dyn Any>` to a concrete type.
2091    ///
2092    /// For a safe alternative see [`downcast`].
2093    ///
2094    /// # Examples
2095    ///
2096    /// ```
2097    /// #![feature(downcast_unchecked)]
2098    ///
2099    /// use std::any::Any;
2100    /// use std::rc::Rc;
2101    ///
2102    /// let x: Rc<dyn Any> = Rc::new(1_usize);
2103    ///
2104    /// unsafe {
2105    ///     assert_eq!(*x.downcast_unchecked::<usize>(), 1);
2106    /// }
2107    /// ```
2108    ///
2109    /// # Safety
2110    ///
2111    /// The contained value must be of type `T`. Calling this method
2112    /// with the incorrect type is *undefined behavior*.
2113    ///
2114    ///
2115    /// [`downcast`]: Self::downcast
2116    #[inline]
2117    #[unstable(feature = "downcast_unchecked", issue = "90850")]
2118    pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
2119        unsafe {
2120            let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2121            Rc::from_inner_in(ptr.cast(), alloc)
2122        }
2123    }
2124}
2125
2126impl<T: ?Sized> Rc<T> {
2127    /// Allocates an `RcInner<T>` with sufficient space for
2128    /// a possibly-unsized inner value where the value has the layout provided.
2129    ///
2130    /// The function `mem_to_rc_inner` is called with the data pointer
2131    /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2132    #[cfg(not(no_global_oom_handling))]
2133    unsafe fn allocate_for_layout(
2134        value_layout: Layout,
2135        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2136        mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2137    ) -> *mut RcInner<T> {
2138        let layout = rc_inner_layout_for_value_layout(value_layout);
2139        unsafe {
2140            Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rc_inner)
2141                .unwrap_or_else(|_| handle_alloc_error(layout))
2142        }
2143    }
2144
2145    /// Allocates an `RcInner<T>` with sufficient space for
2146    /// a possibly-unsized inner value where the value has the layout provided,
2147    /// returning an error if allocation fails.
2148    ///
2149    /// The function `mem_to_rc_inner` is called with the data pointer
2150    /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2151    #[inline]
2152    unsafe fn try_allocate_for_layout(
2153        value_layout: Layout,
2154        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2155        mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2156    ) -> Result<*mut RcInner<T>, AllocError> {
2157        let layout = rc_inner_layout_for_value_layout(value_layout);
2158
2159        // Allocate for the layout.
2160        let ptr = allocate(layout)?;
2161
2162        // Initialize the RcInner
2163        let inner = mem_to_rc_inner(ptr.as_non_null_ptr().as_ptr());
2164        unsafe {
2165            debug_assert_eq!(Layout::for_value_raw(inner), layout);
2166
2167            (&raw mut (*inner).strong).write(Cell::new(1));
2168            (&raw mut (*inner).weak).write(Cell::new(1));
2169        }
2170
2171        Ok(inner)
2172    }
2173}
2174
2175impl<T: ?Sized, A: Allocator> Rc<T, A> {
2176    /// Allocates an `RcInner<T>` with sufficient space for an unsized inner value
2177    #[cfg(not(no_global_oom_handling))]
2178    unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcInner<T> {
2179        // Allocate for the `RcInner<T>` using the given value.
2180        unsafe {
2181            Rc::<T>::allocate_for_layout(
2182                Layout::for_value_raw(ptr),
2183                |layout| alloc.allocate(layout),
2184                |mem| mem.with_metadata_of(ptr as *const RcInner<T>),
2185            )
2186        }
2187    }
2188
2189    #[cfg(not(no_global_oom_handling))]
2190    fn from_box_in(src: Box<T, A>) -> Rc<T, A> {
2191        unsafe {
2192            let value_size = size_of_val(&*src);
2193            let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
2194
2195            // Copy value as bytes
2196            ptr::copy_nonoverlapping(
2197                (&raw const *src) as *const u8,
2198                (&raw mut (*ptr).value) as *mut u8,
2199                value_size,
2200            );
2201
2202            // Free the allocation without dropping its contents
2203            let (bptr, alloc) = Box::into_raw_with_allocator(src);
2204            let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop<T>, alloc.by_ref());
2205            drop(src);
2206
2207            Self::from_ptr_in(ptr, alloc)
2208        }
2209    }
2210}
2211
2212impl<T> Rc<[T]> {
2213    /// Allocates an `RcInner<[T]>` with the given length.
2214    #[cfg(not(no_global_oom_handling))]
2215    unsafe fn allocate_for_slice(len: usize) -> *mut RcInner<[T]> {
2216        unsafe {
2217            Self::allocate_for_layout(
2218                Layout::array::<T>(len).unwrap(),
2219                |layout| Global.allocate(layout),
2220                |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2221            )
2222        }
2223    }
2224
2225    /// Copy elements from slice into newly allocated `Rc<[T]>`
2226    ///
2227    /// Unsafe because the caller must either take ownership, bind `T: Copy` or
2228    /// bind `T: TrivialClone`.
2229    #[cfg(not(no_global_oom_handling))]
2230    unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
2231        unsafe {
2232            let ptr = Self::allocate_for_slice(v.len());
2233            ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len());
2234            Self::from_ptr(ptr)
2235        }
2236    }
2237
2238    /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
2239    ///
2240    /// Behavior is undefined should the size be wrong.
2241    #[cfg(not(no_global_oom_handling))]
2242    unsafe fn from_iter_exact(iter: impl Iterator<Item = T>, len: usize) -> Rc<[T]> {
2243        // Panic guard while cloning T elements.
2244        // In the event of a panic, elements that have been written
2245        // into the new RcInner will be dropped, then the memory freed.
2246        struct Guard<T> {
2247            mem: NonNull<u8>,
2248            elems: *mut T,
2249            layout: Layout,
2250            n_elems: usize,
2251        }
2252
2253        impl<T> Drop for Guard<T> {
2254            fn drop(&mut self) {
2255                unsafe {
2256                    let slice = from_raw_parts_mut(self.elems, self.n_elems);
2257                    ptr::drop_in_place(slice);
2258
2259                    Global.deallocate(self.mem, self.layout);
2260                }
2261            }
2262        }
2263
2264        unsafe {
2265            let ptr = Self::allocate_for_slice(len);
2266
2267            let mem = ptr as *mut _ as *mut u8;
2268            let layout = Layout::for_value_raw(ptr);
2269
2270            // Pointer to first element
2271            let elems = (&raw mut (*ptr).value) as *mut T;
2272
2273            let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
2274
2275            for (i, item) in iter.enumerate() {
2276                ptr::write(elems.add(i), item);
2277                guard.n_elems += 1;
2278            }
2279
2280            // All clear. Forget the guard so it doesn't free the new RcInner.
2281            mem::forget(guard);
2282
2283            Self::from_ptr(ptr)
2284        }
2285    }
2286}
2287
2288impl<T, A: Allocator> Rc<[T], A> {
2289    /// Allocates an `RcInner<[T]>` with the given length.
2290    #[inline]
2291    #[cfg(not(no_global_oom_handling))]
2292    unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcInner<[T]> {
2293        unsafe {
2294            Rc::<[T]>::allocate_for_layout(
2295                Layout::array::<T>(len).unwrap(),
2296                |layout| alloc.allocate(layout),
2297                |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2298            )
2299        }
2300    }
2301}
2302
2303#[cfg(not(no_global_oom_handling))]
2304/// Specialization trait used for `From<&[T]>`.
2305trait RcFromSlice<T> {
2306    fn from_slice(slice: &[T]) -> Self;
2307}
2308
2309#[cfg(not(no_global_oom_handling))]
2310impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
2311    #[inline]
2312    default fn from_slice(v: &[T]) -> Self {
2313        unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
2314    }
2315}
2316
2317#[cfg(not(no_global_oom_handling))]
2318impl<T: TrivialClone> RcFromSlice<T> for Rc<[T]> {
2319    #[inline]
2320    fn from_slice(v: &[T]) -> Self {
2321        // SAFETY: `T` implements `TrivialClone`, so this is sound and equivalent
2322        // to the above.
2323        unsafe { Rc::copy_from_slice(v) }
2324    }
2325}
2326
2327#[stable(feature = "rust1", since = "1.0.0")]
2328impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> {
2329    type Target = T;
2330
2331    #[inline(always)]
2332    fn deref(&self) -> &T {
2333        &self.inner().value
2334    }
2335}
2336
2337#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2338unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Rc<T, A> {}
2339
2340//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2341#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2342unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for UniqueRc<T, A> {}
2343
2344#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2345unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Weak<T, A> {}
2346
2347#[unstable(feature = "deref_pure_trait", issue = "87121")]
2348unsafe impl<T: ?Sized, A: Allocator> DerefPure for Rc<T, A> {}
2349
2350//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2351#[unstable(feature = "deref_pure_trait", issue = "87121")]
2352unsafe impl<T: ?Sized, A: Allocator> DerefPure for UniqueRc<T, A> {}
2353
2354#[unstable(feature = "legacy_receiver_trait", issue = "none")]
2355impl<T: ?Sized> LegacyReceiver for Rc<T> {}
2356
2357#[stable(feature = "rust1", since = "1.0.0")]
2358unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
2359    /// Drops the `Rc`.
2360    ///
2361    /// This will decrement the strong reference count. If the strong reference
2362    /// count reaches zero then the only other references (if any) are
2363    /// [`Weak`], so we `drop` the inner value.
2364    ///
2365    /// # Examples
2366    ///
2367    /// ```
2368    /// use std::rc::Rc;
2369    ///
2370    /// struct Foo;
2371    ///
2372    /// impl Drop for Foo {
2373    ///     fn drop(&mut self) {
2374    ///         println!("dropped!");
2375    ///     }
2376    /// }
2377    ///
2378    /// let foo  = Rc::new(Foo);
2379    /// let foo2 = Rc::clone(&foo);
2380    ///
2381    /// drop(foo);    // Doesn't print anything
2382    /// drop(foo2);   // Prints "dropped!"
2383    /// ```
2384    #[inline]
2385    fn drop(&mut self) {
2386        unsafe {
2387            self.inner().dec_strong();
2388            if self.inner().strong() == 0 {
2389                self.drop_slow();
2390            }
2391        }
2392    }
2393}
2394
2395#[stable(feature = "rust1", since = "1.0.0")]
2396impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
2397    /// Makes a clone of the `Rc` pointer.
2398    ///
2399    /// This creates another pointer to the same allocation, increasing the
2400    /// strong reference count.
2401    ///
2402    /// # Examples
2403    ///
2404    /// ```
2405    /// use std::rc::Rc;
2406    ///
2407    /// let five = Rc::new(5);
2408    ///
2409    /// let _ = Rc::clone(&five);
2410    /// ```
2411    #[inline]
2412    fn clone(&self) -> Self {
2413        unsafe {
2414            self.inner().inc_strong();
2415            Self::from_inner_in(self.ptr, self.alloc.clone())
2416        }
2417    }
2418}
2419
2420#[unstable(feature = "ergonomic_clones", issue = "132290")]
2421impl<T: ?Sized, A: Allocator + Clone> UseCloned for Rc<T, A> {}
2422
2423#[cfg(not(no_global_oom_handling))]
2424#[stable(feature = "rust1", since = "1.0.0")]
2425impl<T: Default> Default for Rc<T> {
2426    /// Creates a new `Rc<T>`, with the `Default` value for `T`.
2427    ///
2428    /// # Examples
2429    ///
2430    /// ```
2431    /// use std::rc::Rc;
2432    ///
2433    /// let x: Rc<i32> = Default::default();
2434    /// assert_eq!(*x, 0);
2435    /// ```
2436    #[inline]
2437    fn default() -> Self {
2438        unsafe {
2439            Self::from_inner(
2440                Box::leak(Box::write(
2441                    Box::new_uninit(),
2442                    RcInner { strong: Cell::new(1), weak: Cell::new(1), value: T::default() },
2443                ))
2444                .into(),
2445            )
2446        }
2447    }
2448}
2449
2450#[cfg(not(no_global_oom_handling))]
2451#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2452impl Default for Rc<str> {
2453    /// Creates an empty `str` inside an `Rc`.
2454    ///
2455    /// This may or may not share an allocation with other Rcs on the same thread.
2456    #[inline]
2457    fn default() -> Self {
2458        let rc = Rc::<[u8]>::default();
2459        // `[u8]` has the same layout as `str`.
2460        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2461    }
2462}
2463
2464#[cfg(not(no_global_oom_handling))]
2465#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2466impl<T> Default for Rc<[T]> {
2467    /// Creates an empty `[T]` inside an `Rc`.
2468    ///
2469    /// This may or may not share an allocation with other Rcs on the same thread.
2470    #[inline]
2471    fn default() -> Self {
2472        let arr: [T; 0] = [];
2473        Rc::from(arr)
2474    }
2475}
2476
2477#[cfg(not(no_global_oom_handling))]
2478#[stable(feature = "pin_default_impls", since = "1.91.0")]
2479impl<T> Default for Pin<Rc<T>>
2480where
2481    T: ?Sized,
2482    Rc<T>: Default,
2483{
2484    #[inline]
2485    fn default() -> Self {
2486        unsafe { Pin::new_unchecked(Rc::<T>::default()) }
2487    }
2488}
2489
2490#[stable(feature = "rust1", since = "1.0.0")]
2491trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
2492    fn eq(&self, other: &Rc<T, A>) -> bool;
2493    fn ne(&self, other: &Rc<T, A>) -> bool;
2494}
2495
2496#[stable(feature = "rust1", since = "1.0.0")]
2497impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2498    #[inline]
2499    default fn eq(&self, other: &Rc<T, A>) -> bool {
2500        **self == **other
2501    }
2502
2503    #[inline]
2504    default fn ne(&self, other: &Rc<T, A>) -> bool {
2505        **self != **other
2506    }
2507}
2508
2509// Hack to allow specializing on `Eq` even though `Eq` has a method.
2510#[rustc_unsafe_specialization_marker]
2511pub(crate) trait MarkerEq: PartialEq<Self> {}
2512
2513impl<T: Eq> MarkerEq for T {}
2514
2515/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
2516/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
2517/// store large values, that are slow to clone, but also heavy to check for equality, causing this
2518/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
2519/// the same value, than two `&T`s.
2520///
2521/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
2522#[stable(feature = "rust1", since = "1.0.0")]
2523impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2524    #[inline]
2525    fn eq(&self, other: &Rc<T, A>) -> bool {
2526        Rc::ptr_eq(self, other) || **self == **other
2527    }
2528
2529    #[inline]
2530    fn ne(&self, other: &Rc<T, A>) -> bool {
2531        !Rc::ptr_eq(self, other) && **self != **other
2532    }
2533}
2534
2535#[stable(feature = "rust1", since = "1.0.0")]
2536impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
2537    /// Equality for two `Rc`s.
2538    ///
2539    /// Two `Rc`s are equal if their inner values are equal, even if they are
2540    /// stored in different allocation.
2541    ///
2542    /// If `T` also implements `Eq` (implying reflexivity of equality),
2543    /// two `Rc`s that point to the same allocation are
2544    /// always equal.
2545    ///
2546    /// # Examples
2547    ///
2548    /// ```
2549    /// use std::rc::Rc;
2550    ///
2551    /// let five = Rc::new(5);
2552    ///
2553    /// assert!(five == Rc::new(5));
2554    /// ```
2555    #[inline]
2556    fn eq(&self, other: &Rc<T, A>) -> bool {
2557        RcEqIdent::eq(self, other)
2558    }
2559
2560    /// Inequality for two `Rc`s.
2561    ///
2562    /// Two `Rc`s are not equal if their inner values are not equal.
2563    ///
2564    /// If `T` also implements `Eq` (implying reflexivity of equality),
2565    /// two `Rc`s that point to the same allocation are
2566    /// always equal.
2567    ///
2568    /// # Examples
2569    ///
2570    /// ```
2571    /// use std::rc::Rc;
2572    ///
2573    /// let five = Rc::new(5);
2574    ///
2575    /// assert!(five != Rc::new(6));
2576    /// ```
2577    #[inline]
2578    fn ne(&self, other: &Rc<T, A>) -> bool {
2579        RcEqIdent::ne(self, other)
2580    }
2581}
2582
2583#[stable(feature = "rust1", since = "1.0.0")]
2584impl<T: ?Sized + Eq, A: Allocator> Eq for Rc<T, A> {}
2585
2586#[stable(feature = "rust1", since = "1.0.0")]
2587impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
2588    /// Partial comparison for two `Rc`s.
2589    ///
2590    /// The two are compared by calling `partial_cmp()` on their inner values.
2591    ///
2592    /// # Examples
2593    ///
2594    /// ```
2595    /// use std::rc::Rc;
2596    /// use std::cmp::Ordering;
2597    ///
2598    /// let five = Rc::new(5);
2599    ///
2600    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
2601    /// ```
2602    #[inline(always)]
2603    fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> {
2604        (**self).partial_cmp(&**other)
2605    }
2606
2607    /// Less-than comparison for two `Rc`s.
2608    ///
2609    /// The two are compared by calling `<` on their inner values.
2610    ///
2611    /// # Examples
2612    ///
2613    /// ```
2614    /// use std::rc::Rc;
2615    ///
2616    /// let five = Rc::new(5);
2617    ///
2618    /// assert!(five < Rc::new(6));
2619    /// ```
2620    #[inline(always)]
2621    fn lt(&self, other: &Rc<T, A>) -> bool {
2622        **self < **other
2623    }
2624
2625    /// 'Less than or equal to' comparison for two `Rc`s.
2626    ///
2627    /// The two are compared by calling `<=` on their inner values.
2628    ///
2629    /// # Examples
2630    ///
2631    /// ```
2632    /// use std::rc::Rc;
2633    ///
2634    /// let five = Rc::new(5);
2635    ///
2636    /// assert!(five <= Rc::new(5));
2637    /// ```
2638    #[inline(always)]
2639    fn le(&self, other: &Rc<T, A>) -> bool {
2640        **self <= **other
2641    }
2642
2643    /// Greater-than comparison for two `Rc`s.
2644    ///
2645    /// The two are compared by calling `>` on their inner values.
2646    ///
2647    /// # Examples
2648    ///
2649    /// ```
2650    /// use std::rc::Rc;
2651    ///
2652    /// let five = Rc::new(5);
2653    ///
2654    /// assert!(five > Rc::new(4));
2655    /// ```
2656    #[inline(always)]
2657    fn gt(&self, other: &Rc<T, A>) -> bool {
2658        **self > **other
2659    }
2660
2661    /// 'Greater than or equal to' comparison for two `Rc`s.
2662    ///
2663    /// The two are compared by calling `>=` on their inner values.
2664    ///
2665    /// # Examples
2666    ///
2667    /// ```
2668    /// use std::rc::Rc;
2669    ///
2670    /// let five = Rc::new(5);
2671    ///
2672    /// assert!(five >= Rc::new(5));
2673    /// ```
2674    #[inline(always)]
2675    fn ge(&self, other: &Rc<T, A>) -> bool {
2676        **self >= **other
2677    }
2678}
2679
2680#[stable(feature = "rust1", since = "1.0.0")]
2681impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> {
2682    /// Comparison for two `Rc`s.
2683    ///
2684    /// The two are compared by calling `cmp()` on their inner values.
2685    ///
2686    /// # Examples
2687    ///
2688    /// ```
2689    /// use std::rc::Rc;
2690    /// use std::cmp::Ordering;
2691    ///
2692    /// let five = Rc::new(5);
2693    ///
2694    /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
2695    /// ```
2696    #[inline]
2697    fn cmp(&self, other: &Rc<T, A>) -> Ordering {
2698        (**self).cmp(&**other)
2699    }
2700}
2701
2702#[stable(feature = "rust1", since = "1.0.0")]
2703impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> {
2704    fn hash<H: Hasher>(&self, state: &mut H) {
2705        (**self).hash(state);
2706    }
2707}
2708
2709#[stable(feature = "rust1", since = "1.0.0")]
2710impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> {
2711    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2712        fmt::Display::fmt(&**self, f)
2713    }
2714}
2715
2716#[stable(feature = "rust1", since = "1.0.0")]
2717impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
2718    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2719        fmt::Debug::fmt(&**self, f)
2720    }
2721}
2722
2723#[stable(feature = "rust1", since = "1.0.0")]
2724impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
2725    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2726        fmt::Pointer::fmt(&(&raw const **self), f)
2727    }
2728}
2729
2730#[cfg(not(no_global_oom_handling))]
2731#[stable(feature = "from_for_ptrs", since = "1.6.0")]
2732impl<T> From<T> for Rc<T> {
2733    /// Converts a generic type `T` into an `Rc<T>`
2734    ///
2735    /// The conversion allocates on the heap and moves `t`
2736    /// from the stack into it.
2737    ///
2738    /// # Example
2739    /// ```rust
2740    /// # use std::rc::Rc;
2741    /// let x = 5;
2742    /// let rc = Rc::new(5);
2743    ///
2744    /// assert_eq!(Rc::from(x), rc);
2745    /// ```
2746    fn from(t: T) -> Self {
2747        Rc::new(t)
2748    }
2749}
2750
2751#[cfg(not(no_global_oom_handling))]
2752#[stable(feature = "shared_from_array", since = "1.74.0")]
2753impl<T, const N: usize> From<[T; N]> for Rc<[T]> {
2754    /// Converts a [`[T; N]`](prim@array) into an `Rc<[T]>`.
2755    ///
2756    /// The conversion moves the array into a newly allocated `Rc`.
2757    ///
2758    /// # Example
2759    ///
2760    /// ```
2761    /// # use std::rc::Rc;
2762    /// let original: [i32; 3] = [1, 2, 3];
2763    /// let shared: Rc<[i32]> = Rc::from(original);
2764    /// assert_eq!(&[1, 2, 3], &shared[..]);
2765    /// ```
2766    #[inline]
2767    fn from(v: [T; N]) -> Rc<[T]> {
2768        Rc::<[T; N]>::from(v)
2769    }
2770}
2771
2772#[cfg(not(no_global_oom_handling))]
2773#[stable(feature = "shared_from_slice", since = "1.21.0")]
2774impl<T: Clone> From<&[T]> for Rc<[T]> {
2775    /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2776    ///
2777    /// # Example
2778    ///
2779    /// ```
2780    /// # use std::rc::Rc;
2781    /// let original: &[i32] = &[1, 2, 3];
2782    /// let shared: Rc<[i32]> = Rc::from(original);
2783    /// assert_eq!(&[1, 2, 3], &shared[..]);
2784    /// ```
2785    #[inline]
2786    fn from(v: &[T]) -> Rc<[T]> {
2787        <Self as RcFromSlice<T>>::from_slice(v)
2788    }
2789}
2790
2791#[cfg(not(no_global_oom_handling))]
2792#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2793impl<T: Clone> From<&mut [T]> for Rc<[T]> {
2794    /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2795    ///
2796    /// # Example
2797    ///
2798    /// ```
2799    /// # use std::rc::Rc;
2800    /// let mut original = [1, 2, 3];
2801    /// let original: &mut [i32] = &mut original;
2802    /// let shared: Rc<[i32]> = Rc::from(original);
2803    /// assert_eq!(&[1, 2, 3], &shared[..]);
2804    /// ```
2805    #[inline]
2806    fn from(v: &mut [T]) -> Rc<[T]> {
2807        Rc::from(&*v)
2808    }
2809}
2810
2811#[cfg(not(no_global_oom_handling))]
2812#[stable(feature = "shared_from_slice", since = "1.21.0")]
2813impl From<&str> for Rc<str> {
2814    /// Allocates a reference-counted string slice and copies `v` into it.
2815    ///
2816    /// # Example
2817    ///
2818    /// ```
2819    /// # use std::rc::Rc;
2820    /// let shared: Rc<str> = Rc::from("statue");
2821    /// assert_eq!("statue", &shared[..]);
2822    /// ```
2823    #[inline]
2824    fn from(v: &str) -> Rc<str> {
2825        let rc = Rc::<[u8]>::from(v.as_bytes());
2826        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2827    }
2828}
2829
2830#[cfg(not(no_global_oom_handling))]
2831#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2832impl From<&mut str> for Rc<str> {
2833    /// Allocates a reference-counted string slice and copies `v` into it.
2834    ///
2835    /// # Example
2836    ///
2837    /// ```
2838    /// # use std::rc::Rc;
2839    /// let mut original = String::from("statue");
2840    /// let original: &mut str = &mut original;
2841    /// let shared: Rc<str> = Rc::from(original);
2842    /// assert_eq!("statue", &shared[..]);
2843    /// ```
2844    #[inline]
2845    fn from(v: &mut str) -> Rc<str> {
2846        Rc::from(&*v)
2847    }
2848}
2849
2850#[cfg(not(no_global_oom_handling))]
2851#[stable(feature = "shared_from_slice", since = "1.21.0")]
2852impl From<String> for Rc<str> {
2853    /// Allocates a reference-counted string slice and copies `v` into it.
2854    ///
2855    /// # Example
2856    ///
2857    /// ```
2858    /// # use std::rc::Rc;
2859    /// let original: String = "statue".to_owned();
2860    /// let shared: Rc<str> = Rc::from(original);
2861    /// assert_eq!("statue", &shared[..]);
2862    /// ```
2863    #[inline]
2864    fn from(v: String) -> Rc<str> {
2865        Rc::from(&v[..])
2866    }
2867}
2868
2869#[cfg(not(no_global_oom_handling))]
2870#[stable(feature = "shared_from_slice", since = "1.21.0")]
2871impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> {
2872    /// Move a boxed object to a new, reference counted, allocation.
2873    ///
2874    /// # Example
2875    ///
2876    /// ```
2877    /// # use std::rc::Rc;
2878    /// let original: Box<i32> = Box::new(1);
2879    /// let shared: Rc<i32> = Rc::from(original);
2880    /// assert_eq!(1, *shared);
2881    /// ```
2882    #[inline]
2883    fn from(v: Box<T, A>) -> Rc<T, A> {
2884        Rc::from_box_in(v)
2885    }
2886}
2887
2888#[cfg(not(no_global_oom_handling))]
2889#[stable(feature = "shared_from_slice", since = "1.21.0")]
2890impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
2891    /// Allocates a reference-counted slice and moves `v`'s items into it.
2892    ///
2893    /// # Example
2894    ///
2895    /// ```
2896    /// # use std::rc::Rc;
2897    /// let unique: Vec<i32> = vec![1, 2, 3];
2898    /// let shared: Rc<[i32]> = Rc::from(unique);
2899    /// assert_eq!(&[1, 2, 3], &shared[..]);
2900    /// ```
2901    #[inline]
2902    fn from(v: Vec<T, A>) -> Rc<[T], A> {
2903        unsafe {
2904            let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
2905
2906            let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
2907            ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len);
2908
2909            // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
2910            // without dropping its contents or the allocator
2911            let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
2912
2913            Self::from_ptr_in(rc_ptr, alloc)
2914        }
2915    }
2916}
2917
2918#[stable(feature = "shared_from_cow", since = "1.45.0")]
2919impl<'a, B> From<Cow<'a, B>> for Rc<B>
2920where
2921    B: ToOwned + ?Sized,
2922    Rc<B>: From<&'a B> + From<B::Owned>,
2923{
2924    /// Creates a reference-counted pointer from a clone-on-write pointer by
2925    /// copying its content.
2926    ///
2927    /// # Example
2928    ///
2929    /// ```rust
2930    /// # use std::rc::Rc;
2931    /// # use std::borrow::Cow;
2932    /// let cow: Cow<'_, str> = Cow::Borrowed("eggplant");
2933    /// let shared: Rc<str> = Rc::from(cow);
2934    /// assert_eq!("eggplant", &shared[..]);
2935    /// ```
2936    #[inline]
2937    fn from(cow: Cow<'a, B>) -> Rc<B> {
2938        match cow {
2939            Cow::Borrowed(s) => Rc::from(s),
2940            Cow::Owned(s) => Rc::from(s),
2941        }
2942    }
2943}
2944
2945#[stable(feature = "shared_from_str", since = "1.62.0")]
2946impl From<Rc<str>> for Rc<[u8]> {
2947    /// Converts a reference-counted string slice into a byte slice.
2948    ///
2949    /// # Example
2950    ///
2951    /// ```
2952    /// # use std::rc::Rc;
2953    /// let string: Rc<str> = Rc::from("eggplant");
2954    /// let bytes: Rc<[u8]> = Rc::from(string);
2955    /// assert_eq!("eggplant".as_bytes(), bytes.as_ref());
2956    /// ```
2957    #[inline]
2958    fn from(rc: Rc<str>) -> Self {
2959        // SAFETY: `str` has the same layout as `[u8]`.
2960        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const [u8]) }
2961    }
2962}
2963
2964#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
2965impl<T, A: Allocator, const N: usize> TryFrom<Rc<[T], A>> for Rc<[T; N], A> {
2966    type Error = Rc<[T], A>;
2967
2968    fn try_from(boxed_slice: Rc<[T], A>) -> Result<Self, Self::Error> {
2969        if boxed_slice.len() == N {
2970            let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice);
2971            Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) })
2972        } else {
2973            Err(boxed_slice)
2974        }
2975    }
2976}
2977
2978#[cfg(not(no_global_oom_handling))]
2979#[stable(feature = "shared_from_iter", since = "1.37.0")]
2980impl<T> FromIterator<T> for Rc<[T]> {
2981    /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
2982    ///
2983    /// # Performance characteristics
2984    ///
2985    /// ## The general case
2986    ///
2987    /// In the general case, collecting into `Rc<[T]>` is done by first
2988    /// collecting into a `Vec<T>`. That is, when writing the following:
2989    ///
2990    /// ```rust
2991    /// # use std::rc::Rc;
2992    /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
2993    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2994    /// ```
2995    ///
2996    /// this behaves as if we wrote:
2997    ///
2998    /// ```rust
2999    /// # use std::rc::Rc;
3000    /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
3001    ///     .collect::<Vec<_>>() // The first set of allocations happens here.
3002    ///     .into(); // A second allocation for `Rc<[T]>` happens here.
3003    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
3004    /// ```
3005    ///
3006    /// This will allocate as many times as needed for constructing the `Vec<T>`
3007    /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
3008    ///
3009    /// ## Iterators of known length
3010    ///
3011    /// When your `Iterator` implements `TrustedLen` and is of an exact size,
3012    /// a single allocation will be made for the `Rc<[T]>`. For example:
3013    ///
3014    /// ```rust
3015    /// # use std::rc::Rc;
3016    /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
3017    /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
3018    /// ```
3019    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
3020        ToRcSlice::to_rc_slice(iter.into_iter())
3021    }
3022}
3023
3024/// Specialization trait used for collecting into `Rc<[T]>`.
3025#[cfg(not(no_global_oom_handling))]
3026trait ToRcSlice<T>: Iterator<Item = T> + Sized {
3027    fn to_rc_slice(self) -> Rc<[T]>;
3028}
3029
3030#[cfg(not(no_global_oom_handling))]
3031impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
3032    default fn to_rc_slice(self) -> Rc<[T]> {
3033        self.collect::<Vec<T>>().into()
3034    }
3035}
3036
3037#[cfg(not(no_global_oom_handling))]
3038impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
3039    fn to_rc_slice(self) -> Rc<[T]> {
3040        // This is the case for a `TrustedLen` iterator.
3041        let (low, high) = self.size_hint();
3042        if let Some(high) = high {
3043            debug_assert_eq!(
3044                low,
3045                high,
3046                "TrustedLen iterator's size hint is not exact: {:?}",
3047                (low, high)
3048            );
3049
3050            unsafe {
3051                // SAFETY: We need to ensure that the iterator has an exact length and we have.
3052                Rc::from_iter_exact(self, low)
3053            }
3054        } else {
3055            // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
3056            // length exceeding `usize::MAX`.
3057            // The default implementation would collect into a vec which would panic.
3058            // Thus we panic here immediately without invoking `Vec` code.
3059            panic!("capacity overflow");
3060        }
3061    }
3062}
3063
3064/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
3065/// managed allocation.
3066///
3067/// The allocation is accessed by calling [`upgrade`] on the `Weak`
3068/// pointer, which returns an <code>[Option]<[Rc]\<T>></code>.
3069///
3070/// Since a `Weak` reference does not count towards ownership, it will not
3071/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
3072/// guarantees about the value still being present. Thus it may return [`None`]
3073/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
3074/// itself (the backing store) from being deallocated.
3075///
3076/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
3077/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
3078/// prevent circular references between [`Rc`] pointers, since mutual owning references
3079/// would never allow either [`Rc`] to be dropped. For example, a tree could
3080/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
3081/// pointers from children back to their parents.
3082///
3083/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
3084///
3085/// [`upgrade`]: Weak::upgrade
3086#[stable(feature = "rc_weak", since = "1.4.0")]
3087#[rustc_diagnostic_item = "RcWeak"]
3088pub struct Weak<
3089    T: ?Sized,
3090    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3091> {
3092    // This is a `NonNull` to allow optimizing the size of this type in enums,
3093    // but it is not necessarily a valid pointer.
3094    // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
3095    // to allocate space on the heap. That's not a value a real pointer
3096    // will ever have because RcInner has alignment at least 2.
3097    ptr: NonNull<RcInner<T>>,
3098    alloc: A,
3099}
3100
3101#[stable(feature = "rc_weak", since = "1.4.0")]
3102impl<T: ?Sized, A: Allocator> !Send for Weak<T, A> {}
3103#[stable(feature = "rc_weak", since = "1.4.0")]
3104impl<T: ?Sized, A: Allocator> !Sync for Weak<T, A> {}
3105
3106#[unstable(feature = "coerce_unsized", issue = "18598")]
3107impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {}
3108
3109#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3110impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
3111
3112// SAFETY: `Weak::clone` doesn't access any `Cell`s which could contain the `Weak` being cloned.
3113#[unstable(feature = "cell_get_cloned", issue = "145329")]
3114unsafe impl<T: ?Sized> CloneFromCell for Weak<T> {}
3115
3116impl<T> Weak<T> {
3117    /// Constructs a new `Weak<T>`, without allocating any memory.
3118    /// Calling [`upgrade`] on the return value always gives [`None`].
3119    ///
3120    /// [`upgrade`]: Weak::upgrade
3121    ///
3122    /// # Examples
3123    ///
3124    /// ```
3125    /// use std::rc::Weak;
3126    ///
3127    /// let empty: Weak<i64> = Weak::new();
3128    /// assert!(empty.upgrade().is_none());
3129    /// ```
3130    #[inline]
3131    #[stable(feature = "downgraded_weak", since = "1.10.0")]
3132    #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
3133    #[must_use]
3134    pub const fn new() -> Weak<T> {
3135        Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc: Global }
3136    }
3137}
3138
3139impl<T, A: Allocator> Weak<T, A> {
3140    /// Constructs a new `Weak<T>`, without allocating any memory, technically in the provided
3141    /// allocator.
3142    /// Calling [`upgrade`] on the return value always gives [`None`].
3143    ///
3144    /// [`upgrade`]: Weak::upgrade
3145    ///
3146    /// # Examples
3147    ///
3148    /// ```
3149    /// use std::rc::Weak;
3150    ///
3151    /// let empty: Weak<i64> = Weak::new();
3152    /// assert!(empty.upgrade().is_none());
3153    /// ```
3154    #[inline]
3155    #[unstable(feature = "allocator_api", issue = "32838")]
3156    pub fn new_in(alloc: A) -> Weak<T, A> {
3157        Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc }
3158    }
3159}
3160
3161pub(crate) fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
3162    (ptr.cast::<()>()).addr() == usize::MAX
3163}
3164
3165/// Helper type to allow accessing the reference counts without
3166/// making any assertions about the data field.
3167struct WeakInner<'a> {
3168    weak: &'a Cell<usize>,
3169    strong: &'a Cell<usize>,
3170}
3171
3172impl<T: ?Sized> Weak<T> {
3173    /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3174    ///
3175    /// This can be used to safely get a strong reference (by calling [`upgrade`]
3176    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3177    ///
3178    /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3179    /// as these don't own anything; the method still works on them).
3180    ///
3181    /// # Safety
3182    ///
3183    /// The pointer must have originated from the [`into_raw`] and must still own its potential
3184    /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator.
3185    ///
3186    /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3187    /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3188    /// count is not modified by this operation) and therefore it must be paired with a previous
3189    /// call to [`into_raw`].
3190    ///
3191    /// # Examples
3192    ///
3193    /// ```
3194    /// use std::rc::{Rc, Weak};
3195    ///
3196    /// let strong = Rc::new("hello".to_owned());
3197    ///
3198    /// let raw_1 = Rc::downgrade(&strong).into_raw();
3199    /// let raw_2 = Rc::downgrade(&strong).into_raw();
3200    ///
3201    /// assert_eq!(2, Rc::weak_count(&strong));
3202    ///
3203    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3204    /// assert_eq!(1, Rc::weak_count(&strong));
3205    ///
3206    /// drop(strong);
3207    ///
3208    /// // Decrement the last weak count.
3209    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3210    /// ```
3211    ///
3212    /// [`into_raw`]: Weak::into_raw
3213    /// [`upgrade`]: Weak::upgrade
3214    /// [`new`]: Weak::new
3215    #[inline]
3216    #[stable(feature = "weak_into_raw", since = "1.45.0")]
3217    pub unsafe fn from_raw(ptr: *const T) -> Self {
3218        unsafe { Self::from_raw_in(ptr, Global) }
3219    }
3220
3221    /// Consumes the `Weak<T>` and turns it into a raw pointer.
3222    ///
3223    /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3224    /// one weak reference (the weak count is not modified by this operation). It can be turned
3225    /// back into the `Weak<T>` with [`from_raw`].
3226    ///
3227    /// The same restrictions of accessing the target of the pointer as with
3228    /// [`as_ptr`] apply.
3229    ///
3230    /// # Examples
3231    ///
3232    /// ```
3233    /// use std::rc::{Rc, Weak};
3234    ///
3235    /// let strong = Rc::new("hello".to_owned());
3236    /// let weak = Rc::downgrade(&strong);
3237    /// let raw = weak.into_raw();
3238    ///
3239    /// assert_eq!(1, Rc::weak_count(&strong));
3240    /// assert_eq!("hello", unsafe { &*raw });
3241    ///
3242    /// drop(unsafe { Weak::from_raw(raw) });
3243    /// assert_eq!(0, Rc::weak_count(&strong));
3244    /// ```
3245    ///
3246    /// [`from_raw`]: Weak::from_raw
3247    /// [`as_ptr`]: Weak::as_ptr
3248    #[must_use = "losing the pointer will leak memory"]
3249    #[stable(feature = "weak_into_raw", since = "1.45.0")]
3250    pub fn into_raw(self) -> *const T {
3251        mem::ManuallyDrop::new(self).as_ptr()
3252    }
3253}
3254
3255impl<T: ?Sized, A: Allocator> Weak<T, A> {
3256    /// Returns a reference to the underlying allocator.
3257    #[inline]
3258    #[unstable(feature = "allocator_api", issue = "32838")]
3259    pub fn allocator(&self) -> &A {
3260        &self.alloc
3261    }
3262
3263    /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
3264    ///
3265    /// The pointer is valid only if there are some strong references. The pointer may be dangling,
3266    /// unaligned or even [`null`] otherwise.
3267    ///
3268    /// # Examples
3269    ///
3270    /// ```
3271    /// use std::rc::Rc;
3272    /// use std::ptr;
3273    ///
3274    /// let strong = Rc::new("hello".to_owned());
3275    /// let weak = Rc::downgrade(&strong);
3276    /// // Both point to the same object
3277    /// assert!(ptr::eq(&*strong, weak.as_ptr()));
3278    /// // The strong here keeps it alive, so we can still access the object.
3279    /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
3280    ///
3281    /// drop(strong);
3282    /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
3283    /// // undefined behavior.
3284    /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
3285    /// ```
3286    ///
3287    /// [`null`]: ptr::null
3288    #[must_use]
3289    #[stable(feature = "rc_as_ptr", since = "1.45.0")]
3290    pub fn as_ptr(&self) -> *const T {
3291        let ptr: *mut RcInner<T> = NonNull::as_ptr(self.ptr);
3292
3293        if is_dangling(ptr) {
3294            // If the pointer is dangling, we return the sentinel directly. This cannot be
3295            // a valid payload address, as the payload is at least as aligned as RcInner (usize).
3296            ptr as *const T
3297        } else {
3298            // SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
3299            // The payload may be dropped at this point, and we have to maintain provenance,
3300            // so use raw pointer manipulation.
3301            unsafe { &raw mut (*ptr).value }
3302        }
3303    }
3304
3305    /// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
3306    ///
3307    /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3308    /// one weak reference (the weak count is not modified by this operation). It can be turned
3309    /// back into the `Weak<T>` with [`from_raw_in`].
3310    ///
3311    /// The same restrictions of accessing the target of the pointer as with
3312    /// [`as_ptr`] apply.
3313    ///
3314    /// # Examples
3315    ///
3316    /// ```
3317    /// #![feature(allocator_api)]
3318    /// use std::rc::{Rc, Weak};
3319    /// use std::alloc::System;
3320    ///
3321    /// let strong = Rc::new_in("hello".to_owned(), System);
3322    /// let weak = Rc::downgrade(&strong);
3323    /// let (raw, alloc) = weak.into_raw_with_allocator();
3324    ///
3325    /// assert_eq!(1, Rc::weak_count(&strong));
3326    /// assert_eq!("hello", unsafe { &*raw });
3327    ///
3328    /// drop(unsafe { Weak::from_raw_in(raw, alloc) });
3329    /// assert_eq!(0, Rc::weak_count(&strong));
3330    /// ```
3331    ///
3332    /// [`from_raw_in`]: Weak::from_raw_in
3333    /// [`as_ptr`]: Weak::as_ptr
3334    #[must_use = "losing the pointer will leak memory"]
3335    #[inline]
3336    #[unstable(feature = "allocator_api", issue = "32838")]
3337    pub fn into_raw_with_allocator(self) -> (*const T, A) {
3338        let this = mem::ManuallyDrop::new(self);
3339        let result = this.as_ptr();
3340        // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
3341        let alloc = unsafe { ptr::read(&this.alloc) };
3342        (result, alloc)
3343    }
3344
3345    /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3346    ///
3347    /// This can be used to safely get a strong reference (by calling [`upgrade`]
3348    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3349    ///
3350    /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3351    /// as these don't own anything; the method still works on them).
3352    ///
3353    /// # Safety
3354    ///
3355    /// The pointer must have originated from the [`into_raw`] and must still own its potential
3356    /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`.
3357    ///
3358    /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3359    /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3360    /// count is not modified by this operation) and therefore it must be paired with a previous
3361    /// call to [`into_raw`].
3362    ///
3363    /// # Examples
3364    ///
3365    /// ```
3366    /// use std::rc::{Rc, Weak};
3367    ///
3368    /// let strong = Rc::new("hello".to_owned());
3369    ///
3370    /// let raw_1 = Rc::downgrade(&strong).into_raw();
3371    /// let raw_2 = Rc::downgrade(&strong).into_raw();
3372    ///
3373    /// assert_eq!(2, Rc::weak_count(&strong));
3374    ///
3375    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3376    /// assert_eq!(1, Rc::weak_count(&strong));
3377    ///
3378    /// drop(strong);
3379    ///
3380    /// // Decrement the last weak count.
3381    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3382    /// ```
3383    ///
3384    /// [`into_raw`]: Weak::into_raw
3385    /// [`upgrade`]: Weak::upgrade
3386    /// [`new`]: Weak::new
3387    #[inline]
3388    #[unstable(feature = "allocator_api", issue = "32838")]
3389    pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
3390        // See Weak::as_ptr for context on how the input pointer is derived.
3391
3392        let ptr = if is_dangling(ptr) {
3393            // This is a dangling Weak.
3394            ptr as *mut RcInner<T>
3395        } else {
3396            // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
3397            // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
3398            let offset = unsafe { data_offset(ptr) };
3399            // Thus, we reverse the offset to get the whole RcInner.
3400            // SAFETY: the pointer originated from a Weak, so this offset is safe.
3401            unsafe { ptr.byte_sub(offset) as *mut RcInner<T> }
3402        };
3403
3404        // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
3405        Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
3406    }
3407
3408    /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
3409    /// dropping of the inner value if successful.
3410    ///
3411    /// Returns [`None`] if the inner value has since been dropped.
3412    ///
3413    /// # Examples
3414    ///
3415    /// ```
3416    /// use std::rc::Rc;
3417    ///
3418    /// let five = Rc::new(5);
3419    ///
3420    /// let weak_five = Rc::downgrade(&five);
3421    ///
3422    /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
3423    /// assert!(strong_five.is_some());
3424    ///
3425    /// // Destroy all strong pointers.
3426    /// drop(strong_five);
3427    /// drop(five);
3428    ///
3429    /// assert!(weak_five.upgrade().is_none());
3430    /// ```
3431    #[must_use = "this returns a new `Rc`, \
3432                  without modifying the original weak pointer"]
3433    #[stable(feature = "rc_weak", since = "1.4.0")]
3434    pub fn upgrade(&self) -> Option<Rc<T, A>>
3435    where
3436        A: Clone,
3437    {
3438        let inner = self.inner()?;
3439
3440        if inner.strong() == 0 {
3441            None
3442        } else {
3443            unsafe {
3444                inner.inc_strong();
3445                Some(Rc::from_inner_in(self.ptr, self.alloc.clone()))
3446            }
3447        }
3448    }
3449
3450    /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
3451    ///
3452    /// If `self` was created using [`Weak::new`], this will return 0.
3453    #[must_use]
3454    #[stable(feature = "weak_counts", since = "1.41.0")]
3455    pub fn strong_count(&self) -> usize {
3456        if let Some(inner) = self.inner() { inner.strong() } else { 0 }
3457    }
3458
3459    /// Gets the number of `Weak` pointers pointing to this allocation.
3460    ///
3461    /// If no strong pointers remain, this will return zero.
3462    #[must_use]
3463    #[stable(feature = "weak_counts", since = "1.41.0")]
3464    pub fn weak_count(&self) -> usize {
3465        if let Some(inner) = self.inner() {
3466            if inner.strong() > 0 {
3467                inner.weak() - 1 // subtract the implicit weak ptr
3468            } else {
3469                0
3470            }
3471        } else {
3472            0
3473        }
3474    }
3475
3476    /// Returns `None` when the pointer is dangling and there is no allocated `RcInner`,
3477    /// (i.e., when this `Weak` was created by `Weak::new`).
3478    #[inline]
3479    fn inner(&self) -> Option<WeakInner<'_>> {
3480        if is_dangling(self.ptr.as_ptr()) {
3481            None
3482        } else {
3483            // We are careful to *not* create a reference covering the "data" field, as
3484            // the field may be mutated concurrently (for example, if the last `Rc`
3485            // is dropped, the data field will be dropped in-place).
3486            Some(unsafe {
3487                let ptr = self.ptr.as_ptr();
3488                WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
3489            })
3490        }
3491    }
3492
3493    /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
3494    /// both don't point to any allocation (because they were created with `Weak::new()`). However,
3495    /// this function ignores the metadata of  `dyn Trait` pointers.
3496    ///
3497    /// # Notes
3498    ///
3499    /// Since this compares pointers it means that `Weak::new()` will equal each
3500    /// other, even though they don't point to any allocation.
3501    ///
3502    /// # Examples
3503    ///
3504    /// ```
3505    /// use std::rc::Rc;
3506    ///
3507    /// let first_rc = Rc::new(5);
3508    /// let first = Rc::downgrade(&first_rc);
3509    /// let second = Rc::downgrade(&first_rc);
3510    ///
3511    /// assert!(first.ptr_eq(&second));
3512    ///
3513    /// let third_rc = Rc::new(5);
3514    /// let third = Rc::downgrade(&third_rc);
3515    ///
3516    /// assert!(!first.ptr_eq(&third));
3517    /// ```
3518    ///
3519    /// Comparing `Weak::new`.
3520    ///
3521    /// ```
3522    /// use std::rc::{Rc, Weak};
3523    ///
3524    /// let first = Weak::new();
3525    /// let second = Weak::new();
3526    /// assert!(first.ptr_eq(&second));
3527    ///
3528    /// let third_rc = Rc::new(());
3529    /// let third = Rc::downgrade(&third_rc);
3530    /// assert!(!first.ptr_eq(&third));
3531    /// ```
3532    #[inline]
3533    #[must_use]
3534    #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
3535    pub fn ptr_eq(&self, other: &Self) -> bool {
3536        ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
3537    }
3538}
3539
3540#[stable(feature = "rc_weak", since = "1.4.0")]
3541unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
3542    /// Drops the `Weak` pointer.
3543    ///
3544    /// # Examples
3545    ///
3546    /// ```
3547    /// use std::rc::{Rc, Weak};
3548    ///
3549    /// struct Foo;
3550    ///
3551    /// impl Drop for Foo {
3552    ///     fn drop(&mut self) {
3553    ///         println!("dropped!");
3554    ///     }
3555    /// }
3556    ///
3557    /// let foo = Rc::new(Foo);
3558    /// let weak_foo = Rc::downgrade(&foo);
3559    /// let other_weak_foo = Weak::clone(&weak_foo);
3560    ///
3561    /// drop(weak_foo);   // Doesn't print anything
3562    /// drop(foo);        // Prints "dropped!"
3563    ///
3564    /// assert!(other_weak_foo.upgrade().is_none());
3565    /// ```
3566    fn drop(&mut self) {
3567        let inner = if let Some(inner) = self.inner() { inner } else { return };
3568
3569        inner.dec_weak();
3570        // the weak count starts at 1, and will only go to zero if all
3571        // the strong pointers have disappeared.
3572        if inner.weak() == 0 {
3573            unsafe {
3574                self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
3575            }
3576        }
3577    }
3578}
3579
3580#[stable(feature = "rc_weak", since = "1.4.0")]
3581impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
3582    /// Makes a clone of the `Weak` pointer that points to the same allocation.
3583    ///
3584    /// # Examples
3585    ///
3586    /// ```
3587    /// use std::rc::{Rc, Weak};
3588    ///
3589    /// let weak_five = Rc::downgrade(&Rc::new(5));
3590    ///
3591    /// let _ = Weak::clone(&weak_five);
3592    /// ```
3593    #[inline]
3594    fn clone(&self) -> Weak<T, A> {
3595        if let Some(inner) = self.inner() {
3596            inner.inc_weak()
3597        }
3598        Weak { ptr: self.ptr, alloc: self.alloc.clone() }
3599    }
3600}
3601
3602#[unstable(feature = "ergonomic_clones", issue = "132290")]
3603impl<T: ?Sized, A: Allocator + Clone> UseCloned for Weak<T, A> {}
3604
3605#[stable(feature = "rc_weak", since = "1.4.0")]
3606impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
3607    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3608        write!(f, "(Weak)")
3609    }
3610}
3611
3612#[stable(feature = "downgraded_weak", since = "1.10.0")]
3613impl<T> Default for Weak<T> {
3614    /// Constructs a new `Weak<T>`, without allocating any memory.
3615    /// Calling [`upgrade`] on the return value always gives [`None`].
3616    ///
3617    /// [`upgrade`]: Weak::upgrade
3618    ///
3619    /// # Examples
3620    ///
3621    /// ```
3622    /// use std::rc::Weak;
3623    ///
3624    /// let empty: Weak<i64> = Default::default();
3625    /// assert!(empty.upgrade().is_none());
3626    /// ```
3627    fn default() -> Weak<T> {
3628        Weak::new()
3629    }
3630}
3631
3632// NOTE: If you mem::forget Rcs (or Weaks), drop is skipped and the ref-count
3633// is not decremented, meaning the ref-count can overflow, and then you can
3634// free the allocation while outstanding Rcs (or Weaks) exist, which would be
3635// unsound. We abort because this is such a degenerate scenario that we don't
3636// care about what happens -- no real program should ever experience this.
3637//
3638// This should have negligible overhead since you don't actually need to
3639// clone these much in Rust thanks to ownership and move-semantics.
3640
3641#[doc(hidden)]
3642trait RcInnerPtr {
3643    fn weak_ref(&self) -> &Cell<usize>;
3644    fn strong_ref(&self) -> &Cell<usize>;
3645
3646    #[inline]
3647    fn strong(&self) -> usize {
3648        self.strong_ref().get()
3649    }
3650
3651    #[inline]
3652    fn inc_strong(&self) {
3653        let strong = self.strong();
3654
3655        // We insert an `assume` here to hint LLVM at an otherwise
3656        // missed optimization.
3657        // SAFETY: The reference count will never be zero when this is
3658        // called.
3659        unsafe {
3660            hint::assert_unchecked(strong != 0);
3661        }
3662
3663        let strong = strong.wrapping_add(1);
3664        self.strong_ref().set(strong);
3665
3666        // We want to abort on overflow instead of dropping the value.
3667        // Checking for overflow after the store instead of before
3668        // allows for slightly better code generation.
3669        if core::intrinsics::unlikely(strong == 0) {
3670            abort();
3671        }
3672    }
3673
3674    #[inline]
3675    fn dec_strong(&self) {
3676        self.strong_ref().set(self.strong() - 1);
3677    }
3678
3679    #[inline]
3680    fn weak(&self) -> usize {
3681        self.weak_ref().get()
3682    }
3683
3684    #[inline]
3685    fn inc_weak(&self) {
3686        let weak = self.weak();
3687
3688        // We insert an `assume` here to hint LLVM at an otherwise
3689        // missed optimization.
3690        // SAFETY: The reference count will never be zero when this is
3691        // called.
3692        unsafe {
3693            hint::assert_unchecked(weak != 0);
3694        }
3695
3696        let weak = weak.wrapping_add(1);
3697        self.weak_ref().set(weak);
3698
3699        // We want to abort on overflow instead of dropping the value.
3700        // Checking for overflow after the store instead of before
3701        // allows for slightly better code generation.
3702        if core::intrinsics::unlikely(weak == 0) {
3703            abort();
3704        }
3705    }
3706
3707    #[inline]
3708    fn dec_weak(&self) {
3709        self.weak_ref().set(self.weak() - 1);
3710    }
3711}
3712
3713impl<T: ?Sized> RcInnerPtr for RcInner<T> {
3714    #[inline(always)]
3715    fn weak_ref(&self) -> &Cell<usize> {
3716        &self.weak
3717    }
3718
3719    #[inline(always)]
3720    fn strong_ref(&self) -> &Cell<usize> {
3721        &self.strong
3722    }
3723}
3724
3725impl<'a> RcInnerPtr for WeakInner<'a> {
3726    #[inline(always)]
3727    fn weak_ref(&self) -> &Cell<usize> {
3728        self.weak
3729    }
3730
3731    #[inline(always)]
3732    fn strong_ref(&self) -> &Cell<usize> {
3733        self.strong
3734    }
3735}
3736
3737#[stable(feature = "rust1", since = "1.0.0")]
3738impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> {
3739    fn borrow(&self) -> &T {
3740        &**self
3741    }
3742}
3743
3744#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
3745impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> {
3746    fn as_ref(&self) -> &T {
3747        &**self
3748    }
3749}
3750
3751#[stable(feature = "pin", since = "1.33.0")]
3752impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {}
3753
3754/// Gets the offset within an `RcInner` for the payload behind a pointer.
3755///
3756/// # Safety
3757///
3758/// The pointer must point to (and have valid metadata for) a previously
3759/// valid instance of T, but the T is allowed to be dropped.
3760unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> usize {
3761    // Align the unsized value to the end of the RcInner.
3762    // Because RcInner is repr(C), it will always be the last field in memory.
3763    // SAFETY: since the only unsized types possible are slices, trait objects,
3764    // and extern types, the input safety requirement is currently enough to
3765    // satisfy the requirements of align_of_val_raw; this is an implementation
3766    // detail of the language that must not be relied upon outside of std.
3767    unsafe { data_offset_align(align_of_val_raw(ptr)) }
3768}
3769
3770#[inline]
3771fn data_offset_align(align: usize) -> usize {
3772    let layout = Layout::new::<RcInner<()>>();
3773    layout.size() + layout.padding_needed_for(align)
3774}
3775
3776/// A uniquely owned [`Rc`].
3777///
3778/// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong
3779/// reference. Multiple weak pointers can be created, but attempts to upgrade those to strong
3780/// references will fail unless the `UniqueRc` they point to has been converted into a regular `Rc`.
3781///
3782/// Because they are uniquely owned, the contents of a `UniqueRc` can be freely mutated. A common
3783/// use case is to have an object be mutable during its initialization phase but then have it become
3784/// immutable and converted to a normal `Rc`.
3785///
3786/// This can be used as a flexible way to create cyclic data structures, as in the example below.
3787///
3788/// ```
3789/// #![feature(unique_rc_arc)]
3790/// use std::rc::{Rc, Weak, UniqueRc};
3791///
3792/// struct Gadget {
3793///     #[allow(dead_code)]
3794///     me: Weak<Gadget>,
3795/// }
3796///
3797/// fn create_gadget() -> Option<Rc<Gadget>> {
3798///     let mut rc = UniqueRc::new(Gadget {
3799///         me: Weak::new(),
3800///     });
3801///     rc.me = UniqueRc::downgrade(&rc);
3802///     Some(UniqueRc::into_rc(rc))
3803/// }
3804///
3805/// create_gadget().unwrap();
3806/// ```
3807///
3808/// An advantage of using `UniqueRc` over [`Rc::new_cyclic`] to build cyclic data structures is that
3809/// [`Rc::new_cyclic`]'s `data_fn` parameter cannot be async or return a [`Result`]. As shown in the
3810/// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data,
3811/// including fallible or async constructors.
3812#[unstable(feature = "unique_rc_arc", issue = "112566")]
3813pub struct UniqueRc<
3814    T: ?Sized,
3815    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3816> {
3817    ptr: NonNull<RcInner<T>>,
3818    // Define the ownership of `RcInner<T>` for drop-check
3819    _marker: PhantomData<RcInner<T>>,
3820    // Invariance is necessary for soundness: once other `Weak`
3821    // references exist, we already have a form of shared mutability!
3822    _marker2: PhantomData<*mut T>,
3823    alloc: A,
3824}
3825
3826// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3827// but having an explicit negative impl is nice for documentation purposes
3828// and results in nicer error messages.
3829#[unstable(feature = "unique_rc_arc", issue = "112566")]
3830impl<T: ?Sized, A: Allocator> !Send for UniqueRc<T, A> {}
3831
3832// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3833// but having an explicit negative impl is nice for documentation purposes
3834// and results in nicer error messages.
3835#[unstable(feature = "unique_rc_arc", issue = "112566")]
3836impl<T: ?Sized, A: Allocator> !Sync for UniqueRc<T, A> {}
3837
3838#[unstable(feature = "unique_rc_arc", issue = "112566")]
3839impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<UniqueRc<U, A>>
3840    for UniqueRc<T, A>
3841{
3842}
3843
3844//#[unstable(feature = "unique_rc_arc", issue = "112566")]
3845#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3846impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<UniqueRc<U>> for UniqueRc<T> {}
3847
3848#[unstable(feature = "unique_rc_arc", issue = "112566")]
3849impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for UniqueRc<T, A> {
3850    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3851        fmt::Display::fmt(&**self, f)
3852    }
3853}
3854
3855#[unstable(feature = "unique_rc_arc", issue = "112566")]
3856impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for UniqueRc<T, A> {
3857    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3858        fmt::Debug::fmt(&**self, f)
3859    }
3860}
3861
3862#[unstable(feature = "unique_rc_arc", issue = "112566")]
3863impl<T: ?Sized, A: Allocator> fmt::Pointer for UniqueRc<T, A> {
3864    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3865        fmt::Pointer::fmt(&(&raw const **self), f)
3866    }
3867}
3868
3869#[unstable(feature = "unique_rc_arc", issue = "112566")]
3870impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for UniqueRc<T, A> {
3871    fn borrow(&self) -> &T {
3872        &**self
3873    }
3874}
3875
3876#[unstable(feature = "unique_rc_arc", issue = "112566")]
3877impl<T: ?Sized, A: Allocator> borrow::BorrowMut<T> for UniqueRc<T, A> {
3878    fn borrow_mut(&mut self) -> &mut T {
3879        &mut **self
3880    }
3881}
3882
3883#[unstable(feature = "unique_rc_arc", issue = "112566")]
3884impl<T: ?Sized, A: Allocator> AsRef<T> for UniqueRc<T, A> {
3885    fn as_ref(&self) -> &T {
3886        &**self
3887    }
3888}
3889
3890#[unstable(feature = "unique_rc_arc", issue = "112566")]
3891impl<T: ?Sized, A: Allocator> AsMut<T> for UniqueRc<T, A> {
3892    fn as_mut(&mut self) -> &mut T {
3893        &mut **self
3894    }
3895}
3896
3897#[unstable(feature = "unique_rc_arc", issue = "112566")]
3898impl<T: ?Sized, A: Allocator> Unpin for UniqueRc<T, A> {}
3899
3900#[unstable(feature = "unique_rc_arc", issue = "112566")]
3901impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for UniqueRc<T, A> {
3902    /// Equality for two `UniqueRc`s.
3903    ///
3904    /// Two `UniqueRc`s are equal if their inner values are equal.
3905    ///
3906    /// # Examples
3907    ///
3908    /// ```
3909    /// #![feature(unique_rc_arc)]
3910    /// use std::rc::UniqueRc;
3911    ///
3912    /// let five = UniqueRc::new(5);
3913    ///
3914    /// assert!(five == UniqueRc::new(5));
3915    /// ```
3916    #[inline]
3917    fn eq(&self, other: &Self) -> bool {
3918        PartialEq::eq(&**self, &**other)
3919    }
3920
3921    /// Inequality for two `UniqueRc`s.
3922    ///
3923    /// Two `UniqueRc`s are not equal if their inner values are not equal.
3924    ///
3925    /// # Examples
3926    ///
3927    /// ```
3928    /// #![feature(unique_rc_arc)]
3929    /// use std::rc::UniqueRc;
3930    ///
3931    /// let five = UniqueRc::new(5);
3932    ///
3933    /// assert!(five != UniqueRc::new(6));
3934    /// ```
3935    #[inline]
3936    fn ne(&self, other: &Self) -> bool {
3937        PartialEq::ne(&**self, &**other)
3938    }
3939}
3940
3941#[unstable(feature = "unique_rc_arc", issue = "112566")]
3942impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for UniqueRc<T, A> {
3943    /// Partial comparison for two `UniqueRc`s.
3944    ///
3945    /// The two are compared by calling `partial_cmp()` on their inner values.
3946    ///
3947    /// # Examples
3948    ///
3949    /// ```
3950    /// #![feature(unique_rc_arc)]
3951    /// use std::rc::UniqueRc;
3952    /// use std::cmp::Ordering;
3953    ///
3954    /// let five = UniqueRc::new(5);
3955    ///
3956    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&UniqueRc::new(6)));
3957    /// ```
3958    #[inline(always)]
3959    fn partial_cmp(&self, other: &UniqueRc<T, A>) -> Option<Ordering> {
3960        (**self).partial_cmp(&**other)
3961    }
3962
3963    /// Less-than comparison for two `UniqueRc`s.
3964    ///
3965    /// The two are compared by calling `<` on their inner values.
3966    ///
3967    /// # Examples
3968    ///
3969    /// ```
3970    /// #![feature(unique_rc_arc)]
3971    /// use std::rc::UniqueRc;
3972    ///
3973    /// let five = UniqueRc::new(5);
3974    ///
3975    /// assert!(five < UniqueRc::new(6));
3976    /// ```
3977    #[inline(always)]
3978    fn lt(&self, other: &UniqueRc<T, A>) -> bool {
3979        **self < **other
3980    }
3981
3982    /// 'Less than or equal to' comparison for two `UniqueRc`s.
3983    ///
3984    /// The two are compared by calling `<=` on their inner values.
3985    ///
3986    /// # Examples
3987    ///
3988    /// ```
3989    /// #![feature(unique_rc_arc)]
3990    /// use std::rc::UniqueRc;
3991    ///
3992    /// let five = UniqueRc::new(5);
3993    ///
3994    /// assert!(five <= UniqueRc::new(5));
3995    /// ```
3996    #[inline(always)]
3997    fn le(&self, other: &UniqueRc<T, A>) -> bool {
3998        **self <= **other
3999    }
4000
4001    /// Greater-than comparison for two `UniqueRc`s.
4002    ///
4003    /// The two are compared by calling `>` on their inner values.
4004    ///
4005    /// # Examples
4006    ///
4007    /// ```
4008    /// #![feature(unique_rc_arc)]
4009    /// use std::rc::UniqueRc;
4010    ///
4011    /// let five = UniqueRc::new(5);
4012    ///
4013    /// assert!(five > UniqueRc::new(4));
4014    /// ```
4015    #[inline(always)]
4016    fn gt(&self, other: &UniqueRc<T, A>) -> bool {
4017        **self > **other
4018    }
4019
4020    /// 'Greater than or equal to' comparison for two `UniqueRc`s.
4021    ///
4022    /// The two are compared by calling `>=` on their inner values.
4023    ///
4024    /// # Examples
4025    ///
4026    /// ```
4027    /// #![feature(unique_rc_arc)]
4028    /// use std::rc::UniqueRc;
4029    ///
4030    /// let five = UniqueRc::new(5);
4031    ///
4032    /// assert!(five >= UniqueRc::new(5));
4033    /// ```
4034    #[inline(always)]
4035    fn ge(&self, other: &UniqueRc<T, A>) -> bool {
4036        **self >= **other
4037    }
4038}
4039
4040#[unstable(feature = "unique_rc_arc", issue = "112566")]
4041impl<T: ?Sized + Ord, A: Allocator> Ord for UniqueRc<T, A> {
4042    /// Comparison for two `UniqueRc`s.
4043    ///
4044    /// The two are compared by calling `cmp()` on their inner values.
4045    ///
4046    /// # Examples
4047    ///
4048    /// ```
4049    /// #![feature(unique_rc_arc)]
4050    /// use std::rc::UniqueRc;
4051    /// use std::cmp::Ordering;
4052    ///
4053    /// let five = UniqueRc::new(5);
4054    ///
4055    /// assert_eq!(Ordering::Less, five.cmp(&UniqueRc::new(6)));
4056    /// ```
4057    #[inline]
4058    fn cmp(&self, other: &UniqueRc<T, A>) -> Ordering {
4059        (**self).cmp(&**other)
4060    }
4061}
4062
4063#[unstable(feature = "unique_rc_arc", issue = "112566")]
4064impl<T: ?Sized + Eq, A: Allocator> Eq for UniqueRc<T, A> {}
4065
4066#[unstable(feature = "unique_rc_arc", issue = "112566")]
4067impl<T: ?Sized + Hash, A: Allocator> Hash for UniqueRc<T, A> {
4068    fn hash<H: Hasher>(&self, state: &mut H) {
4069        (**self).hash(state);
4070    }
4071}
4072
4073// Depends on A = Global
4074impl<T> UniqueRc<T> {
4075    /// Creates a new `UniqueRc`.
4076    ///
4077    /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
4078    /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
4079    /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
4080    /// point to the new [`Rc`].
4081    #[cfg(not(no_global_oom_handling))]
4082    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4083    pub fn new(value: T) -> Self {
4084        Self::new_in(value, Global)
4085    }
4086
4087    /// Maps the value in a `UniqueRc`, reusing the allocation if possible.
4088    ///
4089    /// `f` is called on a reference to the value in the `UniqueRc`, and the result is returned,
4090    /// also in a `UniqueRc`.
4091    ///
4092    /// Note: this is an associated function, which means that you have
4093    /// to call it as `UniqueRc::map(u, f)` instead of `u.map(f)`. This
4094    /// is so that there is no conflict with a method on the inner type.
4095    ///
4096    /// # Examples
4097    ///
4098    /// ```
4099    /// #![feature(smart_pointer_try_map)]
4100    /// #![feature(unique_rc_arc)]
4101    ///
4102    /// use std::rc::UniqueRc;
4103    ///
4104    /// let r = UniqueRc::new(7);
4105    /// let new = UniqueRc::map(r, |i| i + 7);
4106    /// assert_eq!(*new, 14);
4107    /// ```
4108    #[cfg(not(no_global_oom_handling))]
4109    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
4110    pub fn map<U>(this: Self, f: impl FnOnce(T) -> U) -> UniqueRc<U> {
4111        if size_of::<T>() == size_of::<U>()
4112            && align_of::<T>() == align_of::<U>()
4113            && UniqueRc::weak_count(&this) == 0
4114        {
4115            unsafe {
4116                let ptr = UniqueRc::into_raw(this);
4117                let value = ptr.read();
4118                let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<U>>());
4119
4120                allocation.write(f(value));
4121                allocation.assume_init()
4122            }
4123        } else {
4124            UniqueRc::new(f(UniqueRc::unwrap(this)))
4125        }
4126    }
4127
4128    /// Attempts to map the value in a `UniqueRc`, reusing the allocation if possible.
4129    ///
4130    /// `f` is called on a reference to the value in the `UniqueRc`, and if the operation succeeds,
4131    /// the result is returned, also in a `UniqueRc`.
4132    ///
4133    /// Note: this is an associated function, which means that you have
4134    /// to call it as `UniqueRc::try_map(u, f)` instead of `u.try_map(f)`. This
4135    /// is so that there is no conflict with a method on the inner type.
4136    ///
4137    /// # Examples
4138    ///
4139    /// ```
4140    /// #![feature(smart_pointer_try_map)]
4141    /// #![feature(unique_rc_arc)]
4142    ///
4143    /// use std::rc::UniqueRc;
4144    ///
4145    /// let b = UniqueRc::new(7);
4146    /// let new = UniqueRc::try_map(b, u32::try_from).unwrap();
4147    /// assert_eq!(*new, 7);
4148    /// ```
4149    #[cfg(not(no_global_oom_handling))]
4150    #[unstable(feature = "smart_pointer_try_map", issue = "144419")]
4151    pub fn try_map<R>(
4152        this: Self,
4153        f: impl FnOnce(T) -> R,
4154    ) -> <R::Residual as Residual<UniqueRc<R::Output>>>::TryType
4155    where
4156        R: Try,
4157        R::Residual: Residual<UniqueRc<R::Output>>,
4158    {
4159        if size_of::<T>() == size_of::<R::Output>()
4160            && align_of::<T>() == align_of::<R::Output>()
4161            && UniqueRc::weak_count(&this) == 0
4162        {
4163            unsafe {
4164                let ptr = UniqueRc::into_raw(this);
4165                let value = ptr.read();
4166                let mut allocation = UniqueRc::from_raw(ptr.cast::<mem::MaybeUninit<R::Output>>());
4167
4168                allocation.write(f(value)?);
4169                try { allocation.assume_init() }
4170            }
4171        } else {
4172            try { UniqueRc::new(f(UniqueRc::unwrap(this))?) }
4173        }
4174    }
4175
4176    #[cfg(not(no_global_oom_handling))]
4177    fn unwrap(this: Self) -> T {
4178        let this = ManuallyDrop::new(this);
4179        let val: T = unsafe { ptr::read(&**this) };
4180
4181        let _weak = Weak { ptr: this.ptr, alloc: Global };
4182
4183        val
4184    }
4185}
4186
4187impl<T: ?Sized> UniqueRc<T> {
4188    #[cfg(not(no_global_oom_handling))]
4189    unsafe fn from_raw(ptr: *const T) -> Self {
4190        let offset = unsafe { data_offset(ptr) };
4191
4192        // Reverse the offset to find the original RcInner.
4193        let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
4194
4195        Self {
4196            ptr: unsafe { NonNull::new_unchecked(rc_ptr) },
4197            _marker: PhantomData,
4198            _marker2: PhantomData,
4199            alloc: Global,
4200        }
4201    }
4202
4203    #[cfg(not(no_global_oom_handling))]
4204    fn into_raw(this: Self) -> *const T {
4205        let this = ManuallyDrop::new(this);
4206        Self::as_ptr(&*this)
4207    }
4208}
4209
4210impl<T, A: Allocator> UniqueRc<T, A> {
4211    /// Creates a new `UniqueRc` in the provided allocator.
4212    ///
4213    /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
4214    /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
4215    /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
4216    /// point to the new [`Rc`].
4217    #[cfg(not(no_global_oom_handling))]
4218    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4219    pub fn new_in(value: T, alloc: A) -> Self {
4220        let (ptr, alloc) = Box::into_unique(Box::new_in(
4221            RcInner {
4222                strong: Cell::new(0),
4223                // keep one weak reference so if all the weak pointers that are created are dropped
4224                // the UniqueRc still stays valid.
4225                weak: Cell::new(1),
4226                value,
4227            },
4228            alloc,
4229        ));
4230        Self { ptr: ptr.into(), _marker: PhantomData, _marker2: PhantomData, alloc }
4231    }
4232}
4233
4234impl<T: ?Sized, A: Allocator> UniqueRc<T, A> {
4235    /// Converts the `UniqueRc` into a regular [`Rc`].
4236    ///
4237    /// This consumes the `UniqueRc` and returns a regular [`Rc`] that contains the `value` that
4238    /// is passed to `into_rc`.
4239    ///
4240    /// Any weak references created before this method is called can now be upgraded to strong
4241    /// references.
4242    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4243    pub fn into_rc(this: Self) -> Rc<T, A> {
4244        let mut this = ManuallyDrop::new(this);
4245
4246        // Move the allocator out.
4247        // SAFETY: `this.alloc` will not be accessed again, nor dropped because it is in
4248        // a `ManuallyDrop`.
4249        let alloc: A = unsafe { ptr::read(&this.alloc) };
4250
4251        // SAFETY: This pointer was allocated at creation time so we know it is valid.
4252        unsafe {
4253            // Convert our weak reference into a strong reference
4254            this.ptr.as_mut().strong.set(1);
4255            Rc::from_inner_in(this.ptr, alloc)
4256        }
4257    }
4258
4259    #[cfg(not(no_global_oom_handling))]
4260    fn weak_count(this: &Self) -> usize {
4261        this.inner().weak() - 1
4262    }
4263
4264    #[cfg(not(no_global_oom_handling))]
4265    fn inner(&self) -> &RcInner<T> {
4266        // SAFETY: while this UniqueRc is alive we're guaranteed that the inner pointer is valid.
4267        unsafe { self.ptr.as_ref() }
4268    }
4269
4270    #[cfg(not(no_global_oom_handling))]
4271    fn as_ptr(this: &Self) -> *const T {
4272        let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
4273
4274        // SAFETY: This cannot go through Deref::deref or UniqueRc::inner because
4275        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
4276        // write through the pointer after the Rc is recovered through `from_raw`.
4277        unsafe { &raw mut (*ptr).value }
4278    }
4279
4280    #[inline]
4281    #[cfg(not(no_global_oom_handling))]
4282    fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
4283        let this = mem::ManuallyDrop::new(this);
4284        (this.ptr, unsafe { ptr::read(&this.alloc) })
4285    }
4286
4287    #[inline]
4288    #[cfg(not(no_global_oom_handling))]
4289    unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
4290        Self { ptr, _marker: PhantomData, _marker2: PhantomData, alloc }
4291    }
4292}
4293
4294impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
4295    /// Creates a new weak reference to the `UniqueRc`.
4296    ///
4297    /// Attempting to upgrade this weak reference will fail before the `UniqueRc` has been converted
4298    /// to a [`Rc`] using [`UniqueRc::into_rc`].
4299    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4300    pub fn downgrade(this: &Self) -> Weak<T, A> {
4301        // SAFETY: This pointer was allocated at creation time and we guarantee that we only have
4302        // one strong reference before converting to a regular Rc.
4303        unsafe {
4304            this.ptr.as_ref().inc_weak();
4305        }
4306        Weak { ptr: this.ptr, alloc: this.alloc.clone() }
4307    }
4308}
4309
4310#[cfg(not(no_global_oom_handling))]
4311impl<T, A: Allocator> UniqueRc<mem::MaybeUninit<T>, A> {
4312    unsafe fn assume_init(self) -> UniqueRc<T, A> {
4313        let (ptr, alloc) = UniqueRc::into_inner_with_allocator(self);
4314        unsafe { UniqueRc::from_inner_in(ptr.cast(), alloc) }
4315    }
4316}
4317
4318#[unstable(feature = "unique_rc_arc", issue = "112566")]
4319impl<T: ?Sized, A: Allocator> Deref for UniqueRc<T, A> {
4320    type Target = T;
4321
4322    fn deref(&self) -> &T {
4323        // SAFETY: This pointer was allocated at creation time so we know it is valid.
4324        unsafe { &self.ptr.as_ref().value }
4325    }
4326}
4327
4328#[unstable(feature = "unique_rc_arc", issue = "112566")]
4329impl<T: ?Sized, A: Allocator> DerefMut for UniqueRc<T, A> {
4330    fn deref_mut(&mut self) -> &mut T {
4331        // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we
4332        // have unique ownership and therefore it's safe to make a mutable reference because
4333        // `UniqueRc` owns the only strong reference to itself.
4334        unsafe { &mut (*self.ptr.as_ptr()).value }
4335    }
4336}
4337
4338#[unstable(feature = "unique_rc_arc", issue = "112566")]
4339unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc<T, A> {
4340    fn drop(&mut self) {
4341        unsafe {
4342            // destroy the contained object
4343            drop_in_place(DerefMut::deref_mut(self));
4344
4345            // remove the implicit "strong weak" pointer now that we've destroyed the contents.
4346            self.ptr.as_ref().dec_weak();
4347
4348            if self.ptr.as_ref().weak() == 0 {
4349                self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
4350            }
4351        }
4352    }
4353}
4354
4355/// A unique owning pointer to a [`RcInner`] **that does not imply the contents are initialized,**
4356/// but will deallocate it (without dropping the value) when dropped.
4357///
4358/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
4359/// It is nearly a duplicate of `UniqueRc<MaybeUninit<T>, A>` except that it allows `T: !Sized`,
4360/// which `MaybeUninit` does not.
4361#[cfg(not(no_global_oom_handling))]
4362struct UniqueRcUninit<T: ?Sized, A: Allocator> {
4363    ptr: NonNull<RcInner<T>>,
4364    layout_for_value: Layout,
4365    alloc: Option<A>,
4366}
4367
4368#[cfg(not(no_global_oom_handling))]
4369impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
4370    /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it.
4371    fn new(for_value: &T, alloc: A) -> UniqueRcUninit<T, A> {
4372        let layout = Layout::for_value(for_value);
4373        let ptr = unsafe {
4374            Rc::allocate_for_layout(
4375                layout,
4376                |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner),
4377                |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner<T>),
4378            )
4379        };
4380        Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
4381    }
4382
4383    /// Returns the pointer to be written into to initialize the [`Rc`].
4384    fn data_ptr(&mut self) -> *mut T {
4385        let offset = data_offset_align(self.layout_for_value.align());
4386        unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
4387    }
4388
4389    /// Upgrade this into a normal [`Rc`].
4390    ///
4391    /// # Safety
4392    ///
4393    /// The data must have been initialized (by writing to [`Self::data_ptr()`]).
4394    unsafe fn into_rc(self) -> Rc<T, A> {
4395        let mut this = ManuallyDrop::new(self);
4396        let ptr = this.ptr;
4397        let alloc = this.alloc.take().unwrap();
4398
4399        // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
4400        // for having initialized the data.
4401        unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
4402    }
4403}
4404
4405#[cfg(not(no_global_oom_handling))]
4406impl<T: ?Sized, A: Allocator> Drop for UniqueRcUninit<T, A> {
4407    fn drop(&mut self) {
4408        // SAFETY:
4409        // * new() produced a pointer safe to deallocate.
4410        // * We own the pointer unless into_rc() was called, which forgets us.
4411        unsafe {
4412            self.alloc.take().unwrap().deallocate(
4413                self.ptr.cast(),
4414                rc_inner_layout_for_value_layout(self.layout_for_value),
4415            );
4416        }
4417    }
4418}
4419
4420#[unstable(feature = "allocator_api", issue = "32838")]
4421unsafe impl<T: ?Sized + Allocator, A: Allocator> Allocator for Rc<T, A> {
4422    #[inline]
4423    fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
4424        (**self).allocate(layout)
4425    }
4426
4427    #[inline]
4428    fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
4429        (**self).allocate_zeroed(layout)
4430    }
4431
4432    #[inline]
4433    unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
4434        // SAFETY: the safety contract must be upheld by the caller
4435        unsafe { (**self).deallocate(ptr, layout) }
4436    }
4437
4438    #[inline]
4439    unsafe fn grow(
4440        &self,
4441        ptr: NonNull<u8>,
4442        old_layout: Layout,
4443        new_layout: Layout,
4444    ) -> Result<NonNull<[u8]>, AllocError> {
4445        // SAFETY: the safety contract must be upheld by the caller
4446        unsafe { (**self).grow(ptr, old_layout, new_layout) }
4447    }
4448
4449    #[inline]
4450    unsafe fn grow_zeroed(
4451        &self,
4452        ptr: NonNull<u8>,
4453        old_layout: Layout,
4454        new_layout: Layout,
4455    ) -> Result<NonNull<[u8]>, AllocError> {
4456        // SAFETY: the safety contract must be upheld by the caller
4457        unsafe { (**self).grow_zeroed(ptr, old_layout, new_layout) }
4458    }
4459
4460    #[inline]
4461    unsafe fn shrink(
4462        &self,
4463        ptr: NonNull<u8>,
4464        old_layout: Layout,
4465        new_layout: Layout,
4466    ) -> Result<NonNull<[u8]>, AllocError> {
4467        // SAFETY: the safety contract must be upheld by the caller
4468        unsafe { (**self).shrink(ptr, old_layout, new_layout) }
4469    }
4470}