Thanks to visit codestin.com
Credit goes to doc.rust-lang.org

alloc/
rc.rs

1//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
2//! Counted'.
3//!
4//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
5//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
6//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
7//! given allocation is destroyed, the value stored in that allocation (often
8//! referred to as "inner value") is also dropped.
9//!
10//! Shared references in Rust disallow mutation by default, and [`Rc`]
11//! is no exception: you cannot generally obtain a mutable reference to
12//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
13//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
14//! inside an `Rc`][mutability].
15//!
16//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
17//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
18//! does not implement [`Send`]. As a result, the Rust compiler
19//! will check *at compile time* that you are not sending [`Rc`]s between
20//! threads. If you need multi-threaded, atomic reference counting, use
21//! [`sync::Arc`][arc].
22//!
23//! The [`downgrade`][downgrade] method can be used to create a non-owning
24//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
25//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
26//! already been dropped. In other words, `Weak` pointers do not keep the value
27//! inside the allocation alive; however, they *do* keep the allocation
28//! (the backing store for the inner value) alive.
29//!
30//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
31//! [`Weak`] is used to break cycles. For example, a tree could have strong
32//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
33//! children back to their parents.
34//!
35//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
36//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
37//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
38//! functions, called using [fully qualified syntax]:
39//!
40//! ```
41//! use std::rc::Rc;
42//!
43//! let my_rc = Rc::new(());
44//! let my_weak = Rc::downgrade(&my_rc);
45//! ```
46//!
47//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
48//! fully qualified syntax. Some people prefer to use fully qualified syntax,
49//! while others prefer using method-call syntax.
50//!
51//! ```
52//! use std::rc::Rc;
53//!
54//! let rc = Rc::new(());
55//! // Method-call syntax
56//! let rc2 = rc.clone();
57//! // Fully qualified syntax
58//! let rc3 = Rc::clone(&rc);
59//! ```
60//!
61//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
62//! already been dropped.
63//!
64//! # Cloning references
65//!
66//! Creating a new reference to the same allocation as an existing reference counted pointer
67//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
68//!
69//! ```
70//! use std::rc::Rc;
71//!
72//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
73//! // The two syntaxes below are equivalent.
74//! let a = foo.clone();
75//! let b = Rc::clone(&foo);
76//! // a and b both point to the same memory location as foo.
77//! ```
78//!
79//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
80//! the meaning of the code. In the example above, this syntax makes it easier to see that
81//! this code is creating a new reference rather than copying the whole content of foo.
82//!
83//! # Examples
84//!
85//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
86//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
87//! unique ownership, because more than one gadget may belong to the same
88//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
89//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
90//!
91//! ```
92//! use std::rc::Rc;
93//!
94//! struct Owner {
95//!     name: String,
96//!     // ...other fields
97//! }
98//!
99//! struct Gadget {
100//!     id: i32,
101//!     owner: Rc<Owner>,
102//!     // ...other fields
103//! }
104//!
105//! fn main() {
106//!     // Create a reference-counted `Owner`.
107//!     let gadget_owner: Rc<Owner> = Rc::new(
108//!         Owner {
109//!             name: "Gadget Man".to_string(),
110//!         }
111//!     );
112//!
113//!     // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
114//!     // gives us a new pointer to the same `Owner` allocation, incrementing
115//!     // the reference count in the process.
116//!     let gadget1 = Gadget {
117//!         id: 1,
118//!         owner: Rc::clone(&gadget_owner),
119//!     };
120//!     let gadget2 = Gadget {
121//!         id: 2,
122//!         owner: Rc::clone(&gadget_owner),
123//!     };
124//!
125//!     // Dispose of our local variable `gadget_owner`.
126//!     drop(gadget_owner);
127//!
128//!     // Despite dropping `gadget_owner`, we're still able to print out the name
129//!     // of the `Owner` of the `Gadget`s. This is because we've only dropped a
130//!     // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
131//!     // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
132//!     // live. The field projection `gadget1.owner.name` works because
133//!     // `Rc<Owner>` automatically dereferences to `Owner`.
134//!     println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
135//!     println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
136//!
137//!     // At the end of the function, `gadget1` and `gadget2` are destroyed, and
138//!     // with them the last counted references to our `Owner`. Gadget Man now
139//!     // gets destroyed as well.
140//! }
141//! ```
142//!
143//! If our requirements change, and we also need to be able to traverse from
144//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
145//! to `Gadget` introduces a cycle. This means that their
146//! reference counts can never reach 0, and the allocation will never be destroyed:
147//! a memory leak. In order to get around this, we can use [`Weak`]
148//! pointers.
149//!
150//! Rust actually makes it somewhat difficult to produce this loop in the first
151//! place. In order to end up with two values that point at each other, one of
152//! them needs to be mutable. This is difficult because [`Rc`] enforces
153//! memory safety by only giving out shared references to the value it wraps,
154//! and these don't allow direct mutation. We need to wrap the part of the
155//! value we wish to mutate in a [`RefCell`], which provides *interior
156//! mutability*: a method to achieve mutability through a shared reference.
157//! [`RefCell`] enforces Rust's borrowing rules at runtime.
158//!
159//! ```
160//! use std::rc::Rc;
161//! use std::rc::Weak;
162//! use std::cell::RefCell;
163//!
164//! struct Owner {
165//!     name: String,
166//!     gadgets: RefCell<Vec<Weak<Gadget>>>,
167//!     // ...other fields
168//! }
169//!
170//! struct Gadget {
171//!     id: i32,
172//!     owner: Rc<Owner>,
173//!     // ...other fields
174//! }
175//!
176//! fn main() {
177//!     // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
178//!     // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
179//!     // a shared reference.
180//!     let gadget_owner: Rc<Owner> = Rc::new(
181//!         Owner {
182//!             name: "Gadget Man".to_string(),
183//!             gadgets: RefCell::new(vec![]),
184//!         }
185//!     );
186//!
187//!     // Create `Gadget`s belonging to `gadget_owner`, as before.
188//!     let gadget1 = Rc::new(
189//!         Gadget {
190//!             id: 1,
191//!             owner: Rc::clone(&gadget_owner),
192//!         }
193//!     );
194//!     let gadget2 = Rc::new(
195//!         Gadget {
196//!             id: 2,
197//!             owner: Rc::clone(&gadget_owner),
198//!         }
199//!     );
200//!
201//!     // Add the `Gadget`s to their `Owner`.
202//!     {
203//!         let mut gadgets = gadget_owner.gadgets.borrow_mut();
204//!         gadgets.push(Rc::downgrade(&gadget1));
205//!         gadgets.push(Rc::downgrade(&gadget2));
206//!
207//!         // `RefCell` dynamic borrow ends here.
208//!     }
209//!
210//!     // Iterate over our `Gadget`s, printing their details out.
211//!     for gadget_weak in gadget_owner.gadgets.borrow().iter() {
212//!
213//!         // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
214//!         // guarantee the allocation still exists, we need to call
215//!         // `upgrade`, which returns an `Option<Rc<Gadget>>`.
216//!         //
217//!         // In this case we know the allocation still exists, so we simply
218//!         // `unwrap` the `Option`. In a more complicated program, you might
219//!         // need graceful error handling for a `None` result.
220//!
221//!         let gadget = gadget_weak.upgrade().unwrap();
222//!         println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
223//!     }
224//!
225//!     // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
226//!     // are destroyed. There are now no strong (`Rc`) pointers to the
227//!     // gadgets, so they are destroyed. This zeroes the reference count on
228//!     // Gadget Man, so he gets destroyed as well.
229//! }
230//! ```
231//!
232//! [clone]: Clone::clone
233//! [`Cell`]: core::cell::Cell
234//! [`RefCell`]: core::cell::RefCell
235//! [arc]: crate::sync::Arc
236//! [`Deref`]: core::ops::Deref
237//! [downgrade]: Rc::downgrade
238//! [upgrade]: Weak::upgrade
239//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
240//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
241
242#![stable(feature = "rust1", since = "1.0.0")]
243
244use core::any::Any;
245use core::cell::Cell;
246#[cfg(not(no_global_oom_handling))]
247use core::clone::CloneToUninit;
248use core::clone::UseCloned;
249use core::cmp::Ordering;
250use core::hash::{Hash, Hasher};
251use core::intrinsics::abort;
252#[cfg(not(no_global_oom_handling))]
253use core::iter;
254use core::marker::{PhantomData, Unsize};
255use core::mem::{self, ManuallyDrop, align_of_val_raw};
256use core::num::NonZeroUsize;
257use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver};
258use core::panic::{RefUnwindSafe, UnwindSafe};
259#[cfg(not(no_global_oom_handling))]
260use core::pin::Pin;
261use core::pin::PinCoerceUnsized;
262use core::ptr::{self, NonNull, drop_in_place};
263#[cfg(not(no_global_oom_handling))]
264use core::slice::from_raw_parts_mut;
265use core::{borrow, fmt, hint};
266
267#[cfg(not(no_global_oom_handling))]
268use crate::alloc::handle_alloc_error;
269use crate::alloc::{AllocError, Allocator, Global, Layout};
270use crate::borrow::{Cow, ToOwned};
271use crate::boxed::Box;
272#[cfg(not(no_global_oom_handling))]
273use crate::string::String;
274#[cfg(not(no_global_oom_handling))]
275use crate::vec::Vec;
276
277// This is repr(C) to future-proof against possible field-reordering, which
278// would interfere with otherwise safe [into|from]_raw() of transmutable
279// inner types.
280#[repr(C)]
281struct RcInner<T: ?Sized> {
282    strong: Cell<usize>,
283    weak: Cell<usize>,
284    value: T,
285}
286
287/// Calculate layout for `RcInner<T>` using the inner value's layout
288fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout {
289    // Calculate layout using the given value layout.
290    // Previously, layout was calculated on the expression
291    // `&*(ptr as *const RcInner<T>)`, but this created a misaligned
292    // reference (see #54908).
293    Layout::new::<RcInner<()>>().extend(layout).unwrap().0.pad_to_align()
294}
295
296/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
297/// Counted'.
298///
299/// See the [module-level documentation](./index.html) for more details.
300///
301/// The inherent methods of `Rc` are all associated functions, which means
302/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
303/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
304///
305/// [get_mut]: Rc::get_mut
306#[doc(search_unbox)]
307#[rustc_diagnostic_item = "Rc"]
308#[stable(feature = "rust1", since = "1.0.0")]
309#[rustc_insignificant_dtor]
310pub struct Rc<
311    T: ?Sized,
312    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
313> {
314    ptr: NonNull<RcInner<T>>,
315    phantom: PhantomData<RcInner<T>>,
316    alloc: A,
317}
318
319#[stable(feature = "rust1", since = "1.0.0")]
320impl<T: ?Sized, A: Allocator> !Send for Rc<T, A> {}
321
322// Note that this negative impl isn't strictly necessary for correctness,
323// as `Rc` transitively contains a `Cell`, which is itself `!Sync`.
324// However, given how important `Rc`'s `!Sync`-ness is,
325// having an explicit negative impl is nice for documentation purposes
326// and results in nicer error messages.
327#[stable(feature = "rust1", since = "1.0.0")]
328impl<T: ?Sized, A: Allocator> !Sync for Rc<T, A> {}
329
330#[stable(feature = "catch_unwind", since = "1.9.0")]
331impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Rc<T, A> {}
332#[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")]
333impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> RefUnwindSafe for Rc<T, A> {}
334
335#[unstable(feature = "coerce_unsized", issue = "18598")]
336impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for Rc<T, A> {}
337
338#[unstable(feature = "dispatch_from_dyn", issue = "none")]
339impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
340
341impl<T: ?Sized> Rc<T> {
342    #[inline]
343    unsafe fn from_inner(ptr: NonNull<RcInner<T>>) -> Self {
344        unsafe { Self::from_inner_in(ptr, Global) }
345    }
346
347    #[inline]
348    unsafe fn from_ptr(ptr: *mut RcInner<T>) -> Self {
349        unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
350    }
351}
352
353impl<T: ?Sized, A: Allocator> Rc<T, A> {
354    #[inline(always)]
355    fn inner(&self) -> &RcInner<T> {
356        // This unsafety is ok because while this Rc is alive we're guaranteed
357        // that the inner pointer is valid.
358        unsafe { self.ptr.as_ref() }
359    }
360
361    #[inline]
362    fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
363        let this = mem::ManuallyDrop::new(this);
364        (this.ptr, unsafe { ptr::read(&this.alloc) })
365    }
366
367    #[inline]
368    unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
369        Self { ptr, phantom: PhantomData, alloc }
370    }
371
372    #[inline]
373    unsafe fn from_ptr_in(ptr: *mut RcInner<T>, alloc: A) -> Self {
374        unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
375    }
376
377    // Non-inlined part of `drop`.
378    #[inline(never)]
379    unsafe fn drop_slow(&mut self) {
380        // Reconstruct the "strong weak" pointer and drop it when this
381        // variable goes out of scope. This ensures that the memory is
382        // deallocated even if the destructor of `T` panics.
383        let _weak = Weak { ptr: self.ptr, alloc: &self.alloc };
384
385        // Destroy the contained object.
386        // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
387        unsafe {
388            ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value);
389        }
390    }
391}
392
393impl<T> Rc<T> {
394    /// Constructs a new `Rc<T>`.
395    ///
396    /// # Examples
397    ///
398    /// ```
399    /// use std::rc::Rc;
400    ///
401    /// let five = Rc::new(5);
402    /// ```
403    #[cfg(not(no_global_oom_handling))]
404    #[stable(feature = "rust1", since = "1.0.0")]
405    pub fn new(value: T) -> Rc<T> {
406        // There is an implicit weak pointer owned by all the strong
407        // pointers, which ensures that the weak destructor never frees
408        // the allocation while the strong destructor is running, even
409        // if the weak pointer is stored inside the strong one.
410        unsafe {
411            Self::from_inner(
412                Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value }))
413                    .into(),
414            )
415        }
416    }
417
418    /// Constructs a new `Rc<T>` while giving you a `Weak<T>` to the allocation,
419    /// to allow you to construct a `T` which holds a weak pointer to itself.
420    ///
421    /// Generally, a structure circularly referencing itself, either directly or
422    /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
423    /// Using this function, you get access to the weak pointer during the
424    /// initialization of `T`, before the `Rc<T>` is created, such that you can
425    /// clone and store it inside the `T`.
426    ///
427    /// `new_cyclic` first allocates the managed allocation for the `Rc<T>`,
428    /// then calls your closure, giving it a `Weak<T>` to this allocation,
429    /// and only afterwards completes the construction of the `Rc<T>` by placing
430    /// the `T` returned from your closure into the allocation.
431    ///
432    /// Since the new `Rc<T>` is not fully-constructed until `Rc<T>::new_cyclic`
433    /// returns, calling [`upgrade`] on the weak reference inside your closure will
434    /// fail and result in a `None` value.
435    ///
436    /// # Panics
437    ///
438    /// If `data_fn` panics, the panic is propagated to the caller, and the
439    /// temporary [`Weak<T>`] is dropped normally.
440    ///
441    /// # Examples
442    ///
443    /// ```
444    /// # #![allow(dead_code)]
445    /// use std::rc::{Rc, Weak};
446    ///
447    /// struct Gadget {
448    ///     me: Weak<Gadget>,
449    /// }
450    ///
451    /// impl Gadget {
452    ///     /// Constructs a reference counted Gadget.
453    ///     fn new() -> Rc<Self> {
454    ///         // `me` is a `Weak<Gadget>` pointing at the new allocation of the
455    ///         // `Rc` we're constructing.
456    ///         Rc::new_cyclic(|me| {
457    ///             // Create the actual struct here.
458    ///             Gadget { me: me.clone() }
459    ///         })
460    ///     }
461    ///
462    ///     /// Returns a reference counted pointer to Self.
463    ///     fn me(&self) -> Rc<Self> {
464    ///         self.me.upgrade().unwrap()
465    ///     }
466    /// }
467    /// ```
468    /// [`upgrade`]: Weak::upgrade
469    #[cfg(not(no_global_oom_handling))]
470    #[stable(feature = "arc_new_cyclic", since = "1.60.0")]
471    pub fn new_cyclic<F>(data_fn: F) -> Rc<T>
472    where
473        F: FnOnce(&Weak<T>) -> T,
474    {
475        Self::new_cyclic_in(data_fn, Global)
476    }
477
478    /// Constructs a new `Rc` with uninitialized contents.
479    ///
480    /// # Examples
481    ///
482    /// ```
483    /// use std::rc::Rc;
484    ///
485    /// let mut five = Rc::<u32>::new_uninit();
486    ///
487    /// // Deferred initialization:
488    /// Rc::get_mut(&mut five).unwrap().write(5);
489    ///
490    /// let five = unsafe { five.assume_init() };
491    ///
492    /// assert_eq!(*five, 5)
493    /// ```
494    #[cfg(not(no_global_oom_handling))]
495    #[stable(feature = "new_uninit", since = "1.82.0")]
496    #[must_use]
497    pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
498        unsafe {
499            Rc::from_ptr(Rc::allocate_for_layout(
500                Layout::new::<T>(),
501                |layout| Global.allocate(layout),
502                <*mut u8>::cast,
503            ))
504        }
505    }
506
507    /// Constructs a new `Rc` with uninitialized contents, with the memory
508    /// being filled with `0` bytes.
509    ///
510    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
511    /// incorrect usage of this method.
512    ///
513    /// # Examples
514    ///
515    /// ```
516    /// use std::rc::Rc;
517    ///
518    /// let zero = Rc::<u32>::new_zeroed();
519    /// let zero = unsafe { zero.assume_init() };
520    ///
521    /// assert_eq!(*zero, 0)
522    /// ```
523    ///
524    /// [zeroed]: mem::MaybeUninit::zeroed
525    #[cfg(not(no_global_oom_handling))]
526    #[stable(feature = "new_zeroed_alloc", since = "CURRENT_RUSTC_VERSION")]
527    #[must_use]
528    pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
529        unsafe {
530            Rc::from_ptr(Rc::allocate_for_layout(
531                Layout::new::<T>(),
532                |layout| Global.allocate_zeroed(layout),
533                <*mut u8>::cast,
534            ))
535        }
536    }
537
538    /// Constructs a new `Rc<T>`, returning an error if the allocation fails
539    ///
540    /// # Examples
541    ///
542    /// ```
543    /// #![feature(allocator_api)]
544    /// use std::rc::Rc;
545    ///
546    /// let five = Rc::try_new(5);
547    /// # Ok::<(), std::alloc::AllocError>(())
548    /// ```
549    #[unstable(feature = "allocator_api", issue = "32838")]
550    pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
551        // There is an implicit weak pointer owned by all the strong
552        // pointers, which ensures that the weak destructor never frees
553        // the allocation while the strong destructor is running, even
554        // if the weak pointer is stored inside the strong one.
555        unsafe {
556            Ok(Self::from_inner(
557                Box::leak(Box::try_new(RcInner {
558                    strong: Cell::new(1),
559                    weak: Cell::new(1),
560                    value,
561                })?)
562                .into(),
563            ))
564        }
565    }
566
567    /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
568    ///
569    /// # Examples
570    ///
571    /// ```
572    /// #![feature(allocator_api)]
573    ///
574    /// use std::rc::Rc;
575    ///
576    /// let mut five = Rc::<u32>::try_new_uninit()?;
577    ///
578    /// // Deferred initialization:
579    /// Rc::get_mut(&mut five).unwrap().write(5);
580    ///
581    /// let five = unsafe { five.assume_init() };
582    ///
583    /// assert_eq!(*five, 5);
584    /// # Ok::<(), std::alloc::AllocError>(())
585    /// ```
586    #[unstable(feature = "allocator_api", issue = "32838")]
587    pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
588        unsafe {
589            Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
590                Layout::new::<T>(),
591                |layout| Global.allocate(layout),
592                <*mut u8>::cast,
593            )?))
594        }
595    }
596
597    /// Constructs a new `Rc` with uninitialized contents, with the memory
598    /// being filled with `0` bytes, returning an error if the allocation fails
599    ///
600    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
601    /// incorrect usage of this method.
602    ///
603    /// # Examples
604    ///
605    /// ```
606    /// #![feature(allocator_api)]
607    ///
608    /// use std::rc::Rc;
609    ///
610    /// let zero = Rc::<u32>::try_new_zeroed()?;
611    /// let zero = unsafe { zero.assume_init() };
612    ///
613    /// assert_eq!(*zero, 0);
614    /// # Ok::<(), std::alloc::AllocError>(())
615    /// ```
616    ///
617    /// [zeroed]: mem::MaybeUninit::zeroed
618    #[unstable(feature = "allocator_api", issue = "32838")]
619    pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
620        unsafe {
621            Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
622                Layout::new::<T>(),
623                |layout| Global.allocate_zeroed(layout),
624                <*mut u8>::cast,
625            )?))
626        }
627    }
628    /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
629    /// `value` will be pinned in memory and unable to be moved.
630    #[cfg(not(no_global_oom_handling))]
631    #[stable(feature = "pin", since = "1.33.0")]
632    #[must_use]
633    pub fn pin(value: T) -> Pin<Rc<T>> {
634        unsafe { Pin::new_unchecked(Rc::new(value)) }
635    }
636}
637
638impl<T, A: Allocator> Rc<T, A> {
639    /// Constructs a new `Rc` in the provided allocator.
640    ///
641    /// # Examples
642    ///
643    /// ```
644    /// #![feature(allocator_api)]
645    /// use std::rc::Rc;
646    /// use std::alloc::System;
647    ///
648    /// let five = Rc::new_in(5, System);
649    /// ```
650    #[cfg(not(no_global_oom_handling))]
651    #[unstable(feature = "allocator_api", issue = "32838")]
652    #[inline]
653    pub fn new_in(value: T, alloc: A) -> Rc<T, A> {
654        // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
655        // That would make code size bigger.
656        match Self::try_new_in(value, alloc) {
657            Ok(m) => m,
658            Err(_) => handle_alloc_error(Layout::new::<RcInner<T>>()),
659        }
660    }
661
662    /// Constructs a new `Rc` with uninitialized contents in the provided allocator.
663    ///
664    /// # Examples
665    ///
666    /// ```
667    /// #![feature(get_mut_unchecked)]
668    /// #![feature(allocator_api)]
669    ///
670    /// use std::rc::Rc;
671    /// use std::alloc::System;
672    ///
673    /// let mut five = Rc::<u32, _>::new_uninit_in(System);
674    ///
675    /// let five = unsafe {
676    ///     // Deferred initialization:
677    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
678    ///
679    ///     five.assume_init()
680    /// };
681    ///
682    /// assert_eq!(*five, 5)
683    /// ```
684    #[cfg(not(no_global_oom_handling))]
685    #[unstable(feature = "allocator_api", issue = "32838")]
686    #[inline]
687    pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
688        unsafe {
689            Rc::from_ptr_in(
690                Rc::allocate_for_layout(
691                    Layout::new::<T>(),
692                    |layout| alloc.allocate(layout),
693                    <*mut u8>::cast,
694                ),
695                alloc,
696            )
697        }
698    }
699
700    /// Constructs a new `Rc` with uninitialized contents, with the memory
701    /// being filled with `0` bytes, in the provided allocator.
702    ///
703    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
704    /// incorrect usage of this method.
705    ///
706    /// # Examples
707    ///
708    /// ```
709    /// #![feature(allocator_api)]
710    ///
711    /// use std::rc::Rc;
712    /// use std::alloc::System;
713    ///
714    /// let zero = Rc::<u32, _>::new_zeroed_in(System);
715    /// let zero = unsafe { zero.assume_init() };
716    ///
717    /// assert_eq!(*zero, 0)
718    /// ```
719    ///
720    /// [zeroed]: mem::MaybeUninit::zeroed
721    #[cfg(not(no_global_oom_handling))]
722    #[unstable(feature = "allocator_api", issue = "32838")]
723    #[inline]
724    pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
725        unsafe {
726            Rc::from_ptr_in(
727                Rc::allocate_for_layout(
728                    Layout::new::<T>(),
729                    |layout| alloc.allocate_zeroed(layout),
730                    <*mut u8>::cast,
731                ),
732                alloc,
733            )
734        }
735    }
736
737    /// Constructs a new `Rc<T, A>` in the given allocator while giving you a `Weak<T, A>` to the allocation,
738    /// to allow you to construct a `T` which holds a weak pointer to itself.
739    ///
740    /// Generally, a structure circularly referencing itself, either directly or
741    /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
742    /// Using this function, you get access to the weak pointer during the
743    /// initialization of `T`, before the `Rc<T, A>` is created, such that you can
744    /// clone and store it inside the `T`.
745    ///
746    /// `new_cyclic_in` first allocates the managed allocation for the `Rc<T, A>`,
747    /// then calls your closure, giving it a `Weak<T, A>` to this allocation,
748    /// and only afterwards completes the construction of the `Rc<T, A>` by placing
749    /// the `T` returned from your closure into the allocation.
750    ///
751    /// Since the new `Rc<T, A>` is not fully-constructed until `Rc<T, A>::new_cyclic_in`
752    /// returns, calling [`upgrade`] on the weak reference inside your closure will
753    /// fail and result in a `None` value.
754    ///
755    /// # Panics
756    ///
757    /// If `data_fn` panics, the panic is propagated to the caller, and the
758    /// temporary [`Weak<T, A>`] is dropped normally.
759    ///
760    /// # Examples
761    ///
762    /// See [`new_cyclic`].
763    ///
764    /// [`new_cyclic`]: Rc::new_cyclic
765    /// [`upgrade`]: Weak::upgrade
766    #[cfg(not(no_global_oom_handling))]
767    #[unstable(feature = "allocator_api", issue = "32838")]
768    pub fn new_cyclic_in<F>(data_fn: F, alloc: A) -> Rc<T, A>
769    where
770        F: FnOnce(&Weak<T, A>) -> T,
771    {
772        // Construct the inner in the "uninitialized" state with a single
773        // weak reference.
774        let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
775            RcInner {
776                strong: Cell::new(0),
777                weak: Cell::new(1),
778                value: mem::MaybeUninit::<T>::uninit(),
779            },
780            alloc,
781        ));
782        let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into();
783        let init_ptr: NonNull<RcInner<T>> = uninit_ptr.cast();
784
785        let weak = Weak { ptr: init_ptr, alloc };
786
787        // It's important we don't give up ownership of the weak pointer, or
788        // else the memory might be freed by the time `data_fn` returns. If
789        // we really wanted to pass ownership, we could create an additional
790        // weak pointer for ourselves, but this would result in additional
791        // updates to the weak reference count which might not be necessary
792        // otherwise.
793        let data = data_fn(&weak);
794
795        let strong = unsafe {
796            let inner = init_ptr.as_ptr();
797            ptr::write(&raw mut (*inner).value, data);
798
799            let prev_value = (*inner).strong.get();
800            debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
801            (*inner).strong.set(1);
802
803            // Strong references should collectively own a shared weak reference,
804            // so don't run the destructor for our old weak reference.
805            // Calling into_raw_with_allocator has the double effect of giving us back the allocator,
806            // and forgetting the weak reference.
807            let alloc = weak.into_raw_with_allocator().1;
808
809            Rc::from_inner_in(init_ptr, alloc)
810        };
811
812        strong
813    }
814
815    /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
816    /// fails
817    ///
818    /// # Examples
819    ///
820    /// ```
821    /// #![feature(allocator_api)]
822    /// use std::rc::Rc;
823    /// use std::alloc::System;
824    ///
825    /// let five = Rc::try_new_in(5, System);
826    /// # Ok::<(), std::alloc::AllocError>(())
827    /// ```
828    #[unstable(feature = "allocator_api", issue = "32838")]
829    #[inline]
830    pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> {
831        // There is an implicit weak pointer owned by all the strong
832        // pointers, which ensures that the weak destructor never frees
833        // the allocation while the strong destructor is running, even
834        // if the weak pointer is stored inside the strong one.
835        let (ptr, alloc) = Box::into_unique(Box::try_new_in(
836            RcInner { strong: Cell::new(1), weak: Cell::new(1), value },
837            alloc,
838        )?);
839        Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
840    }
841
842    /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an
843    /// error if the allocation fails
844    ///
845    /// # Examples
846    ///
847    /// ```
848    /// #![feature(allocator_api)]
849    /// #![feature(get_mut_unchecked)]
850    ///
851    /// use std::rc::Rc;
852    /// use std::alloc::System;
853    ///
854    /// let mut five = Rc::<u32, _>::try_new_uninit_in(System)?;
855    ///
856    /// let five = unsafe {
857    ///     // Deferred initialization:
858    ///     Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
859    ///
860    ///     five.assume_init()
861    /// };
862    ///
863    /// assert_eq!(*five, 5);
864    /// # Ok::<(), std::alloc::AllocError>(())
865    /// ```
866    #[unstable(feature = "allocator_api", issue = "32838")]
867    #[inline]
868    pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
869        unsafe {
870            Ok(Rc::from_ptr_in(
871                Rc::try_allocate_for_layout(
872                    Layout::new::<T>(),
873                    |layout| alloc.allocate(layout),
874                    <*mut u8>::cast,
875                )?,
876                alloc,
877            ))
878        }
879    }
880
881    /// Constructs a new `Rc` with uninitialized contents, with the memory
882    /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation
883    /// fails
884    ///
885    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
886    /// incorrect usage of this method.
887    ///
888    /// # Examples
889    ///
890    /// ```
891    /// #![feature(allocator_api)]
892    ///
893    /// use std::rc::Rc;
894    /// use std::alloc::System;
895    ///
896    /// let zero = Rc::<u32, _>::try_new_zeroed_in(System)?;
897    /// let zero = unsafe { zero.assume_init() };
898    ///
899    /// assert_eq!(*zero, 0);
900    /// # Ok::<(), std::alloc::AllocError>(())
901    /// ```
902    ///
903    /// [zeroed]: mem::MaybeUninit::zeroed
904    #[unstable(feature = "allocator_api", issue = "32838")]
905    #[inline]
906    pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
907        unsafe {
908            Ok(Rc::from_ptr_in(
909                Rc::try_allocate_for_layout(
910                    Layout::new::<T>(),
911                    |layout| alloc.allocate_zeroed(layout),
912                    <*mut u8>::cast,
913                )?,
914                alloc,
915            ))
916        }
917    }
918
919    /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then
920    /// `value` will be pinned in memory and unable to be moved.
921    #[cfg(not(no_global_oom_handling))]
922    #[unstable(feature = "allocator_api", issue = "32838")]
923    #[inline]
924    pub fn pin_in(value: T, alloc: A) -> Pin<Self>
925    where
926        A: 'static,
927    {
928        unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) }
929    }
930
931    /// Returns the inner value, if the `Rc` has exactly one strong reference.
932    ///
933    /// Otherwise, an [`Err`] is returned with the same `Rc` that was
934    /// passed in.
935    ///
936    /// This will succeed even if there are outstanding weak references.
937    ///
938    /// # Examples
939    ///
940    /// ```
941    /// use std::rc::Rc;
942    ///
943    /// let x = Rc::new(3);
944    /// assert_eq!(Rc::try_unwrap(x), Ok(3));
945    ///
946    /// let x = Rc::new(4);
947    /// let _y = Rc::clone(&x);
948    /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
949    /// ```
950    #[inline]
951    #[stable(feature = "rc_unique", since = "1.4.0")]
952    pub fn try_unwrap(this: Self) -> Result<T, Self> {
953        if Rc::strong_count(&this) == 1 {
954            let this = ManuallyDrop::new(this);
955
956            let val: T = unsafe { ptr::read(&**this) }; // copy the contained object
957            let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator
958
959            // Indicate to Weaks that they can't be promoted by decrementing
960            // the strong count, and then remove the implicit "strong weak"
961            // pointer while also handling drop logic by just crafting a
962            // fake Weak.
963            this.inner().dec_strong();
964            let _weak = Weak { ptr: this.ptr, alloc };
965            Ok(val)
966        } else {
967            Err(this)
968        }
969    }
970
971    /// Returns the inner value, if the `Rc` has exactly one strong reference.
972    ///
973    /// Otherwise, [`None`] is returned and the `Rc` is dropped.
974    ///
975    /// This will succeed even if there are outstanding weak references.
976    ///
977    /// If `Rc::into_inner` is called on every clone of this `Rc`,
978    /// it is guaranteed that exactly one of the calls returns the inner value.
979    /// This means in particular that the inner value is not dropped.
980    ///
981    /// [`Rc::try_unwrap`] is conceptually similar to `Rc::into_inner`.
982    /// And while they are meant for different use-cases, `Rc::into_inner(this)`
983    /// is in fact equivalent to <code>[Rc::try_unwrap]\(this).[ok][Result::ok]()</code>.
984    /// (Note that the same kind of equivalence does **not** hold true for
985    /// [`Arc`](crate::sync::Arc), due to race conditions that do not apply to `Rc`!)
986    ///
987    /// # Examples
988    ///
989    /// ```
990    /// use std::rc::Rc;
991    ///
992    /// let x = Rc::new(3);
993    /// assert_eq!(Rc::into_inner(x), Some(3));
994    ///
995    /// let x = Rc::new(4);
996    /// let y = Rc::clone(&x);
997    ///
998    /// assert_eq!(Rc::into_inner(y), None);
999    /// assert_eq!(Rc::into_inner(x), Some(4));
1000    /// ```
1001    #[inline]
1002    #[stable(feature = "rc_into_inner", since = "1.70.0")]
1003    pub fn into_inner(this: Self) -> Option<T> {
1004        Rc::try_unwrap(this).ok()
1005    }
1006}
1007
1008impl<T> Rc<[T]> {
1009    /// Constructs a new reference-counted slice with uninitialized contents.
1010    ///
1011    /// # Examples
1012    ///
1013    /// ```
1014    /// use std::rc::Rc;
1015    ///
1016    /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1017    ///
1018    /// // Deferred initialization:
1019    /// let data = Rc::get_mut(&mut values).unwrap();
1020    /// data[0].write(1);
1021    /// data[1].write(2);
1022    /// data[2].write(3);
1023    ///
1024    /// let values = unsafe { values.assume_init() };
1025    ///
1026    /// assert_eq!(*values, [1, 2, 3])
1027    /// ```
1028    #[cfg(not(no_global_oom_handling))]
1029    #[stable(feature = "new_uninit", since = "1.82.0")]
1030    #[must_use]
1031    pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1032        unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
1033    }
1034
1035    /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1036    /// filled with `0` bytes.
1037    ///
1038    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1039    /// incorrect usage of this method.
1040    ///
1041    /// # Examples
1042    ///
1043    /// ```
1044    /// use std::rc::Rc;
1045    ///
1046    /// let values = Rc::<[u32]>::new_zeroed_slice(3);
1047    /// let values = unsafe { values.assume_init() };
1048    ///
1049    /// assert_eq!(*values, [0, 0, 0])
1050    /// ```
1051    ///
1052    /// [zeroed]: mem::MaybeUninit::zeroed
1053    #[cfg(not(no_global_oom_handling))]
1054    #[stable(feature = "new_zeroed_alloc", since = "CURRENT_RUSTC_VERSION")]
1055    #[must_use]
1056    pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1057        unsafe {
1058            Rc::from_ptr(Rc::allocate_for_layout(
1059                Layout::array::<T>(len).unwrap(),
1060                |layout| Global.allocate_zeroed(layout),
1061                |mem| {
1062                    ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1063                        as *mut RcInner<[mem::MaybeUninit<T>]>
1064                },
1065            ))
1066        }
1067    }
1068
1069    /// Converts the reference-counted slice into a reference-counted array.
1070    ///
1071    /// This operation does not reallocate; the underlying array of the slice is simply reinterpreted as an array type.
1072    ///
1073    /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1074    #[unstable(feature = "slice_as_array", issue = "133508")]
1075    #[inline]
1076    #[must_use]
1077    pub fn into_array<const N: usize>(self) -> Option<Rc<[T; N]>> {
1078        if self.len() == N {
1079            let ptr = Self::into_raw(self) as *const [T; N];
1080
1081            // SAFETY: The underlying array of a slice has the exact same layout as an actual array `[T; N]` if `N` is equal to the slice's length.
1082            let me = unsafe { Rc::from_raw(ptr) };
1083            Some(me)
1084        } else {
1085            None
1086        }
1087    }
1088}
1089
1090impl<T, A: Allocator> Rc<[T], A> {
1091    /// Constructs a new reference-counted slice with uninitialized contents.
1092    ///
1093    /// # Examples
1094    ///
1095    /// ```
1096    /// #![feature(get_mut_unchecked)]
1097    /// #![feature(allocator_api)]
1098    ///
1099    /// use std::rc::Rc;
1100    /// use std::alloc::System;
1101    ///
1102    /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System);
1103    ///
1104    /// let values = unsafe {
1105    ///     // Deferred initialization:
1106    ///     Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
1107    ///     Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
1108    ///     Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
1109    ///
1110    ///     values.assume_init()
1111    /// };
1112    ///
1113    /// assert_eq!(*values, [1, 2, 3])
1114    /// ```
1115    #[cfg(not(no_global_oom_handling))]
1116    #[unstable(feature = "allocator_api", issue = "32838")]
1117    #[inline]
1118    pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1119        unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) }
1120    }
1121
1122    /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1123    /// filled with `0` bytes.
1124    ///
1125    /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1126    /// incorrect usage of this method.
1127    ///
1128    /// # Examples
1129    ///
1130    /// ```
1131    /// #![feature(allocator_api)]
1132    ///
1133    /// use std::rc::Rc;
1134    /// use std::alloc::System;
1135    ///
1136    /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System);
1137    /// let values = unsafe { values.assume_init() };
1138    ///
1139    /// assert_eq!(*values, [0, 0, 0])
1140    /// ```
1141    ///
1142    /// [zeroed]: mem::MaybeUninit::zeroed
1143    #[cfg(not(no_global_oom_handling))]
1144    #[unstable(feature = "allocator_api", issue = "32838")]
1145    #[inline]
1146    pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1147        unsafe {
1148            Rc::from_ptr_in(
1149                Rc::allocate_for_layout(
1150                    Layout::array::<T>(len).unwrap(),
1151                    |layout| alloc.allocate_zeroed(layout),
1152                    |mem| {
1153                        ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1154                            as *mut RcInner<[mem::MaybeUninit<T>]>
1155                    },
1156                ),
1157                alloc,
1158            )
1159        }
1160    }
1161}
1162
1163impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
1164    /// Converts to `Rc<T>`.
1165    ///
1166    /// # Safety
1167    ///
1168    /// As with [`MaybeUninit::assume_init`],
1169    /// it is up to the caller to guarantee that the inner value
1170    /// really is in an initialized state.
1171    /// Calling this when the content is not yet fully initialized
1172    /// causes immediate undefined behavior.
1173    ///
1174    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1175    ///
1176    /// # Examples
1177    ///
1178    /// ```
1179    /// use std::rc::Rc;
1180    ///
1181    /// let mut five = Rc::<u32>::new_uninit();
1182    ///
1183    /// // Deferred initialization:
1184    /// Rc::get_mut(&mut five).unwrap().write(5);
1185    ///
1186    /// let five = unsafe { five.assume_init() };
1187    ///
1188    /// assert_eq!(*five, 5)
1189    /// ```
1190    #[stable(feature = "new_uninit", since = "1.82.0")]
1191    #[inline]
1192    pub unsafe fn assume_init(self) -> Rc<T, A> {
1193        let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1194        unsafe { Rc::from_inner_in(ptr.cast(), alloc) }
1195    }
1196}
1197
1198impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
1199    /// Converts to `Rc<[T]>`.
1200    ///
1201    /// # Safety
1202    ///
1203    /// As with [`MaybeUninit::assume_init`],
1204    /// it is up to the caller to guarantee that the inner value
1205    /// really is in an initialized state.
1206    /// Calling this when the content is not yet fully initialized
1207    /// causes immediate undefined behavior.
1208    ///
1209    /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1210    ///
1211    /// # Examples
1212    ///
1213    /// ```
1214    /// use std::rc::Rc;
1215    ///
1216    /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1217    ///
1218    /// // Deferred initialization:
1219    /// let data = Rc::get_mut(&mut values).unwrap();
1220    /// data[0].write(1);
1221    /// data[1].write(2);
1222    /// data[2].write(3);
1223    ///
1224    /// let values = unsafe { values.assume_init() };
1225    ///
1226    /// assert_eq!(*values, [1, 2, 3])
1227    /// ```
1228    #[stable(feature = "new_uninit", since = "1.82.0")]
1229    #[inline]
1230    pub unsafe fn assume_init(self) -> Rc<[T], A> {
1231        let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1232        unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) }
1233    }
1234}
1235
1236impl<T: ?Sized> Rc<T> {
1237    /// Constructs an `Rc<T>` from a raw pointer.
1238    ///
1239    /// The raw pointer must have been previously returned by a call to
1240    /// [`Rc<U>::into_raw`][into_raw] with the following requirements:
1241    ///
1242    /// * If `U` is sized, it must have the same size and alignment as `T`. This
1243    ///   is trivially true if `U` is `T`.
1244    /// * If `U` is unsized, its data pointer must have the same size and
1245    ///   alignment as `T`. This is trivially true if `Rc<U>` was constructed
1246    ///   through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1247    ///   coercion].
1248    ///
1249    /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1250    /// and alignment, this is basically like transmuting references of
1251    /// different types. See [`mem::transmute`][transmute] for more information
1252    /// on what restrictions apply in this case.
1253    ///
1254    /// The raw pointer must point to a block of memory allocated by the global allocator
1255    ///
1256    /// The user of `from_raw` has to make sure a specific value of `T` is only
1257    /// dropped once.
1258    ///
1259    /// This function is unsafe because improper use may lead to memory unsafety,
1260    /// even if the returned `Rc<T>` is never accessed.
1261    ///
1262    /// [into_raw]: Rc::into_raw
1263    /// [transmute]: core::mem::transmute
1264    /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1265    ///
1266    /// # Examples
1267    ///
1268    /// ```
1269    /// use std::rc::Rc;
1270    ///
1271    /// let x = Rc::new("hello".to_owned());
1272    /// let x_ptr = Rc::into_raw(x);
1273    ///
1274    /// unsafe {
1275    ///     // Convert back to an `Rc` to prevent leak.
1276    ///     let x = Rc::from_raw(x_ptr);
1277    ///     assert_eq!(&*x, "hello");
1278    ///
1279    ///     // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1280    /// }
1281    ///
1282    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1283    /// ```
1284    ///
1285    /// Convert a slice back into its original array:
1286    ///
1287    /// ```
1288    /// use std::rc::Rc;
1289    ///
1290    /// let x: Rc<[u32]> = Rc::new([1, 2, 3]);
1291    /// let x_ptr: *const [u32] = Rc::into_raw(x);
1292    ///
1293    /// unsafe {
1294    ///     let x: Rc<[u32; 3]> = Rc::from_raw(x_ptr.cast::<[u32; 3]>());
1295    ///     assert_eq!(&*x, &[1, 2, 3]);
1296    /// }
1297    /// ```
1298    #[inline]
1299    #[stable(feature = "rc_raw", since = "1.17.0")]
1300    pub unsafe fn from_raw(ptr: *const T) -> Self {
1301        unsafe { Self::from_raw_in(ptr, Global) }
1302    }
1303
1304    /// Consumes the `Rc`, returning the wrapped pointer.
1305    ///
1306    /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1307    /// [`Rc::from_raw`].
1308    ///
1309    /// # Examples
1310    ///
1311    /// ```
1312    /// use std::rc::Rc;
1313    ///
1314    /// let x = Rc::new("hello".to_owned());
1315    /// let x_ptr = Rc::into_raw(x);
1316    /// assert_eq!(unsafe { &*x_ptr }, "hello");
1317    /// # // Prevent leaks for Miri.
1318    /// # drop(unsafe { Rc::from_raw(x_ptr) });
1319    /// ```
1320    #[must_use = "losing the pointer will leak memory"]
1321    #[stable(feature = "rc_raw", since = "1.17.0")]
1322    #[rustc_never_returns_null_ptr]
1323    pub fn into_raw(this: Self) -> *const T {
1324        let this = ManuallyDrop::new(this);
1325        Self::as_ptr(&*this)
1326    }
1327
1328    /// Increments the strong reference count on the `Rc<T>` associated with the
1329    /// provided pointer by one.
1330    ///
1331    /// # Safety
1332    ///
1333    /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1334    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1335    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1336    /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1337    /// allocated by the global allocator.
1338    ///
1339    /// [from_raw_in]: Rc::from_raw_in
1340    ///
1341    /// # Examples
1342    ///
1343    /// ```
1344    /// use std::rc::Rc;
1345    ///
1346    /// let five = Rc::new(5);
1347    ///
1348    /// unsafe {
1349    ///     let ptr = Rc::into_raw(five);
1350    ///     Rc::increment_strong_count(ptr);
1351    ///
1352    ///     let five = Rc::from_raw(ptr);
1353    ///     assert_eq!(2, Rc::strong_count(&five));
1354    /// #   // Prevent leaks for Miri.
1355    /// #   Rc::decrement_strong_count(ptr);
1356    /// }
1357    /// ```
1358    #[inline]
1359    #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1360    pub unsafe fn increment_strong_count(ptr: *const T) {
1361        unsafe { Self::increment_strong_count_in(ptr, Global) }
1362    }
1363
1364    /// Decrements the strong reference count on the `Rc<T>` associated with the
1365    /// provided pointer by one.
1366    ///
1367    /// # Safety
1368    ///
1369    /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1370    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1371    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1372    /// least 1) when invoking this method, and `ptr` must point to a block of memory
1373    /// allocated by the global allocator. This method can be used to release the final `Rc` and
1374    /// backing storage, but **should not** be called after the final `Rc` has been released.
1375    ///
1376    /// [from_raw_in]: Rc::from_raw_in
1377    ///
1378    /// # Examples
1379    ///
1380    /// ```
1381    /// use std::rc::Rc;
1382    ///
1383    /// let five = Rc::new(5);
1384    ///
1385    /// unsafe {
1386    ///     let ptr = Rc::into_raw(five);
1387    ///     Rc::increment_strong_count(ptr);
1388    ///
1389    ///     let five = Rc::from_raw(ptr);
1390    ///     assert_eq!(2, Rc::strong_count(&five));
1391    ///     Rc::decrement_strong_count(ptr);
1392    ///     assert_eq!(1, Rc::strong_count(&five));
1393    /// }
1394    /// ```
1395    #[inline]
1396    #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1397    pub unsafe fn decrement_strong_count(ptr: *const T) {
1398        unsafe { Self::decrement_strong_count_in(ptr, Global) }
1399    }
1400}
1401
1402impl<T: ?Sized, A: Allocator> Rc<T, A> {
1403    /// Returns a reference to the underlying allocator.
1404    ///
1405    /// Note: this is an associated function, which means that you have
1406    /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
1407    /// is so that there is no conflict with a method on the inner type.
1408    #[inline]
1409    #[unstable(feature = "allocator_api", issue = "32838")]
1410    pub fn allocator(this: &Self) -> &A {
1411        &this.alloc
1412    }
1413
1414    /// Consumes the `Rc`, returning the wrapped pointer and allocator.
1415    ///
1416    /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1417    /// [`Rc::from_raw_in`].
1418    ///
1419    /// # Examples
1420    ///
1421    /// ```
1422    /// #![feature(allocator_api)]
1423    /// use std::rc::Rc;
1424    /// use std::alloc::System;
1425    ///
1426    /// let x = Rc::new_in("hello".to_owned(), System);
1427    /// let (ptr, alloc) = Rc::into_raw_with_allocator(x);
1428    /// assert_eq!(unsafe { &*ptr }, "hello");
1429    /// let x = unsafe { Rc::from_raw_in(ptr, alloc) };
1430    /// assert_eq!(&*x, "hello");
1431    /// ```
1432    #[must_use = "losing the pointer will leak memory"]
1433    #[unstable(feature = "allocator_api", issue = "32838")]
1434    pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
1435        let this = mem::ManuallyDrop::new(this);
1436        let ptr = Self::as_ptr(&this);
1437        // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
1438        let alloc = unsafe { ptr::read(&this.alloc) };
1439        (ptr, alloc)
1440    }
1441
1442    /// Provides a raw pointer to the data.
1443    ///
1444    /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
1445    /// for as long as there are strong counts in the `Rc`.
1446    ///
1447    /// # Examples
1448    ///
1449    /// ```
1450    /// use std::rc::Rc;
1451    ///
1452    /// let x = Rc::new(0);
1453    /// let y = Rc::clone(&x);
1454    /// let x_ptr = Rc::as_ptr(&x);
1455    /// assert_eq!(x_ptr, Rc::as_ptr(&y));
1456    /// assert_eq!(unsafe { *x_ptr }, 0);
1457    /// ```
1458    #[stable(feature = "weak_into_raw", since = "1.45.0")]
1459    #[rustc_never_returns_null_ptr]
1460    pub fn as_ptr(this: &Self) -> *const T {
1461        let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
1462
1463        // SAFETY: This cannot go through Deref::deref or Rc::inner because
1464        // this is required to retain raw/mut provenance such that e.g. `get_mut` can
1465        // write through the pointer after the Rc is recovered through `from_raw`.
1466        unsafe { &raw mut (*ptr).value }
1467    }
1468
1469    /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
1470    ///
1471    /// The raw pointer must have been previously returned by a call to [`Rc<U,
1472    /// A>::into_raw`][into_raw] with the following requirements:
1473    ///
1474    /// * If `U` is sized, it must have the same size and alignment as `T`. This
1475    ///   is trivially true if `U` is `T`.
1476    /// * If `U` is unsized, its data pointer must have the same size and
1477    ///   alignment as `T`. This is trivially true if `Rc<U>` was constructed
1478    ///   through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1479    ///   coercion].
1480    ///
1481    /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1482    /// and alignment, this is basically like transmuting references of
1483    /// different types. See [`mem::transmute`][transmute] for more information
1484    /// on what restrictions apply in this case.
1485    ///
1486    /// The raw pointer must point to a block of memory allocated by `alloc`
1487    ///
1488    /// The user of `from_raw` has to make sure a specific value of `T` is only
1489    /// dropped once.
1490    ///
1491    /// This function is unsafe because improper use may lead to memory unsafety,
1492    /// even if the returned `Rc<T>` is never accessed.
1493    ///
1494    /// [into_raw]: Rc::into_raw
1495    /// [transmute]: core::mem::transmute
1496    /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1497    ///
1498    /// # Examples
1499    ///
1500    /// ```
1501    /// #![feature(allocator_api)]
1502    ///
1503    /// use std::rc::Rc;
1504    /// use std::alloc::System;
1505    ///
1506    /// let x = Rc::new_in("hello".to_owned(), System);
1507    /// let (x_ptr, _alloc) = Rc::into_raw_with_allocator(x);
1508    ///
1509    /// unsafe {
1510    ///     // Convert back to an `Rc` to prevent leak.
1511    ///     let x = Rc::from_raw_in(x_ptr, System);
1512    ///     assert_eq!(&*x, "hello");
1513    ///
1514    ///     // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1515    /// }
1516    ///
1517    /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1518    /// ```
1519    ///
1520    /// Convert a slice back into its original array:
1521    ///
1522    /// ```
1523    /// #![feature(allocator_api)]
1524    ///
1525    /// use std::rc::Rc;
1526    /// use std::alloc::System;
1527    ///
1528    /// let x: Rc<[u32], _> = Rc::new_in([1, 2, 3], System);
1529    /// let x_ptr: *const [u32] = Rc::into_raw_with_allocator(x).0;
1530    ///
1531    /// unsafe {
1532    ///     let x: Rc<[u32; 3], _> = Rc::from_raw_in(x_ptr.cast::<[u32; 3]>(), System);
1533    ///     assert_eq!(&*x, &[1, 2, 3]);
1534    /// }
1535    /// ```
1536    #[unstable(feature = "allocator_api", issue = "32838")]
1537    pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
1538        let offset = unsafe { data_offset(ptr) };
1539
1540        // Reverse the offset to find the original RcInner.
1541        let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
1542
1543        unsafe { Self::from_ptr_in(rc_ptr, alloc) }
1544    }
1545
1546    /// Creates a new [`Weak`] pointer to this allocation.
1547    ///
1548    /// # Examples
1549    ///
1550    /// ```
1551    /// use std::rc::Rc;
1552    ///
1553    /// let five = Rc::new(5);
1554    ///
1555    /// let weak_five = Rc::downgrade(&five);
1556    /// ```
1557    #[must_use = "this returns a new `Weak` pointer, \
1558                  without modifying the original `Rc`"]
1559    #[stable(feature = "rc_weak", since = "1.4.0")]
1560    pub fn downgrade(this: &Self) -> Weak<T, A>
1561    where
1562        A: Clone,
1563    {
1564        this.inner().inc_weak();
1565        // Make sure we do not create a dangling Weak
1566        debug_assert!(!is_dangling(this.ptr.as_ptr()));
1567        Weak { ptr: this.ptr, alloc: this.alloc.clone() }
1568    }
1569
1570    /// Gets the number of [`Weak`] pointers to this allocation.
1571    ///
1572    /// # Examples
1573    ///
1574    /// ```
1575    /// use std::rc::Rc;
1576    ///
1577    /// let five = Rc::new(5);
1578    /// let _weak_five = Rc::downgrade(&five);
1579    ///
1580    /// assert_eq!(1, Rc::weak_count(&five));
1581    /// ```
1582    #[inline]
1583    #[stable(feature = "rc_counts", since = "1.15.0")]
1584    pub fn weak_count(this: &Self) -> usize {
1585        this.inner().weak() - 1
1586    }
1587
1588    /// Gets the number of strong (`Rc`) pointers to this allocation.
1589    ///
1590    /// # Examples
1591    ///
1592    /// ```
1593    /// use std::rc::Rc;
1594    ///
1595    /// let five = Rc::new(5);
1596    /// let _also_five = Rc::clone(&five);
1597    ///
1598    /// assert_eq!(2, Rc::strong_count(&five));
1599    /// ```
1600    #[inline]
1601    #[stable(feature = "rc_counts", since = "1.15.0")]
1602    pub fn strong_count(this: &Self) -> usize {
1603        this.inner().strong()
1604    }
1605
1606    /// Increments the strong reference count on the `Rc<T>` associated with the
1607    /// provided pointer by one.
1608    ///
1609    /// # Safety
1610    ///
1611    /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1612    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1613    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1614    /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1615    /// allocated by `alloc`.
1616    ///
1617    /// [from_raw_in]: Rc::from_raw_in
1618    ///
1619    /// # Examples
1620    ///
1621    /// ```
1622    /// #![feature(allocator_api)]
1623    ///
1624    /// use std::rc::Rc;
1625    /// use std::alloc::System;
1626    ///
1627    /// let five = Rc::new_in(5, System);
1628    ///
1629    /// unsafe {
1630    ///     let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1631    ///     Rc::increment_strong_count_in(ptr, System);
1632    ///
1633    ///     let five = Rc::from_raw_in(ptr, System);
1634    ///     assert_eq!(2, Rc::strong_count(&five));
1635    /// #   // Prevent leaks for Miri.
1636    /// #   Rc::decrement_strong_count_in(ptr, System);
1637    /// }
1638    /// ```
1639    #[inline]
1640    #[unstable(feature = "allocator_api", issue = "32838")]
1641    pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A)
1642    where
1643        A: Clone,
1644    {
1645        // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
1646        let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) };
1647        // Now increase refcount, but don't drop new refcount either
1648        let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
1649    }
1650
1651    /// Decrements the strong reference count on the `Rc<T>` associated with the
1652    /// provided pointer by one.
1653    ///
1654    /// # Safety
1655    ///
1656    /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1657    /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1658    /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1659    /// least 1) when invoking this method, and `ptr` must point to a block of memory
1660    /// allocated by `alloc`. This method can be used to release the final `Rc` and
1661    /// backing storage, but **should not** be called after the final `Rc` has been released.
1662    ///
1663    /// [from_raw_in]: Rc::from_raw_in
1664    ///
1665    /// # Examples
1666    ///
1667    /// ```
1668    /// #![feature(allocator_api)]
1669    ///
1670    /// use std::rc::Rc;
1671    /// use std::alloc::System;
1672    ///
1673    /// let five = Rc::new_in(5, System);
1674    ///
1675    /// unsafe {
1676    ///     let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1677    ///     Rc::increment_strong_count_in(ptr, System);
1678    ///
1679    ///     let five = Rc::from_raw_in(ptr, System);
1680    ///     assert_eq!(2, Rc::strong_count(&five));
1681    ///     Rc::decrement_strong_count_in(ptr, System);
1682    ///     assert_eq!(1, Rc::strong_count(&five));
1683    /// }
1684    /// ```
1685    #[inline]
1686    #[unstable(feature = "allocator_api", issue = "32838")]
1687    pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
1688        unsafe { drop(Rc::from_raw_in(ptr, alloc)) };
1689    }
1690
1691    /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
1692    /// this allocation.
1693    #[inline]
1694    fn is_unique(this: &Self) -> bool {
1695        Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
1696    }
1697
1698    /// Returns a mutable reference into the given `Rc`, if there are
1699    /// no other `Rc` or [`Weak`] pointers to the same allocation.
1700    ///
1701    /// Returns [`None`] otherwise, because it is not safe to
1702    /// mutate a shared value.
1703    ///
1704    /// See also [`make_mut`][make_mut], which will [`clone`][clone]
1705    /// the inner value when there are other `Rc` pointers.
1706    ///
1707    /// [make_mut]: Rc::make_mut
1708    /// [clone]: Clone::clone
1709    ///
1710    /// # Examples
1711    ///
1712    /// ```
1713    /// use std::rc::Rc;
1714    ///
1715    /// let mut x = Rc::new(3);
1716    /// *Rc::get_mut(&mut x).unwrap() = 4;
1717    /// assert_eq!(*x, 4);
1718    ///
1719    /// let _y = Rc::clone(&x);
1720    /// assert!(Rc::get_mut(&mut x).is_none());
1721    /// ```
1722    #[inline]
1723    #[stable(feature = "rc_unique", since = "1.4.0")]
1724    pub fn get_mut(this: &mut Self) -> Option<&mut T> {
1725        if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
1726    }
1727
1728    /// Returns a mutable reference into the given `Rc`,
1729    /// without any check.
1730    ///
1731    /// See also [`get_mut`], which is safe and does appropriate checks.
1732    ///
1733    /// [`get_mut`]: Rc::get_mut
1734    ///
1735    /// # Safety
1736    ///
1737    /// If any other `Rc` or [`Weak`] pointers to the same allocation exist, then
1738    /// they must not be dereferenced or have active borrows for the duration
1739    /// of the returned borrow, and their inner type must be exactly the same as the
1740    /// inner type of this Rc (including lifetimes). This is trivially the case if no
1741    /// such pointers exist, for example immediately after `Rc::new`.
1742    ///
1743    /// # Examples
1744    ///
1745    /// ```
1746    /// #![feature(get_mut_unchecked)]
1747    ///
1748    /// use std::rc::Rc;
1749    ///
1750    /// let mut x = Rc::new(String::new());
1751    /// unsafe {
1752    ///     Rc::get_mut_unchecked(&mut x).push_str("foo")
1753    /// }
1754    /// assert_eq!(*x, "foo");
1755    /// ```
1756    /// Other `Rc` pointers to the same allocation must be to the same type.
1757    /// ```no_run
1758    /// #![feature(get_mut_unchecked)]
1759    ///
1760    /// use std::rc::Rc;
1761    ///
1762    /// let x: Rc<str> = Rc::from("Hello, world!");
1763    /// let mut y: Rc<[u8]> = x.clone().into();
1764    /// unsafe {
1765    ///     // this is Undefined Behavior, because x's inner type is str, not [u8]
1766    ///     Rc::get_mut_unchecked(&mut y).fill(0xff); // 0xff is invalid in UTF-8
1767    /// }
1768    /// println!("{}", &*x); // Invalid UTF-8 in a str
1769    /// ```
1770    /// Other `Rc` pointers to the same allocation must be to the exact same type, including lifetimes.
1771    /// ```no_run
1772    /// #![feature(get_mut_unchecked)]
1773    ///
1774    /// use std::rc::Rc;
1775    ///
1776    /// let x: Rc<&str> = Rc::new("Hello, world!");
1777    /// {
1778    ///     let s = String::from("Oh, no!");
1779    ///     let mut y: Rc<&str> = x.clone();
1780    ///     unsafe {
1781    ///         // this is Undefined Behavior, because x's inner type
1782    ///         // is &'long str, not &'short str
1783    ///         *Rc::get_mut_unchecked(&mut y) = &s;
1784    ///     }
1785    /// }
1786    /// println!("{}", &*x); // Use-after-free
1787    /// ```
1788    #[inline]
1789    #[unstable(feature = "get_mut_unchecked", issue = "63292")]
1790    pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
1791        // We are careful to *not* create a reference covering the "count" fields, as
1792        // this would conflict with accesses to the reference counts (e.g. by `Weak`).
1793        unsafe { &mut (*this.ptr.as_ptr()).value }
1794    }
1795
1796    #[inline]
1797    #[stable(feature = "ptr_eq", since = "1.17.0")]
1798    /// Returns `true` if the two `Rc`s point to the same allocation in a vein similar to
1799    /// [`ptr::eq`]. This function ignores the metadata of  `dyn Trait` pointers.
1800    ///
1801    /// # Examples
1802    ///
1803    /// ```
1804    /// use std::rc::Rc;
1805    ///
1806    /// let five = Rc::new(5);
1807    /// let same_five = Rc::clone(&five);
1808    /// let other_five = Rc::new(5);
1809    ///
1810    /// assert!(Rc::ptr_eq(&five, &same_five));
1811    /// assert!(!Rc::ptr_eq(&five, &other_five));
1812    /// ```
1813    pub fn ptr_eq(this: &Self, other: &Self) -> bool {
1814        ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr())
1815    }
1816}
1817
1818#[cfg(not(no_global_oom_handling))]
1819impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Rc<T, A> {
1820    /// Makes a mutable reference into the given `Rc`.
1821    ///
1822    /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
1823    /// [`clone`] the inner value to a new allocation to ensure unique ownership.  This is also
1824    /// referred to as clone-on-write.
1825    ///
1826    /// However, if there are no other `Rc` pointers to this allocation, but some [`Weak`]
1827    /// pointers, then the [`Weak`] pointers will be disassociated and the inner value will not
1828    /// be cloned.
1829    ///
1830    /// See also [`get_mut`], which will fail rather than cloning the inner value
1831    /// or disassociating [`Weak`] pointers.
1832    ///
1833    /// [`clone`]: Clone::clone
1834    /// [`get_mut`]: Rc::get_mut
1835    ///
1836    /// # Examples
1837    ///
1838    /// ```
1839    /// use std::rc::Rc;
1840    ///
1841    /// let mut data = Rc::new(5);
1842    ///
1843    /// *Rc::make_mut(&mut data) += 1;         // Won't clone anything
1844    /// let mut other_data = Rc::clone(&data); // Won't clone inner data
1845    /// *Rc::make_mut(&mut data) += 1;         // Clones inner data
1846    /// *Rc::make_mut(&mut data) += 1;         // Won't clone anything
1847    /// *Rc::make_mut(&mut other_data) *= 2;   // Won't clone anything
1848    ///
1849    /// // Now `data` and `other_data` point to different allocations.
1850    /// assert_eq!(*data, 8);
1851    /// assert_eq!(*other_data, 12);
1852    /// ```
1853    ///
1854    /// [`Weak`] pointers will be disassociated:
1855    ///
1856    /// ```
1857    /// use std::rc::Rc;
1858    ///
1859    /// let mut data = Rc::new(75);
1860    /// let weak = Rc::downgrade(&data);
1861    ///
1862    /// assert!(75 == *data);
1863    /// assert!(75 == *weak.upgrade().unwrap());
1864    ///
1865    /// *Rc::make_mut(&mut data) += 1;
1866    ///
1867    /// assert!(76 == *data);
1868    /// assert!(weak.upgrade().is_none());
1869    /// ```
1870    #[inline]
1871    #[stable(feature = "rc_unique", since = "1.4.0")]
1872    pub fn make_mut(this: &mut Self) -> &mut T {
1873        let size_of_val = size_of_val::<T>(&**this);
1874
1875        if Rc::strong_count(this) != 1 {
1876            // Gotta clone the data, there are other Rcs.
1877
1878            let this_data_ref: &T = &**this;
1879            // `in_progress` drops the allocation if we panic before finishing initializing it.
1880            let mut in_progress: UniqueRcUninit<T, A> =
1881                UniqueRcUninit::new(this_data_ref, this.alloc.clone());
1882
1883            // Initialize with clone of this.
1884            let initialized_clone = unsafe {
1885                // Clone. If the clone panics, `in_progress` will be dropped and clean up.
1886                this_data_ref.clone_to_uninit(in_progress.data_ptr().cast());
1887                // Cast type of pointer, now that it is initialized.
1888                in_progress.into_rc()
1889            };
1890
1891            // Replace `this` with newly constructed Rc.
1892            *this = initialized_clone;
1893        } else if Rc::weak_count(this) != 0 {
1894            // Can just steal the data, all that's left is Weaks
1895
1896            // We don't need panic-protection like the above branch does, but we might as well
1897            // use the same mechanism.
1898            let mut in_progress: UniqueRcUninit<T, A> =
1899                UniqueRcUninit::new(&**this, this.alloc.clone());
1900            unsafe {
1901                // Initialize `in_progress` with move of **this.
1902                // We have to express this in terms of bytes because `T: ?Sized`; there is no
1903                // operation that just copies a value based on its `size_of_val()`.
1904                ptr::copy_nonoverlapping(
1905                    ptr::from_ref(&**this).cast::<u8>(),
1906                    in_progress.data_ptr().cast::<u8>(),
1907                    size_of_val,
1908                );
1909
1910                this.inner().dec_strong();
1911                // Remove implicit strong-weak ref (no need to craft a fake
1912                // Weak here -- we know other Weaks can clean up for us)
1913                this.inner().dec_weak();
1914                // Replace `this` with newly constructed Rc that has the moved data.
1915                ptr::write(this, in_progress.into_rc());
1916            }
1917        }
1918        // This unsafety is ok because we're guaranteed that the pointer
1919        // returned is the *only* pointer that will ever be returned to T. Our
1920        // reference count is guaranteed to be 1 at this point, and we required
1921        // the `Rc<T>` itself to be `mut`, so we're returning the only possible
1922        // reference to the allocation.
1923        unsafe { &mut this.ptr.as_mut().value }
1924    }
1925}
1926
1927impl<T: Clone, A: Allocator> Rc<T, A> {
1928    /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
1929    /// clone.
1930    ///
1931    /// Assuming `rc_t` is of type `Rc<T>`, this function is functionally equivalent to
1932    /// `(*rc_t).clone()`, but will avoid cloning the inner value where possible.
1933    ///
1934    /// # Examples
1935    ///
1936    /// ```
1937    /// # use std::{ptr, rc::Rc};
1938    /// let inner = String::from("test");
1939    /// let ptr = inner.as_ptr();
1940    ///
1941    /// let rc = Rc::new(inner);
1942    /// let inner = Rc::unwrap_or_clone(rc);
1943    /// // The inner value was not cloned
1944    /// assert!(ptr::eq(ptr, inner.as_ptr()));
1945    ///
1946    /// let rc = Rc::new(inner);
1947    /// let rc2 = rc.clone();
1948    /// let inner = Rc::unwrap_or_clone(rc);
1949    /// // Because there were 2 references, we had to clone the inner value.
1950    /// assert!(!ptr::eq(ptr, inner.as_ptr()));
1951    /// // `rc2` is the last reference, so when we unwrap it we get back
1952    /// // the original `String`.
1953    /// let inner = Rc::unwrap_or_clone(rc2);
1954    /// assert!(ptr::eq(ptr, inner.as_ptr()));
1955    /// ```
1956    #[inline]
1957    #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")]
1958    pub fn unwrap_or_clone(this: Self) -> T {
1959        Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone())
1960    }
1961}
1962
1963impl<A: Allocator> Rc<dyn Any, A> {
1964    /// Attempts to downcast the `Rc<dyn Any>` to a concrete type.
1965    ///
1966    /// # Examples
1967    ///
1968    /// ```
1969    /// use std::any::Any;
1970    /// use std::rc::Rc;
1971    ///
1972    /// fn print_if_string(value: Rc<dyn Any>) {
1973    ///     if let Ok(string) = value.downcast::<String>() {
1974    ///         println!("String ({}): {}", string.len(), string);
1975    ///     }
1976    /// }
1977    ///
1978    /// let my_string = "Hello World".to_string();
1979    /// print_if_string(Rc::new(my_string));
1980    /// print_if_string(Rc::new(0i8));
1981    /// ```
1982    #[inline]
1983    #[stable(feature = "rc_downcast", since = "1.29.0")]
1984    pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
1985        if (*self).is::<T>() {
1986            unsafe {
1987                let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1988                Ok(Rc::from_inner_in(ptr.cast(), alloc))
1989            }
1990        } else {
1991            Err(self)
1992        }
1993    }
1994
1995    /// Downcasts the `Rc<dyn Any>` to a concrete type.
1996    ///
1997    /// For a safe alternative see [`downcast`].
1998    ///
1999    /// # Examples
2000    ///
2001    /// ```
2002    /// #![feature(downcast_unchecked)]
2003    ///
2004    /// use std::any::Any;
2005    /// use std::rc::Rc;
2006    ///
2007    /// let x: Rc<dyn Any> = Rc::new(1_usize);
2008    ///
2009    /// unsafe {
2010    ///     assert_eq!(*x.downcast_unchecked::<usize>(), 1);
2011    /// }
2012    /// ```
2013    ///
2014    /// # Safety
2015    ///
2016    /// The contained value must be of type `T`. Calling this method
2017    /// with the incorrect type is *undefined behavior*.
2018    ///
2019    ///
2020    /// [`downcast`]: Self::downcast
2021    #[inline]
2022    #[unstable(feature = "downcast_unchecked", issue = "90850")]
2023    pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
2024        unsafe {
2025            let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2026            Rc::from_inner_in(ptr.cast(), alloc)
2027        }
2028    }
2029}
2030
2031impl<T: ?Sized> Rc<T> {
2032    /// Allocates an `RcInner<T>` with sufficient space for
2033    /// a possibly-unsized inner value where the value has the layout provided.
2034    ///
2035    /// The function `mem_to_rc_inner` is called with the data pointer
2036    /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2037    #[cfg(not(no_global_oom_handling))]
2038    unsafe fn allocate_for_layout(
2039        value_layout: Layout,
2040        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2041        mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2042    ) -> *mut RcInner<T> {
2043        let layout = rc_inner_layout_for_value_layout(value_layout);
2044        unsafe {
2045            Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rc_inner)
2046                .unwrap_or_else(|_| handle_alloc_error(layout))
2047        }
2048    }
2049
2050    /// Allocates an `RcInner<T>` with sufficient space for
2051    /// a possibly-unsized inner value where the value has the layout provided,
2052    /// returning an error if allocation fails.
2053    ///
2054    /// The function `mem_to_rc_inner` is called with the data pointer
2055    /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2056    #[inline]
2057    unsafe fn try_allocate_for_layout(
2058        value_layout: Layout,
2059        allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2060        mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2061    ) -> Result<*mut RcInner<T>, AllocError> {
2062        let layout = rc_inner_layout_for_value_layout(value_layout);
2063
2064        // Allocate for the layout.
2065        let ptr = allocate(layout)?;
2066
2067        // Initialize the RcInner
2068        let inner = mem_to_rc_inner(ptr.as_non_null_ptr().as_ptr());
2069        unsafe {
2070            debug_assert_eq!(Layout::for_value_raw(inner), layout);
2071
2072            (&raw mut (*inner).strong).write(Cell::new(1));
2073            (&raw mut (*inner).weak).write(Cell::new(1));
2074        }
2075
2076        Ok(inner)
2077    }
2078}
2079
2080impl<T: ?Sized, A: Allocator> Rc<T, A> {
2081    /// Allocates an `RcInner<T>` with sufficient space for an unsized inner value
2082    #[cfg(not(no_global_oom_handling))]
2083    unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcInner<T> {
2084        // Allocate for the `RcInner<T>` using the given value.
2085        unsafe {
2086            Rc::<T>::allocate_for_layout(
2087                Layout::for_value_raw(ptr),
2088                |layout| alloc.allocate(layout),
2089                |mem| mem.with_metadata_of(ptr as *const RcInner<T>),
2090            )
2091        }
2092    }
2093
2094    #[cfg(not(no_global_oom_handling))]
2095    fn from_box_in(src: Box<T, A>) -> Rc<T, A> {
2096        unsafe {
2097            let value_size = size_of_val(&*src);
2098            let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
2099
2100            // Copy value as bytes
2101            ptr::copy_nonoverlapping(
2102                (&raw const *src) as *const u8,
2103                (&raw mut (*ptr).value) as *mut u8,
2104                value_size,
2105            );
2106
2107            // Free the allocation without dropping its contents
2108            let (bptr, alloc) = Box::into_raw_with_allocator(src);
2109            let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop<T>, alloc.by_ref());
2110            drop(src);
2111
2112            Self::from_ptr_in(ptr, alloc)
2113        }
2114    }
2115}
2116
2117impl<T> Rc<[T]> {
2118    /// Allocates an `RcInner<[T]>` with the given length.
2119    #[cfg(not(no_global_oom_handling))]
2120    unsafe fn allocate_for_slice(len: usize) -> *mut RcInner<[T]> {
2121        unsafe {
2122            Self::allocate_for_layout(
2123                Layout::array::<T>(len).unwrap(),
2124                |layout| Global.allocate(layout),
2125                |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2126            )
2127        }
2128    }
2129
2130    /// Copy elements from slice into newly allocated `Rc<[T]>`
2131    ///
2132    /// Unsafe because the caller must either take ownership or bind `T: Copy`
2133    #[cfg(not(no_global_oom_handling))]
2134    unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
2135        unsafe {
2136            let ptr = Self::allocate_for_slice(v.len());
2137            ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len());
2138            Self::from_ptr(ptr)
2139        }
2140    }
2141
2142    /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
2143    ///
2144    /// Behavior is undefined should the size be wrong.
2145    #[cfg(not(no_global_oom_handling))]
2146    unsafe fn from_iter_exact(iter: impl Iterator<Item = T>, len: usize) -> Rc<[T]> {
2147        // Panic guard while cloning T elements.
2148        // In the event of a panic, elements that have been written
2149        // into the new RcInner will be dropped, then the memory freed.
2150        struct Guard<T> {
2151            mem: NonNull<u8>,
2152            elems: *mut T,
2153            layout: Layout,
2154            n_elems: usize,
2155        }
2156
2157        impl<T> Drop for Guard<T> {
2158            fn drop(&mut self) {
2159                unsafe {
2160                    let slice = from_raw_parts_mut(self.elems, self.n_elems);
2161                    ptr::drop_in_place(slice);
2162
2163                    Global.deallocate(self.mem, self.layout);
2164                }
2165            }
2166        }
2167
2168        unsafe {
2169            let ptr = Self::allocate_for_slice(len);
2170
2171            let mem = ptr as *mut _ as *mut u8;
2172            let layout = Layout::for_value_raw(ptr);
2173
2174            // Pointer to first element
2175            let elems = (&raw mut (*ptr).value) as *mut T;
2176
2177            let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
2178
2179            for (i, item) in iter.enumerate() {
2180                ptr::write(elems.add(i), item);
2181                guard.n_elems += 1;
2182            }
2183
2184            // All clear. Forget the guard so it doesn't free the new RcInner.
2185            mem::forget(guard);
2186
2187            Self::from_ptr(ptr)
2188        }
2189    }
2190}
2191
2192impl<T, A: Allocator> Rc<[T], A> {
2193    /// Allocates an `RcInner<[T]>` with the given length.
2194    #[inline]
2195    #[cfg(not(no_global_oom_handling))]
2196    unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcInner<[T]> {
2197        unsafe {
2198            Rc::<[T]>::allocate_for_layout(
2199                Layout::array::<T>(len).unwrap(),
2200                |layout| alloc.allocate(layout),
2201                |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2202            )
2203        }
2204    }
2205}
2206
2207#[cfg(not(no_global_oom_handling))]
2208/// Specialization trait used for `From<&[T]>`.
2209trait RcFromSlice<T> {
2210    fn from_slice(slice: &[T]) -> Self;
2211}
2212
2213#[cfg(not(no_global_oom_handling))]
2214impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
2215    #[inline]
2216    default fn from_slice(v: &[T]) -> Self {
2217        unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
2218    }
2219}
2220
2221#[cfg(not(no_global_oom_handling))]
2222impl<T: Copy> RcFromSlice<T> for Rc<[T]> {
2223    #[inline]
2224    fn from_slice(v: &[T]) -> Self {
2225        unsafe { Rc::copy_from_slice(v) }
2226    }
2227}
2228
2229#[stable(feature = "rust1", since = "1.0.0")]
2230impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> {
2231    type Target = T;
2232
2233    #[inline(always)]
2234    fn deref(&self) -> &T {
2235        &self.inner().value
2236    }
2237}
2238
2239#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2240unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Rc<T, A> {}
2241
2242//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2243#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2244unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for UniqueRc<T, A> {}
2245
2246#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2247unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Weak<T, A> {}
2248
2249#[unstable(feature = "deref_pure_trait", issue = "87121")]
2250unsafe impl<T: ?Sized, A: Allocator> DerefPure for Rc<T, A> {}
2251
2252//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2253#[unstable(feature = "deref_pure_trait", issue = "87121")]
2254unsafe impl<T: ?Sized, A: Allocator> DerefPure for UniqueRc<T, A> {}
2255
2256#[unstable(feature = "legacy_receiver_trait", issue = "none")]
2257impl<T: ?Sized> LegacyReceiver for Rc<T> {}
2258
2259#[stable(feature = "rust1", since = "1.0.0")]
2260unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
2261    /// Drops the `Rc`.
2262    ///
2263    /// This will decrement the strong reference count. If the strong reference
2264    /// count reaches zero then the only other references (if any) are
2265    /// [`Weak`], so we `drop` the inner value.
2266    ///
2267    /// # Examples
2268    ///
2269    /// ```
2270    /// use std::rc::Rc;
2271    ///
2272    /// struct Foo;
2273    ///
2274    /// impl Drop for Foo {
2275    ///     fn drop(&mut self) {
2276    ///         println!("dropped!");
2277    ///     }
2278    /// }
2279    ///
2280    /// let foo  = Rc::new(Foo);
2281    /// let foo2 = Rc::clone(&foo);
2282    ///
2283    /// drop(foo);    // Doesn't print anything
2284    /// drop(foo2);   // Prints "dropped!"
2285    /// ```
2286    #[inline]
2287    fn drop(&mut self) {
2288        unsafe {
2289            self.inner().dec_strong();
2290            if self.inner().strong() == 0 {
2291                self.drop_slow();
2292            }
2293        }
2294    }
2295}
2296
2297#[stable(feature = "rust1", since = "1.0.0")]
2298impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
2299    /// Makes a clone of the `Rc` pointer.
2300    ///
2301    /// This creates another pointer to the same allocation, increasing the
2302    /// strong reference count.
2303    ///
2304    /// # Examples
2305    ///
2306    /// ```
2307    /// use std::rc::Rc;
2308    ///
2309    /// let five = Rc::new(5);
2310    ///
2311    /// let _ = Rc::clone(&five);
2312    /// ```
2313    #[inline]
2314    fn clone(&self) -> Self {
2315        unsafe {
2316            self.inner().inc_strong();
2317            Self::from_inner_in(self.ptr, self.alloc.clone())
2318        }
2319    }
2320}
2321
2322#[unstable(feature = "ergonomic_clones", issue = "132290")]
2323impl<T: ?Sized, A: Allocator + Clone> UseCloned for Rc<T, A> {}
2324
2325#[cfg(not(no_global_oom_handling))]
2326#[stable(feature = "rust1", since = "1.0.0")]
2327impl<T: Default> Default for Rc<T> {
2328    /// Creates a new `Rc<T>`, with the `Default` value for `T`.
2329    ///
2330    /// # Examples
2331    ///
2332    /// ```
2333    /// use std::rc::Rc;
2334    ///
2335    /// let x: Rc<i32> = Default::default();
2336    /// assert_eq!(*x, 0);
2337    /// ```
2338    #[inline]
2339    fn default() -> Self {
2340        unsafe {
2341            Self::from_inner(
2342                Box::leak(Box::write(
2343                    Box::new_uninit(),
2344                    RcInner { strong: Cell::new(1), weak: Cell::new(1), value: T::default() },
2345                ))
2346                .into(),
2347            )
2348        }
2349    }
2350}
2351
2352#[cfg(not(no_global_oom_handling))]
2353#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2354impl Default for Rc<str> {
2355    /// Creates an empty `str` inside an `Rc`.
2356    ///
2357    /// This may or may not share an allocation with other Rcs on the same thread.
2358    #[inline]
2359    fn default() -> Self {
2360        let rc = Rc::<[u8]>::default();
2361        // `[u8]` has the same layout as `str`.
2362        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2363    }
2364}
2365
2366#[cfg(not(no_global_oom_handling))]
2367#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2368impl<T> Default for Rc<[T]> {
2369    /// Creates an empty `[T]` inside an `Rc`.
2370    ///
2371    /// This may or may not share an allocation with other Rcs on the same thread.
2372    #[inline]
2373    fn default() -> Self {
2374        let arr: [T; 0] = [];
2375        Rc::from(arr)
2376    }
2377}
2378
2379#[cfg(not(no_global_oom_handling))]
2380#[stable(feature = "pin_default_impls", since = "CURRENT_RUSTC_VERSION")]
2381impl<T> Default for Pin<Rc<T>>
2382where
2383    T: ?Sized,
2384    Rc<T>: Default,
2385{
2386    #[inline]
2387    fn default() -> Self {
2388        unsafe { Pin::new_unchecked(Rc::<T>::default()) }
2389    }
2390}
2391
2392#[stable(feature = "rust1", since = "1.0.0")]
2393trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
2394    fn eq(&self, other: &Rc<T, A>) -> bool;
2395    fn ne(&self, other: &Rc<T, A>) -> bool;
2396}
2397
2398#[stable(feature = "rust1", since = "1.0.0")]
2399impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2400    #[inline]
2401    default fn eq(&self, other: &Rc<T, A>) -> bool {
2402        **self == **other
2403    }
2404
2405    #[inline]
2406    default fn ne(&self, other: &Rc<T, A>) -> bool {
2407        **self != **other
2408    }
2409}
2410
2411// Hack to allow specializing on `Eq` even though `Eq` has a method.
2412#[rustc_unsafe_specialization_marker]
2413pub(crate) trait MarkerEq: PartialEq<Self> {}
2414
2415impl<T: Eq> MarkerEq for T {}
2416
2417/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
2418/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
2419/// store large values, that are slow to clone, but also heavy to check for equality, causing this
2420/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
2421/// the same value, than two `&T`s.
2422///
2423/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
2424#[stable(feature = "rust1", since = "1.0.0")]
2425impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2426    #[inline]
2427    fn eq(&self, other: &Rc<T, A>) -> bool {
2428        Rc::ptr_eq(self, other) || **self == **other
2429    }
2430
2431    #[inline]
2432    fn ne(&self, other: &Rc<T, A>) -> bool {
2433        !Rc::ptr_eq(self, other) && **self != **other
2434    }
2435}
2436
2437#[stable(feature = "rust1", since = "1.0.0")]
2438impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
2439    /// Equality for two `Rc`s.
2440    ///
2441    /// Two `Rc`s are equal if their inner values are equal, even if they are
2442    /// stored in different allocation.
2443    ///
2444    /// If `T` also implements `Eq` (implying reflexivity of equality),
2445    /// two `Rc`s that point to the same allocation are
2446    /// always equal.
2447    ///
2448    /// # Examples
2449    ///
2450    /// ```
2451    /// use std::rc::Rc;
2452    ///
2453    /// let five = Rc::new(5);
2454    ///
2455    /// assert!(five == Rc::new(5));
2456    /// ```
2457    #[inline]
2458    fn eq(&self, other: &Rc<T, A>) -> bool {
2459        RcEqIdent::eq(self, other)
2460    }
2461
2462    /// Inequality for two `Rc`s.
2463    ///
2464    /// Two `Rc`s are not equal if their inner values are not equal.
2465    ///
2466    /// If `T` also implements `Eq` (implying reflexivity of equality),
2467    /// two `Rc`s that point to the same allocation are
2468    /// always equal.
2469    ///
2470    /// # Examples
2471    ///
2472    /// ```
2473    /// use std::rc::Rc;
2474    ///
2475    /// let five = Rc::new(5);
2476    ///
2477    /// assert!(five != Rc::new(6));
2478    /// ```
2479    #[inline]
2480    fn ne(&self, other: &Rc<T, A>) -> bool {
2481        RcEqIdent::ne(self, other)
2482    }
2483}
2484
2485#[stable(feature = "rust1", since = "1.0.0")]
2486impl<T: ?Sized + Eq, A: Allocator> Eq for Rc<T, A> {}
2487
2488#[stable(feature = "rust1", since = "1.0.0")]
2489impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
2490    /// Partial comparison for two `Rc`s.
2491    ///
2492    /// The two are compared by calling `partial_cmp()` on their inner values.
2493    ///
2494    /// # Examples
2495    ///
2496    /// ```
2497    /// use std::rc::Rc;
2498    /// use std::cmp::Ordering;
2499    ///
2500    /// let five = Rc::new(5);
2501    ///
2502    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
2503    /// ```
2504    #[inline(always)]
2505    fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> {
2506        (**self).partial_cmp(&**other)
2507    }
2508
2509    /// Less-than comparison for two `Rc`s.
2510    ///
2511    /// The two are compared by calling `<` on their inner values.
2512    ///
2513    /// # Examples
2514    ///
2515    /// ```
2516    /// use std::rc::Rc;
2517    ///
2518    /// let five = Rc::new(5);
2519    ///
2520    /// assert!(five < Rc::new(6));
2521    /// ```
2522    #[inline(always)]
2523    fn lt(&self, other: &Rc<T, A>) -> bool {
2524        **self < **other
2525    }
2526
2527    /// 'Less than or equal to' comparison for two `Rc`s.
2528    ///
2529    /// The two are compared by calling `<=` on their inner values.
2530    ///
2531    /// # Examples
2532    ///
2533    /// ```
2534    /// use std::rc::Rc;
2535    ///
2536    /// let five = Rc::new(5);
2537    ///
2538    /// assert!(five <= Rc::new(5));
2539    /// ```
2540    #[inline(always)]
2541    fn le(&self, other: &Rc<T, A>) -> bool {
2542        **self <= **other
2543    }
2544
2545    /// Greater-than comparison for two `Rc`s.
2546    ///
2547    /// The two are compared by calling `>` on their inner values.
2548    ///
2549    /// # Examples
2550    ///
2551    /// ```
2552    /// use std::rc::Rc;
2553    ///
2554    /// let five = Rc::new(5);
2555    ///
2556    /// assert!(five > Rc::new(4));
2557    /// ```
2558    #[inline(always)]
2559    fn gt(&self, other: &Rc<T, A>) -> bool {
2560        **self > **other
2561    }
2562
2563    /// 'Greater than or equal to' comparison for two `Rc`s.
2564    ///
2565    /// The two are compared by calling `>=` on their inner values.
2566    ///
2567    /// # Examples
2568    ///
2569    /// ```
2570    /// use std::rc::Rc;
2571    ///
2572    /// let five = Rc::new(5);
2573    ///
2574    /// assert!(five >= Rc::new(5));
2575    /// ```
2576    #[inline(always)]
2577    fn ge(&self, other: &Rc<T, A>) -> bool {
2578        **self >= **other
2579    }
2580}
2581
2582#[stable(feature = "rust1", since = "1.0.0")]
2583impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> {
2584    /// Comparison for two `Rc`s.
2585    ///
2586    /// The two are compared by calling `cmp()` on their inner values.
2587    ///
2588    /// # Examples
2589    ///
2590    /// ```
2591    /// use std::rc::Rc;
2592    /// use std::cmp::Ordering;
2593    ///
2594    /// let five = Rc::new(5);
2595    ///
2596    /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
2597    /// ```
2598    #[inline]
2599    fn cmp(&self, other: &Rc<T, A>) -> Ordering {
2600        (**self).cmp(&**other)
2601    }
2602}
2603
2604#[stable(feature = "rust1", since = "1.0.0")]
2605impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> {
2606    fn hash<H: Hasher>(&self, state: &mut H) {
2607        (**self).hash(state);
2608    }
2609}
2610
2611#[stable(feature = "rust1", since = "1.0.0")]
2612impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> {
2613    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2614        fmt::Display::fmt(&**self, f)
2615    }
2616}
2617
2618#[stable(feature = "rust1", since = "1.0.0")]
2619impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
2620    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2621        fmt::Debug::fmt(&**self, f)
2622    }
2623}
2624
2625#[stable(feature = "rust1", since = "1.0.0")]
2626impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
2627    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2628        fmt::Pointer::fmt(&(&raw const **self), f)
2629    }
2630}
2631
2632#[cfg(not(no_global_oom_handling))]
2633#[stable(feature = "from_for_ptrs", since = "1.6.0")]
2634impl<T> From<T> for Rc<T> {
2635    /// Converts a generic type `T` into an `Rc<T>`
2636    ///
2637    /// The conversion allocates on the heap and moves `t`
2638    /// from the stack into it.
2639    ///
2640    /// # Example
2641    /// ```rust
2642    /// # use std::rc::Rc;
2643    /// let x = 5;
2644    /// let rc = Rc::new(5);
2645    ///
2646    /// assert_eq!(Rc::from(x), rc);
2647    /// ```
2648    fn from(t: T) -> Self {
2649        Rc::new(t)
2650    }
2651}
2652
2653#[cfg(not(no_global_oom_handling))]
2654#[stable(feature = "shared_from_array", since = "1.74.0")]
2655impl<T, const N: usize> From<[T; N]> for Rc<[T]> {
2656    /// Converts a [`[T; N]`](prim@array) into an `Rc<[T]>`.
2657    ///
2658    /// The conversion moves the array into a newly allocated `Rc`.
2659    ///
2660    /// # Example
2661    ///
2662    /// ```
2663    /// # use std::rc::Rc;
2664    /// let original: [i32; 3] = [1, 2, 3];
2665    /// let shared: Rc<[i32]> = Rc::from(original);
2666    /// assert_eq!(&[1, 2, 3], &shared[..]);
2667    /// ```
2668    #[inline]
2669    fn from(v: [T; N]) -> Rc<[T]> {
2670        Rc::<[T; N]>::from(v)
2671    }
2672}
2673
2674#[cfg(not(no_global_oom_handling))]
2675#[stable(feature = "shared_from_slice", since = "1.21.0")]
2676impl<T: Clone> From<&[T]> for Rc<[T]> {
2677    /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2678    ///
2679    /// # Example
2680    ///
2681    /// ```
2682    /// # use std::rc::Rc;
2683    /// let original: &[i32] = &[1, 2, 3];
2684    /// let shared: Rc<[i32]> = Rc::from(original);
2685    /// assert_eq!(&[1, 2, 3], &shared[..]);
2686    /// ```
2687    #[inline]
2688    fn from(v: &[T]) -> Rc<[T]> {
2689        <Self as RcFromSlice<T>>::from_slice(v)
2690    }
2691}
2692
2693#[cfg(not(no_global_oom_handling))]
2694#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2695impl<T: Clone> From<&mut [T]> for Rc<[T]> {
2696    /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2697    ///
2698    /// # Example
2699    ///
2700    /// ```
2701    /// # use std::rc::Rc;
2702    /// let mut original = [1, 2, 3];
2703    /// let original: &mut [i32] = &mut original;
2704    /// let shared: Rc<[i32]> = Rc::from(original);
2705    /// assert_eq!(&[1, 2, 3], &shared[..]);
2706    /// ```
2707    #[inline]
2708    fn from(v: &mut [T]) -> Rc<[T]> {
2709        Rc::from(&*v)
2710    }
2711}
2712
2713#[cfg(not(no_global_oom_handling))]
2714#[stable(feature = "shared_from_slice", since = "1.21.0")]
2715impl From<&str> for Rc<str> {
2716    /// Allocates a reference-counted string slice and copies `v` into it.
2717    ///
2718    /// # Example
2719    ///
2720    /// ```
2721    /// # use std::rc::Rc;
2722    /// let shared: Rc<str> = Rc::from("statue");
2723    /// assert_eq!("statue", &shared[..]);
2724    /// ```
2725    #[inline]
2726    fn from(v: &str) -> Rc<str> {
2727        let rc = Rc::<[u8]>::from(v.as_bytes());
2728        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2729    }
2730}
2731
2732#[cfg(not(no_global_oom_handling))]
2733#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2734impl From<&mut str> for Rc<str> {
2735    /// Allocates a reference-counted string slice and copies `v` into it.
2736    ///
2737    /// # Example
2738    ///
2739    /// ```
2740    /// # use std::rc::Rc;
2741    /// let mut original = String::from("statue");
2742    /// let original: &mut str = &mut original;
2743    /// let shared: Rc<str> = Rc::from(original);
2744    /// assert_eq!("statue", &shared[..]);
2745    /// ```
2746    #[inline]
2747    fn from(v: &mut str) -> Rc<str> {
2748        Rc::from(&*v)
2749    }
2750}
2751
2752#[cfg(not(no_global_oom_handling))]
2753#[stable(feature = "shared_from_slice", since = "1.21.0")]
2754impl From<String> for Rc<str> {
2755    /// Allocates a reference-counted string slice and copies `v` into it.
2756    ///
2757    /// # Example
2758    ///
2759    /// ```
2760    /// # use std::rc::Rc;
2761    /// let original: String = "statue".to_owned();
2762    /// let shared: Rc<str> = Rc::from(original);
2763    /// assert_eq!("statue", &shared[..]);
2764    /// ```
2765    #[inline]
2766    fn from(v: String) -> Rc<str> {
2767        Rc::from(&v[..])
2768    }
2769}
2770
2771#[cfg(not(no_global_oom_handling))]
2772#[stable(feature = "shared_from_slice", since = "1.21.0")]
2773impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> {
2774    /// Move a boxed object to a new, reference counted, allocation.
2775    ///
2776    /// # Example
2777    ///
2778    /// ```
2779    /// # use std::rc::Rc;
2780    /// let original: Box<i32> = Box::new(1);
2781    /// let shared: Rc<i32> = Rc::from(original);
2782    /// assert_eq!(1, *shared);
2783    /// ```
2784    #[inline]
2785    fn from(v: Box<T, A>) -> Rc<T, A> {
2786        Rc::from_box_in(v)
2787    }
2788}
2789
2790#[cfg(not(no_global_oom_handling))]
2791#[stable(feature = "shared_from_slice", since = "1.21.0")]
2792impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
2793    /// Allocates a reference-counted slice and moves `v`'s items into it.
2794    ///
2795    /// # Example
2796    ///
2797    /// ```
2798    /// # use std::rc::Rc;
2799    /// let unique: Vec<i32> = vec![1, 2, 3];
2800    /// let shared: Rc<[i32]> = Rc::from(unique);
2801    /// assert_eq!(&[1, 2, 3], &shared[..]);
2802    /// ```
2803    #[inline]
2804    fn from(v: Vec<T, A>) -> Rc<[T], A> {
2805        unsafe {
2806            let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
2807
2808            let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
2809            ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len);
2810
2811            // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
2812            // without dropping its contents or the allocator
2813            let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
2814
2815            Self::from_ptr_in(rc_ptr, alloc)
2816        }
2817    }
2818}
2819
2820#[stable(feature = "shared_from_cow", since = "1.45.0")]
2821impl<'a, B> From<Cow<'a, B>> for Rc<B>
2822where
2823    B: ToOwned + ?Sized,
2824    Rc<B>: From<&'a B> + From<B::Owned>,
2825{
2826    /// Creates a reference-counted pointer from a clone-on-write pointer by
2827    /// copying its content.
2828    ///
2829    /// # Example
2830    ///
2831    /// ```rust
2832    /// # use std::rc::Rc;
2833    /// # use std::borrow::Cow;
2834    /// let cow: Cow<'_, str> = Cow::Borrowed("eggplant");
2835    /// let shared: Rc<str> = Rc::from(cow);
2836    /// assert_eq!("eggplant", &shared[..]);
2837    /// ```
2838    #[inline]
2839    fn from(cow: Cow<'a, B>) -> Rc<B> {
2840        match cow {
2841            Cow::Borrowed(s) => Rc::from(s),
2842            Cow::Owned(s) => Rc::from(s),
2843        }
2844    }
2845}
2846
2847#[stable(feature = "shared_from_str", since = "1.62.0")]
2848impl From<Rc<str>> for Rc<[u8]> {
2849    /// Converts a reference-counted string slice into a byte slice.
2850    ///
2851    /// # Example
2852    ///
2853    /// ```
2854    /// # use std::rc::Rc;
2855    /// let string: Rc<str> = Rc::from("eggplant");
2856    /// let bytes: Rc<[u8]> = Rc::from(string);
2857    /// assert_eq!("eggplant".as_bytes(), bytes.as_ref());
2858    /// ```
2859    #[inline]
2860    fn from(rc: Rc<str>) -> Self {
2861        // SAFETY: `str` has the same layout as `[u8]`.
2862        unsafe { Rc::from_raw(Rc::into_raw(rc) as *const [u8]) }
2863    }
2864}
2865
2866#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
2867impl<T, A: Allocator, const N: usize> TryFrom<Rc<[T], A>> for Rc<[T; N], A> {
2868    type Error = Rc<[T], A>;
2869
2870    fn try_from(boxed_slice: Rc<[T], A>) -> Result<Self, Self::Error> {
2871        if boxed_slice.len() == N {
2872            let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice);
2873            Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) })
2874        } else {
2875            Err(boxed_slice)
2876        }
2877    }
2878}
2879
2880#[cfg(not(no_global_oom_handling))]
2881#[stable(feature = "shared_from_iter", since = "1.37.0")]
2882impl<T> FromIterator<T> for Rc<[T]> {
2883    /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
2884    ///
2885    /// # Performance characteristics
2886    ///
2887    /// ## The general case
2888    ///
2889    /// In the general case, collecting into `Rc<[T]>` is done by first
2890    /// collecting into a `Vec<T>`. That is, when writing the following:
2891    ///
2892    /// ```rust
2893    /// # use std::rc::Rc;
2894    /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
2895    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2896    /// ```
2897    ///
2898    /// this behaves as if we wrote:
2899    ///
2900    /// ```rust
2901    /// # use std::rc::Rc;
2902    /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
2903    ///     .collect::<Vec<_>>() // The first set of allocations happens here.
2904    ///     .into(); // A second allocation for `Rc<[T]>` happens here.
2905    /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2906    /// ```
2907    ///
2908    /// This will allocate as many times as needed for constructing the `Vec<T>`
2909    /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
2910    ///
2911    /// ## Iterators of known length
2912    ///
2913    /// When your `Iterator` implements `TrustedLen` and is of an exact size,
2914    /// a single allocation will be made for the `Rc<[T]>`. For example:
2915    ///
2916    /// ```rust
2917    /// # use std::rc::Rc;
2918    /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
2919    /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
2920    /// ```
2921    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
2922        ToRcSlice::to_rc_slice(iter.into_iter())
2923    }
2924}
2925
2926/// Specialization trait used for collecting into `Rc<[T]>`.
2927#[cfg(not(no_global_oom_handling))]
2928trait ToRcSlice<T>: Iterator<Item = T> + Sized {
2929    fn to_rc_slice(self) -> Rc<[T]>;
2930}
2931
2932#[cfg(not(no_global_oom_handling))]
2933impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
2934    default fn to_rc_slice(self) -> Rc<[T]> {
2935        self.collect::<Vec<T>>().into()
2936    }
2937}
2938
2939#[cfg(not(no_global_oom_handling))]
2940impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
2941    fn to_rc_slice(self) -> Rc<[T]> {
2942        // This is the case for a `TrustedLen` iterator.
2943        let (low, high) = self.size_hint();
2944        if let Some(high) = high {
2945            debug_assert_eq!(
2946                low,
2947                high,
2948                "TrustedLen iterator's size hint is not exact: {:?}",
2949                (low, high)
2950            );
2951
2952            unsafe {
2953                // SAFETY: We need to ensure that the iterator has an exact length and we have.
2954                Rc::from_iter_exact(self, low)
2955            }
2956        } else {
2957            // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
2958            // length exceeding `usize::MAX`.
2959            // The default implementation would collect into a vec which would panic.
2960            // Thus we panic here immediately without invoking `Vec` code.
2961            panic!("capacity overflow");
2962        }
2963    }
2964}
2965
2966/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
2967/// managed allocation.
2968///
2969/// The allocation is accessed by calling [`upgrade`] on the `Weak`
2970/// pointer, which returns an <code>[Option]<[Rc]\<T>></code>.
2971///
2972/// Since a `Weak` reference does not count towards ownership, it will not
2973/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
2974/// guarantees about the value still being present. Thus it may return [`None`]
2975/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
2976/// itself (the backing store) from being deallocated.
2977///
2978/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
2979/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
2980/// prevent circular references between [`Rc`] pointers, since mutual owning references
2981/// would never allow either [`Rc`] to be dropped. For example, a tree could
2982/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
2983/// pointers from children back to their parents.
2984///
2985/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
2986///
2987/// [`upgrade`]: Weak::upgrade
2988#[stable(feature = "rc_weak", since = "1.4.0")]
2989#[rustc_diagnostic_item = "RcWeak"]
2990pub struct Weak<
2991    T: ?Sized,
2992    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
2993> {
2994    // This is a `NonNull` to allow optimizing the size of this type in enums,
2995    // but it is not necessarily a valid pointer.
2996    // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
2997    // to allocate space on the heap. That's not a value a real pointer
2998    // will ever have because RcInner has alignment at least 2.
2999    ptr: NonNull<RcInner<T>>,
3000    alloc: A,
3001}
3002
3003#[stable(feature = "rc_weak", since = "1.4.0")]
3004impl<T: ?Sized, A: Allocator> !Send for Weak<T, A> {}
3005#[stable(feature = "rc_weak", since = "1.4.0")]
3006impl<T: ?Sized, A: Allocator> !Sync for Weak<T, A> {}
3007
3008#[unstable(feature = "coerce_unsized", issue = "18598")]
3009impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {}
3010
3011#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3012impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
3013
3014impl<T> Weak<T> {
3015    /// Constructs a new `Weak<T>`, without allocating any memory.
3016    /// Calling [`upgrade`] on the return value always gives [`None`].
3017    ///
3018    /// [`upgrade`]: Weak::upgrade
3019    ///
3020    /// # Examples
3021    ///
3022    /// ```
3023    /// use std::rc::Weak;
3024    ///
3025    /// let empty: Weak<i64> = Weak::new();
3026    /// assert!(empty.upgrade().is_none());
3027    /// ```
3028    #[inline]
3029    #[stable(feature = "downgraded_weak", since = "1.10.0")]
3030    #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
3031    #[must_use]
3032    pub const fn new() -> Weak<T> {
3033        Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc: Global }
3034    }
3035}
3036
3037impl<T, A: Allocator> Weak<T, A> {
3038    /// Constructs a new `Weak<T>`, without allocating any memory, technically in the provided
3039    /// allocator.
3040    /// Calling [`upgrade`] on the return value always gives [`None`].
3041    ///
3042    /// [`upgrade`]: Weak::upgrade
3043    ///
3044    /// # Examples
3045    ///
3046    /// ```
3047    /// use std::rc::Weak;
3048    ///
3049    /// let empty: Weak<i64> = Weak::new();
3050    /// assert!(empty.upgrade().is_none());
3051    /// ```
3052    #[inline]
3053    #[unstable(feature = "allocator_api", issue = "32838")]
3054    pub fn new_in(alloc: A) -> Weak<T, A> {
3055        Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc }
3056    }
3057}
3058
3059pub(crate) fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
3060    (ptr.cast::<()>()).addr() == usize::MAX
3061}
3062
3063/// Helper type to allow accessing the reference counts without
3064/// making any assertions about the data field.
3065struct WeakInner<'a> {
3066    weak: &'a Cell<usize>,
3067    strong: &'a Cell<usize>,
3068}
3069
3070impl<T: ?Sized> Weak<T> {
3071    /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3072    ///
3073    /// This can be used to safely get a strong reference (by calling [`upgrade`]
3074    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3075    ///
3076    /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3077    /// as these don't own anything; the method still works on them).
3078    ///
3079    /// # Safety
3080    ///
3081    /// The pointer must have originated from the [`into_raw`] and must still own its potential
3082    /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator.
3083    ///
3084    /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3085    /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3086    /// count is not modified by this operation) and therefore it must be paired with a previous
3087    /// call to [`into_raw`].
3088    ///
3089    /// # Examples
3090    ///
3091    /// ```
3092    /// use std::rc::{Rc, Weak};
3093    ///
3094    /// let strong = Rc::new("hello".to_owned());
3095    ///
3096    /// let raw_1 = Rc::downgrade(&strong).into_raw();
3097    /// let raw_2 = Rc::downgrade(&strong).into_raw();
3098    ///
3099    /// assert_eq!(2, Rc::weak_count(&strong));
3100    ///
3101    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3102    /// assert_eq!(1, Rc::weak_count(&strong));
3103    ///
3104    /// drop(strong);
3105    ///
3106    /// // Decrement the last weak count.
3107    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3108    /// ```
3109    ///
3110    /// [`into_raw`]: Weak::into_raw
3111    /// [`upgrade`]: Weak::upgrade
3112    /// [`new`]: Weak::new
3113    #[inline]
3114    #[stable(feature = "weak_into_raw", since = "1.45.0")]
3115    pub unsafe fn from_raw(ptr: *const T) -> Self {
3116        unsafe { Self::from_raw_in(ptr, Global) }
3117    }
3118
3119    /// Consumes the `Weak<T>` and turns it into a raw pointer.
3120    ///
3121    /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3122    /// one weak reference (the weak count is not modified by this operation). It can be turned
3123    /// back into the `Weak<T>` with [`from_raw`].
3124    ///
3125    /// The same restrictions of accessing the target of the pointer as with
3126    /// [`as_ptr`] apply.
3127    ///
3128    /// # Examples
3129    ///
3130    /// ```
3131    /// use std::rc::{Rc, Weak};
3132    ///
3133    /// let strong = Rc::new("hello".to_owned());
3134    /// let weak = Rc::downgrade(&strong);
3135    /// let raw = weak.into_raw();
3136    ///
3137    /// assert_eq!(1, Rc::weak_count(&strong));
3138    /// assert_eq!("hello", unsafe { &*raw });
3139    ///
3140    /// drop(unsafe { Weak::from_raw(raw) });
3141    /// assert_eq!(0, Rc::weak_count(&strong));
3142    /// ```
3143    ///
3144    /// [`from_raw`]: Weak::from_raw
3145    /// [`as_ptr`]: Weak::as_ptr
3146    #[must_use = "losing the pointer will leak memory"]
3147    #[stable(feature = "weak_into_raw", since = "1.45.0")]
3148    pub fn into_raw(self) -> *const T {
3149        mem::ManuallyDrop::new(self).as_ptr()
3150    }
3151}
3152
3153impl<T: ?Sized, A: Allocator> Weak<T, A> {
3154    /// Returns a reference to the underlying allocator.
3155    #[inline]
3156    #[unstable(feature = "allocator_api", issue = "32838")]
3157    pub fn allocator(&self) -> &A {
3158        &self.alloc
3159    }
3160
3161    /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
3162    ///
3163    /// The pointer is valid only if there are some strong references. The pointer may be dangling,
3164    /// unaligned or even [`null`] otherwise.
3165    ///
3166    /// # Examples
3167    ///
3168    /// ```
3169    /// use std::rc::Rc;
3170    /// use std::ptr;
3171    ///
3172    /// let strong = Rc::new("hello".to_owned());
3173    /// let weak = Rc::downgrade(&strong);
3174    /// // Both point to the same object
3175    /// assert!(ptr::eq(&*strong, weak.as_ptr()));
3176    /// // The strong here keeps it alive, so we can still access the object.
3177    /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
3178    ///
3179    /// drop(strong);
3180    /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
3181    /// // undefined behavior.
3182    /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
3183    /// ```
3184    ///
3185    /// [`null`]: ptr::null
3186    #[must_use]
3187    #[stable(feature = "rc_as_ptr", since = "1.45.0")]
3188    pub fn as_ptr(&self) -> *const T {
3189        let ptr: *mut RcInner<T> = NonNull::as_ptr(self.ptr);
3190
3191        if is_dangling(ptr) {
3192            // If the pointer is dangling, we return the sentinel directly. This cannot be
3193            // a valid payload address, as the payload is at least as aligned as RcInner (usize).
3194            ptr as *const T
3195        } else {
3196            // SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
3197            // The payload may be dropped at this point, and we have to maintain provenance,
3198            // so use raw pointer manipulation.
3199            unsafe { &raw mut (*ptr).value }
3200        }
3201    }
3202
3203    /// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
3204    ///
3205    /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3206    /// one weak reference (the weak count is not modified by this operation). It can be turned
3207    /// back into the `Weak<T>` with [`from_raw_in`].
3208    ///
3209    /// The same restrictions of accessing the target of the pointer as with
3210    /// [`as_ptr`] apply.
3211    ///
3212    /// # Examples
3213    ///
3214    /// ```
3215    /// #![feature(allocator_api)]
3216    /// use std::rc::{Rc, Weak};
3217    /// use std::alloc::System;
3218    ///
3219    /// let strong = Rc::new_in("hello".to_owned(), System);
3220    /// let weak = Rc::downgrade(&strong);
3221    /// let (raw, alloc) = weak.into_raw_with_allocator();
3222    ///
3223    /// assert_eq!(1, Rc::weak_count(&strong));
3224    /// assert_eq!("hello", unsafe { &*raw });
3225    ///
3226    /// drop(unsafe { Weak::from_raw_in(raw, alloc) });
3227    /// assert_eq!(0, Rc::weak_count(&strong));
3228    /// ```
3229    ///
3230    /// [`from_raw_in`]: Weak::from_raw_in
3231    /// [`as_ptr`]: Weak::as_ptr
3232    #[must_use = "losing the pointer will leak memory"]
3233    #[inline]
3234    #[unstable(feature = "allocator_api", issue = "32838")]
3235    pub fn into_raw_with_allocator(self) -> (*const T, A) {
3236        let this = mem::ManuallyDrop::new(self);
3237        let result = this.as_ptr();
3238        // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
3239        let alloc = unsafe { ptr::read(&this.alloc) };
3240        (result, alloc)
3241    }
3242
3243    /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3244    ///
3245    /// This can be used to safely get a strong reference (by calling [`upgrade`]
3246    /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3247    ///
3248    /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3249    /// as these don't own anything; the method still works on them).
3250    ///
3251    /// # Safety
3252    ///
3253    /// The pointer must have originated from the [`into_raw`] and must still own its potential
3254    /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`.
3255    ///
3256    /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3257    /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3258    /// count is not modified by this operation) and therefore it must be paired with a previous
3259    /// call to [`into_raw`].
3260    ///
3261    /// # Examples
3262    ///
3263    /// ```
3264    /// use std::rc::{Rc, Weak};
3265    ///
3266    /// let strong = Rc::new("hello".to_owned());
3267    ///
3268    /// let raw_1 = Rc::downgrade(&strong).into_raw();
3269    /// let raw_2 = Rc::downgrade(&strong).into_raw();
3270    ///
3271    /// assert_eq!(2, Rc::weak_count(&strong));
3272    ///
3273    /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3274    /// assert_eq!(1, Rc::weak_count(&strong));
3275    ///
3276    /// drop(strong);
3277    ///
3278    /// // Decrement the last weak count.
3279    /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3280    /// ```
3281    ///
3282    /// [`into_raw`]: Weak::into_raw
3283    /// [`upgrade`]: Weak::upgrade
3284    /// [`new`]: Weak::new
3285    #[inline]
3286    #[unstable(feature = "allocator_api", issue = "32838")]
3287    pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
3288        // See Weak::as_ptr for context on how the input pointer is derived.
3289
3290        let ptr = if is_dangling(ptr) {
3291            // This is a dangling Weak.
3292            ptr as *mut RcInner<T>
3293        } else {
3294            // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
3295            // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
3296            let offset = unsafe { data_offset(ptr) };
3297            // Thus, we reverse the offset to get the whole RcInner.
3298            // SAFETY: the pointer originated from a Weak, so this offset is safe.
3299            unsafe { ptr.byte_sub(offset) as *mut RcInner<T> }
3300        };
3301
3302        // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
3303        Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
3304    }
3305
3306    /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
3307    /// dropping of the inner value if successful.
3308    ///
3309    /// Returns [`None`] if the inner value has since been dropped.
3310    ///
3311    /// # Examples
3312    ///
3313    /// ```
3314    /// use std::rc::Rc;
3315    ///
3316    /// let five = Rc::new(5);
3317    ///
3318    /// let weak_five = Rc::downgrade(&five);
3319    ///
3320    /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
3321    /// assert!(strong_five.is_some());
3322    ///
3323    /// // Destroy all strong pointers.
3324    /// drop(strong_five);
3325    /// drop(five);
3326    ///
3327    /// assert!(weak_five.upgrade().is_none());
3328    /// ```
3329    #[must_use = "this returns a new `Rc`, \
3330                  without modifying the original weak pointer"]
3331    #[stable(feature = "rc_weak", since = "1.4.0")]
3332    pub fn upgrade(&self) -> Option<Rc<T, A>>
3333    where
3334        A: Clone,
3335    {
3336        let inner = self.inner()?;
3337
3338        if inner.strong() == 0 {
3339            None
3340        } else {
3341            unsafe {
3342                inner.inc_strong();
3343                Some(Rc::from_inner_in(self.ptr, self.alloc.clone()))
3344            }
3345        }
3346    }
3347
3348    /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
3349    ///
3350    /// If `self` was created using [`Weak::new`], this will return 0.
3351    #[must_use]
3352    #[stable(feature = "weak_counts", since = "1.41.0")]
3353    pub fn strong_count(&self) -> usize {
3354        if let Some(inner) = self.inner() { inner.strong() } else { 0 }
3355    }
3356
3357    /// Gets the number of `Weak` pointers pointing to this allocation.
3358    ///
3359    /// If no strong pointers remain, this will return zero.
3360    #[must_use]
3361    #[stable(feature = "weak_counts", since = "1.41.0")]
3362    pub fn weak_count(&self) -> usize {
3363        if let Some(inner) = self.inner() {
3364            if inner.strong() > 0 {
3365                inner.weak() - 1 // subtract the implicit weak ptr
3366            } else {
3367                0
3368            }
3369        } else {
3370            0
3371        }
3372    }
3373
3374    /// Returns `None` when the pointer is dangling and there is no allocated `RcInner`,
3375    /// (i.e., when this `Weak` was created by `Weak::new`).
3376    #[inline]
3377    fn inner(&self) -> Option<WeakInner<'_>> {
3378        if is_dangling(self.ptr.as_ptr()) {
3379            None
3380        } else {
3381            // We are careful to *not* create a reference covering the "data" field, as
3382            // the field may be mutated concurrently (for example, if the last `Rc`
3383            // is dropped, the data field will be dropped in-place).
3384            Some(unsafe {
3385                let ptr = self.ptr.as_ptr();
3386                WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
3387            })
3388        }
3389    }
3390
3391    /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
3392    /// both don't point to any allocation (because they were created with `Weak::new()`). However,
3393    /// this function ignores the metadata of  `dyn Trait` pointers.
3394    ///
3395    /// # Notes
3396    ///
3397    /// Since this compares pointers it means that `Weak::new()` will equal each
3398    /// other, even though they don't point to any allocation.
3399    ///
3400    /// # Examples
3401    ///
3402    /// ```
3403    /// use std::rc::Rc;
3404    ///
3405    /// let first_rc = Rc::new(5);
3406    /// let first = Rc::downgrade(&first_rc);
3407    /// let second = Rc::downgrade(&first_rc);
3408    ///
3409    /// assert!(first.ptr_eq(&second));
3410    ///
3411    /// let third_rc = Rc::new(5);
3412    /// let third = Rc::downgrade(&third_rc);
3413    ///
3414    /// assert!(!first.ptr_eq(&third));
3415    /// ```
3416    ///
3417    /// Comparing `Weak::new`.
3418    ///
3419    /// ```
3420    /// use std::rc::{Rc, Weak};
3421    ///
3422    /// let first = Weak::new();
3423    /// let second = Weak::new();
3424    /// assert!(first.ptr_eq(&second));
3425    ///
3426    /// let third_rc = Rc::new(());
3427    /// let third = Rc::downgrade(&third_rc);
3428    /// assert!(!first.ptr_eq(&third));
3429    /// ```
3430    #[inline]
3431    #[must_use]
3432    #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
3433    pub fn ptr_eq(&self, other: &Self) -> bool {
3434        ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
3435    }
3436}
3437
3438#[stable(feature = "rc_weak", since = "1.4.0")]
3439unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
3440    /// Drops the `Weak` pointer.
3441    ///
3442    /// # Examples
3443    ///
3444    /// ```
3445    /// use std::rc::{Rc, Weak};
3446    ///
3447    /// struct Foo;
3448    ///
3449    /// impl Drop for Foo {
3450    ///     fn drop(&mut self) {
3451    ///         println!("dropped!");
3452    ///     }
3453    /// }
3454    ///
3455    /// let foo = Rc::new(Foo);
3456    /// let weak_foo = Rc::downgrade(&foo);
3457    /// let other_weak_foo = Weak::clone(&weak_foo);
3458    ///
3459    /// drop(weak_foo);   // Doesn't print anything
3460    /// drop(foo);        // Prints "dropped!"
3461    ///
3462    /// assert!(other_weak_foo.upgrade().is_none());
3463    /// ```
3464    fn drop(&mut self) {
3465        let inner = if let Some(inner) = self.inner() { inner } else { return };
3466
3467        inner.dec_weak();
3468        // the weak count starts at 1, and will only go to zero if all
3469        // the strong pointers have disappeared.
3470        if inner.weak() == 0 {
3471            unsafe {
3472                self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
3473            }
3474        }
3475    }
3476}
3477
3478#[stable(feature = "rc_weak", since = "1.4.0")]
3479impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
3480    /// Makes a clone of the `Weak` pointer that points to the same allocation.
3481    ///
3482    /// # Examples
3483    ///
3484    /// ```
3485    /// use std::rc::{Rc, Weak};
3486    ///
3487    /// let weak_five = Rc::downgrade(&Rc::new(5));
3488    ///
3489    /// let _ = Weak::clone(&weak_five);
3490    /// ```
3491    #[inline]
3492    fn clone(&self) -> Weak<T, A> {
3493        if let Some(inner) = self.inner() {
3494            inner.inc_weak()
3495        }
3496        Weak { ptr: self.ptr, alloc: self.alloc.clone() }
3497    }
3498}
3499
3500#[unstable(feature = "ergonomic_clones", issue = "132290")]
3501impl<T: ?Sized, A: Allocator + Clone> UseCloned for Weak<T, A> {}
3502
3503#[stable(feature = "rc_weak", since = "1.4.0")]
3504impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
3505    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3506        write!(f, "(Weak)")
3507    }
3508}
3509
3510#[stable(feature = "downgraded_weak", since = "1.10.0")]
3511impl<T> Default for Weak<T> {
3512    /// Constructs a new `Weak<T>`, without allocating any memory.
3513    /// Calling [`upgrade`] on the return value always gives [`None`].
3514    ///
3515    /// [`upgrade`]: Weak::upgrade
3516    ///
3517    /// # Examples
3518    ///
3519    /// ```
3520    /// use std::rc::Weak;
3521    ///
3522    /// let empty: Weak<i64> = Default::default();
3523    /// assert!(empty.upgrade().is_none());
3524    /// ```
3525    fn default() -> Weak<T> {
3526        Weak::new()
3527    }
3528}
3529
3530// NOTE: If you mem::forget Rcs (or Weaks), drop is skipped and the ref-count
3531// is not decremented, meaning the ref-count can overflow, and then you can
3532// free the allocation while outstanding Rcs (or Weaks) exist, which would be
3533// unsound. We abort because this is such a degenerate scenario that we don't
3534// care about what happens -- no real program should ever experience this.
3535//
3536// This should have negligible overhead since you don't actually need to
3537// clone these much in Rust thanks to ownership and move-semantics.
3538
3539#[doc(hidden)]
3540trait RcInnerPtr {
3541    fn weak_ref(&self) -> &Cell<usize>;
3542    fn strong_ref(&self) -> &Cell<usize>;
3543
3544    #[inline]
3545    fn strong(&self) -> usize {
3546        self.strong_ref().get()
3547    }
3548
3549    #[inline]
3550    fn inc_strong(&self) {
3551        let strong = self.strong();
3552
3553        // We insert an `assume` here to hint LLVM at an otherwise
3554        // missed optimization.
3555        // SAFETY: The reference count will never be zero when this is
3556        // called.
3557        unsafe {
3558            hint::assert_unchecked(strong != 0);
3559        }
3560
3561        let strong = strong.wrapping_add(1);
3562        self.strong_ref().set(strong);
3563
3564        // We want to abort on overflow instead of dropping the value.
3565        // Checking for overflow after the store instead of before
3566        // allows for slightly better code generation.
3567        if core::intrinsics::unlikely(strong == 0) {
3568            abort();
3569        }
3570    }
3571
3572    #[inline]
3573    fn dec_strong(&self) {
3574        self.strong_ref().set(self.strong() - 1);
3575    }
3576
3577    #[inline]
3578    fn weak(&self) -> usize {
3579        self.weak_ref().get()
3580    }
3581
3582    #[inline]
3583    fn inc_weak(&self) {
3584        let weak = self.weak();
3585
3586        // We insert an `assume` here to hint LLVM at an otherwise
3587        // missed optimization.
3588        // SAFETY: The reference count will never be zero when this is
3589        // called.
3590        unsafe {
3591            hint::assert_unchecked(weak != 0);
3592        }
3593
3594        let weak = weak.wrapping_add(1);
3595        self.weak_ref().set(weak);
3596
3597        // We want to abort on overflow instead of dropping the value.
3598        // Checking for overflow after the store instead of before
3599        // allows for slightly better code generation.
3600        if core::intrinsics::unlikely(weak == 0) {
3601            abort();
3602        }
3603    }
3604
3605    #[inline]
3606    fn dec_weak(&self) {
3607        self.weak_ref().set(self.weak() - 1);
3608    }
3609}
3610
3611impl<T: ?Sized> RcInnerPtr for RcInner<T> {
3612    #[inline(always)]
3613    fn weak_ref(&self) -> &Cell<usize> {
3614        &self.weak
3615    }
3616
3617    #[inline(always)]
3618    fn strong_ref(&self) -> &Cell<usize> {
3619        &self.strong
3620    }
3621}
3622
3623impl<'a> RcInnerPtr for WeakInner<'a> {
3624    #[inline(always)]
3625    fn weak_ref(&self) -> &Cell<usize> {
3626        self.weak
3627    }
3628
3629    #[inline(always)]
3630    fn strong_ref(&self) -> &Cell<usize> {
3631        self.strong
3632    }
3633}
3634
3635#[stable(feature = "rust1", since = "1.0.0")]
3636impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> {
3637    fn borrow(&self) -> &T {
3638        &**self
3639    }
3640}
3641
3642#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
3643impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> {
3644    fn as_ref(&self) -> &T {
3645        &**self
3646    }
3647}
3648
3649#[stable(feature = "pin", since = "1.33.0")]
3650impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {}
3651
3652/// Gets the offset within an `RcInner` for the payload behind a pointer.
3653///
3654/// # Safety
3655///
3656/// The pointer must point to (and have valid metadata for) a previously
3657/// valid instance of T, but the T is allowed to be dropped.
3658unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> usize {
3659    // Align the unsized value to the end of the RcInner.
3660    // Because RcInner is repr(C), it will always be the last field in memory.
3661    // SAFETY: since the only unsized types possible are slices, trait objects,
3662    // and extern types, the input safety requirement is currently enough to
3663    // satisfy the requirements of align_of_val_raw; this is an implementation
3664    // detail of the language that must not be relied upon outside of std.
3665    unsafe { data_offset_align(align_of_val_raw(ptr)) }
3666}
3667
3668#[inline]
3669fn data_offset_align(align: usize) -> usize {
3670    let layout = Layout::new::<RcInner<()>>();
3671    layout.size() + layout.padding_needed_for(align)
3672}
3673
3674/// A uniquely owned [`Rc`].
3675///
3676/// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong
3677/// reference. Multiple weak pointers can be created, but attempts to upgrade those to strong
3678/// references will fail unless the `UniqueRc` they point to has been converted into a regular `Rc`.
3679///
3680/// Because they are uniquely owned, the contents of a `UniqueRc` can be freely mutated. A common
3681/// use case is to have an object be mutable during its initialization phase but then have it become
3682/// immutable and converted to a normal `Rc`.
3683///
3684/// This can be used as a flexible way to create cyclic data structures, as in the example below.
3685///
3686/// ```
3687/// #![feature(unique_rc_arc)]
3688/// use std::rc::{Rc, Weak, UniqueRc};
3689///
3690/// struct Gadget {
3691///     #[allow(dead_code)]
3692///     me: Weak<Gadget>,
3693/// }
3694///
3695/// fn create_gadget() -> Option<Rc<Gadget>> {
3696///     let mut rc = UniqueRc::new(Gadget {
3697///         me: Weak::new(),
3698///     });
3699///     rc.me = UniqueRc::downgrade(&rc);
3700///     Some(UniqueRc::into_rc(rc))
3701/// }
3702///
3703/// create_gadget().unwrap();
3704/// ```
3705///
3706/// An advantage of using `UniqueRc` over [`Rc::new_cyclic`] to build cyclic data structures is that
3707/// [`Rc::new_cyclic`]'s `data_fn` parameter cannot be async or return a [`Result`]. As shown in the
3708/// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data,
3709/// including fallible or async constructors.
3710#[unstable(feature = "unique_rc_arc", issue = "112566")]
3711pub struct UniqueRc<
3712    T: ?Sized,
3713    #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3714> {
3715    ptr: NonNull<RcInner<T>>,
3716    // Define the ownership of `RcInner<T>` for drop-check
3717    _marker: PhantomData<RcInner<T>>,
3718    // Invariance is necessary for soundness: once other `Weak`
3719    // references exist, we already have a form of shared mutability!
3720    _marker2: PhantomData<*mut T>,
3721    alloc: A,
3722}
3723
3724// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3725// but having an explicit negative impl is nice for documentation purposes
3726// and results in nicer error messages.
3727#[unstable(feature = "unique_rc_arc", issue = "112566")]
3728impl<T: ?Sized, A: Allocator> !Send for UniqueRc<T, A> {}
3729
3730// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3731// but having an explicit negative impl is nice for documentation purposes
3732// and results in nicer error messages.
3733#[unstable(feature = "unique_rc_arc", issue = "112566")]
3734impl<T: ?Sized, A: Allocator> !Sync for UniqueRc<T, A> {}
3735
3736#[unstable(feature = "unique_rc_arc", issue = "112566")]
3737impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<UniqueRc<U, A>>
3738    for UniqueRc<T, A>
3739{
3740}
3741
3742//#[unstable(feature = "unique_rc_arc", issue = "112566")]
3743#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3744impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<UniqueRc<U>> for UniqueRc<T> {}
3745
3746#[unstable(feature = "unique_rc_arc", issue = "112566")]
3747impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for UniqueRc<T, A> {
3748    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3749        fmt::Display::fmt(&**self, f)
3750    }
3751}
3752
3753#[unstable(feature = "unique_rc_arc", issue = "112566")]
3754impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for UniqueRc<T, A> {
3755    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3756        fmt::Debug::fmt(&**self, f)
3757    }
3758}
3759
3760#[unstable(feature = "unique_rc_arc", issue = "112566")]
3761impl<T: ?Sized, A: Allocator> fmt::Pointer for UniqueRc<T, A> {
3762    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3763        fmt::Pointer::fmt(&(&raw const **self), f)
3764    }
3765}
3766
3767#[unstable(feature = "unique_rc_arc", issue = "112566")]
3768impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for UniqueRc<T, A> {
3769    fn borrow(&self) -> &T {
3770        &**self
3771    }
3772}
3773
3774#[unstable(feature = "unique_rc_arc", issue = "112566")]
3775impl<T: ?Sized, A: Allocator> borrow::BorrowMut<T> for UniqueRc<T, A> {
3776    fn borrow_mut(&mut self) -> &mut T {
3777        &mut **self
3778    }
3779}
3780
3781#[unstable(feature = "unique_rc_arc", issue = "112566")]
3782impl<T: ?Sized, A: Allocator> AsRef<T> for UniqueRc<T, A> {
3783    fn as_ref(&self) -> &T {
3784        &**self
3785    }
3786}
3787
3788#[unstable(feature = "unique_rc_arc", issue = "112566")]
3789impl<T: ?Sized, A: Allocator> AsMut<T> for UniqueRc<T, A> {
3790    fn as_mut(&mut self) -> &mut T {
3791        &mut **self
3792    }
3793}
3794
3795#[unstable(feature = "unique_rc_arc", issue = "112566")]
3796impl<T: ?Sized, A: Allocator> Unpin for UniqueRc<T, A> {}
3797
3798#[unstable(feature = "unique_rc_arc", issue = "112566")]
3799impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for UniqueRc<T, A> {
3800    /// Equality for two `UniqueRc`s.
3801    ///
3802    /// Two `UniqueRc`s are equal if their inner values are equal.
3803    ///
3804    /// # Examples
3805    ///
3806    /// ```
3807    /// #![feature(unique_rc_arc)]
3808    /// use std::rc::UniqueRc;
3809    ///
3810    /// let five = UniqueRc::new(5);
3811    ///
3812    /// assert!(five == UniqueRc::new(5));
3813    /// ```
3814    #[inline]
3815    fn eq(&self, other: &Self) -> bool {
3816        PartialEq::eq(&**self, &**other)
3817    }
3818
3819    /// Inequality for two `UniqueRc`s.
3820    ///
3821    /// Two `UniqueRc`s are not equal if their inner values are not equal.
3822    ///
3823    /// # Examples
3824    ///
3825    /// ```
3826    /// #![feature(unique_rc_arc)]
3827    /// use std::rc::UniqueRc;
3828    ///
3829    /// let five = UniqueRc::new(5);
3830    ///
3831    /// assert!(five != UniqueRc::new(6));
3832    /// ```
3833    #[inline]
3834    fn ne(&self, other: &Self) -> bool {
3835        PartialEq::ne(&**self, &**other)
3836    }
3837}
3838
3839#[unstable(feature = "unique_rc_arc", issue = "112566")]
3840impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for UniqueRc<T, A> {
3841    /// Partial comparison for two `UniqueRc`s.
3842    ///
3843    /// The two are compared by calling `partial_cmp()` on their inner values.
3844    ///
3845    /// # Examples
3846    ///
3847    /// ```
3848    /// #![feature(unique_rc_arc)]
3849    /// use std::rc::UniqueRc;
3850    /// use std::cmp::Ordering;
3851    ///
3852    /// let five = UniqueRc::new(5);
3853    ///
3854    /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&UniqueRc::new(6)));
3855    /// ```
3856    #[inline(always)]
3857    fn partial_cmp(&self, other: &UniqueRc<T, A>) -> Option<Ordering> {
3858        (**self).partial_cmp(&**other)
3859    }
3860
3861    /// Less-than comparison for two `UniqueRc`s.
3862    ///
3863    /// The two are compared by calling `<` on their inner values.
3864    ///
3865    /// # Examples
3866    ///
3867    /// ```
3868    /// #![feature(unique_rc_arc)]
3869    /// use std::rc::UniqueRc;
3870    ///
3871    /// let five = UniqueRc::new(5);
3872    ///
3873    /// assert!(five < UniqueRc::new(6));
3874    /// ```
3875    #[inline(always)]
3876    fn lt(&self, other: &UniqueRc<T, A>) -> bool {
3877        **self < **other
3878    }
3879
3880    /// 'Less than or equal to' comparison for two `UniqueRc`s.
3881    ///
3882    /// The two are compared by calling `<=` on their inner values.
3883    ///
3884    /// # Examples
3885    ///
3886    /// ```
3887    /// #![feature(unique_rc_arc)]
3888    /// use std::rc::UniqueRc;
3889    ///
3890    /// let five = UniqueRc::new(5);
3891    ///
3892    /// assert!(five <= UniqueRc::new(5));
3893    /// ```
3894    #[inline(always)]
3895    fn le(&self, other: &UniqueRc<T, A>) -> bool {
3896        **self <= **other
3897    }
3898
3899    /// Greater-than comparison for two `UniqueRc`s.
3900    ///
3901    /// The two are compared by calling `>` on their inner values.
3902    ///
3903    /// # Examples
3904    ///
3905    /// ```
3906    /// #![feature(unique_rc_arc)]
3907    /// use std::rc::UniqueRc;
3908    ///
3909    /// let five = UniqueRc::new(5);
3910    ///
3911    /// assert!(five > UniqueRc::new(4));
3912    /// ```
3913    #[inline(always)]
3914    fn gt(&self, other: &UniqueRc<T, A>) -> bool {
3915        **self > **other
3916    }
3917
3918    /// 'Greater than or equal to' comparison for two `UniqueRc`s.
3919    ///
3920    /// The two are compared by calling `>=` on their inner values.
3921    ///
3922    /// # Examples
3923    ///
3924    /// ```
3925    /// #![feature(unique_rc_arc)]
3926    /// use std::rc::UniqueRc;
3927    ///
3928    /// let five = UniqueRc::new(5);
3929    ///
3930    /// assert!(five >= UniqueRc::new(5));
3931    /// ```
3932    #[inline(always)]
3933    fn ge(&self, other: &UniqueRc<T, A>) -> bool {
3934        **self >= **other
3935    }
3936}
3937
3938#[unstable(feature = "unique_rc_arc", issue = "112566")]
3939impl<T: ?Sized + Ord, A: Allocator> Ord for UniqueRc<T, A> {
3940    /// Comparison for two `UniqueRc`s.
3941    ///
3942    /// The two are compared by calling `cmp()` on their inner values.
3943    ///
3944    /// # Examples
3945    ///
3946    /// ```
3947    /// #![feature(unique_rc_arc)]
3948    /// use std::rc::UniqueRc;
3949    /// use std::cmp::Ordering;
3950    ///
3951    /// let five = UniqueRc::new(5);
3952    ///
3953    /// assert_eq!(Ordering::Less, five.cmp(&UniqueRc::new(6)));
3954    /// ```
3955    #[inline]
3956    fn cmp(&self, other: &UniqueRc<T, A>) -> Ordering {
3957        (**self).cmp(&**other)
3958    }
3959}
3960
3961#[unstable(feature = "unique_rc_arc", issue = "112566")]
3962impl<T: ?Sized + Eq, A: Allocator> Eq for UniqueRc<T, A> {}
3963
3964#[unstable(feature = "unique_rc_arc", issue = "112566")]
3965impl<T: ?Sized + Hash, A: Allocator> Hash for UniqueRc<T, A> {
3966    fn hash<H: Hasher>(&self, state: &mut H) {
3967        (**self).hash(state);
3968    }
3969}
3970
3971// Depends on A = Global
3972impl<T> UniqueRc<T> {
3973    /// Creates a new `UniqueRc`.
3974    ///
3975    /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
3976    /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
3977    /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
3978    /// point to the new [`Rc`].
3979    #[cfg(not(no_global_oom_handling))]
3980    #[unstable(feature = "unique_rc_arc", issue = "112566")]
3981    pub fn new(value: T) -> Self {
3982        Self::new_in(value, Global)
3983    }
3984}
3985
3986impl<T, A: Allocator> UniqueRc<T, A> {
3987    /// Creates a new `UniqueRc` in the provided allocator.
3988    ///
3989    /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
3990    /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
3991    /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
3992    /// point to the new [`Rc`].
3993    #[cfg(not(no_global_oom_handling))]
3994    #[unstable(feature = "unique_rc_arc", issue = "112566")]
3995    pub fn new_in(value: T, alloc: A) -> Self {
3996        let (ptr, alloc) = Box::into_unique(Box::new_in(
3997            RcInner {
3998                strong: Cell::new(0),
3999                // keep one weak reference so if all the weak pointers that are created are dropped
4000                // the UniqueRc still stays valid.
4001                weak: Cell::new(1),
4002                value,
4003            },
4004            alloc,
4005        ));
4006        Self { ptr: ptr.into(), _marker: PhantomData, _marker2: PhantomData, alloc }
4007    }
4008}
4009
4010impl<T: ?Sized, A: Allocator> UniqueRc<T, A> {
4011    /// Converts the `UniqueRc` into a regular [`Rc`].
4012    ///
4013    /// This consumes the `UniqueRc` and returns a regular [`Rc`] that contains the `value` that
4014    /// is passed to `into_rc`.
4015    ///
4016    /// Any weak references created before this method is called can now be upgraded to strong
4017    /// references.
4018    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4019    pub fn into_rc(this: Self) -> Rc<T, A> {
4020        let mut this = ManuallyDrop::new(this);
4021
4022        // Move the allocator out.
4023        // SAFETY: `this.alloc` will not be accessed again, nor dropped because it is in
4024        // a `ManuallyDrop`.
4025        let alloc: A = unsafe { ptr::read(&this.alloc) };
4026
4027        // SAFETY: This pointer was allocated at creation time so we know it is valid.
4028        unsafe {
4029            // Convert our weak reference into a strong reference
4030            this.ptr.as_mut().strong.set(1);
4031            Rc::from_inner_in(this.ptr, alloc)
4032        }
4033    }
4034}
4035
4036impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
4037    /// Creates a new weak reference to the `UniqueRc`.
4038    ///
4039    /// Attempting to upgrade this weak reference will fail before the `UniqueRc` has been converted
4040    /// to a [`Rc`] using [`UniqueRc::into_rc`].
4041    #[unstable(feature = "unique_rc_arc", issue = "112566")]
4042    pub fn downgrade(this: &Self) -> Weak<T, A> {
4043        // SAFETY: This pointer was allocated at creation time and we guarantee that we only have
4044        // one strong reference before converting to a regular Rc.
4045        unsafe {
4046            this.ptr.as_ref().inc_weak();
4047        }
4048        Weak { ptr: this.ptr, alloc: this.alloc.clone() }
4049    }
4050}
4051
4052#[unstable(feature = "unique_rc_arc", issue = "112566")]
4053impl<T: ?Sized, A: Allocator> Deref for UniqueRc<T, A> {
4054    type Target = T;
4055
4056    fn deref(&self) -> &T {
4057        // SAFETY: This pointer was allocated at creation time so we know it is valid.
4058        unsafe { &self.ptr.as_ref().value }
4059    }
4060}
4061
4062#[unstable(feature = "unique_rc_arc", issue = "112566")]
4063impl<T: ?Sized, A: Allocator> DerefMut for UniqueRc<T, A> {
4064    fn deref_mut(&mut self) -> &mut T {
4065        // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we
4066        // have unique ownership and therefore it's safe to make a mutable reference because
4067        // `UniqueRc` owns the only strong reference to itself.
4068        unsafe { &mut (*self.ptr.as_ptr()).value }
4069    }
4070}
4071
4072#[unstable(feature = "unique_rc_arc", issue = "112566")]
4073unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc<T, A> {
4074    fn drop(&mut self) {
4075        unsafe {
4076            // destroy the contained object
4077            drop_in_place(DerefMut::deref_mut(self));
4078
4079            // remove the implicit "strong weak" pointer now that we've destroyed the contents.
4080            self.ptr.as_ref().dec_weak();
4081
4082            if self.ptr.as_ref().weak() == 0 {
4083                self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
4084            }
4085        }
4086    }
4087}
4088
4089/// A unique owning pointer to a [`RcInner`] **that does not imply the contents are initialized,**
4090/// but will deallocate it (without dropping the value) when dropped.
4091///
4092/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
4093/// It is nearly a duplicate of `UniqueRc<MaybeUninit<T>, A>` except that it allows `T: !Sized`,
4094/// which `MaybeUninit` does not.
4095#[cfg(not(no_global_oom_handling))]
4096struct UniqueRcUninit<T: ?Sized, A: Allocator> {
4097    ptr: NonNull<RcInner<T>>,
4098    layout_for_value: Layout,
4099    alloc: Option<A>,
4100}
4101
4102#[cfg(not(no_global_oom_handling))]
4103impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
4104    /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it.
4105    fn new(for_value: &T, alloc: A) -> UniqueRcUninit<T, A> {
4106        let layout = Layout::for_value(for_value);
4107        let ptr = unsafe {
4108            Rc::allocate_for_layout(
4109                layout,
4110                |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner),
4111                |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner<T>),
4112            )
4113        };
4114        Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
4115    }
4116
4117    /// Returns the pointer to be written into to initialize the [`Rc`].
4118    fn data_ptr(&mut self) -> *mut T {
4119        let offset = data_offset_align(self.layout_for_value.align());
4120        unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
4121    }
4122
4123    /// Upgrade this into a normal [`Rc`].
4124    ///
4125    /// # Safety
4126    ///
4127    /// The data must have been initialized (by writing to [`Self::data_ptr()`]).
4128    unsafe fn into_rc(self) -> Rc<T, A> {
4129        let mut this = ManuallyDrop::new(self);
4130        let ptr = this.ptr;
4131        let alloc = this.alloc.take().unwrap();
4132
4133        // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
4134        // for having initialized the data.
4135        unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
4136    }
4137}
4138
4139#[cfg(not(no_global_oom_handling))]
4140impl<T: ?Sized, A: Allocator> Drop for UniqueRcUninit<T, A> {
4141    fn drop(&mut self) {
4142        // SAFETY:
4143        // * new() produced a pointer safe to deallocate.
4144        // * We own the pointer unless into_rc() was called, which forgets us.
4145        unsafe {
4146            self.alloc.take().unwrap().deallocate(
4147                self.ptr.cast(),
4148                rc_inner_layout_for_value_layout(self.layout_for_value),
4149            );
4150        }
4151    }
4152}