alloc/rc.rs
1//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
2//! Counted'.
3//!
4//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
5//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
6//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
7//! given allocation is destroyed, the value stored in that allocation (often
8//! referred to as "inner value") is also dropped.
9//!
10//! Shared references in Rust disallow mutation by default, and [`Rc`]
11//! is no exception: you cannot generally obtain a mutable reference to
12//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
13//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
14//! inside an `Rc`][mutability].
15//!
16//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
17//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
18//! does not implement [`Send`]. As a result, the Rust compiler
19//! will check *at compile time* that you are not sending [`Rc`]s between
20//! threads. If you need multi-threaded, atomic reference counting, use
21//! [`sync::Arc`][arc].
22//!
23//! The [`downgrade`][downgrade] method can be used to create a non-owning
24//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
25//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
26//! already been dropped. In other words, `Weak` pointers do not keep the value
27//! inside the allocation alive; however, they *do* keep the allocation
28//! (the backing store for the inner value) alive.
29//!
30//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
31//! [`Weak`] is used to break cycles. For example, a tree could have strong
32//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
33//! children back to their parents.
34//!
35//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
36//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
37//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
38//! functions, called using [fully qualified syntax]:
39//!
40//! ```
41//! use std::rc::Rc;
42//!
43//! let my_rc = Rc::new(());
44//! let my_weak = Rc::downgrade(&my_rc);
45//! ```
46//!
47//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
48//! fully qualified syntax. Some people prefer to use fully qualified syntax,
49//! while others prefer using method-call syntax.
50//!
51//! ```
52//! use std::rc::Rc;
53//!
54//! let rc = Rc::new(());
55//! // Method-call syntax
56//! let rc2 = rc.clone();
57//! // Fully qualified syntax
58//! let rc3 = Rc::clone(&rc);
59//! ```
60//!
61//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
62//! already been dropped.
63//!
64//! # Cloning references
65//!
66//! Creating a new reference to the same allocation as an existing reference counted pointer
67//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
68//!
69//! ```
70//! use std::rc::Rc;
71//!
72//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
73//! // The two syntaxes below are equivalent.
74//! let a = foo.clone();
75//! let b = Rc::clone(&foo);
76//! // a and b both point to the same memory location as foo.
77//! ```
78//!
79//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
80//! the meaning of the code. In the example above, this syntax makes it easier to see that
81//! this code is creating a new reference rather than copying the whole content of foo.
82//!
83//! # Examples
84//!
85//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
86//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
87//! unique ownership, because more than one gadget may belong to the same
88//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
89//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
90//!
91//! ```
92//! use std::rc::Rc;
93//!
94//! struct Owner {
95//! name: String,
96//! // ...other fields
97//! }
98//!
99//! struct Gadget {
100//! id: i32,
101//! owner: Rc<Owner>,
102//! // ...other fields
103//! }
104//!
105//! fn main() {
106//! // Create a reference-counted `Owner`.
107//! let gadget_owner: Rc<Owner> = Rc::new(
108//! Owner {
109//! name: "Gadget Man".to_string(),
110//! }
111//! );
112//!
113//! // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
114//! // gives us a new pointer to the same `Owner` allocation, incrementing
115//! // the reference count in the process.
116//! let gadget1 = Gadget {
117//! id: 1,
118//! owner: Rc::clone(&gadget_owner),
119//! };
120//! let gadget2 = Gadget {
121//! id: 2,
122//! owner: Rc::clone(&gadget_owner),
123//! };
124//!
125//! // Dispose of our local variable `gadget_owner`.
126//! drop(gadget_owner);
127//!
128//! // Despite dropping `gadget_owner`, we're still able to print out the name
129//! // of the `Owner` of the `Gadget`s. This is because we've only dropped a
130//! // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
131//! // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
132//! // live. The field projection `gadget1.owner.name` works because
133//! // `Rc<Owner>` automatically dereferences to `Owner`.
134//! println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
135//! println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
136//!
137//! // At the end of the function, `gadget1` and `gadget2` are destroyed, and
138//! // with them the last counted references to our `Owner`. Gadget Man now
139//! // gets destroyed as well.
140//! }
141//! ```
142//!
143//! If our requirements change, and we also need to be able to traverse from
144//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
145//! to `Gadget` introduces a cycle. This means that their
146//! reference counts can never reach 0, and the allocation will never be destroyed:
147//! a memory leak. In order to get around this, we can use [`Weak`]
148//! pointers.
149//!
150//! Rust actually makes it somewhat difficult to produce this loop in the first
151//! place. In order to end up with two values that point at each other, one of
152//! them needs to be mutable. This is difficult because [`Rc`] enforces
153//! memory safety by only giving out shared references to the value it wraps,
154//! and these don't allow direct mutation. We need to wrap the part of the
155//! value we wish to mutate in a [`RefCell`], which provides *interior
156//! mutability*: a method to achieve mutability through a shared reference.
157//! [`RefCell`] enforces Rust's borrowing rules at runtime.
158//!
159//! ```
160//! use std::rc::Rc;
161//! use std::rc::Weak;
162//! use std::cell::RefCell;
163//!
164//! struct Owner {
165//! name: String,
166//! gadgets: RefCell<Vec<Weak<Gadget>>>,
167//! // ...other fields
168//! }
169//!
170//! struct Gadget {
171//! id: i32,
172//! owner: Rc<Owner>,
173//! // ...other fields
174//! }
175//!
176//! fn main() {
177//! // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
178//! // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
179//! // a shared reference.
180//! let gadget_owner: Rc<Owner> = Rc::new(
181//! Owner {
182//! name: "Gadget Man".to_string(),
183//! gadgets: RefCell::new(vec![]),
184//! }
185//! );
186//!
187//! // Create `Gadget`s belonging to `gadget_owner`, as before.
188//! let gadget1 = Rc::new(
189//! Gadget {
190//! id: 1,
191//! owner: Rc::clone(&gadget_owner),
192//! }
193//! );
194//! let gadget2 = Rc::new(
195//! Gadget {
196//! id: 2,
197//! owner: Rc::clone(&gadget_owner),
198//! }
199//! );
200//!
201//! // Add the `Gadget`s to their `Owner`.
202//! {
203//! let mut gadgets = gadget_owner.gadgets.borrow_mut();
204//! gadgets.push(Rc::downgrade(&gadget1));
205//! gadgets.push(Rc::downgrade(&gadget2));
206//!
207//! // `RefCell` dynamic borrow ends here.
208//! }
209//!
210//! // Iterate over our `Gadget`s, printing their details out.
211//! for gadget_weak in gadget_owner.gadgets.borrow().iter() {
212//!
213//! // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
214//! // guarantee the allocation still exists, we need to call
215//! // `upgrade`, which returns an `Option<Rc<Gadget>>`.
216//! //
217//! // In this case we know the allocation still exists, so we simply
218//! // `unwrap` the `Option`. In a more complicated program, you might
219//! // need graceful error handling for a `None` result.
220//!
221//! let gadget = gadget_weak.upgrade().unwrap();
222//! println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
223//! }
224//!
225//! // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
226//! // are destroyed. There are now no strong (`Rc`) pointers to the
227//! // gadgets, so they are destroyed. This zeroes the reference count on
228//! // Gadget Man, so he gets destroyed as well.
229//! }
230//! ```
231//!
232//! [clone]: Clone::clone
233//! [`Cell`]: core::cell::Cell
234//! [`RefCell`]: core::cell::RefCell
235//! [arc]: crate::sync::Arc
236//! [`Deref`]: core::ops::Deref
237//! [downgrade]: Rc::downgrade
238//! [upgrade]: Weak::upgrade
239//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
240//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
241
242#![stable(feature = "rust1", since = "1.0.0")]
243
244use core::any::Any;
245use core::cell::Cell;
246#[cfg(not(no_global_oom_handling))]
247use core::clone::CloneToUninit;
248use core::clone::UseCloned;
249use core::cmp::Ordering;
250use core::hash::{Hash, Hasher};
251use core::intrinsics::abort;
252#[cfg(not(no_global_oom_handling))]
253use core::iter;
254use core::marker::{PhantomData, Unsize};
255use core::mem::{self, ManuallyDrop, align_of_val_raw};
256use core::num::NonZeroUsize;
257use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver};
258use core::panic::{RefUnwindSafe, UnwindSafe};
259#[cfg(not(no_global_oom_handling))]
260use core::pin::Pin;
261use core::pin::PinCoerceUnsized;
262use core::ptr::{self, NonNull, drop_in_place};
263#[cfg(not(no_global_oom_handling))]
264use core::slice::from_raw_parts_mut;
265use core::{borrow, fmt, hint};
266
267#[cfg(not(no_global_oom_handling))]
268use crate::alloc::handle_alloc_error;
269use crate::alloc::{AllocError, Allocator, Global, Layout};
270use crate::borrow::{Cow, ToOwned};
271use crate::boxed::Box;
272#[cfg(not(no_global_oom_handling))]
273use crate::string::String;
274#[cfg(not(no_global_oom_handling))]
275use crate::vec::Vec;
276
277// This is repr(C) to future-proof against possible field-reordering, which
278// would interfere with otherwise safe [into|from]_raw() of transmutable
279// inner types.
280#[repr(C)]
281struct RcInner<T: ?Sized> {
282 strong: Cell<usize>,
283 weak: Cell<usize>,
284 value: T,
285}
286
287/// Calculate layout for `RcInner<T>` using the inner value's layout
288fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout {
289 // Calculate layout using the given value layout.
290 // Previously, layout was calculated on the expression
291 // `&*(ptr as *const RcInner<T>)`, but this created a misaligned
292 // reference (see #54908).
293 Layout::new::<RcInner<()>>().extend(layout).unwrap().0.pad_to_align()
294}
295
296/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
297/// Counted'.
298///
299/// See the [module-level documentation](./index.html) for more details.
300///
301/// The inherent methods of `Rc` are all associated functions, which means
302/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
303/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
304///
305/// [get_mut]: Rc::get_mut
306#[doc(search_unbox)]
307#[rustc_diagnostic_item = "Rc"]
308#[stable(feature = "rust1", since = "1.0.0")]
309#[rustc_insignificant_dtor]
310pub struct Rc<
311 T: ?Sized,
312 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
313> {
314 ptr: NonNull<RcInner<T>>,
315 phantom: PhantomData<RcInner<T>>,
316 alloc: A,
317}
318
319#[stable(feature = "rust1", since = "1.0.0")]
320impl<T: ?Sized, A: Allocator> !Send for Rc<T, A> {}
321
322// Note that this negative impl isn't strictly necessary for correctness,
323// as `Rc` transitively contains a `Cell`, which is itself `!Sync`.
324// However, given how important `Rc`'s `!Sync`-ness is,
325// having an explicit negative impl is nice for documentation purposes
326// and results in nicer error messages.
327#[stable(feature = "rust1", since = "1.0.0")]
328impl<T: ?Sized, A: Allocator> !Sync for Rc<T, A> {}
329
330#[stable(feature = "catch_unwind", since = "1.9.0")]
331impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Rc<T, A> {}
332#[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")]
333impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> RefUnwindSafe for Rc<T, A> {}
334
335#[unstable(feature = "coerce_unsized", issue = "18598")]
336impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for Rc<T, A> {}
337
338#[unstable(feature = "dispatch_from_dyn", issue = "none")]
339impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
340
341impl<T: ?Sized> Rc<T> {
342 #[inline]
343 unsafe fn from_inner(ptr: NonNull<RcInner<T>>) -> Self {
344 unsafe { Self::from_inner_in(ptr, Global) }
345 }
346
347 #[inline]
348 unsafe fn from_ptr(ptr: *mut RcInner<T>) -> Self {
349 unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
350 }
351}
352
353impl<T: ?Sized, A: Allocator> Rc<T, A> {
354 #[inline(always)]
355 fn inner(&self) -> &RcInner<T> {
356 // This unsafety is ok because while this Rc is alive we're guaranteed
357 // that the inner pointer is valid.
358 unsafe { self.ptr.as_ref() }
359 }
360
361 #[inline]
362 fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
363 let this = mem::ManuallyDrop::new(this);
364 (this.ptr, unsafe { ptr::read(&this.alloc) })
365 }
366
367 #[inline]
368 unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
369 Self { ptr, phantom: PhantomData, alloc }
370 }
371
372 #[inline]
373 unsafe fn from_ptr_in(ptr: *mut RcInner<T>, alloc: A) -> Self {
374 unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
375 }
376
377 // Non-inlined part of `drop`.
378 #[inline(never)]
379 unsafe fn drop_slow(&mut self) {
380 // Reconstruct the "strong weak" pointer and drop it when this
381 // variable goes out of scope. This ensures that the memory is
382 // deallocated even if the destructor of `T` panics.
383 let _weak = Weak { ptr: self.ptr, alloc: &self.alloc };
384
385 // Destroy the contained object.
386 // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
387 unsafe {
388 ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value);
389 }
390 }
391}
392
393impl<T> Rc<T> {
394 /// Constructs a new `Rc<T>`.
395 ///
396 /// # Examples
397 ///
398 /// ```
399 /// use std::rc::Rc;
400 ///
401 /// let five = Rc::new(5);
402 /// ```
403 #[cfg(not(no_global_oom_handling))]
404 #[stable(feature = "rust1", since = "1.0.0")]
405 pub fn new(value: T) -> Rc<T> {
406 // There is an implicit weak pointer owned by all the strong
407 // pointers, which ensures that the weak destructor never frees
408 // the allocation while the strong destructor is running, even
409 // if the weak pointer is stored inside the strong one.
410 unsafe {
411 Self::from_inner(
412 Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value }))
413 .into(),
414 )
415 }
416 }
417
418 /// Constructs a new `Rc<T>` while giving you a `Weak<T>` to the allocation,
419 /// to allow you to construct a `T` which holds a weak pointer to itself.
420 ///
421 /// Generally, a structure circularly referencing itself, either directly or
422 /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
423 /// Using this function, you get access to the weak pointer during the
424 /// initialization of `T`, before the `Rc<T>` is created, such that you can
425 /// clone and store it inside the `T`.
426 ///
427 /// `new_cyclic` first allocates the managed allocation for the `Rc<T>`,
428 /// then calls your closure, giving it a `Weak<T>` to this allocation,
429 /// and only afterwards completes the construction of the `Rc<T>` by placing
430 /// the `T` returned from your closure into the allocation.
431 ///
432 /// Since the new `Rc<T>` is not fully-constructed until `Rc<T>::new_cyclic`
433 /// returns, calling [`upgrade`] on the weak reference inside your closure will
434 /// fail and result in a `None` value.
435 ///
436 /// # Panics
437 ///
438 /// If `data_fn` panics, the panic is propagated to the caller, and the
439 /// temporary [`Weak<T>`] is dropped normally.
440 ///
441 /// # Examples
442 ///
443 /// ```
444 /// # #![allow(dead_code)]
445 /// use std::rc::{Rc, Weak};
446 ///
447 /// struct Gadget {
448 /// me: Weak<Gadget>,
449 /// }
450 ///
451 /// impl Gadget {
452 /// /// Constructs a reference counted Gadget.
453 /// fn new() -> Rc<Self> {
454 /// // `me` is a `Weak<Gadget>` pointing at the new allocation of the
455 /// // `Rc` we're constructing.
456 /// Rc::new_cyclic(|me| {
457 /// // Create the actual struct here.
458 /// Gadget { me: me.clone() }
459 /// })
460 /// }
461 ///
462 /// /// Returns a reference counted pointer to Self.
463 /// fn me(&self) -> Rc<Self> {
464 /// self.me.upgrade().unwrap()
465 /// }
466 /// }
467 /// ```
468 /// [`upgrade`]: Weak::upgrade
469 #[cfg(not(no_global_oom_handling))]
470 #[stable(feature = "arc_new_cyclic", since = "1.60.0")]
471 pub fn new_cyclic<F>(data_fn: F) -> Rc<T>
472 where
473 F: FnOnce(&Weak<T>) -> T,
474 {
475 Self::new_cyclic_in(data_fn, Global)
476 }
477
478 /// Constructs a new `Rc` with uninitialized contents.
479 ///
480 /// # Examples
481 ///
482 /// ```
483 /// #![feature(get_mut_unchecked)]
484 ///
485 /// use std::rc::Rc;
486 ///
487 /// let mut five = Rc::<u32>::new_uninit();
488 ///
489 /// // Deferred initialization:
490 /// Rc::get_mut(&mut five).unwrap().write(5);
491 ///
492 /// let five = unsafe { five.assume_init() };
493 ///
494 /// assert_eq!(*five, 5)
495 /// ```
496 #[cfg(not(no_global_oom_handling))]
497 #[stable(feature = "new_uninit", since = "1.82.0")]
498 #[must_use]
499 pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
500 unsafe {
501 Rc::from_ptr(Rc::allocate_for_layout(
502 Layout::new::<T>(),
503 |layout| Global.allocate(layout),
504 <*mut u8>::cast,
505 ))
506 }
507 }
508
509 /// Constructs a new `Rc` with uninitialized contents, with the memory
510 /// being filled with `0` bytes.
511 ///
512 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
513 /// incorrect usage of this method.
514 ///
515 /// # Examples
516 ///
517 /// ```
518 /// use std::rc::Rc;
519 ///
520 /// let zero = Rc::<u32>::new_zeroed();
521 /// let zero = unsafe { zero.assume_init() };
522 ///
523 /// assert_eq!(*zero, 0)
524 /// ```
525 ///
526 /// [zeroed]: mem::MaybeUninit::zeroed
527 #[cfg(not(no_global_oom_handling))]
528 #[stable(feature = "new_zeroed_alloc", since = "CURRENT_RUSTC_VERSION")]
529 #[must_use]
530 pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
531 unsafe {
532 Rc::from_ptr(Rc::allocate_for_layout(
533 Layout::new::<T>(),
534 |layout| Global.allocate_zeroed(layout),
535 <*mut u8>::cast,
536 ))
537 }
538 }
539
540 /// Constructs a new `Rc<T>`, returning an error if the allocation fails
541 ///
542 /// # Examples
543 ///
544 /// ```
545 /// #![feature(allocator_api)]
546 /// use std::rc::Rc;
547 ///
548 /// let five = Rc::try_new(5);
549 /// # Ok::<(), std::alloc::AllocError>(())
550 /// ```
551 #[unstable(feature = "allocator_api", issue = "32838")]
552 pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
553 // There is an implicit weak pointer owned by all the strong
554 // pointers, which ensures that the weak destructor never frees
555 // the allocation while the strong destructor is running, even
556 // if the weak pointer is stored inside the strong one.
557 unsafe {
558 Ok(Self::from_inner(
559 Box::leak(Box::try_new(RcInner {
560 strong: Cell::new(1),
561 weak: Cell::new(1),
562 value,
563 })?)
564 .into(),
565 ))
566 }
567 }
568
569 /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
570 ///
571 /// # Examples
572 ///
573 /// ```
574 /// #![feature(allocator_api)]
575 /// #![feature(get_mut_unchecked)]
576 ///
577 /// use std::rc::Rc;
578 ///
579 /// let mut five = Rc::<u32>::try_new_uninit()?;
580 ///
581 /// // Deferred initialization:
582 /// Rc::get_mut(&mut five).unwrap().write(5);
583 ///
584 /// let five = unsafe { five.assume_init() };
585 ///
586 /// assert_eq!(*five, 5);
587 /// # Ok::<(), std::alloc::AllocError>(())
588 /// ```
589 #[unstable(feature = "allocator_api", issue = "32838")]
590 pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
591 unsafe {
592 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
593 Layout::new::<T>(),
594 |layout| Global.allocate(layout),
595 <*mut u8>::cast,
596 )?))
597 }
598 }
599
600 /// Constructs a new `Rc` with uninitialized contents, with the memory
601 /// being filled with `0` bytes, returning an error if the allocation fails
602 ///
603 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
604 /// incorrect usage of this method.
605 ///
606 /// # Examples
607 ///
608 /// ```
609 /// #![feature(allocator_api)]
610 ///
611 /// use std::rc::Rc;
612 ///
613 /// let zero = Rc::<u32>::try_new_zeroed()?;
614 /// let zero = unsafe { zero.assume_init() };
615 ///
616 /// assert_eq!(*zero, 0);
617 /// # Ok::<(), std::alloc::AllocError>(())
618 /// ```
619 ///
620 /// [zeroed]: mem::MaybeUninit::zeroed
621 #[unstable(feature = "allocator_api", issue = "32838")]
622 pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
623 unsafe {
624 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
625 Layout::new::<T>(),
626 |layout| Global.allocate_zeroed(layout),
627 <*mut u8>::cast,
628 )?))
629 }
630 }
631 /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
632 /// `value` will be pinned in memory and unable to be moved.
633 #[cfg(not(no_global_oom_handling))]
634 #[stable(feature = "pin", since = "1.33.0")]
635 #[must_use]
636 pub fn pin(value: T) -> Pin<Rc<T>> {
637 unsafe { Pin::new_unchecked(Rc::new(value)) }
638 }
639}
640
641impl<T, A: Allocator> Rc<T, A> {
642 /// Constructs a new `Rc` in the provided allocator.
643 ///
644 /// # Examples
645 ///
646 /// ```
647 /// #![feature(allocator_api)]
648 /// use std::rc::Rc;
649 /// use std::alloc::System;
650 ///
651 /// let five = Rc::new_in(5, System);
652 /// ```
653 #[cfg(not(no_global_oom_handling))]
654 #[unstable(feature = "allocator_api", issue = "32838")]
655 #[inline]
656 pub fn new_in(value: T, alloc: A) -> Rc<T, A> {
657 // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
658 // That would make code size bigger.
659 match Self::try_new_in(value, alloc) {
660 Ok(m) => m,
661 Err(_) => handle_alloc_error(Layout::new::<RcInner<T>>()),
662 }
663 }
664
665 /// Constructs a new `Rc` with uninitialized contents in the provided allocator.
666 ///
667 /// # Examples
668 ///
669 /// ```
670 /// #![feature(get_mut_unchecked)]
671 /// #![feature(allocator_api)]
672 ///
673 /// use std::rc::Rc;
674 /// use std::alloc::System;
675 ///
676 /// let mut five = Rc::<u32, _>::new_uninit_in(System);
677 ///
678 /// let five = unsafe {
679 /// // Deferred initialization:
680 /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
681 ///
682 /// five.assume_init()
683 /// };
684 ///
685 /// assert_eq!(*five, 5)
686 /// ```
687 #[cfg(not(no_global_oom_handling))]
688 #[unstable(feature = "allocator_api", issue = "32838")]
689 #[inline]
690 pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
691 unsafe {
692 Rc::from_ptr_in(
693 Rc::allocate_for_layout(
694 Layout::new::<T>(),
695 |layout| alloc.allocate(layout),
696 <*mut u8>::cast,
697 ),
698 alloc,
699 )
700 }
701 }
702
703 /// Constructs a new `Rc` with uninitialized contents, with the memory
704 /// being filled with `0` bytes, in the provided allocator.
705 ///
706 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
707 /// incorrect usage of this method.
708 ///
709 /// # Examples
710 ///
711 /// ```
712 /// #![feature(allocator_api)]
713 ///
714 /// use std::rc::Rc;
715 /// use std::alloc::System;
716 ///
717 /// let zero = Rc::<u32, _>::new_zeroed_in(System);
718 /// let zero = unsafe { zero.assume_init() };
719 ///
720 /// assert_eq!(*zero, 0)
721 /// ```
722 ///
723 /// [zeroed]: mem::MaybeUninit::zeroed
724 #[cfg(not(no_global_oom_handling))]
725 #[unstable(feature = "allocator_api", issue = "32838")]
726 #[inline]
727 pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
728 unsafe {
729 Rc::from_ptr_in(
730 Rc::allocate_for_layout(
731 Layout::new::<T>(),
732 |layout| alloc.allocate_zeroed(layout),
733 <*mut u8>::cast,
734 ),
735 alloc,
736 )
737 }
738 }
739
740 /// Constructs a new `Rc<T, A>` in the given allocator while giving you a `Weak<T, A>` to the allocation,
741 /// to allow you to construct a `T` which holds a weak pointer to itself.
742 ///
743 /// Generally, a structure circularly referencing itself, either directly or
744 /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
745 /// Using this function, you get access to the weak pointer during the
746 /// initialization of `T`, before the `Rc<T, A>` is created, such that you can
747 /// clone and store it inside the `T`.
748 ///
749 /// `new_cyclic_in` first allocates the managed allocation for the `Rc<T, A>`,
750 /// then calls your closure, giving it a `Weak<T, A>` to this allocation,
751 /// and only afterwards completes the construction of the `Rc<T, A>` by placing
752 /// the `T` returned from your closure into the allocation.
753 ///
754 /// Since the new `Rc<T, A>` is not fully-constructed until `Rc<T, A>::new_cyclic_in`
755 /// returns, calling [`upgrade`] on the weak reference inside your closure will
756 /// fail and result in a `None` value.
757 ///
758 /// # Panics
759 ///
760 /// If `data_fn` panics, the panic is propagated to the caller, and the
761 /// temporary [`Weak<T, A>`] is dropped normally.
762 ///
763 /// # Examples
764 ///
765 /// See [`new_cyclic`].
766 ///
767 /// [`new_cyclic`]: Rc::new_cyclic
768 /// [`upgrade`]: Weak::upgrade
769 #[cfg(not(no_global_oom_handling))]
770 #[unstable(feature = "allocator_api", issue = "32838")]
771 pub fn new_cyclic_in<F>(data_fn: F, alloc: A) -> Rc<T, A>
772 where
773 F: FnOnce(&Weak<T, A>) -> T,
774 {
775 // Construct the inner in the "uninitialized" state with a single
776 // weak reference.
777 let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
778 RcInner {
779 strong: Cell::new(0),
780 weak: Cell::new(1),
781 value: mem::MaybeUninit::<T>::uninit(),
782 },
783 alloc,
784 ));
785 let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into();
786 let init_ptr: NonNull<RcInner<T>> = uninit_ptr.cast();
787
788 let weak = Weak { ptr: init_ptr, alloc };
789
790 // It's important we don't give up ownership of the weak pointer, or
791 // else the memory might be freed by the time `data_fn` returns. If
792 // we really wanted to pass ownership, we could create an additional
793 // weak pointer for ourselves, but this would result in additional
794 // updates to the weak reference count which might not be necessary
795 // otherwise.
796 let data = data_fn(&weak);
797
798 let strong = unsafe {
799 let inner = init_ptr.as_ptr();
800 ptr::write(&raw mut (*inner).value, data);
801
802 let prev_value = (*inner).strong.get();
803 debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
804 (*inner).strong.set(1);
805
806 // Strong references should collectively own a shared weak reference,
807 // so don't run the destructor for our old weak reference.
808 // Calling into_raw_with_allocator has the double effect of giving us back the allocator,
809 // and forgetting the weak reference.
810 let alloc = weak.into_raw_with_allocator().1;
811
812 Rc::from_inner_in(init_ptr, alloc)
813 };
814
815 strong
816 }
817
818 /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
819 /// fails
820 ///
821 /// # Examples
822 ///
823 /// ```
824 /// #![feature(allocator_api)]
825 /// use std::rc::Rc;
826 /// use std::alloc::System;
827 ///
828 /// let five = Rc::try_new_in(5, System);
829 /// # Ok::<(), std::alloc::AllocError>(())
830 /// ```
831 #[unstable(feature = "allocator_api", issue = "32838")]
832 #[inline]
833 pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> {
834 // There is an implicit weak pointer owned by all the strong
835 // pointers, which ensures that the weak destructor never frees
836 // the allocation while the strong destructor is running, even
837 // if the weak pointer is stored inside the strong one.
838 let (ptr, alloc) = Box::into_unique(Box::try_new_in(
839 RcInner { strong: Cell::new(1), weak: Cell::new(1), value },
840 alloc,
841 )?);
842 Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
843 }
844
845 /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an
846 /// error if the allocation fails
847 ///
848 /// # Examples
849 ///
850 /// ```
851 /// #![feature(allocator_api)]
852 /// #![feature(get_mut_unchecked)]
853 ///
854 /// use std::rc::Rc;
855 /// use std::alloc::System;
856 ///
857 /// let mut five = Rc::<u32, _>::try_new_uninit_in(System)?;
858 ///
859 /// let five = unsafe {
860 /// // Deferred initialization:
861 /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
862 ///
863 /// five.assume_init()
864 /// };
865 ///
866 /// assert_eq!(*five, 5);
867 /// # Ok::<(), std::alloc::AllocError>(())
868 /// ```
869 #[unstable(feature = "allocator_api", issue = "32838")]
870 #[inline]
871 pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
872 unsafe {
873 Ok(Rc::from_ptr_in(
874 Rc::try_allocate_for_layout(
875 Layout::new::<T>(),
876 |layout| alloc.allocate(layout),
877 <*mut u8>::cast,
878 )?,
879 alloc,
880 ))
881 }
882 }
883
884 /// Constructs a new `Rc` with uninitialized contents, with the memory
885 /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation
886 /// fails
887 ///
888 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
889 /// incorrect usage of this method.
890 ///
891 /// # Examples
892 ///
893 /// ```
894 /// #![feature(allocator_api)]
895 ///
896 /// use std::rc::Rc;
897 /// use std::alloc::System;
898 ///
899 /// let zero = Rc::<u32, _>::try_new_zeroed_in(System)?;
900 /// let zero = unsafe { zero.assume_init() };
901 ///
902 /// assert_eq!(*zero, 0);
903 /// # Ok::<(), std::alloc::AllocError>(())
904 /// ```
905 ///
906 /// [zeroed]: mem::MaybeUninit::zeroed
907 #[unstable(feature = "allocator_api", issue = "32838")]
908 #[inline]
909 pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
910 unsafe {
911 Ok(Rc::from_ptr_in(
912 Rc::try_allocate_for_layout(
913 Layout::new::<T>(),
914 |layout| alloc.allocate_zeroed(layout),
915 <*mut u8>::cast,
916 )?,
917 alloc,
918 ))
919 }
920 }
921
922 /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then
923 /// `value` will be pinned in memory and unable to be moved.
924 #[cfg(not(no_global_oom_handling))]
925 #[unstable(feature = "allocator_api", issue = "32838")]
926 #[inline]
927 pub fn pin_in(value: T, alloc: A) -> Pin<Self>
928 where
929 A: 'static,
930 {
931 unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) }
932 }
933
934 /// Returns the inner value, if the `Rc` has exactly one strong reference.
935 ///
936 /// Otherwise, an [`Err`] is returned with the same `Rc` that was
937 /// passed in.
938 ///
939 /// This will succeed even if there are outstanding weak references.
940 ///
941 /// # Examples
942 ///
943 /// ```
944 /// use std::rc::Rc;
945 ///
946 /// let x = Rc::new(3);
947 /// assert_eq!(Rc::try_unwrap(x), Ok(3));
948 ///
949 /// let x = Rc::new(4);
950 /// let _y = Rc::clone(&x);
951 /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
952 /// ```
953 #[inline]
954 #[stable(feature = "rc_unique", since = "1.4.0")]
955 pub fn try_unwrap(this: Self) -> Result<T, Self> {
956 if Rc::strong_count(&this) == 1 {
957 let this = ManuallyDrop::new(this);
958
959 let val: T = unsafe { ptr::read(&**this) }; // copy the contained object
960 let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator
961
962 // Indicate to Weaks that they can't be promoted by decrementing
963 // the strong count, and then remove the implicit "strong weak"
964 // pointer while also handling drop logic by just crafting a
965 // fake Weak.
966 this.inner().dec_strong();
967 let _weak = Weak { ptr: this.ptr, alloc };
968 Ok(val)
969 } else {
970 Err(this)
971 }
972 }
973
974 /// Returns the inner value, if the `Rc` has exactly one strong reference.
975 ///
976 /// Otherwise, [`None`] is returned and the `Rc` is dropped.
977 ///
978 /// This will succeed even if there are outstanding weak references.
979 ///
980 /// If `Rc::into_inner` is called on every clone of this `Rc`,
981 /// it is guaranteed that exactly one of the calls returns the inner value.
982 /// This means in particular that the inner value is not dropped.
983 ///
984 /// [`Rc::try_unwrap`] is conceptually similar to `Rc::into_inner`.
985 /// And while they are meant for different use-cases, `Rc::into_inner(this)`
986 /// is in fact equivalent to <code>[Rc::try_unwrap]\(this).[ok][Result::ok]()</code>.
987 /// (Note that the same kind of equivalence does **not** hold true for
988 /// [`Arc`](crate::sync::Arc), due to race conditions that do not apply to `Rc`!)
989 ///
990 /// # Examples
991 ///
992 /// ```
993 /// use std::rc::Rc;
994 ///
995 /// let x = Rc::new(3);
996 /// assert_eq!(Rc::into_inner(x), Some(3));
997 ///
998 /// let x = Rc::new(4);
999 /// let y = Rc::clone(&x);
1000 ///
1001 /// assert_eq!(Rc::into_inner(y), None);
1002 /// assert_eq!(Rc::into_inner(x), Some(4));
1003 /// ```
1004 #[inline]
1005 #[stable(feature = "rc_into_inner", since = "1.70.0")]
1006 pub fn into_inner(this: Self) -> Option<T> {
1007 Rc::try_unwrap(this).ok()
1008 }
1009}
1010
1011impl<T> Rc<[T]> {
1012 /// Constructs a new reference-counted slice with uninitialized contents.
1013 ///
1014 /// # Examples
1015 ///
1016 /// ```
1017 /// #![feature(get_mut_unchecked)]
1018 ///
1019 /// use std::rc::Rc;
1020 ///
1021 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1022 ///
1023 /// // Deferred initialization:
1024 /// let data = Rc::get_mut(&mut values).unwrap();
1025 /// data[0].write(1);
1026 /// data[1].write(2);
1027 /// data[2].write(3);
1028 ///
1029 /// let values = unsafe { values.assume_init() };
1030 ///
1031 /// assert_eq!(*values, [1, 2, 3])
1032 /// ```
1033 #[cfg(not(no_global_oom_handling))]
1034 #[stable(feature = "new_uninit", since = "1.82.0")]
1035 #[must_use]
1036 pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1037 unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
1038 }
1039
1040 /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1041 /// filled with `0` bytes.
1042 ///
1043 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1044 /// incorrect usage of this method.
1045 ///
1046 /// # Examples
1047 ///
1048 /// ```
1049 /// use std::rc::Rc;
1050 ///
1051 /// let values = Rc::<[u32]>::new_zeroed_slice(3);
1052 /// let values = unsafe { values.assume_init() };
1053 ///
1054 /// assert_eq!(*values, [0, 0, 0])
1055 /// ```
1056 ///
1057 /// [zeroed]: mem::MaybeUninit::zeroed
1058 #[cfg(not(no_global_oom_handling))]
1059 #[stable(feature = "new_zeroed_alloc", since = "CURRENT_RUSTC_VERSION")]
1060 #[must_use]
1061 pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1062 unsafe {
1063 Rc::from_ptr(Rc::allocate_for_layout(
1064 Layout::array::<T>(len).unwrap(),
1065 |layout| Global.allocate_zeroed(layout),
1066 |mem| {
1067 ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1068 as *mut RcInner<[mem::MaybeUninit<T>]>
1069 },
1070 ))
1071 }
1072 }
1073
1074 /// Converts the reference-counted slice into a reference-counted array.
1075 ///
1076 /// This operation does not reallocate; the underlying array of the slice is simply reinterpreted as an array type.
1077 ///
1078 /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1079 #[unstable(feature = "slice_as_array", issue = "133508")]
1080 #[inline]
1081 #[must_use]
1082 pub fn into_array<const N: usize>(self) -> Option<Rc<[T; N]>> {
1083 if self.len() == N {
1084 let ptr = Self::into_raw(self) as *const [T; N];
1085
1086 // SAFETY: The underlying array of a slice has the exact same layout as an actual array `[T; N]` if `N` is equal to the slice's length.
1087 let me = unsafe { Rc::from_raw(ptr) };
1088 Some(me)
1089 } else {
1090 None
1091 }
1092 }
1093}
1094
1095impl<T, A: Allocator> Rc<[T], A> {
1096 /// Constructs a new reference-counted slice with uninitialized contents.
1097 ///
1098 /// # Examples
1099 ///
1100 /// ```
1101 /// #![feature(get_mut_unchecked)]
1102 /// #![feature(allocator_api)]
1103 ///
1104 /// use std::rc::Rc;
1105 /// use std::alloc::System;
1106 ///
1107 /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System);
1108 ///
1109 /// let values = unsafe {
1110 /// // Deferred initialization:
1111 /// Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
1112 /// Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
1113 /// Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
1114 ///
1115 /// values.assume_init()
1116 /// };
1117 ///
1118 /// assert_eq!(*values, [1, 2, 3])
1119 /// ```
1120 #[cfg(not(no_global_oom_handling))]
1121 #[unstable(feature = "allocator_api", issue = "32838")]
1122 #[inline]
1123 pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1124 unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) }
1125 }
1126
1127 /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1128 /// filled with `0` bytes.
1129 ///
1130 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1131 /// incorrect usage of this method.
1132 ///
1133 /// # Examples
1134 ///
1135 /// ```
1136 /// #![feature(allocator_api)]
1137 ///
1138 /// use std::rc::Rc;
1139 /// use std::alloc::System;
1140 ///
1141 /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System);
1142 /// let values = unsafe { values.assume_init() };
1143 ///
1144 /// assert_eq!(*values, [0, 0, 0])
1145 /// ```
1146 ///
1147 /// [zeroed]: mem::MaybeUninit::zeroed
1148 #[cfg(not(no_global_oom_handling))]
1149 #[unstable(feature = "allocator_api", issue = "32838")]
1150 #[inline]
1151 pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1152 unsafe {
1153 Rc::from_ptr_in(
1154 Rc::allocate_for_layout(
1155 Layout::array::<T>(len).unwrap(),
1156 |layout| alloc.allocate_zeroed(layout),
1157 |mem| {
1158 ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1159 as *mut RcInner<[mem::MaybeUninit<T>]>
1160 },
1161 ),
1162 alloc,
1163 )
1164 }
1165 }
1166}
1167
1168impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
1169 /// Converts to `Rc<T>`.
1170 ///
1171 /// # Safety
1172 ///
1173 /// As with [`MaybeUninit::assume_init`],
1174 /// it is up to the caller to guarantee that the inner value
1175 /// really is in an initialized state.
1176 /// Calling this when the content is not yet fully initialized
1177 /// causes immediate undefined behavior.
1178 ///
1179 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1180 ///
1181 /// # Examples
1182 ///
1183 /// ```
1184 /// #![feature(get_mut_unchecked)]
1185 ///
1186 /// use std::rc::Rc;
1187 ///
1188 /// let mut five = Rc::<u32>::new_uninit();
1189 ///
1190 /// // Deferred initialization:
1191 /// Rc::get_mut(&mut five).unwrap().write(5);
1192 ///
1193 /// let five = unsafe { five.assume_init() };
1194 ///
1195 /// assert_eq!(*five, 5)
1196 /// ```
1197 #[stable(feature = "new_uninit", since = "1.82.0")]
1198 #[inline]
1199 pub unsafe fn assume_init(self) -> Rc<T, A> {
1200 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1201 unsafe { Rc::from_inner_in(ptr.cast(), alloc) }
1202 }
1203}
1204
1205impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
1206 /// Converts to `Rc<[T]>`.
1207 ///
1208 /// # Safety
1209 ///
1210 /// As with [`MaybeUninit::assume_init`],
1211 /// it is up to the caller to guarantee that the inner value
1212 /// really is in an initialized state.
1213 /// Calling this when the content is not yet fully initialized
1214 /// causes immediate undefined behavior.
1215 ///
1216 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1217 ///
1218 /// # Examples
1219 ///
1220 /// ```
1221 /// #![feature(get_mut_unchecked)]
1222 ///
1223 /// use std::rc::Rc;
1224 ///
1225 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1226 ///
1227 /// // Deferred initialization:
1228 /// let data = Rc::get_mut(&mut values).unwrap();
1229 /// data[0].write(1);
1230 /// data[1].write(2);
1231 /// data[2].write(3);
1232 ///
1233 /// let values = unsafe { values.assume_init() };
1234 ///
1235 /// assert_eq!(*values, [1, 2, 3])
1236 /// ```
1237 #[stable(feature = "new_uninit", since = "1.82.0")]
1238 #[inline]
1239 pub unsafe fn assume_init(self) -> Rc<[T], A> {
1240 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1241 unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) }
1242 }
1243}
1244
1245impl<T: ?Sized> Rc<T> {
1246 /// Constructs an `Rc<T>` from a raw pointer.
1247 ///
1248 /// The raw pointer must have been previously returned by a call to
1249 /// [`Rc<U>::into_raw`][into_raw] with the following requirements:
1250 ///
1251 /// * If `U` is sized, it must have the same size and alignment as `T`. This
1252 /// is trivially true if `U` is `T`.
1253 /// * If `U` is unsized, its data pointer must have the same size and
1254 /// alignment as `T`. This is trivially true if `Rc<U>` was constructed
1255 /// through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1256 /// coercion].
1257 ///
1258 /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1259 /// and alignment, this is basically like transmuting references of
1260 /// different types. See [`mem::transmute`][transmute] for more information
1261 /// on what restrictions apply in this case.
1262 ///
1263 /// The raw pointer must point to a block of memory allocated by the global allocator
1264 ///
1265 /// The user of `from_raw` has to make sure a specific value of `T` is only
1266 /// dropped once.
1267 ///
1268 /// This function is unsafe because improper use may lead to memory unsafety,
1269 /// even if the returned `Rc<T>` is never accessed.
1270 ///
1271 /// [into_raw]: Rc::into_raw
1272 /// [transmute]: core::mem::transmute
1273 /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1274 ///
1275 /// # Examples
1276 ///
1277 /// ```
1278 /// use std::rc::Rc;
1279 ///
1280 /// let x = Rc::new("hello".to_owned());
1281 /// let x_ptr = Rc::into_raw(x);
1282 ///
1283 /// unsafe {
1284 /// // Convert back to an `Rc` to prevent leak.
1285 /// let x = Rc::from_raw(x_ptr);
1286 /// assert_eq!(&*x, "hello");
1287 ///
1288 /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1289 /// }
1290 ///
1291 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1292 /// ```
1293 ///
1294 /// Convert a slice back into its original array:
1295 ///
1296 /// ```
1297 /// use std::rc::Rc;
1298 ///
1299 /// let x: Rc<[u32]> = Rc::new([1, 2, 3]);
1300 /// let x_ptr: *const [u32] = Rc::into_raw(x);
1301 ///
1302 /// unsafe {
1303 /// let x: Rc<[u32; 3]> = Rc::from_raw(x_ptr.cast::<[u32; 3]>());
1304 /// assert_eq!(&*x, &[1, 2, 3]);
1305 /// }
1306 /// ```
1307 #[inline]
1308 #[stable(feature = "rc_raw", since = "1.17.0")]
1309 pub unsafe fn from_raw(ptr: *const T) -> Self {
1310 unsafe { Self::from_raw_in(ptr, Global) }
1311 }
1312
1313 /// Consumes the `Rc`, returning the wrapped pointer.
1314 ///
1315 /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1316 /// [`Rc::from_raw`].
1317 ///
1318 /// # Examples
1319 ///
1320 /// ```
1321 /// use std::rc::Rc;
1322 ///
1323 /// let x = Rc::new("hello".to_owned());
1324 /// let x_ptr = Rc::into_raw(x);
1325 /// assert_eq!(unsafe { &*x_ptr }, "hello");
1326 /// # // Prevent leaks for Miri.
1327 /// # drop(unsafe { Rc::from_raw(x_ptr) });
1328 /// ```
1329 #[must_use = "losing the pointer will leak memory"]
1330 #[stable(feature = "rc_raw", since = "1.17.0")]
1331 #[rustc_never_returns_null_ptr]
1332 pub fn into_raw(this: Self) -> *const T {
1333 let this = ManuallyDrop::new(this);
1334 Self::as_ptr(&*this)
1335 }
1336
1337 /// Increments the strong reference count on the `Rc<T>` associated with the
1338 /// provided pointer by one.
1339 ///
1340 /// # Safety
1341 ///
1342 /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1343 /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1344 /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1345 /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1346 /// allocated by the global allocator.
1347 ///
1348 /// [from_raw_in]: Rc::from_raw_in
1349 ///
1350 /// # Examples
1351 ///
1352 /// ```
1353 /// use std::rc::Rc;
1354 ///
1355 /// let five = Rc::new(5);
1356 ///
1357 /// unsafe {
1358 /// let ptr = Rc::into_raw(five);
1359 /// Rc::increment_strong_count(ptr);
1360 ///
1361 /// let five = Rc::from_raw(ptr);
1362 /// assert_eq!(2, Rc::strong_count(&five));
1363 /// # // Prevent leaks for Miri.
1364 /// # Rc::decrement_strong_count(ptr);
1365 /// }
1366 /// ```
1367 #[inline]
1368 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1369 pub unsafe fn increment_strong_count(ptr: *const T) {
1370 unsafe { Self::increment_strong_count_in(ptr, Global) }
1371 }
1372
1373 /// Decrements the strong reference count on the `Rc<T>` associated with the
1374 /// provided pointer by one.
1375 ///
1376 /// # Safety
1377 ///
1378 /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1379 /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1380 /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1381 /// least 1) when invoking this method, and `ptr` must point to a block of memory
1382 /// allocated by the global allocator. This method can be used to release the final `Rc` and
1383 /// backing storage, but **should not** be called after the final `Rc` has been released.
1384 ///
1385 /// [from_raw_in]: Rc::from_raw_in
1386 ///
1387 /// # Examples
1388 ///
1389 /// ```
1390 /// use std::rc::Rc;
1391 ///
1392 /// let five = Rc::new(5);
1393 ///
1394 /// unsafe {
1395 /// let ptr = Rc::into_raw(five);
1396 /// Rc::increment_strong_count(ptr);
1397 ///
1398 /// let five = Rc::from_raw(ptr);
1399 /// assert_eq!(2, Rc::strong_count(&five));
1400 /// Rc::decrement_strong_count(ptr);
1401 /// assert_eq!(1, Rc::strong_count(&five));
1402 /// }
1403 /// ```
1404 #[inline]
1405 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1406 pub unsafe fn decrement_strong_count(ptr: *const T) {
1407 unsafe { Self::decrement_strong_count_in(ptr, Global) }
1408 }
1409}
1410
1411impl<T: ?Sized, A: Allocator> Rc<T, A> {
1412 /// Returns a reference to the underlying allocator.
1413 ///
1414 /// Note: this is an associated function, which means that you have
1415 /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
1416 /// is so that there is no conflict with a method on the inner type.
1417 #[inline]
1418 #[unstable(feature = "allocator_api", issue = "32838")]
1419 pub fn allocator(this: &Self) -> &A {
1420 &this.alloc
1421 }
1422
1423 /// Consumes the `Rc`, returning the wrapped pointer and allocator.
1424 ///
1425 /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1426 /// [`Rc::from_raw_in`].
1427 ///
1428 /// # Examples
1429 ///
1430 /// ```
1431 /// #![feature(allocator_api)]
1432 /// use std::rc::Rc;
1433 /// use std::alloc::System;
1434 ///
1435 /// let x = Rc::new_in("hello".to_owned(), System);
1436 /// let (ptr, alloc) = Rc::into_raw_with_allocator(x);
1437 /// assert_eq!(unsafe { &*ptr }, "hello");
1438 /// let x = unsafe { Rc::from_raw_in(ptr, alloc) };
1439 /// assert_eq!(&*x, "hello");
1440 /// ```
1441 #[must_use = "losing the pointer will leak memory"]
1442 #[unstable(feature = "allocator_api", issue = "32838")]
1443 pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
1444 let this = mem::ManuallyDrop::new(this);
1445 let ptr = Self::as_ptr(&this);
1446 // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
1447 let alloc = unsafe { ptr::read(&this.alloc) };
1448 (ptr, alloc)
1449 }
1450
1451 /// Provides a raw pointer to the data.
1452 ///
1453 /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
1454 /// for as long as there are strong counts in the `Rc`.
1455 ///
1456 /// # Examples
1457 ///
1458 /// ```
1459 /// use std::rc::Rc;
1460 ///
1461 /// let x = Rc::new(0);
1462 /// let y = Rc::clone(&x);
1463 /// let x_ptr = Rc::as_ptr(&x);
1464 /// assert_eq!(x_ptr, Rc::as_ptr(&y));
1465 /// assert_eq!(unsafe { *x_ptr }, 0);
1466 /// ```
1467 #[stable(feature = "weak_into_raw", since = "1.45.0")]
1468 #[rustc_never_returns_null_ptr]
1469 pub fn as_ptr(this: &Self) -> *const T {
1470 let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
1471
1472 // SAFETY: This cannot go through Deref::deref or Rc::inner because
1473 // this is required to retain raw/mut provenance such that e.g. `get_mut` can
1474 // write through the pointer after the Rc is recovered through `from_raw`.
1475 unsafe { &raw mut (*ptr).value }
1476 }
1477
1478 /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
1479 ///
1480 /// The raw pointer must have been previously returned by a call to [`Rc<U,
1481 /// A>::into_raw`][into_raw] with the following requirements:
1482 ///
1483 /// * If `U` is sized, it must have the same size and alignment as `T`. This
1484 /// is trivially true if `U` is `T`.
1485 /// * If `U` is unsized, its data pointer must have the same size and
1486 /// alignment as `T`. This is trivially true if `Rc<U>` was constructed
1487 /// through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1488 /// coercion].
1489 ///
1490 /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1491 /// and alignment, this is basically like transmuting references of
1492 /// different types. See [`mem::transmute`][transmute] for more information
1493 /// on what restrictions apply in this case.
1494 ///
1495 /// The raw pointer must point to a block of memory allocated by `alloc`
1496 ///
1497 /// The user of `from_raw` has to make sure a specific value of `T` is only
1498 /// dropped once.
1499 ///
1500 /// This function is unsafe because improper use may lead to memory unsafety,
1501 /// even if the returned `Rc<T>` is never accessed.
1502 ///
1503 /// [into_raw]: Rc::into_raw
1504 /// [transmute]: core::mem::transmute
1505 /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1506 ///
1507 /// # Examples
1508 ///
1509 /// ```
1510 /// #![feature(allocator_api)]
1511 ///
1512 /// use std::rc::Rc;
1513 /// use std::alloc::System;
1514 ///
1515 /// let x = Rc::new_in("hello".to_owned(), System);
1516 /// let (x_ptr, _alloc) = Rc::into_raw_with_allocator(x);
1517 ///
1518 /// unsafe {
1519 /// // Convert back to an `Rc` to prevent leak.
1520 /// let x = Rc::from_raw_in(x_ptr, System);
1521 /// assert_eq!(&*x, "hello");
1522 ///
1523 /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1524 /// }
1525 ///
1526 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1527 /// ```
1528 ///
1529 /// Convert a slice back into its original array:
1530 ///
1531 /// ```
1532 /// #![feature(allocator_api)]
1533 ///
1534 /// use std::rc::Rc;
1535 /// use std::alloc::System;
1536 ///
1537 /// let x: Rc<[u32], _> = Rc::new_in([1, 2, 3], System);
1538 /// let x_ptr: *const [u32] = Rc::into_raw_with_allocator(x).0;
1539 ///
1540 /// unsafe {
1541 /// let x: Rc<[u32; 3], _> = Rc::from_raw_in(x_ptr.cast::<[u32; 3]>(), System);
1542 /// assert_eq!(&*x, &[1, 2, 3]);
1543 /// }
1544 /// ```
1545 #[unstable(feature = "allocator_api", issue = "32838")]
1546 pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
1547 let offset = unsafe { data_offset(ptr) };
1548
1549 // Reverse the offset to find the original RcInner.
1550 let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
1551
1552 unsafe { Self::from_ptr_in(rc_ptr, alloc) }
1553 }
1554
1555 /// Creates a new [`Weak`] pointer to this allocation.
1556 ///
1557 /// # Examples
1558 ///
1559 /// ```
1560 /// use std::rc::Rc;
1561 ///
1562 /// let five = Rc::new(5);
1563 ///
1564 /// let weak_five = Rc::downgrade(&five);
1565 /// ```
1566 #[must_use = "this returns a new `Weak` pointer, \
1567 without modifying the original `Rc`"]
1568 #[stable(feature = "rc_weak", since = "1.4.0")]
1569 pub fn downgrade(this: &Self) -> Weak<T, A>
1570 where
1571 A: Clone,
1572 {
1573 this.inner().inc_weak();
1574 // Make sure we do not create a dangling Weak
1575 debug_assert!(!is_dangling(this.ptr.as_ptr()));
1576 Weak { ptr: this.ptr, alloc: this.alloc.clone() }
1577 }
1578
1579 /// Gets the number of [`Weak`] pointers to this allocation.
1580 ///
1581 /// # Examples
1582 ///
1583 /// ```
1584 /// use std::rc::Rc;
1585 ///
1586 /// let five = Rc::new(5);
1587 /// let _weak_five = Rc::downgrade(&five);
1588 ///
1589 /// assert_eq!(1, Rc::weak_count(&five));
1590 /// ```
1591 #[inline]
1592 #[stable(feature = "rc_counts", since = "1.15.0")]
1593 pub fn weak_count(this: &Self) -> usize {
1594 this.inner().weak() - 1
1595 }
1596
1597 /// Gets the number of strong (`Rc`) pointers to this allocation.
1598 ///
1599 /// # Examples
1600 ///
1601 /// ```
1602 /// use std::rc::Rc;
1603 ///
1604 /// let five = Rc::new(5);
1605 /// let _also_five = Rc::clone(&five);
1606 ///
1607 /// assert_eq!(2, Rc::strong_count(&five));
1608 /// ```
1609 #[inline]
1610 #[stable(feature = "rc_counts", since = "1.15.0")]
1611 pub fn strong_count(this: &Self) -> usize {
1612 this.inner().strong()
1613 }
1614
1615 /// Increments the strong reference count on the `Rc<T>` associated with the
1616 /// provided pointer by one.
1617 ///
1618 /// # Safety
1619 ///
1620 /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1621 /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1622 /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1623 /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1624 /// allocated by `alloc`.
1625 ///
1626 /// [from_raw_in]: Rc::from_raw_in
1627 ///
1628 /// # Examples
1629 ///
1630 /// ```
1631 /// #![feature(allocator_api)]
1632 ///
1633 /// use std::rc::Rc;
1634 /// use std::alloc::System;
1635 ///
1636 /// let five = Rc::new_in(5, System);
1637 ///
1638 /// unsafe {
1639 /// let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1640 /// Rc::increment_strong_count_in(ptr, System);
1641 ///
1642 /// let five = Rc::from_raw_in(ptr, System);
1643 /// assert_eq!(2, Rc::strong_count(&five));
1644 /// # // Prevent leaks for Miri.
1645 /// # Rc::decrement_strong_count_in(ptr, System);
1646 /// }
1647 /// ```
1648 #[inline]
1649 #[unstable(feature = "allocator_api", issue = "32838")]
1650 pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A)
1651 where
1652 A: Clone,
1653 {
1654 // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
1655 let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) };
1656 // Now increase refcount, but don't drop new refcount either
1657 let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
1658 }
1659
1660 /// Decrements the strong reference count on the `Rc<T>` associated with the
1661 /// provided pointer by one.
1662 ///
1663 /// # Safety
1664 ///
1665 /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1666 /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1667 /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1668 /// least 1) when invoking this method, and `ptr` must point to a block of memory
1669 /// allocated by `alloc`. This method can be used to release the final `Rc` and
1670 /// backing storage, but **should not** be called after the final `Rc` has been released.
1671 ///
1672 /// [from_raw_in]: Rc::from_raw_in
1673 ///
1674 /// # Examples
1675 ///
1676 /// ```
1677 /// #![feature(allocator_api)]
1678 ///
1679 /// use std::rc::Rc;
1680 /// use std::alloc::System;
1681 ///
1682 /// let five = Rc::new_in(5, System);
1683 ///
1684 /// unsafe {
1685 /// let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1686 /// Rc::increment_strong_count_in(ptr, System);
1687 ///
1688 /// let five = Rc::from_raw_in(ptr, System);
1689 /// assert_eq!(2, Rc::strong_count(&five));
1690 /// Rc::decrement_strong_count_in(ptr, System);
1691 /// assert_eq!(1, Rc::strong_count(&five));
1692 /// }
1693 /// ```
1694 #[inline]
1695 #[unstable(feature = "allocator_api", issue = "32838")]
1696 pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
1697 unsafe { drop(Rc::from_raw_in(ptr, alloc)) };
1698 }
1699
1700 /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
1701 /// this allocation.
1702 #[inline]
1703 fn is_unique(this: &Self) -> bool {
1704 Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
1705 }
1706
1707 /// Returns a mutable reference into the given `Rc`, if there are
1708 /// no other `Rc` or [`Weak`] pointers to the same allocation.
1709 ///
1710 /// Returns [`None`] otherwise, because it is not safe to
1711 /// mutate a shared value.
1712 ///
1713 /// See also [`make_mut`][make_mut], which will [`clone`][clone]
1714 /// the inner value when there are other `Rc` pointers.
1715 ///
1716 /// [make_mut]: Rc::make_mut
1717 /// [clone]: Clone::clone
1718 ///
1719 /// # Examples
1720 ///
1721 /// ```
1722 /// use std::rc::Rc;
1723 ///
1724 /// let mut x = Rc::new(3);
1725 /// *Rc::get_mut(&mut x).unwrap() = 4;
1726 /// assert_eq!(*x, 4);
1727 ///
1728 /// let _y = Rc::clone(&x);
1729 /// assert!(Rc::get_mut(&mut x).is_none());
1730 /// ```
1731 #[inline]
1732 #[stable(feature = "rc_unique", since = "1.4.0")]
1733 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
1734 if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
1735 }
1736
1737 /// Returns a mutable reference into the given `Rc`,
1738 /// without any check.
1739 ///
1740 /// See also [`get_mut`], which is safe and does appropriate checks.
1741 ///
1742 /// [`get_mut`]: Rc::get_mut
1743 ///
1744 /// # Safety
1745 ///
1746 /// If any other `Rc` or [`Weak`] pointers to the same allocation exist, then
1747 /// they must not be dereferenced or have active borrows for the duration
1748 /// of the returned borrow, and their inner type must be exactly the same as the
1749 /// inner type of this Rc (including lifetimes). This is trivially the case if no
1750 /// such pointers exist, for example immediately after `Rc::new`.
1751 ///
1752 /// # Examples
1753 ///
1754 /// ```
1755 /// #![feature(get_mut_unchecked)]
1756 ///
1757 /// use std::rc::Rc;
1758 ///
1759 /// let mut x = Rc::new(String::new());
1760 /// unsafe {
1761 /// Rc::get_mut_unchecked(&mut x).push_str("foo")
1762 /// }
1763 /// assert_eq!(*x, "foo");
1764 /// ```
1765 /// Other `Rc` pointers to the same allocation must be to the same type.
1766 /// ```no_run
1767 /// #![feature(get_mut_unchecked)]
1768 ///
1769 /// use std::rc::Rc;
1770 ///
1771 /// let x: Rc<str> = Rc::from("Hello, world!");
1772 /// let mut y: Rc<[u8]> = x.clone().into();
1773 /// unsafe {
1774 /// // this is Undefined Behavior, because x's inner type is str, not [u8]
1775 /// Rc::get_mut_unchecked(&mut y).fill(0xff); // 0xff is invalid in UTF-8
1776 /// }
1777 /// println!("{}", &*x); // Invalid UTF-8 in a str
1778 /// ```
1779 /// Other `Rc` pointers to the same allocation must be to the exact same type, including lifetimes.
1780 /// ```no_run
1781 /// #![feature(get_mut_unchecked)]
1782 ///
1783 /// use std::rc::Rc;
1784 ///
1785 /// let x: Rc<&str> = Rc::new("Hello, world!");
1786 /// {
1787 /// let s = String::from("Oh, no!");
1788 /// let mut y: Rc<&str> = x.clone();
1789 /// unsafe {
1790 /// // this is Undefined Behavior, because x's inner type
1791 /// // is &'long str, not &'short str
1792 /// *Rc::get_mut_unchecked(&mut y) = &s;
1793 /// }
1794 /// }
1795 /// println!("{}", &*x); // Use-after-free
1796 /// ```
1797 #[inline]
1798 #[unstable(feature = "get_mut_unchecked", issue = "63292")]
1799 pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
1800 // We are careful to *not* create a reference covering the "count" fields, as
1801 // this would conflict with accesses to the reference counts (e.g. by `Weak`).
1802 unsafe { &mut (*this.ptr.as_ptr()).value }
1803 }
1804
1805 #[inline]
1806 #[stable(feature = "ptr_eq", since = "1.17.0")]
1807 /// Returns `true` if the two `Rc`s point to the same allocation in a vein similar to
1808 /// [`ptr::eq`]. This function ignores the metadata of `dyn Trait` pointers.
1809 ///
1810 /// # Examples
1811 ///
1812 /// ```
1813 /// use std::rc::Rc;
1814 ///
1815 /// let five = Rc::new(5);
1816 /// let same_five = Rc::clone(&five);
1817 /// let other_five = Rc::new(5);
1818 ///
1819 /// assert!(Rc::ptr_eq(&five, &same_five));
1820 /// assert!(!Rc::ptr_eq(&five, &other_five));
1821 /// ```
1822 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
1823 ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr())
1824 }
1825}
1826
1827#[cfg(not(no_global_oom_handling))]
1828impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Rc<T, A> {
1829 /// Makes a mutable reference into the given `Rc`.
1830 ///
1831 /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
1832 /// [`clone`] the inner value to a new allocation to ensure unique ownership. This is also
1833 /// referred to as clone-on-write.
1834 ///
1835 /// However, if there are no other `Rc` pointers to this allocation, but some [`Weak`]
1836 /// pointers, then the [`Weak`] pointers will be disassociated and the inner value will not
1837 /// be cloned.
1838 ///
1839 /// See also [`get_mut`], which will fail rather than cloning the inner value
1840 /// or disassociating [`Weak`] pointers.
1841 ///
1842 /// [`clone`]: Clone::clone
1843 /// [`get_mut`]: Rc::get_mut
1844 ///
1845 /// # Examples
1846 ///
1847 /// ```
1848 /// use std::rc::Rc;
1849 ///
1850 /// let mut data = Rc::new(5);
1851 ///
1852 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
1853 /// let mut other_data = Rc::clone(&data); // Won't clone inner data
1854 /// *Rc::make_mut(&mut data) += 1; // Clones inner data
1855 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
1856 /// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything
1857 ///
1858 /// // Now `data` and `other_data` point to different allocations.
1859 /// assert_eq!(*data, 8);
1860 /// assert_eq!(*other_data, 12);
1861 /// ```
1862 ///
1863 /// [`Weak`] pointers will be disassociated:
1864 ///
1865 /// ```
1866 /// use std::rc::Rc;
1867 ///
1868 /// let mut data = Rc::new(75);
1869 /// let weak = Rc::downgrade(&data);
1870 ///
1871 /// assert!(75 == *data);
1872 /// assert!(75 == *weak.upgrade().unwrap());
1873 ///
1874 /// *Rc::make_mut(&mut data) += 1;
1875 ///
1876 /// assert!(76 == *data);
1877 /// assert!(weak.upgrade().is_none());
1878 /// ```
1879 #[inline]
1880 #[stable(feature = "rc_unique", since = "1.4.0")]
1881 pub fn make_mut(this: &mut Self) -> &mut T {
1882 let size_of_val = size_of_val::<T>(&**this);
1883
1884 if Rc::strong_count(this) != 1 {
1885 // Gotta clone the data, there are other Rcs.
1886
1887 let this_data_ref: &T = &**this;
1888 // `in_progress` drops the allocation if we panic before finishing initializing it.
1889 let mut in_progress: UniqueRcUninit<T, A> =
1890 UniqueRcUninit::new(this_data_ref, this.alloc.clone());
1891
1892 // Initialize with clone of this.
1893 let initialized_clone = unsafe {
1894 // Clone. If the clone panics, `in_progress` will be dropped and clean up.
1895 this_data_ref.clone_to_uninit(in_progress.data_ptr().cast());
1896 // Cast type of pointer, now that it is initialized.
1897 in_progress.into_rc()
1898 };
1899
1900 // Replace `this` with newly constructed Rc.
1901 *this = initialized_clone;
1902 } else if Rc::weak_count(this) != 0 {
1903 // Can just steal the data, all that's left is Weaks
1904
1905 // We don't need panic-protection like the above branch does, but we might as well
1906 // use the same mechanism.
1907 let mut in_progress: UniqueRcUninit<T, A> =
1908 UniqueRcUninit::new(&**this, this.alloc.clone());
1909 unsafe {
1910 // Initialize `in_progress` with move of **this.
1911 // We have to express this in terms of bytes because `T: ?Sized`; there is no
1912 // operation that just copies a value based on its `size_of_val()`.
1913 ptr::copy_nonoverlapping(
1914 ptr::from_ref(&**this).cast::<u8>(),
1915 in_progress.data_ptr().cast::<u8>(),
1916 size_of_val,
1917 );
1918
1919 this.inner().dec_strong();
1920 // Remove implicit strong-weak ref (no need to craft a fake
1921 // Weak here -- we know other Weaks can clean up for us)
1922 this.inner().dec_weak();
1923 // Replace `this` with newly constructed Rc that has the moved data.
1924 ptr::write(this, in_progress.into_rc());
1925 }
1926 }
1927 // This unsafety is ok because we're guaranteed that the pointer
1928 // returned is the *only* pointer that will ever be returned to T. Our
1929 // reference count is guaranteed to be 1 at this point, and we required
1930 // the `Rc<T>` itself to be `mut`, so we're returning the only possible
1931 // reference to the allocation.
1932 unsafe { &mut this.ptr.as_mut().value }
1933 }
1934}
1935
1936impl<T: Clone, A: Allocator> Rc<T, A> {
1937 /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
1938 /// clone.
1939 ///
1940 /// Assuming `rc_t` is of type `Rc<T>`, this function is functionally equivalent to
1941 /// `(*rc_t).clone()`, but will avoid cloning the inner value where possible.
1942 ///
1943 /// # Examples
1944 ///
1945 /// ```
1946 /// # use std::{ptr, rc::Rc};
1947 /// let inner = String::from("test");
1948 /// let ptr = inner.as_ptr();
1949 ///
1950 /// let rc = Rc::new(inner);
1951 /// let inner = Rc::unwrap_or_clone(rc);
1952 /// // The inner value was not cloned
1953 /// assert!(ptr::eq(ptr, inner.as_ptr()));
1954 ///
1955 /// let rc = Rc::new(inner);
1956 /// let rc2 = rc.clone();
1957 /// let inner = Rc::unwrap_or_clone(rc);
1958 /// // Because there were 2 references, we had to clone the inner value.
1959 /// assert!(!ptr::eq(ptr, inner.as_ptr()));
1960 /// // `rc2` is the last reference, so when we unwrap it we get back
1961 /// // the original `String`.
1962 /// let inner = Rc::unwrap_or_clone(rc2);
1963 /// assert!(ptr::eq(ptr, inner.as_ptr()));
1964 /// ```
1965 #[inline]
1966 #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")]
1967 pub fn unwrap_or_clone(this: Self) -> T {
1968 Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone())
1969 }
1970}
1971
1972impl<A: Allocator> Rc<dyn Any, A> {
1973 /// Attempts to downcast the `Rc<dyn Any>` to a concrete type.
1974 ///
1975 /// # Examples
1976 ///
1977 /// ```
1978 /// use std::any::Any;
1979 /// use std::rc::Rc;
1980 ///
1981 /// fn print_if_string(value: Rc<dyn Any>) {
1982 /// if let Ok(string) = value.downcast::<String>() {
1983 /// println!("String ({}): {}", string.len(), string);
1984 /// }
1985 /// }
1986 ///
1987 /// let my_string = "Hello World".to_string();
1988 /// print_if_string(Rc::new(my_string));
1989 /// print_if_string(Rc::new(0i8));
1990 /// ```
1991 #[inline]
1992 #[stable(feature = "rc_downcast", since = "1.29.0")]
1993 pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
1994 if (*self).is::<T>() {
1995 unsafe {
1996 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1997 Ok(Rc::from_inner_in(ptr.cast(), alloc))
1998 }
1999 } else {
2000 Err(self)
2001 }
2002 }
2003
2004 /// Downcasts the `Rc<dyn Any>` to a concrete type.
2005 ///
2006 /// For a safe alternative see [`downcast`].
2007 ///
2008 /// # Examples
2009 ///
2010 /// ```
2011 /// #![feature(downcast_unchecked)]
2012 ///
2013 /// use std::any::Any;
2014 /// use std::rc::Rc;
2015 ///
2016 /// let x: Rc<dyn Any> = Rc::new(1_usize);
2017 ///
2018 /// unsafe {
2019 /// assert_eq!(*x.downcast_unchecked::<usize>(), 1);
2020 /// }
2021 /// ```
2022 ///
2023 /// # Safety
2024 ///
2025 /// The contained value must be of type `T`. Calling this method
2026 /// with the incorrect type is *undefined behavior*.
2027 ///
2028 ///
2029 /// [`downcast`]: Self::downcast
2030 #[inline]
2031 #[unstable(feature = "downcast_unchecked", issue = "90850")]
2032 pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
2033 unsafe {
2034 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2035 Rc::from_inner_in(ptr.cast(), alloc)
2036 }
2037 }
2038}
2039
2040impl<T: ?Sized> Rc<T> {
2041 /// Allocates an `RcInner<T>` with sufficient space for
2042 /// a possibly-unsized inner value where the value has the layout provided.
2043 ///
2044 /// The function `mem_to_rc_inner` is called with the data pointer
2045 /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2046 #[cfg(not(no_global_oom_handling))]
2047 unsafe fn allocate_for_layout(
2048 value_layout: Layout,
2049 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2050 mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2051 ) -> *mut RcInner<T> {
2052 let layout = rc_inner_layout_for_value_layout(value_layout);
2053 unsafe {
2054 Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rc_inner)
2055 .unwrap_or_else(|_| handle_alloc_error(layout))
2056 }
2057 }
2058
2059 /// Allocates an `RcInner<T>` with sufficient space for
2060 /// a possibly-unsized inner value where the value has the layout provided,
2061 /// returning an error if allocation fails.
2062 ///
2063 /// The function `mem_to_rc_inner` is called with the data pointer
2064 /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2065 #[inline]
2066 unsafe fn try_allocate_for_layout(
2067 value_layout: Layout,
2068 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2069 mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2070 ) -> Result<*mut RcInner<T>, AllocError> {
2071 let layout = rc_inner_layout_for_value_layout(value_layout);
2072
2073 // Allocate for the layout.
2074 let ptr = allocate(layout)?;
2075
2076 // Initialize the RcInner
2077 let inner = mem_to_rc_inner(ptr.as_non_null_ptr().as_ptr());
2078 unsafe {
2079 debug_assert_eq!(Layout::for_value_raw(inner), layout);
2080
2081 (&raw mut (*inner).strong).write(Cell::new(1));
2082 (&raw mut (*inner).weak).write(Cell::new(1));
2083 }
2084
2085 Ok(inner)
2086 }
2087}
2088
2089impl<T: ?Sized, A: Allocator> Rc<T, A> {
2090 /// Allocates an `RcInner<T>` with sufficient space for an unsized inner value
2091 #[cfg(not(no_global_oom_handling))]
2092 unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcInner<T> {
2093 // Allocate for the `RcInner<T>` using the given value.
2094 unsafe {
2095 Rc::<T>::allocate_for_layout(
2096 Layout::for_value_raw(ptr),
2097 |layout| alloc.allocate(layout),
2098 |mem| mem.with_metadata_of(ptr as *const RcInner<T>),
2099 )
2100 }
2101 }
2102
2103 #[cfg(not(no_global_oom_handling))]
2104 fn from_box_in(src: Box<T, A>) -> Rc<T, A> {
2105 unsafe {
2106 let value_size = size_of_val(&*src);
2107 let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
2108
2109 // Copy value as bytes
2110 ptr::copy_nonoverlapping(
2111 (&raw const *src) as *const u8,
2112 (&raw mut (*ptr).value) as *mut u8,
2113 value_size,
2114 );
2115
2116 // Free the allocation without dropping its contents
2117 let (bptr, alloc) = Box::into_raw_with_allocator(src);
2118 let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop<T>, alloc.by_ref());
2119 drop(src);
2120
2121 Self::from_ptr_in(ptr, alloc)
2122 }
2123 }
2124}
2125
2126impl<T> Rc<[T]> {
2127 /// Allocates an `RcInner<[T]>` with the given length.
2128 #[cfg(not(no_global_oom_handling))]
2129 unsafe fn allocate_for_slice(len: usize) -> *mut RcInner<[T]> {
2130 unsafe {
2131 Self::allocate_for_layout(
2132 Layout::array::<T>(len).unwrap(),
2133 |layout| Global.allocate(layout),
2134 |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2135 )
2136 }
2137 }
2138
2139 /// Copy elements from slice into newly allocated `Rc<[T]>`
2140 ///
2141 /// Unsafe because the caller must either take ownership or bind `T: Copy`
2142 #[cfg(not(no_global_oom_handling))]
2143 unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
2144 unsafe {
2145 let ptr = Self::allocate_for_slice(v.len());
2146 ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len());
2147 Self::from_ptr(ptr)
2148 }
2149 }
2150
2151 /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
2152 ///
2153 /// Behavior is undefined should the size be wrong.
2154 #[cfg(not(no_global_oom_handling))]
2155 unsafe fn from_iter_exact(iter: impl Iterator<Item = T>, len: usize) -> Rc<[T]> {
2156 // Panic guard while cloning T elements.
2157 // In the event of a panic, elements that have been written
2158 // into the new RcInner will be dropped, then the memory freed.
2159 struct Guard<T> {
2160 mem: NonNull<u8>,
2161 elems: *mut T,
2162 layout: Layout,
2163 n_elems: usize,
2164 }
2165
2166 impl<T> Drop for Guard<T> {
2167 fn drop(&mut self) {
2168 unsafe {
2169 let slice = from_raw_parts_mut(self.elems, self.n_elems);
2170 ptr::drop_in_place(slice);
2171
2172 Global.deallocate(self.mem, self.layout);
2173 }
2174 }
2175 }
2176
2177 unsafe {
2178 let ptr = Self::allocate_for_slice(len);
2179
2180 let mem = ptr as *mut _ as *mut u8;
2181 let layout = Layout::for_value_raw(ptr);
2182
2183 // Pointer to first element
2184 let elems = (&raw mut (*ptr).value) as *mut T;
2185
2186 let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
2187
2188 for (i, item) in iter.enumerate() {
2189 ptr::write(elems.add(i), item);
2190 guard.n_elems += 1;
2191 }
2192
2193 // All clear. Forget the guard so it doesn't free the new RcInner.
2194 mem::forget(guard);
2195
2196 Self::from_ptr(ptr)
2197 }
2198 }
2199}
2200
2201impl<T, A: Allocator> Rc<[T], A> {
2202 /// Allocates an `RcInner<[T]>` with the given length.
2203 #[inline]
2204 #[cfg(not(no_global_oom_handling))]
2205 unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcInner<[T]> {
2206 unsafe {
2207 Rc::<[T]>::allocate_for_layout(
2208 Layout::array::<T>(len).unwrap(),
2209 |layout| alloc.allocate(layout),
2210 |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2211 )
2212 }
2213 }
2214}
2215
2216#[cfg(not(no_global_oom_handling))]
2217/// Specialization trait used for `From<&[T]>`.
2218trait RcFromSlice<T> {
2219 fn from_slice(slice: &[T]) -> Self;
2220}
2221
2222#[cfg(not(no_global_oom_handling))]
2223impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
2224 #[inline]
2225 default fn from_slice(v: &[T]) -> Self {
2226 unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
2227 }
2228}
2229
2230#[cfg(not(no_global_oom_handling))]
2231impl<T: Copy> RcFromSlice<T> for Rc<[T]> {
2232 #[inline]
2233 fn from_slice(v: &[T]) -> Self {
2234 unsafe { Rc::copy_from_slice(v) }
2235 }
2236}
2237
2238#[stable(feature = "rust1", since = "1.0.0")]
2239impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> {
2240 type Target = T;
2241
2242 #[inline(always)]
2243 fn deref(&self) -> &T {
2244 &self.inner().value
2245 }
2246}
2247
2248#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2249unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Rc<T, A> {}
2250
2251//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2252#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2253unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for UniqueRc<T, A> {}
2254
2255#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2256unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Weak<T, A> {}
2257
2258#[unstable(feature = "deref_pure_trait", issue = "87121")]
2259unsafe impl<T: ?Sized, A: Allocator> DerefPure for Rc<T, A> {}
2260
2261//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2262#[unstable(feature = "deref_pure_trait", issue = "87121")]
2263unsafe impl<T: ?Sized, A: Allocator> DerefPure for UniqueRc<T, A> {}
2264
2265#[unstable(feature = "legacy_receiver_trait", issue = "none")]
2266impl<T: ?Sized> LegacyReceiver for Rc<T> {}
2267
2268#[stable(feature = "rust1", since = "1.0.0")]
2269unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
2270 /// Drops the `Rc`.
2271 ///
2272 /// This will decrement the strong reference count. If the strong reference
2273 /// count reaches zero then the only other references (if any) are
2274 /// [`Weak`], so we `drop` the inner value.
2275 ///
2276 /// # Examples
2277 ///
2278 /// ```
2279 /// use std::rc::Rc;
2280 ///
2281 /// struct Foo;
2282 ///
2283 /// impl Drop for Foo {
2284 /// fn drop(&mut self) {
2285 /// println!("dropped!");
2286 /// }
2287 /// }
2288 ///
2289 /// let foo = Rc::new(Foo);
2290 /// let foo2 = Rc::clone(&foo);
2291 ///
2292 /// drop(foo); // Doesn't print anything
2293 /// drop(foo2); // Prints "dropped!"
2294 /// ```
2295 #[inline]
2296 fn drop(&mut self) {
2297 unsafe {
2298 self.inner().dec_strong();
2299 if self.inner().strong() == 0 {
2300 self.drop_slow();
2301 }
2302 }
2303 }
2304}
2305
2306#[stable(feature = "rust1", since = "1.0.0")]
2307impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
2308 /// Makes a clone of the `Rc` pointer.
2309 ///
2310 /// This creates another pointer to the same allocation, increasing the
2311 /// strong reference count.
2312 ///
2313 /// # Examples
2314 ///
2315 /// ```
2316 /// use std::rc::Rc;
2317 ///
2318 /// let five = Rc::new(5);
2319 ///
2320 /// let _ = Rc::clone(&five);
2321 /// ```
2322 #[inline]
2323 fn clone(&self) -> Self {
2324 unsafe {
2325 self.inner().inc_strong();
2326 Self::from_inner_in(self.ptr, self.alloc.clone())
2327 }
2328 }
2329}
2330
2331#[unstable(feature = "ergonomic_clones", issue = "132290")]
2332impl<T: ?Sized, A: Allocator + Clone> UseCloned for Rc<T, A> {}
2333
2334#[cfg(not(no_global_oom_handling))]
2335#[stable(feature = "rust1", since = "1.0.0")]
2336impl<T: Default> Default for Rc<T> {
2337 /// Creates a new `Rc<T>`, with the `Default` value for `T`.
2338 ///
2339 /// # Examples
2340 ///
2341 /// ```
2342 /// use std::rc::Rc;
2343 ///
2344 /// let x: Rc<i32> = Default::default();
2345 /// assert_eq!(*x, 0);
2346 /// ```
2347 #[inline]
2348 fn default() -> Self {
2349 unsafe {
2350 Self::from_inner(
2351 Box::leak(Box::write(
2352 Box::new_uninit(),
2353 RcInner { strong: Cell::new(1), weak: Cell::new(1), value: T::default() },
2354 ))
2355 .into(),
2356 )
2357 }
2358 }
2359}
2360
2361#[cfg(not(no_global_oom_handling))]
2362#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2363impl Default for Rc<str> {
2364 /// Creates an empty `str` inside an `Rc`.
2365 ///
2366 /// This may or may not share an allocation with other Rcs on the same thread.
2367 #[inline]
2368 fn default() -> Self {
2369 let rc = Rc::<[u8]>::default();
2370 // `[u8]` has the same layout as `str`.
2371 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2372 }
2373}
2374
2375#[cfg(not(no_global_oom_handling))]
2376#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2377impl<T> Default for Rc<[T]> {
2378 /// Creates an empty `[T]` inside an `Rc`.
2379 ///
2380 /// This may or may not share an allocation with other Rcs on the same thread.
2381 #[inline]
2382 fn default() -> Self {
2383 let arr: [T; 0] = [];
2384 Rc::from(arr)
2385 }
2386}
2387
2388#[cfg(not(no_global_oom_handling))]
2389#[stable(feature = "pin_default_impls", since = "CURRENT_RUSTC_VERSION")]
2390impl<T> Default for Pin<Rc<T>>
2391where
2392 T: ?Sized,
2393 Rc<T>: Default,
2394{
2395 #[inline]
2396 fn default() -> Self {
2397 unsafe { Pin::new_unchecked(Rc::<T>::default()) }
2398 }
2399}
2400
2401#[stable(feature = "rust1", since = "1.0.0")]
2402trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
2403 fn eq(&self, other: &Rc<T, A>) -> bool;
2404 fn ne(&self, other: &Rc<T, A>) -> bool;
2405}
2406
2407#[stable(feature = "rust1", since = "1.0.0")]
2408impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2409 #[inline]
2410 default fn eq(&self, other: &Rc<T, A>) -> bool {
2411 **self == **other
2412 }
2413
2414 #[inline]
2415 default fn ne(&self, other: &Rc<T, A>) -> bool {
2416 **self != **other
2417 }
2418}
2419
2420// Hack to allow specializing on `Eq` even though `Eq` has a method.
2421#[rustc_unsafe_specialization_marker]
2422pub(crate) trait MarkerEq: PartialEq<Self> {}
2423
2424impl<T: Eq> MarkerEq for T {}
2425
2426/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
2427/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
2428/// store large values, that are slow to clone, but also heavy to check for equality, causing this
2429/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
2430/// the same value, than two `&T`s.
2431///
2432/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
2433#[stable(feature = "rust1", since = "1.0.0")]
2434impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2435 #[inline]
2436 fn eq(&self, other: &Rc<T, A>) -> bool {
2437 Rc::ptr_eq(self, other) || **self == **other
2438 }
2439
2440 #[inline]
2441 fn ne(&self, other: &Rc<T, A>) -> bool {
2442 !Rc::ptr_eq(self, other) && **self != **other
2443 }
2444}
2445
2446#[stable(feature = "rust1", since = "1.0.0")]
2447impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
2448 /// Equality for two `Rc`s.
2449 ///
2450 /// Two `Rc`s are equal if their inner values are equal, even if they are
2451 /// stored in different allocation.
2452 ///
2453 /// If `T` also implements `Eq` (implying reflexivity of equality),
2454 /// two `Rc`s that point to the same allocation are
2455 /// always equal.
2456 ///
2457 /// # Examples
2458 ///
2459 /// ```
2460 /// use std::rc::Rc;
2461 ///
2462 /// let five = Rc::new(5);
2463 ///
2464 /// assert!(five == Rc::new(5));
2465 /// ```
2466 #[inline]
2467 fn eq(&self, other: &Rc<T, A>) -> bool {
2468 RcEqIdent::eq(self, other)
2469 }
2470
2471 /// Inequality for two `Rc`s.
2472 ///
2473 /// Two `Rc`s are not equal if their inner values are not equal.
2474 ///
2475 /// If `T` also implements `Eq` (implying reflexivity of equality),
2476 /// two `Rc`s that point to the same allocation are
2477 /// always equal.
2478 ///
2479 /// # Examples
2480 ///
2481 /// ```
2482 /// use std::rc::Rc;
2483 ///
2484 /// let five = Rc::new(5);
2485 ///
2486 /// assert!(five != Rc::new(6));
2487 /// ```
2488 #[inline]
2489 fn ne(&self, other: &Rc<T, A>) -> bool {
2490 RcEqIdent::ne(self, other)
2491 }
2492}
2493
2494#[stable(feature = "rust1", since = "1.0.0")]
2495impl<T: ?Sized + Eq, A: Allocator> Eq for Rc<T, A> {}
2496
2497#[stable(feature = "rust1", since = "1.0.0")]
2498impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
2499 /// Partial comparison for two `Rc`s.
2500 ///
2501 /// The two are compared by calling `partial_cmp()` on their inner values.
2502 ///
2503 /// # Examples
2504 ///
2505 /// ```
2506 /// use std::rc::Rc;
2507 /// use std::cmp::Ordering;
2508 ///
2509 /// let five = Rc::new(5);
2510 ///
2511 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
2512 /// ```
2513 #[inline(always)]
2514 fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> {
2515 (**self).partial_cmp(&**other)
2516 }
2517
2518 /// Less-than comparison for two `Rc`s.
2519 ///
2520 /// The two are compared by calling `<` on their inner values.
2521 ///
2522 /// # Examples
2523 ///
2524 /// ```
2525 /// use std::rc::Rc;
2526 ///
2527 /// let five = Rc::new(5);
2528 ///
2529 /// assert!(five < Rc::new(6));
2530 /// ```
2531 #[inline(always)]
2532 fn lt(&self, other: &Rc<T, A>) -> bool {
2533 **self < **other
2534 }
2535
2536 /// 'Less than or equal to' comparison for two `Rc`s.
2537 ///
2538 /// The two are compared by calling `<=` on their inner values.
2539 ///
2540 /// # Examples
2541 ///
2542 /// ```
2543 /// use std::rc::Rc;
2544 ///
2545 /// let five = Rc::new(5);
2546 ///
2547 /// assert!(five <= Rc::new(5));
2548 /// ```
2549 #[inline(always)]
2550 fn le(&self, other: &Rc<T, A>) -> bool {
2551 **self <= **other
2552 }
2553
2554 /// Greater-than comparison for two `Rc`s.
2555 ///
2556 /// The two are compared by calling `>` on their inner values.
2557 ///
2558 /// # Examples
2559 ///
2560 /// ```
2561 /// use std::rc::Rc;
2562 ///
2563 /// let five = Rc::new(5);
2564 ///
2565 /// assert!(five > Rc::new(4));
2566 /// ```
2567 #[inline(always)]
2568 fn gt(&self, other: &Rc<T, A>) -> bool {
2569 **self > **other
2570 }
2571
2572 /// 'Greater than or equal to' comparison for two `Rc`s.
2573 ///
2574 /// The two are compared by calling `>=` on their inner values.
2575 ///
2576 /// # Examples
2577 ///
2578 /// ```
2579 /// use std::rc::Rc;
2580 ///
2581 /// let five = Rc::new(5);
2582 ///
2583 /// assert!(five >= Rc::new(5));
2584 /// ```
2585 #[inline(always)]
2586 fn ge(&self, other: &Rc<T, A>) -> bool {
2587 **self >= **other
2588 }
2589}
2590
2591#[stable(feature = "rust1", since = "1.0.0")]
2592impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> {
2593 /// Comparison for two `Rc`s.
2594 ///
2595 /// The two are compared by calling `cmp()` on their inner values.
2596 ///
2597 /// # Examples
2598 ///
2599 /// ```
2600 /// use std::rc::Rc;
2601 /// use std::cmp::Ordering;
2602 ///
2603 /// let five = Rc::new(5);
2604 ///
2605 /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
2606 /// ```
2607 #[inline]
2608 fn cmp(&self, other: &Rc<T, A>) -> Ordering {
2609 (**self).cmp(&**other)
2610 }
2611}
2612
2613#[stable(feature = "rust1", since = "1.0.0")]
2614impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> {
2615 fn hash<H: Hasher>(&self, state: &mut H) {
2616 (**self).hash(state);
2617 }
2618}
2619
2620#[stable(feature = "rust1", since = "1.0.0")]
2621impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> {
2622 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2623 fmt::Display::fmt(&**self, f)
2624 }
2625}
2626
2627#[stable(feature = "rust1", since = "1.0.0")]
2628impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
2629 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2630 fmt::Debug::fmt(&**self, f)
2631 }
2632}
2633
2634#[stable(feature = "rust1", since = "1.0.0")]
2635impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
2636 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2637 fmt::Pointer::fmt(&(&raw const **self), f)
2638 }
2639}
2640
2641#[cfg(not(no_global_oom_handling))]
2642#[stable(feature = "from_for_ptrs", since = "1.6.0")]
2643impl<T> From<T> for Rc<T> {
2644 /// Converts a generic type `T` into an `Rc<T>`
2645 ///
2646 /// The conversion allocates on the heap and moves `t`
2647 /// from the stack into it.
2648 ///
2649 /// # Example
2650 /// ```rust
2651 /// # use std::rc::Rc;
2652 /// let x = 5;
2653 /// let rc = Rc::new(5);
2654 ///
2655 /// assert_eq!(Rc::from(x), rc);
2656 /// ```
2657 fn from(t: T) -> Self {
2658 Rc::new(t)
2659 }
2660}
2661
2662#[cfg(not(no_global_oom_handling))]
2663#[stable(feature = "shared_from_array", since = "1.74.0")]
2664impl<T, const N: usize> From<[T; N]> for Rc<[T]> {
2665 /// Converts a [`[T; N]`](prim@array) into an `Rc<[T]>`.
2666 ///
2667 /// The conversion moves the array into a newly allocated `Rc`.
2668 ///
2669 /// # Example
2670 ///
2671 /// ```
2672 /// # use std::rc::Rc;
2673 /// let original: [i32; 3] = [1, 2, 3];
2674 /// let shared: Rc<[i32]> = Rc::from(original);
2675 /// assert_eq!(&[1, 2, 3], &shared[..]);
2676 /// ```
2677 #[inline]
2678 fn from(v: [T; N]) -> Rc<[T]> {
2679 Rc::<[T; N]>::from(v)
2680 }
2681}
2682
2683#[cfg(not(no_global_oom_handling))]
2684#[stable(feature = "shared_from_slice", since = "1.21.0")]
2685impl<T: Clone> From<&[T]> for Rc<[T]> {
2686 /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2687 ///
2688 /// # Example
2689 ///
2690 /// ```
2691 /// # use std::rc::Rc;
2692 /// let original: &[i32] = &[1, 2, 3];
2693 /// let shared: Rc<[i32]> = Rc::from(original);
2694 /// assert_eq!(&[1, 2, 3], &shared[..]);
2695 /// ```
2696 #[inline]
2697 fn from(v: &[T]) -> Rc<[T]> {
2698 <Self as RcFromSlice<T>>::from_slice(v)
2699 }
2700}
2701
2702#[cfg(not(no_global_oom_handling))]
2703#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2704impl<T: Clone> From<&mut [T]> for Rc<[T]> {
2705 /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2706 ///
2707 /// # Example
2708 ///
2709 /// ```
2710 /// # use std::rc::Rc;
2711 /// let mut original = [1, 2, 3];
2712 /// let original: &mut [i32] = &mut original;
2713 /// let shared: Rc<[i32]> = Rc::from(original);
2714 /// assert_eq!(&[1, 2, 3], &shared[..]);
2715 /// ```
2716 #[inline]
2717 fn from(v: &mut [T]) -> Rc<[T]> {
2718 Rc::from(&*v)
2719 }
2720}
2721
2722#[cfg(not(no_global_oom_handling))]
2723#[stable(feature = "shared_from_slice", since = "1.21.0")]
2724impl From<&str> for Rc<str> {
2725 /// Allocates a reference-counted string slice and copies `v` into it.
2726 ///
2727 /// # Example
2728 ///
2729 /// ```
2730 /// # use std::rc::Rc;
2731 /// let shared: Rc<str> = Rc::from("statue");
2732 /// assert_eq!("statue", &shared[..]);
2733 /// ```
2734 #[inline]
2735 fn from(v: &str) -> Rc<str> {
2736 let rc = Rc::<[u8]>::from(v.as_bytes());
2737 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2738 }
2739}
2740
2741#[cfg(not(no_global_oom_handling))]
2742#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2743impl From<&mut str> for Rc<str> {
2744 /// Allocates a reference-counted string slice and copies `v` into it.
2745 ///
2746 /// # Example
2747 ///
2748 /// ```
2749 /// # use std::rc::Rc;
2750 /// let mut original = String::from("statue");
2751 /// let original: &mut str = &mut original;
2752 /// let shared: Rc<str> = Rc::from(original);
2753 /// assert_eq!("statue", &shared[..]);
2754 /// ```
2755 #[inline]
2756 fn from(v: &mut str) -> Rc<str> {
2757 Rc::from(&*v)
2758 }
2759}
2760
2761#[cfg(not(no_global_oom_handling))]
2762#[stable(feature = "shared_from_slice", since = "1.21.0")]
2763impl From<String> for Rc<str> {
2764 /// Allocates a reference-counted string slice and copies `v` into it.
2765 ///
2766 /// # Example
2767 ///
2768 /// ```
2769 /// # use std::rc::Rc;
2770 /// let original: String = "statue".to_owned();
2771 /// let shared: Rc<str> = Rc::from(original);
2772 /// assert_eq!("statue", &shared[..]);
2773 /// ```
2774 #[inline]
2775 fn from(v: String) -> Rc<str> {
2776 Rc::from(&v[..])
2777 }
2778}
2779
2780#[cfg(not(no_global_oom_handling))]
2781#[stable(feature = "shared_from_slice", since = "1.21.0")]
2782impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> {
2783 /// Move a boxed object to a new, reference counted, allocation.
2784 ///
2785 /// # Example
2786 ///
2787 /// ```
2788 /// # use std::rc::Rc;
2789 /// let original: Box<i32> = Box::new(1);
2790 /// let shared: Rc<i32> = Rc::from(original);
2791 /// assert_eq!(1, *shared);
2792 /// ```
2793 #[inline]
2794 fn from(v: Box<T, A>) -> Rc<T, A> {
2795 Rc::from_box_in(v)
2796 }
2797}
2798
2799#[cfg(not(no_global_oom_handling))]
2800#[stable(feature = "shared_from_slice", since = "1.21.0")]
2801impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
2802 /// Allocates a reference-counted slice and moves `v`'s items into it.
2803 ///
2804 /// # Example
2805 ///
2806 /// ```
2807 /// # use std::rc::Rc;
2808 /// let unique: Vec<i32> = vec![1, 2, 3];
2809 /// let shared: Rc<[i32]> = Rc::from(unique);
2810 /// assert_eq!(&[1, 2, 3], &shared[..]);
2811 /// ```
2812 #[inline]
2813 fn from(v: Vec<T, A>) -> Rc<[T], A> {
2814 unsafe {
2815 let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
2816
2817 let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
2818 ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len);
2819
2820 // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
2821 // without dropping its contents or the allocator
2822 let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
2823
2824 Self::from_ptr_in(rc_ptr, alloc)
2825 }
2826 }
2827}
2828
2829#[stable(feature = "shared_from_cow", since = "1.45.0")]
2830impl<'a, B> From<Cow<'a, B>> for Rc<B>
2831where
2832 B: ToOwned + ?Sized,
2833 Rc<B>: From<&'a B> + From<B::Owned>,
2834{
2835 /// Creates a reference-counted pointer from a clone-on-write pointer by
2836 /// copying its content.
2837 ///
2838 /// # Example
2839 ///
2840 /// ```rust
2841 /// # use std::rc::Rc;
2842 /// # use std::borrow::Cow;
2843 /// let cow: Cow<'_, str> = Cow::Borrowed("eggplant");
2844 /// let shared: Rc<str> = Rc::from(cow);
2845 /// assert_eq!("eggplant", &shared[..]);
2846 /// ```
2847 #[inline]
2848 fn from(cow: Cow<'a, B>) -> Rc<B> {
2849 match cow {
2850 Cow::Borrowed(s) => Rc::from(s),
2851 Cow::Owned(s) => Rc::from(s),
2852 }
2853 }
2854}
2855
2856#[stable(feature = "shared_from_str", since = "1.62.0")]
2857impl From<Rc<str>> for Rc<[u8]> {
2858 /// Converts a reference-counted string slice into a byte slice.
2859 ///
2860 /// # Example
2861 ///
2862 /// ```
2863 /// # use std::rc::Rc;
2864 /// let string: Rc<str> = Rc::from("eggplant");
2865 /// let bytes: Rc<[u8]> = Rc::from(string);
2866 /// assert_eq!("eggplant".as_bytes(), bytes.as_ref());
2867 /// ```
2868 #[inline]
2869 fn from(rc: Rc<str>) -> Self {
2870 // SAFETY: `str` has the same layout as `[u8]`.
2871 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const [u8]) }
2872 }
2873}
2874
2875#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
2876impl<T, A: Allocator, const N: usize> TryFrom<Rc<[T], A>> for Rc<[T; N], A> {
2877 type Error = Rc<[T], A>;
2878
2879 fn try_from(boxed_slice: Rc<[T], A>) -> Result<Self, Self::Error> {
2880 if boxed_slice.len() == N {
2881 let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice);
2882 Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) })
2883 } else {
2884 Err(boxed_slice)
2885 }
2886 }
2887}
2888
2889#[cfg(not(no_global_oom_handling))]
2890#[stable(feature = "shared_from_iter", since = "1.37.0")]
2891impl<T> FromIterator<T> for Rc<[T]> {
2892 /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
2893 ///
2894 /// # Performance characteristics
2895 ///
2896 /// ## The general case
2897 ///
2898 /// In the general case, collecting into `Rc<[T]>` is done by first
2899 /// collecting into a `Vec<T>`. That is, when writing the following:
2900 ///
2901 /// ```rust
2902 /// # use std::rc::Rc;
2903 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
2904 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2905 /// ```
2906 ///
2907 /// this behaves as if we wrote:
2908 ///
2909 /// ```rust
2910 /// # use std::rc::Rc;
2911 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
2912 /// .collect::<Vec<_>>() // The first set of allocations happens here.
2913 /// .into(); // A second allocation for `Rc<[T]>` happens here.
2914 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2915 /// ```
2916 ///
2917 /// This will allocate as many times as needed for constructing the `Vec<T>`
2918 /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
2919 ///
2920 /// ## Iterators of known length
2921 ///
2922 /// When your `Iterator` implements `TrustedLen` and is of an exact size,
2923 /// a single allocation will be made for the `Rc<[T]>`. For example:
2924 ///
2925 /// ```rust
2926 /// # use std::rc::Rc;
2927 /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
2928 /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
2929 /// ```
2930 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
2931 ToRcSlice::to_rc_slice(iter.into_iter())
2932 }
2933}
2934
2935/// Specialization trait used for collecting into `Rc<[T]>`.
2936#[cfg(not(no_global_oom_handling))]
2937trait ToRcSlice<T>: Iterator<Item = T> + Sized {
2938 fn to_rc_slice(self) -> Rc<[T]>;
2939}
2940
2941#[cfg(not(no_global_oom_handling))]
2942impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
2943 default fn to_rc_slice(self) -> Rc<[T]> {
2944 self.collect::<Vec<T>>().into()
2945 }
2946}
2947
2948#[cfg(not(no_global_oom_handling))]
2949impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
2950 fn to_rc_slice(self) -> Rc<[T]> {
2951 // This is the case for a `TrustedLen` iterator.
2952 let (low, high) = self.size_hint();
2953 if let Some(high) = high {
2954 debug_assert_eq!(
2955 low,
2956 high,
2957 "TrustedLen iterator's size hint is not exact: {:?}",
2958 (low, high)
2959 );
2960
2961 unsafe {
2962 // SAFETY: We need to ensure that the iterator has an exact length and we have.
2963 Rc::from_iter_exact(self, low)
2964 }
2965 } else {
2966 // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
2967 // length exceeding `usize::MAX`.
2968 // The default implementation would collect into a vec which would panic.
2969 // Thus we panic here immediately without invoking `Vec` code.
2970 panic!("capacity overflow");
2971 }
2972 }
2973}
2974
2975/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
2976/// managed allocation.
2977///
2978/// The allocation is accessed by calling [`upgrade`] on the `Weak`
2979/// pointer, which returns an <code>[Option]<[Rc]\<T>></code>.
2980///
2981/// Since a `Weak` reference does not count towards ownership, it will not
2982/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
2983/// guarantees about the value still being present. Thus it may return [`None`]
2984/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
2985/// itself (the backing store) from being deallocated.
2986///
2987/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
2988/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
2989/// prevent circular references between [`Rc`] pointers, since mutual owning references
2990/// would never allow either [`Rc`] to be dropped. For example, a tree could
2991/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
2992/// pointers from children back to their parents.
2993///
2994/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
2995///
2996/// [`upgrade`]: Weak::upgrade
2997#[stable(feature = "rc_weak", since = "1.4.0")]
2998#[rustc_diagnostic_item = "RcWeak"]
2999pub struct Weak<
3000 T: ?Sized,
3001 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3002> {
3003 // This is a `NonNull` to allow optimizing the size of this type in enums,
3004 // but it is not necessarily a valid pointer.
3005 // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
3006 // to allocate space on the heap. That's not a value a real pointer
3007 // will ever have because RcInner has alignment at least 2.
3008 ptr: NonNull<RcInner<T>>,
3009 alloc: A,
3010}
3011
3012#[stable(feature = "rc_weak", since = "1.4.0")]
3013impl<T: ?Sized, A: Allocator> !Send for Weak<T, A> {}
3014#[stable(feature = "rc_weak", since = "1.4.0")]
3015impl<T: ?Sized, A: Allocator> !Sync for Weak<T, A> {}
3016
3017#[unstable(feature = "coerce_unsized", issue = "18598")]
3018impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {}
3019
3020#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3021impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
3022
3023impl<T> Weak<T> {
3024 /// Constructs a new `Weak<T>`, without allocating any memory.
3025 /// Calling [`upgrade`] on the return value always gives [`None`].
3026 ///
3027 /// [`upgrade`]: Weak::upgrade
3028 ///
3029 /// # Examples
3030 ///
3031 /// ```
3032 /// use std::rc::Weak;
3033 ///
3034 /// let empty: Weak<i64> = Weak::new();
3035 /// assert!(empty.upgrade().is_none());
3036 /// ```
3037 #[inline]
3038 #[stable(feature = "downgraded_weak", since = "1.10.0")]
3039 #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
3040 #[must_use]
3041 pub const fn new() -> Weak<T> {
3042 Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc: Global }
3043 }
3044}
3045
3046impl<T, A: Allocator> Weak<T, A> {
3047 /// Constructs a new `Weak<T>`, without allocating any memory, technically in the provided
3048 /// allocator.
3049 /// Calling [`upgrade`] on the return value always gives [`None`].
3050 ///
3051 /// [`upgrade`]: Weak::upgrade
3052 ///
3053 /// # Examples
3054 ///
3055 /// ```
3056 /// use std::rc::Weak;
3057 ///
3058 /// let empty: Weak<i64> = Weak::new();
3059 /// assert!(empty.upgrade().is_none());
3060 /// ```
3061 #[inline]
3062 #[unstable(feature = "allocator_api", issue = "32838")]
3063 pub fn new_in(alloc: A) -> Weak<T, A> {
3064 Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc }
3065 }
3066}
3067
3068pub(crate) fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
3069 (ptr.cast::<()>()).addr() == usize::MAX
3070}
3071
3072/// Helper type to allow accessing the reference counts without
3073/// making any assertions about the data field.
3074struct WeakInner<'a> {
3075 weak: &'a Cell<usize>,
3076 strong: &'a Cell<usize>,
3077}
3078
3079impl<T: ?Sized> Weak<T> {
3080 /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3081 ///
3082 /// This can be used to safely get a strong reference (by calling [`upgrade`]
3083 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3084 ///
3085 /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3086 /// as these don't own anything; the method still works on them).
3087 ///
3088 /// # Safety
3089 ///
3090 /// The pointer must have originated from the [`into_raw`] and must still own its potential
3091 /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator.
3092 ///
3093 /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3094 /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3095 /// count is not modified by this operation) and therefore it must be paired with a previous
3096 /// call to [`into_raw`].
3097 ///
3098 /// # Examples
3099 ///
3100 /// ```
3101 /// use std::rc::{Rc, Weak};
3102 ///
3103 /// let strong = Rc::new("hello".to_owned());
3104 ///
3105 /// let raw_1 = Rc::downgrade(&strong).into_raw();
3106 /// let raw_2 = Rc::downgrade(&strong).into_raw();
3107 ///
3108 /// assert_eq!(2, Rc::weak_count(&strong));
3109 ///
3110 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3111 /// assert_eq!(1, Rc::weak_count(&strong));
3112 ///
3113 /// drop(strong);
3114 ///
3115 /// // Decrement the last weak count.
3116 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3117 /// ```
3118 ///
3119 /// [`into_raw`]: Weak::into_raw
3120 /// [`upgrade`]: Weak::upgrade
3121 /// [`new`]: Weak::new
3122 #[inline]
3123 #[stable(feature = "weak_into_raw", since = "1.45.0")]
3124 pub unsafe fn from_raw(ptr: *const T) -> Self {
3125 unsafe { Self::from_raw_in(ptr, Global) }
3126 }
3127
3128 /// Consumes the `Weak<T>` and turns it into a raw pointer.
3129 ///
3130 /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3131 /// one weak reference (the weak count is not modified by this operation). It can be turned
3132 /// back into the `Weak<T>` with [`from_raw`].
3133 ///
3134 /// The same restrictions of accessing the target of the pointer as with
3135 /// [`as_ptr`] apply.
3136 ///
3137 /// # Examples
3138 ///
3139 /// ```
3140 /// use std::rc::{Rc, Weak};
3141 ///
3142 /// let strong = Rc::new("hello".to_owned());
3143 /// let weak = Rc::downgrade(&strong);
3144 /// let raw = weak.into_raw();
3145 ///
3146 /// assert_eq!(1, Rc::weak_count(&strong));
3147 /// assert_eq!("hello", unsafe { &*raw });
3148 ///
3149 /// drop(unsafe { Weak::from_raw(raw) });
3150 /// assert_eq!(0, Rc::weak_count(&strong));
3151 /// ```
3152 ///
3153 /// [`from_raw`]: Weak::from_raw
3154 /// [`as_ptr`]: Weak::as_ptr
3155 #[must_use = "losing the pointer will leak memory"]
3156 #[stable(feature = "weak_into_raw", since = "1.45.0")]
3157 pub fn into_raw(self) -> *const T {
3158 mem::ManuallyDrop::new(self).as_ptr()
3159 }
3160}
3161
3162impl<T: ?Sized, A: Allocator> Weak<T, A> {
3163 /// Returns a reference to the underlying allocator.
3164 #[inline]
3165 #[unstable(feature = "allocator_api", issue = "32838")]
3166 pub fn allocator(&self) -> &A {
3167 &self.alloc
3168 }
3169
3170 /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
3171 ///
3172 /// The pointer is valid only if there are some strong references. The pointer may be dangling,
3173 /// unaligned or even [`null`] otherwise.
3174 ///
3175 /// # Examples
3176 ///
3177 /// ```
3178 /// use std::rc::Rc;
3179 /// use std::ptr;
3180 ///
3181 /// let strong = Rc::new("hello".to_owned());
3182 /// let weak = Rc::downgrade(&strong);
3183 /// // Both point to the same object
3184 /// assert!(ptr::eq(&*strong, weak.as_ptr()));
3185 /// // The strong here keeps it alive, so we can still access the object.
3186 /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
3187 ///
3188 /// drop(strong);
3189 /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
3190 /// // undefined behavior.
3191 /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
3192 /// ```
3193 ///
3194 /// [`null`]: ptr::null
3195 #[must_use]
3196 #[stable(feature = "rc_as_ptr", since = "1.45.0")]
3197 pub fn as_ptr(&self) -> *const T {
3198 let ptr: *mut RcInner<T> = NonNull::as_ptr(self.ptr);
3199
3200 if is_dangling(ptr) {
3201 // If the pointer is dangling, we return the sentinel directly. This cannot be
3202 // a valid payload address, as the payload is at least as aligned as RcInner (usize).
3203 ptr as *const T
3204 } else {
3205 // SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
3206 // The payload may be dropped at this point, and we have to maintain provenance,
3207 // so use raw pointer manipulation.
3208 unsafe { &raw mut (*ptr).value }
3209 }
3210 }
3211
3212 /// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
3213 ///
3214 /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3215 /// one weak reference (the weak count is not modified by this operation). It can be turned
3216 /// back into the `Weak<T>` with [`from_raw_in`].
3217 ///
3218 /// The same restrictions of accessing the target of the pointer as with
3219 /// [`as_ptr`] apply.
3220 ///
3221 /// # Examples
3222 ///
3223 /// ```
3224 /// #![feature(allocator_api)]
3225 /// use std::rc::{Rc, Weak};
3226 /// use std::alloc::System;
3227 ///
3228 /// let strong = Rc::new_in("hello".to_owned(), System);
3229 /// let weak = Rc::downgrade(&strong);
3230 /// let (raw, alloc) = weak.into_raw_with_allocator();
3231 ///
3232 /// assert_eq!(1, Rc::weak_count(&strong));
3233 /// assert_eq!("hello", unsafe { &*raw });
3234 ///
3235 /// drop(unsafe { Weak::from_raw_in(raw, alloc) });
3236 /// assert_eq!(0, Rc::weak_count(&strong));
3237 /// ```
3238 ///
3239 /// [`from_raw_in`]: Weak::from_raw_in
3240 /// [`as_ptr`]: Weak::as_ptr
3241 #[must_use = "losing the pointer will leak memory"]
3242 #[inline]
3243 #[unstable(feature = "allocator_api", issue = "32838")]
3244 pub fn into_raw_with_allocator(self) -> (*const T, A) {
3245 let this = mem::ManuallyDrop::new(self);
3246 let result = this.as_ptr();
3247 // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
3248 let alloc = unsafe { ptr::read(&this.alloc) };
3249 (result, alloc)
3250 }
3251
3252 /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3253 ///
3254 /// This can be used to safely get a strong reference (by calling [`upgrade`]
3255 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3256 ///
3257 /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3258 /// as these don't own anything; the method still works on them).
3259 ///
3260 /// # Safety
3261 ///
3262 /// The pointer must have originated from the [`into_raw`] and must still own its potential
3263 /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`.
3264 ///
3265 /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3266 /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3267 /// count is not modified by this operation) and therefore it must be paired with a previous
3268 /// call to [`into_raw`].
3269 ///
3270 /// # Examples
3271 ///
3272 /// ```
3273 /// use std::rc::{Rc, Weak};
3274 ///
3275 /// let strong = Rc::new("hello".to_owned());
3276 ///
3277 /// let raw_1 = Rc::downgrade(&strong).into_raw();
3278 /// let raw_2 = Rc::downgrade(&strong).into_raw();
3279 ///
3280 /// assert_eq!(2, Rc::weak_count(&strong));
3281 ///
3282 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3283 /// assert_eq!(1, Rc::weak_count(&strong));
3284 ///
3285 /// drop(strong);
3286 ///
3287 /// // Decrement the last weak count.
3288 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3289 /// ```
3290 ///
3291 /// [`into_raw`]: Weak::into_raw
3292 /// [`upgrade`]: Weak::upgrade
3293 /// [`new`]: Weak::new
3294 #[inline]
3295 #[unstable(feature = "allocator_api", issue = "32838")]
3296 pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
3297 // See Weak::as_ptr for context on how the input pointer is derived.
3298
3299 let ptr = if is_dangling(ptr) {
3300 // This is a dangling Weak.
3301 ptr as *mut RcInner<T>
3302 } else {
3303 // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
3304 // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
3305 let offset = unsafe { data_offset(ptr) };
3306 // Thus, we reverse the offset to get the whole RcInner.
3307 // SAFETY: the pointer originated from a Weak, so this offset is safe.
3308 unsafe { ptr.byte_sub(offset) as *mut RcInner<T> }
3309 };
3310
3311 // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
3312 Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
3313 }
3314
3315 /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
3316 /// dropping of the inner value if successful.
3317 ///
3318 /// Returns [`None`] if the inner value has since been dropped.
3319 ///
3320 /// # Examples
3321 ///
3322 /// ```
3323 /// use std::rc::Rc;
3324 ///
3325 /// let five = Rc::new(5);
3326 ///
3327 /// let weak_five = Rc::downgrade(&five);
3328 ///
3329 /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
3330 /// assert!(strong_five.is_some());
3331 ///
3332 /// // Destroy all strong pointers.
3333 /// drop(strong_five);
3334 /// drop(five);
3335 ///
3336 /// assert!(weak_five.upgrade().is_none());
3337 /// ```
3338 #[must_use = "this returns a new `Rc`, \
3339 without modifying the original weak pointer"]
3340 #[stable(feature = "rc_weak", since = "1.4.0")]
3341 pub fn upgrade(&self) -> Option<Rc<T, A>>
3342 where
3343 A: Clone,
3344 {
3345 let inner = self.inner()?;
3346
3347 if inner.strong() == 0 {
3348 None
3349 } else {
3350 unsafe {
3351 inner.inc_strong();
3352 Some(Rc::from_inner_in(self.ptr, self.alloc.clone()))
3353 }
3354 }
3355 }
3356
3357 /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
3358 ///
3359 /// If `self` was created using [`Weak::new`], this will return 0.
3360 #[must_use]
3361 #[stable(feature = "weak_counts", since = "1.41.0")]
3362 pub fn strong_count(&self) -> usize {
3363 if let Some(inner) = self.inner() { inner.strong() } else { 0 }
3364 }
3365
3366 /// Gets the number of `Weak` pointers pointing to this allocation.
3367 ///
3368 /// If no strong pointers remain, this will return zero.
3369 #[must_use]
3370 #[stable(feature = "weak_counts", since = "1.41.0")]
3371 pub fn weak_count(&self) -> usize {
3372 if let Some(inner) = self.inner() {
3373 if inner.strong() > 0 {
3374 inner.weak() - 1 // subtract the implicit weak ptr
3375 } else {
3376 0
3377 }
3378 } else {
3379 0
3380 }
3381 }
3382
3383 /// Returns `None` when the pointer is dangling and there is no allocated `RcInner`,
3384 /// (i.e., when this `Weak` was created by `Weak::new`).
3385 #[inline]
3386 fn inner(&self) -> Option<WeakInner<'_>> {
3387 if is_dangling(self.ptr.as_ptr()) {
3388 None
3389 } else {
3390 // We are careful to *not* create a reference covering the "data" field, as
3391 // the field may be mutated concurrently (for example, if the last `Rc`
3392 // is dropped, the data field will be dropped in-place).
3393 Some(unsafe {
3394 let ptr = self.ptr.as_ptr();
3395 WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
3396 })
3397 }
3398 }
3399
3400 /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
3401 /// both don't point to any allocation (because they were created with `Weak::new()`). However,
3402 /// this function ignores the metadata of `dyn Trait` pointers.
3403 ///
3404 /// # Notes
3405 ///
3406 /// Since this compares pointers it means that `Weak::new()` will equal each
3407 /// other, even though they don't point to any allocation.
3408 ///
3409 /// # Examples
3410 ///
3411 /// ```
3412 /// use std::rc::Rc;
3413 ///
3414 /// let first_rc = Rc::new(5);
3415 /// let first = Rc::downgrade(&first_rc);
3416 /// let second = Rc::downgrade(&first_rc);
3417 ///
3418 /// assert!(first.ptr_eq(&second));
3419 ///
3420 /// let third_rc = Rc::new(5);
3421 /// let third = Rc::downgrade(&third_rc);
3422 ///
3423 /// assert!(!first.ptr_eq(&third));
3424 /// ```
3425 ///
3426 /// Comparing `Weak::new`.
3427 ///
3428 /// ```
3429 /// use std::rc::{Rc, Weak};
3430 ///
3431 /// let first = Weak::new();
3432 /// let second = Weak::new();
3433 /// assert!(first.ptr_eq(&second));
3434 ///
3435 /// let third_rc = Rc::new(());
3436 /// let third = Rc::downgrade(&third_rc);
3437 /// assert!(!first.ptr_eq(&third));
3438 /// ```
3439 #[inline]
3440 #[must_use]
3441 #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
3442 pub fn ptr_eq(&self, other: &Self) -> bool {
3443 ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
3444 }
3445}
3446
3447#[stable(feature = "rc_weak", since = "1.4.0")]
3448unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
3449 /// Drops the `Weak` pointer.
3450 ///
3451 /// # Examples
3452 ///
3453 /// ```
3454 /// use std::rc::{Rc, Weak};
3455 ///
3456 /// struct Foo;
3457 ///
3458 /// impl Drop for Foo {
3459 /// fn drop(&mut self) {
3460 /// println!("dropped!");
3461 /// }
3462 /// }
3463 ///
3464 /// let foo = Rc::new(Foo);
3465 /// let weak_foo = Rc::downgrade(&foo);
3466 /// let other_weak_foo = Weak::clone(&weak_foo);
3467 ///
3468 /// drop(weak_foo); // Doesn't print anything
3469 /// drop(foo); // Prints "dropped!"
3470 ///
3471 /// assert!(other_weak_foo.upgrade().is_none());
3472 /// ```
3473 fn drop(&mut self) {
3474 let inner = if let Some(inner) = self.inner() { inner } else { return };
3475
3476 inner.dec_weak();
3477 // the weak count starts at 1, and will only go to zero if all
3478 // the strong pointers have disappeared.
3479 if inner.weak() == 0 {
3480 unsafe {
3481 self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
3482 }
3483 }
3484 }
3485}
3486
3487#[stable(feature = "rc_weak", since = "1.4.0")]
3488impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
3489 /// Makes a clone of the `Weak` pointer that points to the same allocation.
3490 ///
3491 /// # Examples
3492 ///
3493 /// ```
3494 /// use std::rc::{Rc, Weak};
3495 ///
3496 /// let weak_five = Rc::downgrade(&Rc::new(5));
3497 ///
3498 /// let _ = Weak::clone(&weak_five);
3499 /// ```
3500 #[inline]
3501 fn clone(&self) -> Weak<T, A> {
3502 if let Some(inner) = self.inner() {
3503 inner.inc_weak()
3504 }
3505 Weak { ptr: self.ptr, alloc: self.alloc.clone() }
3506 }
3507}
3508
3509#[unstable(feature = "ergonomic_clones", issue = "132290")]
3510impl<T: ?Sized, A: Allocator + Clone> UseCloned for Weak<T, A> {}
3511
3512#[stable(feature = "rc_weak", since = "1.4.0")]
3513impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
3514 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3515 write!(f, "(Weak)")
3516 }
3517}
3518
3519#[stable(feature = "downgraded_weak", since = "1.10.0")]
3520impl<T> Default for Weak<T> {
3521 /// Constructs a new `Weak<T>`, without allocating any memory.
3522 /// Calling [`upgrade`] on the return value always gives [`None`].
3523 ///
3524 /// [`upgrade`]: Weak::upgrade
3525 ///
3526 /// # Examples
3527 ///
3528 /// ```
3529 /// use std::rc::Weak;
3530 ///
3531 /// let empty: Weak<i64> = Default::default();
3532 /// assert!(empty.upgrade().is_none());
3533 /// ```
3534 fn default() -> Weak<T> {
3535 Weak::new()
3536 }
3537}
3538
3539// NOTE: If you mem::forget Rcs (or Weaks), drop is skipped and the ref-count
3540// is not decremented, meaning the ref-count can overflow, and then you can
3541// free the allocation while outstanding Rcs (or Weaks) exist, which would be
3542// unsound. We abort because this is such a degenerate scenario that we don't
3543// care about what happens -- no real program should ever experience this.
3544//
3545// This should have negligible overhead since you don't actually need to
3546// clone these much in Rust thanks to ownership and move-semantics.
3547
3548#[doc(hidden)]
3549trait RcInnerPtr {
3550 fn weak_ref(&self) -> &Cell<usize>;
3551 fn strong_ref(&self) -> &Cell<usize>;
3552
3553 #[inline]
3554 fn strong(&self) -> usize {
3555 self.strong_ref().get()
3556 }
3557
3558 #[inline]
3559 fn inc_strong(&self) {
3560 let strong = self.strong();
3561
3562 // We insert an `assume` here to hint LLVM at an otherwise
3563 // missed optimization.
3564 // SAFETY: The reference count will never be zero when this is
3565 // called.
3566 unsafe {
3567 hint::assert_unchecked(strong != 0);
3568 }
3569
3570 let strong = strong.wrapping_add(1);
3571 self.strong_ref().set(strong);
3572
3573 // We want to abort on overflow instead of dropping the value.
3574 // Checking for overflow after the store instead of before
3575 // allows for slightly better code generation.
3576 if core::intrinsics::unlikely(strong == 0) {
3577 abort();
3578 }
3579 }
3580
3581 #[inline]
3582 fn dec_strong(&self) {
3583 self.strong_ref().set(self.strong() - 1);
3584 }
3585
3586 #[inline]
3587 fn weak(&self) -> usize {
3588 self.weak_ref().get()
3589 }
3590
3591 #[inline]
3592 fn inc_weak(&self) {
3593 let weak = self.weak();
3594
3595 // We insert an `assume` here to hint LLVM at an otherwise
3596 // missed optimization.
3597 // SAFETY: The reference count will never be zero when this is
3598 // called.
3599 unsafe {
3600 hint::assert_unchecked(weak != 0);
3601 }
3602
3603 let weak = weak.wrapping_add(1);
3604 self.weak_ref().set(weak);
3605
3606 // We want to abort on overflow instead of dropping the value.
3607 // Checking for overflow after the store instead of before
3608 // allows for slightly better code generation.
3609 if core::intrinsics::unlikely(weak == 0) {
3610 abort();
3611 }
3612 }
3613
3614 #[inline]
3615 fn dec_weak(&self) {
3616 self.weak_ref().set(self.weak() - 1);
3617 }
3618}
3619
3620impl<T: ?Sized> RcInnerPtr for RcInner<T> {
3621 #[inline(always)]
3622 fn weak_ref(&self) -> &Cell<usize> {
3623 &self.weak
3624 }
3625
3626 #[inline(always)]
3627 fn strong_ref(&self) -> &Cell<usize> {
3628 &self.strong
3629 }
3630}
3631
3632impl<'a> RcInnerPtr for WeakInner<'a> {
3633 #[inline(always)]
3634 fn weak_ref(&self) -> &Cell<usize> {
3635 self.weak
3636 }
3637
3638 #[inline(always)]
3639 fn strong_ref(&self) -> &Cell<usize> {
3640 self.strong
3641 }
3642}
3643
3644#[stable(feature = "rust1", since = "1.0.0")]
3645impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> {
3646 fn borrow(&self) -> &T {
3647 &**self
3648 }
3649}
3650
3651#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
3652impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> {
3653 fn as_ref(&self) -> &T {
3654 &**self
3655 }
3656}
3657
3658#[stable(feature = "pin", since = "1.33.0")]
3659impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {}
3660
3661/// Gets the offset within an `RcInner` for the payload behind a pointer.
3662///
3663/// # Safety
3664///
3665/// The pointer must point to (and have valid metadata for) a previously
3666/// valid instance of T, but the T is allowed to be dropped.
3667unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> usize {
3668 // Align the unsized value to the end of the RcInner.
3669 // Because RcInner is repr(C), it will always be the last field in memory.
3670 // SAFETY: since the only unsized types possible are slices, trait objects,
3671 // and extern types, the input safety requirement is currently enough to
3672 // satisfy the requirements of align_of_val_raw; this is an implementation
3673 // detail of the language that must not be relied upon outside of std.
3674 unsafe { data_offset_align(align_of_val_raw(ptr)) }
3675}
3676
3677#[inline]
3678fn data_offset_align(align: usize) -> usize {
3679 let layout = Layout::new::<RcInner<()>>();
3680 layout.size() + layout.padding_needed_for(align)
3681}
3682
3683/// A uniquely owned [`Rc`].
3684///
3685/// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong
3686/// reference. Multiple weak pointers can be created, but attempts to upgrade those to strong
3687/// references will fail unless the `UniqueRc` they point to has been converted into a regular `Rc`.
3688///
3689/// Because they are uniquely owned, the contents of a `UniqueRc` can be freely mutated. A common
3690/// use case is to have an object be mutable during its initialization phase but then have it become
3691/// immutable and converted to a normal `Rc`.
3692///
3693/// This can be used as a flexible way to create cyclic data structures, as in the example below.
3694///
3695/// ```
3696/// #![feature(unique_rc_arc)]
3697/// use std::rc::{Rc, Weak, UniqueRc};
3698///
3699/// struct Gadget {
3700/// #[allow(dead_code)]
3701/// me: Weak<Gadget>,
3702/// }
3703///
3704/// fn create_gadget() -> Option<Rc<Gadget>> {
3705/// let mut rc = UniqueRc::new(Gadget {
3706/// me: Weak::new(),
3707/// });
3708/// rc.me = UniqueRc::downgrade(&rc);
3709/// Some(UniqueRc::into_rc(rc))
3710/// }
3711///
3712/// create_gadget().unwrap();
3713/// ```
3714///
3715/// An advantage of using `UniqueRc` over [`Rc::new_cyclic`] to build cyclic data structures is that
3716/// [`Rc::new_cyclic`]'s `data_fn` parameter cannot be async or return a [`Result`]. As shown in the
3717/// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data,
3718/// including fallible or async constructors.
3719#[unstable(feature = "unique_rc_arc", issue = "112566")]
3720pub struct UniqueRc<
3721 T: ?Sized,
3722 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3723> {
3724 ptr: NonNull<RcInner<T>>,
3725 // Define the ownership of `RcInner<T>` for drop-check
3726 _marker: PhantomData<RcInner<T>>,
3727 // Invariance is necessary for soundness: once other `Weak`
3728 // references exist, we already have a form of shared mutability!
3729 _marker2: PhantomData<*mut T>,
3730 alloc: A,
3731}
3732
3733// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3734// but having an explicit negative impl is nice for documentation purposes
3735// and results in nicer error messages.
3736#[unstable(feature = "unique_rc_arc", issue = "112566")]
3737impl<T: ?Sized, A: Allocator> !Send for UniqueRc<T, A> {}
3738
3739// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3740// but having an explicit negative impl is nice for documentation purposes
3741// and results in nicer error messages.
3742#[unstable(feature = "unique_rc_arc", issue = "112566")]
3743impl<T: ?Sized, A: Allocator> !Sync for UniqueRc<T, A> {}
3744
3745#[unstable(feature = "unique_rc_arc", issue = "112566")]
3746impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<UniqueRc<U, A>>
3747 for UniqueRc<T, A>
3748{
3749}
3750
3751//#[unstable(feature = "unique_rc_arc", issue = "112566")]
3752#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3753impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<UniqueRc<U>> for UniqueRc<T> {}
3754
3755#[unstable(feature = "unique_rc_arc", issue = "112566")]
3756impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for UniqueRc<T, A> {
3757 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3758 fmt::Display::fmt(&**self, f)
3759 }
3760}
3761
3762#[unstable(feature = "unique_rc_arc", issue = "112566")]
3763impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for UniqueRc<T, A> {
3764 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3765 fmt::Debug::fmt(&**self, f)
3766 }
3767}
3768
3769#[unstable(feature = "unique_rc_arc", issue = "112566")]
3770impl<T: ?Sized, A: Allocator> fmt::Pointer for UniqueRc<T, A> {
3771 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3772 fmt::Pointer::fmt(&(&raw const **self), f)
3773 }
3774}
3775
3776#[unstable(feature = "unique_rc_arc", issue = "112566")]
3777impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for UniqueRc<T, A> {
3778 fn borrow(&self) -> &T {
3779 &**self
3780 }
3781}
3782
3783#[unstable(feature = "unique_rc_arc", issue = "112566")]
3784impl<T: ?Sized, A: Allocator> borrow::BorrowMut<T> for UniqueRc<T, A> {
3785 fn borrow_mut(&mut self) -> &mut T {
3786 &mut **self
3787 }
3788}
3789
3790#[unstable(feature = "unique_rc_arc", issue = "112566")]
3791impl<T: ?Sized, A: Allocator> AsRef<T> for UniqueRc<T, A> {
3792 fn as_ref(&self) -> &T {
3793 &**self
3794 }
3795}
3796
3797#[unstable(feature = "unique_rc_arc", issue = "112566")]
3798impl<T: ?Sized, A: Allocator> AsMut<T> for UniqueRc<T, A> {
3799 fn as_mut(&mut self) -> &mut T {
3800 &mut **self
3801 }
3802}
3803
3804#[unstable(feature = "unique_rc_arc", issue = "112566")]
3805impl<T: ?Sized, A: Allocator> Unpin for UniqueRc<T, A> {}
3806
3807#[unstable(feature = "unique_rc_arc", issue = "112566")]
3808impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for UniqueRc<T, A> {
3809 /// Equality for two `UniqueRc`s.
3810 ///
3811 /// Two `UniqueRc`s are equal if their inner values are equal.
3812 ///
3813 /// # Examples
3814 ///
3815 /// ```
3816 /// #![feature(unique_rc_arc)]
3817 /// use std::rc::UniqueRc;
3818 ///
3819 /// let five = UniqueRc::new(5);
3820 ///
3821 /// assert!(five == UniqueRc::new(5));
3822 /// ```
3823 #[inline]
3824 fn eq(&self, other: &Self) -> bool {
3825 PartialEq::eq(&**self, &**other)
3826 }
3827
3828 /// Inequality for two `UniqueRc`s.
3829 ///
3830 /// Two `UniqueRc`s are not equal if their inner values are not equal.
3831 ///
3832 /// # Examples
3833 ///
3834 /// ```
3835 /// #![feature(unique_rc_arc)]
3836 /// use std::rc::UniqueRc;
3837 ///
3838 /// let five = UniqueRc::new(5);
3839 ///
3840 /// assert!(five != UniqueRc::new(6));
3841 /// ```
3842 #[inline]
3843 fn ne(&self, other: &Self) -> bool {
3844 PartialEq::ne(&**self, &**other)
3845 }
3846}
3847
3848#[unstable(feature = "unique_rc_arc", issue = "112566")]
3849impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for UniqueRc<T, A> {
3850 /// Partial comparison for two `UniqueRc`s.
3851 ///
3852 /// The two are compared by calling `partial_cmp()` on their inner values.
3853 ///
3854 /// # Examples
3855 ///
3856 /// ```
3857 /// #![feature(unique_rc_arc)]
3858 /// use std::rc::UniqueRc;
3859 /// use std::cmp::Ordering;
3860 ///
3861 /// let five = UniqueRc::new(5);
3862 ///
3863 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&UniqueRc::new(6)));
3864 /// ```
3865 #[inline(always)]
3866 fn partial_cmp(&self, other: &UniqueRc<T, A>) -> Option<Ordering> {
3867 (**self).partial_cmp(&**other)
3868 }
3869
3870 /// Less-than comparison for two `UniqueRc`s.
3871 ///
3872 /// The two are compared by calling `<` on their inner values.
3873 ///
3874 /// # Examples
3875 ///
3876 /// ```
3877 /// #![feature(unique_rc_arc)]
3878 /// use std::rc::UniqueRc;
3879 ///
3880 /// let five = UniqueRc::new(5);
3881 ///
3882 /// assert!(five < UniqueRc::new(6));
3883 /// ```
3884 #[inline(always)]
3885 fn lt(&self, other: &UniqueRc<T, A>) -> bool {
3886 **self < **other
3887 }
3888
3889 /// 'Less than or equal to' comparison for two `UniqueRc`s.
3890 ///
3891 /// The two are compared by calling `<=` on their inner values.
3892 ///
3893 /// # Examples
3894 ///
3895 /// ```
3896 /// #![feature(unique_rc_arc)]
3897 /// use std::rc::UniqueRc;
3898 ///
3899 /// let five = UniqueRc::new(5);
3900 ///
3901 /// assert!(five <= UniqueRc::new(5));
3902 /// ```
3903 #[inline(always)]
3904 fn le(&self, other: &UniqueRc<T, A>) -> bool {
3905 **self <= **other
3906 }
3907
3908 /// Greater-than comparison for two `UniqueRc`s.
3909 ///
3910 /// The two are compared by calling `>` on their inner values.
3911 ///
3912 /// # Examples
3913 ///
3914 /// ```
3915 /// #![feature(unique_rc_arc)]
3916 /// use std::rc::UniqueRc;
3917 ///
3918 /// let five = UniqueRc::new(5);
3919 ///
3920 /// assert!(five > UniqueRc::new(4));
3921 /// ```
3922 #[inline(always)]
3923 fn gt(&self, other: &UniqueRc<T, A>) -> bool {
3924 **self > **other
3925 }
3926
3927 /// 'Greater than or equal to' comparison for two `UniqueRc`s.
3928 ///
3929 /// The two are compared by calling `>=` on their inner values.
3930 ///
3931 /// # Examples
3932 ///
3933 /// ```
3934 /// #![feature(unique_rc_arc)]
3935 /// use std::rc::UniqueRc;
3936 ///
3937 /// let five = UniqueRc::new(5);
3938 ///
3939 /// assert!(five >= UniqueRc::new(5));
3940 /// ```
3941 #[inline(always)]
3942 fn ge(&self, other: &UniqueRc<T, A>) -> bool {
3943 **self >= **other
3944 }
3945}
3946
3947#[unstable(feature = "unique_rc_arc", issue = "112566")]
3948impl<T: ?Sized + Ord, A: Allocator> Ord for UniqueRc<T, A> {
3949 /// Comparison for two `UniqueRc`s.
3950 ///
3951 /// The two are compared by calling `cmp()` on their inner values.
3952 ///
3953 /// # Examples
3954 ///
3955 /// ```
3956 /// #![feature(unique_rc_arc)]
3957 /// use std::rc::UniqueRc;
3958 /// use std::cmp::Ordering;
3959 ///
3960 /// let five = UniqueRc::new(5);
3961 ///
3962 /// assert_eq!(Ordering::Less, five.cmp(&UniqueRc::new(6)));
3963 /// ```
3964 #[inline]
3965 fn cmp(&self, other: &UniqueRc<T, A>) -> Ordering {
3966 (**self).cmp(&**other)
3967 }
3968}
3969
3970#[unstable(feature = "unique_rc_arc", issue = "112566")]
3971impl<T: ?Sized + Eq, A: Allocator> Eq for UniqueRc<T, A> {}
3972
3973#[unstable(feature = "unique_rc_arc", issue = "112566")]
3974impl<T: ?Sized + Hash, A: Allocator> Hash for UniqueRc<T, A> {
3975 fn hash<H: Hasher>(&self, state: &mut H) {
3976 (**self).hash(state);
3977 }
3978}
3979
3980// Depends on A = Global
3981impl<T> UniqueRc<T> {
3982 /// Creates a new `UniqueRc`.
3983 ///
3984 /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
3985 /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
3986 /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
3987 /// point to the new [`Rc`].
3988 #[cfg(not(no_global_oom_handling))]
3989 #[unstable(feature = "unique_rc_arc", issue = "112566")]
3990 pub fn new(value: T) -> Self {
3991 Self::new_in(value, Global)
3992 }
3993}
3994
3995impl<T, A: Allocator> UniqueRc<T, A> {
3996 /// Creates a new `UniqueRc` in the provided allocator.
3997 ///
3998 /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
3999 /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
4000 /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
4001 /// point to the new [`Rc`].
4002 #[cfg(not(no_global_oom_handling))]
4003 #[unstable(feature = "unique_rc_arc", issue = "112566")]
4004 pub fn new_in(value: T, alloc: A) -> Self {
4005 let (ptr, alloc) = Box::into_unique(Box::new_in(
4006 RcInner {
4007 strong: Cell::new(0),
4008 // keep one weak reference so if all the weak pointers that are created are dropped
4009 // the UniqueRc still stays valid.
4010 weak: Cell::new(1),
4011 value,
4012 },
4013 alloc,
4014 ));
4015 Self { ptr: ptr.into(), _marker: PhantomData, _marker2: PhantomData, alloc }
4016 }
4017}
4018
4019impl<T: ?Sized, A: Allocator> UniqueRc<T, A> {
4020 /// Converts the `UniqueRc` into a regular [`Rc`].
4021 ///
4022 /// This consumes the `UniqueRc` and returns a regular [`Rc`] that contains the `value` that
4023 /// is passed to `into_rc`.
4024 ///
4025 /// Any weak references created before this method is called can now be upgraded to strong
4026 /// references.
4027 #[unstable(feature = "unique_rc_arc", issue = "112566")]
4028 pub fn into_rc(this: Self) -> Rc<T, A> {
4029 let mut this = ManuallyDrop::new(this);
4030
4031 // Move the allocator out.
4032 // SAFETY: `this.alloc` will not be accessed again, nor dropped because it is in
4033 // a `ManuallyDrop`.
4034 let alloc: A = unsafe { ptr::read(&this.alloc) };
4035
4036 // SAFETY: This pointer was allocated at creation time so we know it is valid.
4037 unsafe {
4038 // Convert our weak reference into a strong reference
4039 this.ptr.as_mut().strong.set(1);
4040 Rc::from_inner_in(this.ptr, alloc)
4041 }
4042 }
4043}
4044
4045impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
4046 /// Creates a new weak reference to the `UniqueRc`.
4047 ///
4048 /// Attempting to upgrade this weak reference will fail before the `UniqueRc` has been converted
4049 /// to a [`Rc`] using [`UniqueRc::into_rc`].
4050 #[unstable(feature = "unique_rc_arc", issue = "112566")]
4051 pub fn downgrade(this: &Self) -> Weak<T, A> {
4052 // SAFETY: This pointer was allocated at creation time and we guarantee that we only have
4053 // one strong reference before converting to a regular Rc.
4054 unsafe {
4055 this.ptr.as_ref().inc_weak();
4056 }
4057 Weak { ptr: this.ptr, alloc: this.alloc.clone() }
4058 }
4059}
4060
4061#[unstable(feature = "unique_rc_arc", issue = "112566")]
4062impl<T: ?Sized, A: Allocator> Deref for UniqueRc<T, A> {
4063 type Target = T;
4064
4065 fn deref(&self) -> &T {
4066 // SAFETY: This pointer was allocated at creation time so we know it is valid.
4067 unsafe { &self.ptr.as_ref().value }
4068 }
4069}
4070
4071#[unstable(feature = "unique_rc_arc", issue = "112566")]
4072impl<T: ?Sized, A: Allocator> DerefMut for UniqueRc<T, A> {
4073 fn deref_mut(&mut self) -> &mut T {
4074 // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we
4075 // have unique ownership and therefore it's safe to make a mutable reference because
4076 // `UniqueRc` owns the only strong reference to itself.
4077 unsafe { &mut (*self.ptr.as_ptr()).value }
4078 }
4079}
4080
4081#[unstable(feature = "unique_rc_arc", issue = "112566")]
4082unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc<T, A> {
4083 fn drop(&mut self) {
4084 unsafe {
4085 // destroy the contained object
4086 drop_in_place(DerefMut::deref_mut(self));
4087
4088 // remove the implicit "strong weak" pointer now that we've destroyed the contents.
4089 self.ptr.as_ref().dec_weak();
4090
4091 if self.ptr.as_ref().weak() == 0 {
4092 self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
4093 }
4094 }
4095 }
4096}
4097
4098/// A unique owning pointer to a [`RcInner`] **that does not imply the contents are initialized,**
4099/// but will deallocate it (without dropping the value) when dropped.
4100///
4101/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
4102/// It is nearly a duplicate of `UniqueRc<MaybeUninit<T>, A>` except that it allows `T: !Sized`,
4103/// which `MaybeUninit` does not.
4104#[cfg(not(no_global_oom_handling))]
4105struct UniqueRcUninit<T: ?Sized, A: Allocator> {
4106 ptr: NonNull<RcInner<T>>,
4107 layout_for_value: Layout,
4108 alloc: Option<A>,
4109}
4110
4111#[cfg(not(no_global_oom_handling))]
4112impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
4113 /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it.
4114 fn new(for_value: &T, alloc: A) -> UniqueRcUninit<T, A> {
4115 let layout = Layout::for_value(for_value);
4116 let ptr = unsafe {
4117 Rc::allocate_for_layout(
4118 layout,
4119 |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner),
4120 |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner<T>),
4121 )
4122 };
4123 Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
4124 }
4125
4126 /// Returns the pointer to be written into to initialize the [`Rc`].
4127 fn data_ptr(&mut self) -> *mut T {
4128 let offset = data_offset_align(self.layout_for_value.align());
4129 unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
4130 }
4131
4132 /// Upgrade this into a normal [`Rc`].
4133 ///
4134 /// # Safety
4135 ///
4136 /// The data must have been initialized (by writing to [`Self::data_ptr()`]).
4137 unsafe fn into_rc(self) -> Rc<T, A> {
4138 let mut this = ManuallyDrop::new(self);
4139 let ptr = this.ptr;
4140 let alloc = this.alloc.take().unwrap();
4141
4142 // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
4143 // for having initialized the data.
4144 unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
4145 }
4146}
4147
4148#[cfg(not(no_global_oom_handling))]
4149impl<T: ?Sized, A: Allocator> Drop for UniqueRcUninit<T, A> {
4150 fn drop(&mut self) {
4151 // SAFETY:
4152 // * new() produced a pointer safe to deallocate.
4153 // * We own the pointer unless into_rc() was called, which forgets us.
4154 unsafe {
4155 self.alloc.take().unwrap().deallocate(
4156 self.ptr.cast(),
4157 rc_inner_layout_for_value_layout(self.layout_for_value),
4158 );
4159 }
4160 }
4161}