alloc/rc.rs
1//! Single-threaded reference-counting pointers. 'Rc' stands for 'Reference
2//! Counted'.
3//!
4//! The type [`Rc<T>`][`Rc`] provides shared ownership of a value of type `T`,
5//! allocated in the heap. Invoking [`clone`][clone] on [`Rc`] produces a new
6//! pointer to the same allocation in the heap. When the last [`Rc`] pointer to a
7//! given allocation is destroyed, the value stored in that allocation (often
8//! referred to as "inner value") is also dropped.
9//!
10//! Shared references in Rust disallow mutation by default, and [`Rc`]
11//! is no exception: you cannot generally obtain a mutable reference to
12//! something inside an [`Rc`]. If you need mutability, put a [`Cell`]
13//! or [`RefCell`] inside the [`Rc`]; see [an example of mutability
14//! inside an `Rc`][mutability].
15//!
16//! [`Rc`] uses non-atomic reference counting. This means that overhead is very
17//! low, but an [`Rc`] cannot be sent between threads, and consequently [`Rc`]
18//! does not implement [`Send`]. As a result, the Rust compiler
19//! will check *at compile time* that you are not sending [`Rc`]s between
20//! threads. If you need multi-threaded, atomic reference counting, use
21//! [`sync::Arc`][arc].
22//!
23//! The [`downgrade`][downgrade] method can be used to create a non-owning
24//! [`Weak`] pointer. A [`Weak`] pointer can be [`upgrade`][upgrade]d
25//! to an [`Rc`], but this will return [`None`] if the value stored in the allocation has
26//! already been dropped. In other words, `Weak` pointers do not keep the value
27//! inside the allocation alive; however, they *do* keep the allocation
28//! (the backing store for the inner value) alive.
29//!
30//! A cycle between [`Rc`] pointers will never be deallocated. For this reason,
31//! [`Weak`] is used to break cycles. For example, a tree could have strong
32//! [`Rc`] pointers from parent nodes to children, and [`Weak`] pointers from
33//! children back to their parents.
34//!
35//! `Rc<T>` automatically dereferences to `T` (via the [`Deref`] trait),
36//! so you can call `T`'s methods on a value of type [`Rc<T>`][`Rc`]. To avoid name
37//! clashes with `T`'s methods, the methods of [`Rc<T>`][`Rc`] itself are associated
38//! functions, called using [fully qualified syntax]:
39//!
40//! ```
41//! use std::rc::Rc;
42//!
43//! let my_rc = Rc::new(());
44//! let my_weak = Rc::downgrade(&my_rc);
45//! ```
46//!
47//! `Rc<T>`'s implementations of traits like `Clone` may also be called using
48//! fully qualified syntax. Some people prefer to use fully qualified syntax,
49//! while others prefer using method-call syntax.
50//!
51//! ```
52//! use std::rc::Rc;
53//!
54//! let rc = Rc::new(());
55//! // Method-call syntax
56//! let rc2 = rc.clone();
57//! // Fully qualified syntax
58//! let rc3 = Rc::clone(&rc);
59//! ```
60//!
61//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the inner value may have
62//! already been dropped.
63//!
64//! # Cloning references
65//!
66//! Creating a new reference to the same allocation as an existing reference counted pointer
67//! is done using the `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
68//!
69//! ```
70//! use std::rc::Rc;
71//!
72//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
73//! // The two syntaxes below are equivalent.
74//! let a = foo.clone();
75//! let b = Rc::clone(&foo);
76//! // a and b both point to the same memory location as foo.
77//! ```
78//!
79//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
80//! the meaning of the code. In the example above, this syntax makes it easier to see that
81//! this code is creating a new reference rather than copying the whole content of foo.
82//!
83//! # Examples
84//!
85//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
86//! We want to have our `Gadget`s point to their `Owner`. We can't do this with
87//! unique ownership, because more than one gadget may belong to the same
88//! `Owner`. [`Rc`] allows us to share an `Owner` between multiple `Gadget`s,
89//! and have the `Owner` remain allocated as long as any `Gadget` points at it.
90//!
91//! ```
92//! use std::rc::Rc;
93//!
94//! struct Owner {
95//! name: String,
96//! // ...other fields
97//! }
98//!
99//! struct Gadget {
100//! id: i32,
101//! owner: Rc<Owner>,
102//! // ...other fields
103//! }
104//!
105//! fn main() {
106//! // Create a reference-counted `Owner`.
107//! let gadget_owner: Rc<Owner> = Rc::new(
108//! Owner {
109//! name: "Gadget Man".to_string(),
110//! }
111//! );
112//!
113//! // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc<Owner>`
114//! // gives us a new pointer to the same `Owner` allocation, incrementing
115//! // the reference count in the process.
116//! let gadget1 = Gadget {
117//! id: 1,
118//! owner: Rc::clone(&gadget_owner),
119//! };
120//! let gadget2 = Gadget {
121//! id: 2,
122//! owner: Rc::clone(&gadget_owner),
123//! };
124//!
125//! // Dispose of our local variable `gadget_owner`.
126//! drop(gadget_owner);
127//!
128//! // Despite dropping `gadget_owner`, we're still able to print out the name
129//! // of the `Owner` of the `Gadget`s. This is because we've only dropped a
130//! // single `Rc<Owner>`, not the `Owner` it points to. As long as there are
131//! // other `Rc<Owner>` pointing at the same `Owner` allocation, it will remain
132//! // live. The field projection `gadget1.owner.name` works because
133//! // `Rc<Owner>` automatically dereferences to `Owner`.
134//! println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name);
135//! println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name);
136//!
137//! // At the end of the function, `gadget1` and `gadget2` are destroyed, and
138//! // with them the last counted references to our `Owner`. Gadget Man now
139//! // gets destroyed as well.
140//! }
141//! ```
142//!
143//! If our requirements change, and we also need to be able to traverse from
144//! `Owner` to `Gadget`, we will run into problems. An [`Rc`] pointer from `Owner`
145//! to `Gadget` introduces a cycle. This means that their
146//! reference counts can never reach 0, and the allocation will never be destroyed:
147//! a memory leak. In order to get around this, we can use [`Weak`]
148//! pointers.
149//!
150//! Rust actually makes it somewhat difficult to produce this loop in the first
151//! place. In order to end up with two values that point at each other, one of
152//! them needs to be mutable. This is difficult because [`Rc`] enforces
153//! memory safety by only giving out shared references to the value it wraps,
154//! and these don't allow direct mutation. We need to wrap the part of the
155//! value we wish to mutate in a [`RefCell`], which provides *interior
156//! mutability*: a method to achieve mutability through a shared reference.
157//! [`RefCell`] enforces Rust's borrowing rules at runtime.
158//!
159//! ```
160//! use std::rc::Rc;
161//! use std::rc::Weak;
162//! use std::cell::RefCell;
163//!
164//! struct Owner {
165//! name: String,
166//! gadgets: RefCell<Vec<Weak<Gadget>>>,
167//! // ...other fields
168//! }
169//!
170//! struct Gadget {
171//! id: i32,
172//! owner: Rc<Owner>,
173//! // ...other fields
174//! }
175//!
176//! fn main() {
177//! // Create a reference-counted `Owner`. Note that we've put the `Owner`'s
178//! // vector of `Gadget`s inside a `RefCell` so that we can mutate it through
179//! // a shared reference.
180//! let gadget_owner: Rc<Owner> = Rc::new(
181//! Owner {
182//! name: "Gadget Man".to_string(),
183//! gadgets: RefCell::new(vec![]),
184//! }
185//! );
186//!
187//! // Create `Gadget`s belonging to `gadget_owner`, as before.
188//! let gadget1 = Rc::new(
189//! Gadget {
190//! id: 1,
191//! owner: Rc::clone(&gadget_owner),
192//! }
193//! );
194//! let gadget2 = Rc::new(
195//! Gadget {
196//! id: 2,
197//! owner: Rc::clone(&gadget_owner),
198//! }
199//! );
200//!
201//! // Add the `Gadget`s to their `Owner`.
202//! {
203//! let mut gadgets = gadget_owner.gadgets.borrow_mut();
204//! gadgets.push(Rc::downgrade(&gadget1));
205//! gadgets.push(Rc::downgrade(&gadget2));
206//!
207//! // `RefCell` dynamic borrow ends here.
208//! }
209//!
210//! // Iterate over our `Gadget`s, printing their details out.
211//! for gadget_weak in gadget_owner.gadgets.borrow().iter() {
212//!
213//! // `gadget_weak` is a `Weak<Gadget>`. Since `Weak` pointers can't
214//! // guarantee the allocation still exists, we need to call
215//! // `upgrade`, which returns an `Option<Rc<Gadget>>`.
216//! //
217//! // In this case we know the allocation still exists, so we simply
218//! // `unwrap` the `Option`. In a more complicated program, you might
219//! // need graceful error handling for a `None` result.
220//!
221//! let gadget = gadget_weak.upgrade().unwrap();
222//! println!("Gadget {} owned by {}", gadget.id, gadget.owner.name);
223//! }
224//!
225//! // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2`
226//! // are destroyed. There are now no strong (`Rc`) pointers to the
227//! // gadgets, so they are destroyed. This zeroes the reference count on
228//! // Gadget Man, so he gets destroyed as well.
229//! }
230//! ```
231//!
232//! [clone]: Clone::clone
233//! [`Cell`]: core::cell::Cell
234//! [`RefCell`]: core::cell::RefCell
235//! [arc]: crate::sync::Arc
236//! [`Deref`]: core::ops::Deref
237//! [downgrade]: Rc::downgrade
238//! [upgrade]: Weak::upgrade
239//! [mutability]: core::cell#introducing-mutability-inside-of-something-immutable
240//! [fully qualified syntax]: https://doc.rust-lang.org/book/ch19-03-advanced-traits.html#fully-qualified-syntax-for-disambiguation-calling-methods-with-the-same-name
241
242#![stable(feature = "rust1", since = "1.0.0")]
243
244use core::any::Any;
245use core::cell::Cell;
246#[cfg(not(no_global_oom_handling))]
247use core::clone::CloneToUninit;
248use core::clone::UseCloned;
249use core::cmp::Ordering;
250use core::hash::{Hash, Hasher};
251use core::intrinsics::abort;
252#[cfg(not(no_global_oom_handling))]
253use core::iter;
254use core::marker::{PhantomData, Unsize};
255use core::mem::{self, ManuallyDrop, align_of_val_raw};
256use core::num::NonZeroUsize;
257use core::ops::{CoerceUnsized, Deref, DerefMut, DerefPure, DispatchFromDyn, LegacyReceiver};
258use core::panic::{RefUnwindSafe, UnwindSafe};
259#[cfg(not(no_global_oom_handling))]
260use core::pin::Pin;
261use core::pin::PinCoerceUnsized;
262use core::ptr::{self, NonNull, drop_in_place};
263#[cfg(not(no_global_oom_handling))]
264use core::slice::from_raw_parts_mut;
265use core::{borrow, fmt, hint};
266
267#[cfg(not(no_global_oom_handling))]
268use crate::alloc::handle_alloc_error;
269use crate::alloc::{AllocError, Allocator, Global, Layout};
270use crate::borrow::{Cow, ToOwned};
271use crate::boxed::Box;
272#[cfg(not(no_global_oom_handling))]
273use crate::string::String;
274#[cfg(not(no_global_oom_handling))]
275use crate::vec::Vec;
276
277// This is repr(C) to future-proof against possible field-reordering, which
278// would interfere with otherwise safe [into|from]_raw() of transmutable
279// inner types.
280// repr(align(2)) (forcing alignment to at least 2) is required because usize
281// has 1-byte alignment on AVR.
282#[repr(C, align(2))]
283struct RcInner<T: ?Sized> {
284 strong: Cell<usize>,
285 weak: Cell<usize>,
286 value: T,
287}
288
289/// Calculate layout for `RcInner<T>` using the inner value's layout
290fn rc_inner_layout_for_value_layout(layout: Layout) -> Layout {
291 // Calculate layout using the given value layout.
292 // Previously, layout was calculated on the expression
293 // `&*(ptr as *const RcInner<T>)`, but this created a misaligned
294 // reference (see #54908).
295 Layout::new::<RcInner<()>>().extend(layout).unwrap().0.pad_to_align()
296}
297
298/// A single-threaded reference-counting pointer. 'Rc' stands for 'Reference
299/// Counted'.
300///
301/// See the [module-level documentation](./index.html) for more details.
302///
303/// The inherent methods of `Rc` are all associated functions, which means
304/// that you have to call them as e.g., [`Rc::get_mut(&mut value)`][get_mut] instead of
305/// `value.get_mut()`. This avoids conflicts with methods of the inner type `T`.
306///
307/// [get_mut]: Rc::get_mut
308#[doc(search_unbox)]
309#[rustc_diagnostic_item = "Rc"]
310#[stable(feature = "rust1", since = "1.0.0")]
311#[rustc_insignificant_dtor]
312pub struct Rc<
313 T: ?Sized,
314 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
315> {
316 ptr: NonNull<RcInner<T>>,
317 phantom: PhantomData<RcInner<T>>,
318 alloc: A,
319}
320
321#[stable(feature = "rust1", since = "1.0.0")]
322impl<T: ?Sized, A: Allocator> !Send for Rc<T, A> {}
323
324// Note that this negative impl isn't strictly necessary for correctness,
325// as `Rc` transitively contains a `Cell`, which is itself `!Sync`.
326// However, given how important `Rc`'s `!Sync`-ness is,
327// having an explicit negative impl is nice for documentation purposes
328// and results in nicer error messages.
329#[stable(feature = "rust1", since = "1.0.0")]
330impl<T: ?Sized, A: Allocator> !Sync for Rc<T, A> {}
331
332#[stable(feature = "catch_unwind", since = "1.9.0")]
333impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> UnwindSafe for Rc<T, A> {}
334#[stable(feature = "rc_ref_unwind_safe", since = "1.58.0")]
335impl<T: RefUnwindSafe + ?Sized, A: Allocator + UnwindSafe> RefUnwindSafe for Rc<T, A> {}
336
337#[unstable(feature = "coerce_unsized", issue = "18598")]
338impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Rc<U, A>> for Rc<T, A> {}
339
340#[unstable(feature = "dispatch_from_dyn", issue = "none")]
341impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Rc<U>> for Rc<T> {}
342
343impl<T: ?Sized> Rc<T> {
344 #[inline]
345 unsafe fn from_inner(ptr: NonNull<RcInner<T>>) -> Self {
346 unsafe { Self::from_inner_in(ptr, Global) }
347 }
348
349 #[inline]
350 unsafe fn from_ptr(ptr: *mut RcInner<T>) -> Self {
351 unsafe { Self::from_inner(NonNull::new_unchecked(ptr)) }
352 }
353}
354
355impl<T: ?Sized, A: Allocator> Rc<T, A> {
356 #[inline(always)]
357 fn inner(&self) -> &RcInner<T> {
358 // This unsafety is ok because while this Rc is alive we're guaranteed
359 // that the inner pointer is valid.
360 unsafe { self.ptr.as_ref() }
361 }
362
363 #[inline]
364 fn into_inner_with_allocator(this: Self) -> (NonNull<RcInner<T>>, A) {
365 let this = mem::ManuallyDrop::new(this);
366 (this.ptr, unsafe { ptr::read(&this.alloc) })
367 }
368
369 #[inline]
370 unsafe fn from_inner_in(ptr: NonNull<RcInner<T>>, alloc: A) -> Self {
371 Self { ptr, phantom: PhantomData, alloc }
372 }
373
374 #[inline]
375 unsafe fn from_ptr_in(ptr: *mut RcInner<T>, alloc: A) -> Self {
376 unsafe { Self::from_inner_in(NonNull::new_unchecked(ptr), alloc) }
377 }
378
379 // Non-inlined part of `drop`.
380 #[inline(never)]
381 unsafe fn drop_slow(&mut self) {
382 // Reconstruct the "strong weak" pointer and drop it when this
383 // variable goes out of scope. This ensures that the memory is
384 // deallocated even if the destructor of `T` panics.
385 let _weak = Weak { ptr: self.ptr, alloc: &self.alloc };
386
387 // Destroy the contained object.
388 // We cannot use `get_mut_unchecked` here, because `self.alloc` is borrowed.
389 unsafe {
390 ptr::drop_in_place(&mut (*self.ptr.as_ptr()).value);
391 }
392 }
393}
394
395impl<T> Rc<T> {
396 /// Constructs a new `Rc<T>`.
397 ///
398 /// # Examples
399 ///
400 /// ```
401 /// use std::rc::Rc;
402 ///
403 /// let five = Rc::new(5);
404 /// ```
405 #[cfg(not(no_global_oom_handling))]
406 #[stable(feature = "rust1", since = "1.0.0")]
407 pub fn new(value: T) -> Rc<T> {
408 // There is an implicit weak pointer owned by all the strong
409 // pointers, which ensures that the weak destructor never frees
410 // the allocation while the strong destructor is running, even
411 // if the weak pointer is stored inside the strong one.
412 unsafe {
413 Self::from_inner(
414 Box::leak(Box::new(RcInner { strong: Cell::new(1), weak: Cell::new(1), value }))
415 .into(),
416 )
417 }
418 }
419
420 /// Constructs a new `Rc<T>` while giving you a `Weak<T>` to the allocation,
421 /// to allow you to construct a `T` which holds a weak pointer to itself.
422 ///
423 /// Generally, a structure circularly referencing itself, either directly or
424 /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
425 /// Using this function, you get access to the weak pointer during the
426 /// initialization of `T`, before the `Rc<T>` is created, such that you can
427 /// clone and store it inside the `T`.
428 ///
429 /// `new_cyclic` first allocates the managed allocation for the `Rc<T>`,
430 /// then calls your closure, giving it a `Weak<T>` to this allocation,
431 /// and only afterwards completes the construction of the `Rc<T>` by placing
432 /// the `T` returned from your closure into the allocation.
433 ///
434 /// Since the new `Rc<T>` is not fully-constructed until `Rc<T>::new_cyclic`
435 /// returns, calling [`upgrade`] on the weak reference inside your closure will
436 /// fail and result in a `None` value.
437 ///
438 /// # Panics
439 ///
440 /// If `data_fn` panics, the panic is propagated to the caller, and the
441 /// temporary [`Weak<T>`] is dropped normally.
442 ///
443 /// # Examples
444 ///
445 /// ```
446 /// # #![allow(dead_code)]
447 /// use std::rc::{Rc, Weak};
448 ///
449 /// struct Gadget {
450 /// me: Weak<Gadget>,
451 /// }
452 ///
453 /// impl Gadget {
454 /// /// Constructs a reference counted Gadget.
455 /// fn new() -> Rc<Self> {
456 /// // `me` is a `Weak<Gadget>` pointing at the new allocation of the
457 /// // `Rc` we're constructing.
458 /// Rc::new_cyclic(|me| {
459 /// // Create the actual struct here.
460 /// Gadget { me: me.clone() }
461 /// })
462 /// }
463 ///
464 /// /// Returns a reference counted pointer to Self.
465 /// fn me(&self) -> Rc<Self> {
466 /// self.me.upgrade().unwrap()
467 /// }
468 /// }
469 /// ```
470 /// [`upgrade`]: Weak::upgrade
471 #[cfg(not(no_global_oom_handling))]
472 #[stable(feature = "arc_new_cyclic", since = "1.60.0")]
473 pub fn new_cyclic<F>(data_fn: F) -> Rc<T>
474 where
475 F: FnOnce(&Weak<T>) -> T,
476 {
477 Self::new_cyclic_in(data_fn, Global)
478 }
479
480 /// Constructs a new `Rc` with uninitialized contents.
481 ///
482 /// # Examples
483 ///
484 /// ```
485 /// use std::rc::Rc;
486 ///
487 /// let mut five = Rc::<u32>::new_uninit();
488 ///
489 /// // Deferred initialization:
490 /// Rc::get_mut(&mut five).unwrap().write(5);
491 ///
492 /// let five = unsafe { five.assume_init() };
493 ///
494 /// assert_eq!(*five, 5)
495 /// ```
496 #[cfg(not(no_global_oom_handling))]
497 #[stable(feature = "new_uninit", since = "1.82.0")]
498 #[must_use]
499 pub fn new_uninit() -> Rc<mem::MaybeUninit<T>> {
500 unsafe {
501 Rc::from_ptr(Rc::allocate_for_layout(
502 Layout::new::<T>(),
503 |layout| Global.allocate(layout),
504 <*mut u8>::cast,
505 ))
506 }
507 }
508
509 /// Constructs a new `Rc` with uninitialized contents, with the memory
510 /// being filled with `0` bytes.
511 ///
512 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
513 /// incorrect usage of this method.
514 ///
515 /// # Examples
516 ///
517 /// ```
518 /// use std::rc::Rc;
519 ///
520 /// let zero = Rc::<u32>::new_zeroed();
521 /// let zero = unsafe { zero.assume_init() };
522 ///
523 /// assert_eq!(*zero, 0)
524 /// ```
525 ///
526 /// [zeroed]: mem::MaybeUninit::zeroed
527 #[cfg(not(no_global_oom_handling))]
528 #[stable(feature = "new_zeroed_alloc", since = "CURRENT_RUSTC_VERSION")]
529 #[must_use]
530 pub fn new_zeroed() -> Rc<mem::MaybeUninit<T>> {
531 unsafe {
532 Rc::from_ptr(Rc::allocate_for_layout(
533 Layout::new::<T>(),
534 |layout| Global.allocate_zeroed(layout),
535 <*mut u8>::cast,
536 ))
537 }
538 }
539
540 /// Constructs a new `Rc<T>`, returning an error if the allocation fails
541 ///
542 /// # Examples
543 ///
544 /// ```
545 /// #![feature(allocator_api)]
546 /// use std::rc::Rc;
547 ///
548 /// let five = Rc::try_new(5);
549 /// # Ok::<(), std::alloc::AllocError>(())
550 /// ```
551 #[unstable(feature = "allocator_api", issue = "32838")]
552 pub fn try_new(value: T) -> Result<Rc<T>, AllocError> {
553 // There is an implicit weak pointer owned by all the strong
554 // pointers, which ensures that the weak destructor never frees
555 // the allocation while the strong destructor is running, even
556 // if the weak pointer is stored inside the strong one.
557 unsafe {
558 Ok(Self::from_inner(
559 Box::leak(Box::try_new(RcInner {
560 strong: Cell::new(1),
561 weak: Cell::new(1),
562 value,
563 })?)
564 .into(),
565 ))
566 }
567 }
568
569 /// Constructs a new `Rc` with uninitialized contents, returning an error if the allocation fails
570 ///
571 /// # Examples
572 ///
573 /// ```
574 /// #![feature(allocator_api)]
575 ///
576 /// use std::rc::Rc;
577 ///
578 /// let mut five = Rc::<u32>::try_new_uninit()?;
579 ///
580 /// // Deferred initialization:
581 /// Rc::get_mut(&mut five).unwrap().write(5);
582 ///
583 /// let five = unsafe { five.assume_init() };
584 ///
585 /// assert_eq!(*five, 5);
586 /// # Ok::<(), std::alloc::AllocError>(())
587 /// ```
588 #[unstable(feature = "allocator_api", issue = "32838")]
589 pub fn try_new_uninit() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
590 unsafe {
591 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
592 Layout::new::<T>(),
593 |layout| Global.allocate(layout),
594 <*mut u8>::cast,
595 )?))
596 }
597 }
598
599 /// Constructs a new `Rc` with uninitialized contents, with the memory
600 /// being filled with `0` bytes, returning an error if the allocation fails
601 ///
602 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
603 /// incorrect usage of this method.
604 ///
605 /// # Examples
606 ///
607 /// ```
608 /// #![feature(allocator_api)]
609 ///
610 /// use std::rc::Rc;
611 ///
612 /// let zero = Rc::<u32>::try_new_zeroed()?;
613 /// let zero = unsafe { zero.assume_init() };
614 ///
615 /// assert_eq!(*zero, 0);
616 /// # Ok::<(), std::alloc::AllocError>(())
617 /// ```
618 ///
619 /// [zeroed]: mem::MaybeUninit::zeroed
620 #[unstable(feature = "allocator_api", issue = "32838")]
621 pub fn try_new_zeroed() -> Result<Rc<mem::MaybeUninit<T>>, AllocError> {
622 unsafe {
623 Ok(Rc::from_ptr(Rc::try_allocate_for_layout(
624 Layout::new::<T>(),
625 |layout| Global.allocate_zeroed(layout),
626 <*mut u8>::cast,
627 )?))
628 }
629 }
630 /// Constructs a new `Pin<Rc<T>>`. If `T` does not implement `Unpin`, then
631 /// `value` will be pinned in memory and unable to be moved.
632 #[cfg(not(no_global_oom_handling))]
633 #[stable(feature = "pin", since = "1.33.0")]
634 #[must_use]
635 pub fn pin(value: T) -> Pin<Rc<T>> {
636 unsafe { Pin::new_unchecked(Rc::new(value)) }
637 }
638}
639
640impl<T, A: Allocator> Rc<T, A> {
641 /// Constructs a new `Rc` in the provided allocator.
642 ///
643 /// # Examples
644 ///
645 /// ```
646 /// #![feature(allocator_api)]
647 /// use std::rc::Rc;
648 /// use std::alloc::System;
649 ///
650 /// let five = Rc::new_in(5, System);
651 /// ```
652 #[cfg(not(no_global_oom_handling))]
653 #[unstable(feature = "allocator_api", issue = "32838")]
654 #[inline]
655 pub fn new_in(value: T, alloc: A) -> Rc<T, A> {
656 // NOTE: Prefer match over unwrap_or_else since closure sometimes not inlineable.
657 // That would make code size bigger.
658 match Self::try_new_in(value, alloc) {
659 Ok(m) => m,
660 Err(_) => handle_alloc_error(Layout::new::<RcInner<T>>()),
661 }
662 }
663
664 /// Constructs a new `Rc` with uninitialized contents in the provided allocator.
665 ///
666 /// # Examples
667 ///
668 /// ```
669 /// #![feature(get_mut_unchecked)]
670 /// #![feature(allocator_api)]
671 ///
672 /// use std::rc::Rc;
673 /// use std::alloc::System;
674 ///
675 /// let mut five = Rc::<u32, _>::new_uninit_in(System);
676 ///
677 /// let five = unsafe {
678 /// // Deferred initialization:
679 /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
680 ///
681 /// five.assume_init()
682 /// };
683 ///
684 /// assert_eq!(*five, 5)
685 /// ```
686 #[cfg(not(no_global_oom_handling))]
687 #[unstable(feature = "allocator_api", issue = "32838")]
688 #[inline]
689 pub fn new_uninit_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
690 unsafe {
691 Rc::from_ptr_in(
692 Rc::allocate_for_layout(
693 Layout::new::<T>(),
694 |layout| alloc.allocate(layout),
695 <*mut u8>::cast,
696 ),
697 alloc,
698 )
699 }
700 }
701
702 /// Constructs a new `Rc` with uninitialized contents, with the memory
703 /// being filled with `0` bytes, in the provided allocator.
704 ///
705 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
706 /// incorrect usage of this method.
707 ///
708 /// # Examples
709 ///
710 /// ```
711 /// #![feature(allocator_api)]
712 ///
713 /// use std::rc::Rc;
714 /// use std::alloc::System;
715 ///
716 /// let zero = Rc::<u32, _>::new_zeroed_in(System);
717 /// let zero = unsafe { zero.assume_init() };
718 ///
719 /// assert_eq!(*zero, 0)
720 /// ```
721 ///
722 /// [zeroed]: mem::MaybeUninit::zeroed
723 #[cfg(not(no_global_oom_handling))]
724 #[unstable(feature = "allocator_api", issue = "32838")]
725 #[inline]
726 pub fn new_zeroed_in(alloc: A) -> Rc<mem::MaybeUninit<T>, A> {
727 unsafe {
728 Rc::from_ptr_in(
729 Rc::allocate_for_layout(
730 Layout::new::<T>(),
731 |layout| alloc.allocate_zeroed(layout),
732 <*mut u8>::cast,
733 ),
734 alloc,
735 )
736 }
737 }
738
739 /// Constructs a new `Rc<T, A>` in the given allocator while giving you a `Weak<T, A>` to the allocation,
740 /// to allow you to construct a `T` which holds a weak pointer to itself.
741 ///
742 /// Generally, a structure circularly referencing itself, either directly or
743 /// indirectly, should not hold a strong reference to itself to prevent a memory leak.
744 /// Using this function, you get access to the weak pointer during the
745 /// initialization of `T`, before the `Rc<T, A>` is created, such that you can
746 /// clone and store it inside the `T`.
747 ///
748 /// `new_cyclic_in` first allocates the managed allocation for the `Rc<T, A>`,
749 /// then calls your closure, giving it a `Weak<T, A>` to this allocation,
750 /// and only afterwards completes the construction of the `Rc<T, A>` by placing
751 /// the `T` returned from your closure into the allocation.
752 ///
753 /// Since the new `Rc<T, A>` is not fully-constructed until `Rc<T, A>::new_cyclic_in`
754 /// returns, calling [`upgrade`] on the weak reference inside your closure will
755 /// fail and result in a `None` value.
756 ///
757 /// # Panics
758 ///
759 /// If `data_fn` panics, the panic is propagated to the caller, and the
760 /// temporary [`Weak<T, A>`] is dropped normally.
761 ///
762 /// # Examples
763 ///
764 /// See [`new_cyclic`].
765 ///
766 /// [`new_cyclic`]: Rc::new_cyclic
767 /// [`upgrade`]: Weak::upgrade
768 #[cfg(not(no_global_oom_handling))]
769 #[unstable(feature = "allocator_api", issue = "32838")]
770 pub fn new_cyclic_in<F>(data_fn: F, alloc: A) -> Rc<T, A>
771 where
772 F: FnOnce(&Weak<T, A>) -> T,
773 {
774 // Construct the inner in the "uninitialized" state with a single
775 // weak reference.
776 let (uninit_raw_ptr, alloc) = Box::into_raw_with_allocator(Box::new_in(
777 RcInner {
778 strong: Cell::new(0),
779 weak: Cell::new(1),
780 value: mem::MaybeUninit::<T>::uninit(),
781 },
782 alloc,
783 ));
784 let uninit_ptr: NonNull<_> = (unsafe { &mut *uninit_raw_ptr }).into();
785 let init_ptr: NonNull<RcInner<T>> = uninit_ptr.cast();
786
787 let weak = Weak { ptr: init_ptr, alloc };
788
789 // It's important we don't give up ownership of the weak pointer, or
790 // else the memory might be freed by the time `data_fn` returns. If
791 // we really wanted to pass ownership, we could create an additional
792 // weak pointer for ourselves, but this would result in additional
793 // updates to the weak reference count which might not be necessary
794 // otherwise.
795 let data = data_fn(&weak);
796
797 let strong = unsafe {
798 let inner = init_ptr.as_ptr();
799 ptr::write(&raw mut (*inner).value, data);
800
801 let prev_value = (*inner).strong.get();
802 debug_assert_eq!(prev_value, 0, "No prior strong references should exist");
803 (*inner).strong.set(1);
804
805 // Strong references should collectively own a shared weak reference,
806 // so don't run the destructor for our old weak reference.
807 // Calling into_raw_with_allocator has the double effect of giving us back the allocator,
808 // and forgetting the weak reference.
809 let alloc = weak.into_raw_with_allocator().1;
810
811 Rc::from_inner_in(init_ptr, alloc)
812 };
813
814 strong
815 }
816
817 /// Constructs a new `Rc<T>` in the provided allocator, returning an error if the allocation
818 /// fails
819 ///
820 /// # Examples
821 ///
822 /// ```
823 /// #![feature(allocator_api)]
824 /// use std::rc::Rc;
825 /// use std::alloc::System;
826 ///
827 /// let five = Rc::try_new_in(5, System);
828 /// # Ok::<(), std::alloc::AllocError>(())
829 /// ```
830 #[unstable(feature = "allocator_api", issue = "32838")]
831 #[inline]
832 pub fn try_new_in(value: T, alloc: A) -> Result<Self, AllocError> {
833 // There is an implicit weak pointer owned by all the strong
834 // pointers, which ensures that the weak destructor never frees
835 // the allocation while the strong destructor is running, even
836 // if the weak pointer is stored inside the strong one.
837 let (ptr, alloc) = Box::into_unique(Box::try_new_in(
838 RcInner { strong: Cell::new(1), weak: Cell::new(1), value },
839 alloc,
840 )?);
841 Ok(unsafe { Self::from_inner_in(ptr.into(), alloc) })
842 }
843
844 /// Constructs a new `Rc` with uninitialized contents, in the provided allocator, returning an
845 /// error if the allocation fails
846 ///
847 /// # Examples
848 ///
849 /// ```
850 /// #![feature(allocator_api)]
851 /// #![feature(get_mut_unchecked)]
852 ///
853 /// use std::rc::Rc;
854 /// use std::alloc::System;
855 ///
856 /// let mut five = Rc::<u32, _>::try_new_uninit_in(System)?;
857 ///
858 /// let five = unsafe {
859 /// // Deferred initialization:
860 /// Rc::get_mut_unchecked(&mut five).as_mut_ptr().write(5);
861 ///
862 /// five.assume_init()
863 /// };
864 ///
865 /// assert_eq!(*five, 5);
866 /// # Ok::<(), std::alloc::AllocError>(())
867 /// ```
868 #[unstable(feature = "allocator_api", issue = "32838")]
869 #[inline]
870 pub fn try_new_uninit_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
871 unsafe {
872 Ok(Rc::from_ptr_in(
873 Rc::try_allocate_for_layout(
874 Layout::new::<T>(),
875 |layout| alloc.allocate(layout),
876 <*mut u8>::cast,
877 )?,
878 alloc,
879 ))
880 }
881 }
882
883 /// Constructs a new `Rc` with uninitialized contents, with the memory
884 /// being filled with `0` bytes, in the provided allocator, returning an error if the allocation
885 /// fails
886 ///
887 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
888 /// incorrect usage of this method.
889 ///
890 /// # Examples
891 ///
892 /// ```
893 /// #![feature(allocator_api)]
894 ///
895 /// use std::rc::Rc;
896 /// use std::alloc::System;
897 ///
898 /// let zero = Rc::<u32, _>::try_new_zeroed_in(System)?;
899 /// let zero = unsafe { zero.assume_init() };
900 ///
901 /// assert_eq!(*zero, 0);
902 /// # Ok::<(), std::alloc::AllocError>(())
903 /// ```
904 ///
905 /// [zeroed]: mem::MaybeUninit::zeroed
906 #[unstable(feature = "allocator_api", issue = "32838")]
907 #[inline]
908 pub fn try_new_zeroed_in(alloc: A) -> Result<Rc<mem::MaybeUninit<T>, A>, AllocError> {
909 unsafe {
910 Ok(Rc::from_ptr_in(
911 Rc::try_allocate_for_layout(
912 Layout::new::<T>(),
913 |layout| alloc.allocate_zeroed(layout),
914 <*mut u8>::cast,
915 )?,
916 alloc,
917 ))
918 }
919 }
920
921 /// Constructs a new `Pin<Rc<T>>` in the provided allocator. If `T` does not implement `Unpin`, then
922 /// `value` will be pinned in memory and unable to be moved.
923 #[cfg(not(no_global_oom_handling))]
924 #[unstable(feature = "allocator_api", issue = "32838")]
925 #[inline]
926 pub fn pin_in(value: T, alloc: A) -> Pin<Self>
927 where
928 A: 'static,
929 {
930 unsafe { Pin::new_unchecked(Rc::new_in(value, alloc)) }
931 }
932
933 /// Returns the inner value, if the `Rc` has exactly one strong reference.
934 ///
935 /// Otherwise, an [`Err`] is returned with the same `Rc` that was
936 /// passed in.
937 ///
938 /// This will succeed even if there are outstanding weak references.
939 ///
940 /// # Examples
941 ///
942 /// ```
943 /// use std::rc::Rc;
944 ///
945 /// let x = Rc::new(3);
946 /// assert_eq!(Rc::try_unwrap(x), Ok(3));
947 ///
948 /// let x = Rc::new(4);
949 /// let _y = Rc::clone(&x);
950 /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
951 /// ```
952 #[inline]
953 #[stable(feature = "rc_unique", since = "1.4.0")]
954 pub fn try_unwrap(this: Self) -> Result<T, Self> {
955 if Rc::strong_count(&this) == 1 {
956 let this = ManuallyDrop::new(this);
957
958 let val: T = unsafe { ptr::read(&**this) }; // copy the contained object
959 let alloc: A = unsafe { ptr::read(&this.alloc) }; // copy the allocator
960
961 // Indicate to Weaks that they can't be promoted by decrementing
962 // the strong count, and then remove the implicit "strong weak"
963 // pointer while also handling drop logic by just crafting a
964 // fake Weak.
965 this.inner().dec_strong();
966 let _weak = Weak { ptr: this.ptr, alloc };
967 Ok(val)
968 } else {
969 Err(this)
970 }
971 }
972
973 /// Returns the inner value, if the `Rc` has exactly one strong reference.
974 ///
975 /// Otherwise, [`None`] is returned and the `Rc` is dropped.
976 ///
977 /// This will succeed even if there are outstanding weak references.
978 ///
979 /// If `Rc::into_inner` is called on every clone of this `Rc`,
980 /// it is guaranteed that exactly one of the calls returns the inner value.
981 /// This means in particular that the inner value is not dropped.
982 ///
983 /// [`Rc::try_unwrap`] is conceptually similar to `Rc::into_inner`.
984 /// And while they are meant for different use-cases, `Rc::into_inner(this)`
985 /// is in fact equivalent to <code>[Rc::try_unwrap]\(this).[ok][Result::ok]()</code>.
986 /// (Note that the same kind of equivalence does **not** hold true for
987 /// [`Arc`](crate::sync::Arc), due to race conditions that do not apply to `Rc`!)
988 ///
989 /// # Examples
990 ///
991 /// ```
992 /// use std::rc::Rc;
993 ///
994 /// let x = Rc::new(3);
995 /// assert_eq!(Rc::into_inner(x), Some(3));
996 ///
997 /// let x = Rc::new(4);
998 /// let y = Rc::clone(&x);
999 ///
1000 /// assert_eq!(Rc::into_inner(y), None);
1001 /// assert_eq!(Rc::into_inner(x), Some(4));
1002 /// ```
1003 #[inline]
1004 #[stable(feature = "rc_into_inner", since = "1.70.0")]
1005 pub fn into_inner(this: Self) -> Option<T> {
1006 Rc::try_unwrap(this).ok()
1007 }
1008}
1009
1010impl<T> Rc<[T]> {
1011 /// Constructs a new reference-counted slice with uninitialized contents.
1012 ///
1013 /// # Examples
1014 ///
1015 /// ```
1016 /// use std::rc::Rc;
1017 ///
1018 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1019 ///
1020 /// // Deferred initialization:
1021 /// let data = Rc::get_mut(&mut values).unwrap();
1022 /// data[0].write(1);
1023 /// data[1].write(2);
1024 /// data[2].write(3);
1025 ///
1026 /// let values = unsafe { values.assume_init() };
1027 ///
1028 /// assert_eq!(*values, [1, 2, 3])
1029 /// ```
1030 #[cfg(not(no_global_oom_handling))]
1031 #[stable(feature = "new_uninit", since = "1.82.0")]
1032 #[must_use]
1033 pub fn new_uninit_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1034 unsafe { Rc::from_ptr(Rc::allocate_for_slice(len)) }
1035 }
1036
1037 /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1038 /// filled with `0` bytes.
1039 ///
1040 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1041 /// incorrect usage of this method.
1042 ///
1043 /// # Examples
1044 ///
1045 /// ```
1046 /// use std::rc::Rc;
1047 ///
1048 /// let values = Rc::<[u32]>::new_zeroed_slice(3);
1049 /// let values = unsafe { values.assume_init() };
1050 ///
1051 /// assert_eq!(*values, [0, 0, 0])
1052 /// ```
1053 ///
1054 /// [zeroed]: mem::MaybeUninit::zeroed
1055 #[cfg(not(no_global_oom_handling))]
1056 #[stable(feature = "new_zeroed_alloc", since = "CURRENT_RUSTC_VERSION")]
1057 #[must_use]
1058 pub fn new_zeroed_slice(len: usize) -> Rc<[mem::MaybeUninit<T>]> {
1059 unsafe {
1060 Rc::from_ptr(Rc::allocate_for_layout(
1061 Layout::array::<T>(len).unwrap(),
1062 |layout| Global.allocate_zeroed(layout),
1063 |mem| {
1064 ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1065 as *mut RcInner<[mem::MaybeUninit<T>]>
1066 },
1067 ))
1068 }
1069 }
1070
1071 /// Converts the reference-counted slice into a reference-counted array.
1072 ///
1073 /// This operation does not reallocate; the underlying array of the slice is simply reinterpreted as an array type.
1074 ///
1075 /// If `N` is not exactly equal to the length of `self`, then this method returns `None`.
1076 #[unstable(feature = "slice_as_array", issue = "133508")]
1077 #[inline]
1078 #[must_use]
1079 pub fn into_array<const N: usize>(self) -> Option<Rc<[T; N]>> {
1080 if self.len() == N {
1081 let ptr = Self::into_raw(self) as *const [T; N];
1082
1083 // SAFETY: The underlying array of a slice has the exact same layout as an actual array `[T; N]` if `N` is equal to the slice's length.
1084 let me = unsafe { Rc::from_raw(ptr) };
1085 Some(me)
1086 } else {
1087 None
1088 }
1089 }
1090}
1091
1092impl<T, A: Allocator> Rc<[T], A> {
1093 /// Constructs a new reference-counted slice with uninitialized contents.
1094 ///
1095 /// # Examples
1096 ///
1097 /// ```
1098 /// #![feature(get_mut_unchecked)]
1099 /// #![feature(allocator_api)]
1100 ///
1101 /// use std::rc::Rc;
1102 /// use std::alloc::System;
1103 ///
1104 /// let mut values = Rc::<[u32], _>::new_uninit_slice_in(3, System);
1105 ///
1106 /// let values = unsafe {
1107 /// // Deferred initialization:
1108 /// Rc::get_mut_unchecked(&mut values)[0].as_mut_ptr().write(1);
1109 /// Rc::get_mut_unchecked(&mut values)[1].as_mut_ptr().write(2);
1110 /// Rc::get_mut_unchecked(&mut values)[2].as_mut_ptr().write(3);
1111 ///
1112 /// values.assume_init()
1113 /// };
1114 ///
1115 /// assert_eq!(*values, [1, 2, 3])
1116 /// ```
1117 #[cfg(not(no_global_oom_handling))]
1118 #[unstable(feature = "allocator_api", issue = "32838")]
1119 #[inline]
1120 pub fn new_uninit_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1121 unsafe { Rc::from_ptr_in(Rc::allocate_for_slice_in(len, &alloc), alloc) }
1122 }
1123
1124 /// Constructs a new reference-counted slice with uninitialized contents, with the memory being
1125 /// filled with `0` bytes.
1126 ///
1127 /// See [`MaybeUninit::zeroed`][zeroed] for examples of correct and
1128 /// incorrect usage of this method.
1129 ///
1130 /// # Examples
1131 ///
1132 /// ```
1133 /// #![feature(allocator_api)]
1134 ///
1135 /// use std::rc::Rc;
1136 /// use std::alloc::System;
1137 ///
1138 /// let values = Rc::<[u32], _>::new_zeroed_slice_in(3, System);
1139 /// let values = unsafe { values.assume_init() };
1140 ///
1141 /// assert_eq!(*values, [0, 0, 0])
1142 /// ```
1143 ///
1144 /// [zeroed]: mem::MaybeUninit::zeroed
1145 #[cfg(not(no_global_oom_handling))]
1146 #[unstable(feature = "allocator_api", issue = "32838")]
1147 #[inline]
1148 pub fn new_zeroed_slice_in(len: usize, alloc: A) -> Rc<[mem::MaybeUninit<T>], A> {
1149 unsafe {
1150 Rc::from_ptr_in(
1151 Rc::allocate_for_layout(
1152 Layout::array::<T>(len).unwrap(),
1153 |layout| alloc.allocate_zeroed(layout),
1154 |mem| {
1155 ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len)
1156 as *mut RcInner<[mem::MaybeUninit<T>]>
1157 },
1158 ),
1159 alloc,
1160 )
1161 }
1162 }
1163}
1164
1165impl<T, A: Allocator> Rc<mem::MaybeUninit<T>, A> {
1166 /// Converts to `Rc<T>`.
1167 ///
1168 /// # Safety
1169 ///
1170 /// As with [`MaybeUninit::assume_init`],
1171 /// it is up to the caller to guarantee that the inner value
1172 /// really is in an initialized state.
1173 /// Calling this when the content is not yet fully initialized
1174 /// causes immediate undefined behavior.
1175 ///
1176 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1177 ///
1178 /// # Examples
1179 ///
1180 /// ```
1181 /// use std::rc::Rc;
1182 ///
1183 /// let mut five = Rc::<u32>::new_uninit();
1184 ///
1185 /// // Deferred initialization:
1186 /// Rc::get_mut(&mut five).unwrap().write(5);
1187 ///
1188 /// let five = unsafe { five.assume_init() };
1189 ///
1190 /// assert_eq!(*five, 5)
1191 /// ```
1192 #[stable(feature = "new_uninit", since = "1.82.0")]
1193 #[inline]
1194 pub unsafe fn assume_init(self) -> Rc<T, A> {
1195 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1196 unsafe { Rc::from_inner_in(ptr.cast(), alloc) }
1197 }
1198}
1199
1200impl<T, A: Allocator> Rc<[mem::MaybeUninit<T>], A> {
1201 /// Converts to `Rc<[T]>`.
1202 ///
1203 /// # Safety
1204 ///
1205 /// As with [`MaybeUninit::assume_init`],
1206 /// it is up to the caller to guarantee that the inner value
1207 /// really is in an initialized state.
1208 /// Calling this when the content is not yet fully initialized
1209 /// causes immediate undefined behavior.
1210 ///
1211 /// [`MaybeUninit::assume_init`]: mem::MaybeUninit::assume_init
1212 ///
1213 /// # Examples
1214 ///
1215 /// ```
1216 /// use std::rc::Rc;
1217 ///
1218 /// let mut values = Rc::<[u32]>::new_uninit_slice(3);
1219 ///
1220 /// // Deferred initialization:
1221 /// let data = Rc::get_mut(&mut values).unwrap();
1222 /// data[0].write(1);
1223 /// data[1].write(2);
1224 /// data[2].write(3);
1225 ///
1226 /// let values = unsafe { values.assume_init() };
1227 ///
1228 /// assert_eq!(*values, [1, 2, 3])
1229 /// ```
1230 #[stable(feature = "new_uninit", since = "1.82.0")]
1231 #[inline]
1232 pub unsafe fn assume_init(self) -> Rc<[T], A> {
1233 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1234 unsafe { Rc::from_ptr_in(ptr.as_ptr() as _, alloc) }
1235 }
1236}
1237
1238impl<T: ?Sized> Rc<T> {
1239 /// Constructs an `Rc<T>` from a raw pointer.
1240 ///
1241 /// The raw pointer must have been previously returned by a call to
1242 /// [`Rc<U>::into_raw`][into_raw] with the following requirements:
1243 ///
1244 /// * If `U` is sized, it must have the same size and alignment as `T`. This
1245 /// is trivially true if `U` is `T`.
1246 /// * If `U` is unsized, its data pointer must have the same size and
1247 /// alignment as `T`. This is trivially true if `Rc<U>` was constructed
1248 /// through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1249 /// coercion].
1250 ///
1251 /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1252 /// and alignment, this is basically like transmuting references of
1253 /// different types. See [`mem::transmute`][transmute] for more information
1254 /// on what restrictions apply in this case.
1255 ///
1256 /// The raw pointer must point to a block of memory allocated by the global allocator
1257 ///
1258 /// The user of `from_raw` has to make sure a specific value of `T` is only
1259 /// dropped once.
1260 ///
1261 /// This function is unsafe because improper use may lead to memory unsafety,
1262 /// even if the returned `Rc<T>` is never accessed.
1263 ///
1264 /// [into_raw]: Rc::into_raw
1265 /// [transmute]: core::mem::transmute
1266 /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1267 ///
1268 /// # Examples
1269 ///
1270 /// ```
1271 /// use std::rc::Rc;
1272 ///
1273 /// let x = Rc::new("hello".to_owned());
1274 /// let x_ptr = Rc::into_raw(x);
1275 ///
1276 /// unsafe {
1277 /// // Convert back to an `Rc` to prevent leak.
1278 /// let x = Rc::from_raw(x_ptr);
1279 /// assert_eq!(&*x, "hello");
1280 ///
1281 /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1282 /// }
1283 ///
1284 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1285 /// ```
1286 ///
1287 /// Convert a slice back into its original array:
1288 ///
1289 /// ```
1290 /// use std::rc::Rc;
1291 ///
1292 /// let x: Rc<[u32]> = Rc::new([1, 2, 3]);
1293 /// let x_ptr: *const [u32] = Rc::into_raw(x);
1294 ///
1295 /// unsafe {
1296 /// let x: Rc<[u32; 3]> = Rc::from_raw(x_ptr.cast::<[u32; 3]>());
1297 /// assert_eq!(&*x, &[1, 2, 3]);
1298 /// }
1299 /// ```
1300 #[inline]
1301 #[stable(feature = "rc_raw", since = "1.17.0")]
1302 pub unsafe fn from_raw(ptr: *const T) -> Self {
1303 unsafe { Self::from_raw_in(ptr, Global) }
1304 }
1305
1306 /// Consumes the `Rc`, returning the wrapped pointer.
1307 ///
1308 /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1309 /// [`Rc::from_raw`].
1310 ///
1311 /// # Examples
1312 ///
1313 /// ```
1314 /// use std::rc::Rc;
1315 ///
1316 /// let x = Rc::new("hello".to_owned());
1317 /// let x_ptr = Rc::into_raw(x);
1318 /// assert_eq!(unsafe { &*x_ptr }, "hello");
1319 /// # // Prevent leaks for Miri.
1320 /// # drop(unsafe { Rc::from_raw(x_ptr) });
1321 /// ```
1322 #[must_use = "losing the pointer will leak memory"]
1323 #[stable(feature = "rc_raw", since = "1.17.0")]
1324 #[rustc_never_returns_null_ptr]
1325 pub fn into_raw(this: Self) -> *const T {
1326 let this = ManuallyDrop::new(this);
1327 Self::as_ptr(&*this)
1328 }
1329
1330 /// Increments the strong reference count on the `Rc<T>` associated with the
1331 /// provided pointer by one.
1332 ///
1333 /// # Safety
1334 ///
1335 /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1336 /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1337 /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1338 /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1339 /// allocated by the global allocator.
1340 ///
1341 /// [from_raw_in]: Rc::from_raw_in
1342 ///
1343 /// # Examples
1344 ///
1345 /// ```
1346 /// use std::rc::Rc;
1347 ///
1348 /// let five = Rc::new(5);
1349 ///
1350 /// unsafe {
1351 /// let ptr = Rc::into_raw(five);
1352 /// Rc::increment_strong_count(ptr);
1353 ///
1354 /// let five = Rc::from_raw(ptr);
1355 /// assert_eq!(2, Rc::strong_count(&five));
1356 /// # // Prevent leaks for Miri.
1357 /// # Rc::decrement_strong_count(ptr);
1358 /// }
1359 /// ```
1360 #[inline]
1361 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1362 pub unsafe fn increment_strong_count(ptr: *const T) {
1363 unsafe { Self::increment_strong_count_in(ptr, Global) }
1364 }
1365
1366 /// Decrements the strong reference count on the `Rc<T>` associated with the
1367 /// provided pointer by one.
1368 ///
1369 /// # Safety
1370 ///
1371 /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1372 /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1373 /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1374 /// least 1) when invoking this method, and `ptr` must point to a block of memory
1375 /// allocated by the global allocator. This method can be used to release the final `Rc` and
1376 /// backing storage, but **should not** be called after the final `Rc` has been released.
1377 ///
1378 /// [from_raw_in]: Rc::from_raw_in
1379 ///
1380 /// # Examples
1381 ///
1382 /// ```
1383 /// use std::rc::Rc;
1384 ///
1385 /// let five = Rc::new(5);
1386 ///
1387 /// unsafe {
1388 /// let ptr = Rc::into_raw(five);
1389 /// Rc::increment_strong_count(ptr);
1390 ///
1391 /// let five = Rc::from_raw(ptr);
1392 /// assert_eq!(2, Rc::strong_count(&five));
1393 /// Rc::decrement_strong_count(ptr);
1394 /// assert_eq!(1, Rc::strong_count(&five));
1395 /// }
1396 /// ```
1397 #[inline]
1398 #[stable(feature = "rc_mutate_strong_count", since = "1.53.0")]
1399 pub unsafe fn decrement_strong_count(ptr: *const T) {
1400 unsafe { Self::decrement_strong_count_in(ptr, Global) }
1401 }
1402}
1403
1404impl<T: ?Sized, A: Allocator> Rc<T, A> {
1405 /// Returns a reference to the underlying allocator.
1406 ///
1407 /// Note: this is an associated function, which means that you have
1408 /// to call it as `Rc::allocator(&r)` instead of `r.allocator()`. This
1409 /// is so that there is no conflict with a method on the inner type.
1410 #[inline]
1411 #[unstable(feature = "allocator_api", issue = "32838")]
1412 pub fn allocator(this: &Self) -> &A {
1413 &this.alloc
1414 }
1415
1416 /// Consumes the `Rc`, returning the wrapped pointer and allocator.
1417 ///
1418 /// To avoid a memory leak the pointer must be converted back to an `Rc` using
1419 /// [`Rc::from_raw_in`].
1420 ///
1421 /// # Examples
1422 ///
1423 /// ```
1424 /// #![feature(allocator_api)]
1425 /// use std::rc::Rc;
1426 /// use std::alloc::System;
1427 ///
1428 /// let x = Rc::new_in("hello".to_owned(), System);
1429 /// let (ptr, alloc) = Rc::into_raw_with_allocator(x);
1430 /// assert_eq!(unsafe { &*ptr }, "hello");
1431 /// let x = unsafe { Rc::from_raw_in(ptr, alloc) };
1432 /// assert_eq!(&*x, "hello");
1433 /// ```
1434 #[must_use = "losing the pointer will leak memory"]
1435 #[unstable(feature = "allocator_api", issue = "32838")]
1436 pub fn into_raw_with_allocator(this: Self) -> (*const T, A) {
1437 let this = mem::ManuallyDrop::new(this);
1438 let ptr = Self::as_ptr(&this);
1439 // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
1440 let alloc = unsafe { ptr::read(&this.alloc) };
1441 (ptr, alloc)
1442 }
1443
1444 /// Provides a raw pointer to the data.
1445 ///
1446 /// The counts are not affected in any way and the `Rc` is not consumed. The pointer is valid
1447 /// for as long as there are strong counts in the `Rc`.
1448 ///
1449 /// # Examples
1450 ///
1451 /// ```
1452 /// use std::rc::Rc;
1453 ///
1454 /// let x = Rc::new(0);
1455 /// let y = Rc::clone(&x);
1456 /// let x_ptr = Rc::as_ptr(&x);
1457 /// assert_eq!(x_ptr, Rc::as_ptr(&y));
1458 /// assert_eq!(unsafe { *x_ptr }, 0);
1459 /// ```
1460 #[stable(feature = "weak_into_raw", since = "1.45.0")]
1461 #[rustc_never_returns_null_ptr]
1462 pub fn as_ptr(this: &Self) -> *const T {
1463 let ptr: *mut RcInner<T> = NonNull::as_ptr(this.ptr);
1464
1465 // SAFETY: This cannot go through Deref::deref or Rc::inner because
1466 // this is required to retain raw/mut provenance such that e.g. `get_mut` can
1467 // write through the pointer after the Rc is recovered through `from_raw`.
1468 unsafe { &raw mut (*ptr).value }
1469 }
1470
1471 /// Constructs an `Rc<T, A>` from a raw pointer in the provided allocator.
1472 ///
1473 /// The raw pointer must have been previously returned by a call to [`Rc<U,
1474 /// A>::into_raw`][into_raw] with the following requirements:
1475 ///
1476 /// * If `U` is sized, it must have the same size and alignment as `T`. This
1477 /// is trivially true if `U` is `T`.
1478 /// * If `U` is unsized, its data pointer must have the same size and
1479 /// alignment as `T`. This is trivially true if `Rc<U>` was constructed
1480 /// through `Rc<T>` and then converted to `Rc<U>` through an [unsized
1481 /// coercion].
1482 ///
1483 /// Note that if `U` or `U`'s data pointer is not `T` but has the same size
1484 /// and alignment, this is basically like transmuting references of
1485 /// different types. See [`mem::transmute`][transmute] for more information
1486 /// on what restrictions apply in this case.
1487 ///
1488 /// The raw pointer must point to a block of memory allocated by `alloc`
1489 ///
1490 /// The user of `from_raw` has to make sure a specific value of `T` is only
1491 /// dropped once.
1492 ///
1493 /// This function is unsafe because improper use may lead to memory unsafety,
1494 /// even if the returned `Rc<T>` is never accessed.
1495 ///
1496 /// [into_raw]: Rc::into_raw
1497 /// [transmute]: core::mem::transmute
1498 /// [unsized coercion]: https://doc.rust-lang.org/reference/type-coercions.html#unsized-coercions
1499 ///
1500 /// # Examples
1501 ///
1502 /// ```
1503 /// #![feature(allocator_api)]
1504 ///
1505 /// use std::rc::Rc;
1506 /// use std::alloc::System;
1507 ///
1508 /// let x = Rc::new_in("hello".to_owned(), System);
1509 /// let (x_ptr, _alloc) = Rc::into_raw_with_allocator(x);
1510 ///
1511 /// unsafe {
1512 /// // Convert back to an `Rc` to prevent leak.
1513 /// let x = Rc::from_raw_in(x_ptr, System);
1514 /// assert_eq!(&*x, "hello");
1515 ///
1516 /// // Further calls to `Rc::from_raw(x_ptr)` would be memory-unsafe.
1517 /// }
1518 ///
1519 /// // The memory was freed when `x` went out of scope above, so `x_ptr` is now dangling!
1520 /// ```
1521 ///
1522 /// Convert a slice back into its original array:
1523 ///
1524 /// ```
1525 /// #![feature(allocator_api)]
1526 ///
1527 /// use std::rc::Rc;
1528 /// use std::alloc::System;
1529 ///
1530 /// let x: Rc<[u32], _> = Rc::new_in([1, 2, 3], System);
1531 /// let x_ptr: *const [u32] = Rc::into_raw_with_allocator(x).0;
1532 ///
1533 /// unsafe {
1534 /// let x: Rc<[u32; 3], _> = Rc::from_raw_in(x_ptr.cast::<[u32; 3]>(), System);
1535 /// assert_eq!(&*x, &[1, 2, 3]);
1536 /// }
1537 /// ```
1538 #[unstable(feature = "allocator_api", issue = "32838")]
1539 pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
1540 let offset = unsafe { data_offset(ptr) };
1541
1542 // Reverse the offset to find the original RcInner.
1543 let rc_ptr = unsafe { ptr.byte_sub(offset) as *mut RcInner<T> };
1544
1545 unsafe { Self::from_ptr_in(rc_ptr, alloc) }
1546 }
1547
1548 /// Creates a new [`Weak`] pointer to this allocation.
1549 ///
1550 /// # Examples
1551 ///
1552 /// ```
1553 /// use std::rc::Rc;
1554 ///
1555 /// let five = Rc::new(5);
1556 ///
1557 /// let weak_five = Rc::downgrade(&five);
1558 /// ```
1559 #[must_use = "this returns a new `Weak` pointer, \
1560 without modifying the original `Rc`"]
1561 #[stable(feature = "rc_weak", since = "1.4.0")]
1562 pub fn downgrade(this: &Self) -> Weak<T, A>
1563 where
1564 A: Clone,
1565 {
1566 this.inner().inc_weak();
1567 // Make sure we do not create a dangling Weak
1568 debug_assert!(!is_dangling(this.ptr.as_ptr()));
1569 Weak { ptr: this.ptr, alloc: this.alloc.clone() }
1570 }
1571
1572 /// Gets the number of [`Weak`] pointers to this allocation.
1573 ///
1574 /// # Examples
1575 ///
1576 /// ```
1577 /// use std::rc::Rc;
1578 ///
1579 /// let five = Rc::new(5);
1580 /// let _weak_five = Rc::downgrade(&five);
1581 ///
1582 /// assert_eq!(1, Rc::weak_count(&five));
1583 /// ```
1584 #[inline]
1585 #[stable(feature = "rc_counts", since = "1.15.0")]
1586 pub fn weak_count(this: &Self) -> usize {
1587 this.inner().weak() - 1
1588 }
1589
1590 /// Gets the number of strong (`Rc`) pointers to this allocation.
1591 ///
1592 /// # Examples
1593 ///
1594 /// ```
1595 /// use std::rc::Rc;
1596 ///
1597 /// let five = Rc::new(5);
1598 /// let _also_five = Rc::clone(&five);
1599 ///
1600 /// assert_eq!(2, Rc::strong_count(&five));
1601 /// ```
1602 #[inline]
1603 #[stable(feature = "rc_counts", since = "1.15.0")]
1604 pub fn strong_count(this: &Self) -> usize {
1605 this.inner().strong()
1606 }
1607
1608 /// Increments the strong reference count on the `Rc<T>` associated with the
1609 /// provided pointer by one.
1610 ///
1611 /// # Safety
1612 ///
1613 /// The pointer must have been obtained through `Rc::into_raw` and must satisfy the
1614 /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1615 /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1616 /// least 1) for the duration of this method, and `ptr` must point to a block of memory
1617 /// allocated by `alloc`.
1618 ///
1619 /// [from_raw_in]: Rc::from_raw_in
1620 ///
1621 /// # Examples
1622 ///
1623 /// ```
1624 /// #![feature(allocator_api)]
1625 ///
1626 /// use std::rc::Rc;
1627 /// use std::alloc::System;
1628 ///
1629 /// let five = Rc::new_in(5, System);
1630 ///
1631 /// unsafe {
1632 /// let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1633 /// Rc::increment_strong_count_in(ptr, System);
1634 ///
1635 /// let five = Rc::from_raw_in(ptr, System);
1636 /// assert_eq!(2, Rc::strong_count(&five));
1637 /// # // Prevent leaks for Miri.
1638 /// # Rc::decrement_strong_count_in(ptr, System);
1639 /// }
1640 /// ```
1641 #[inline]
1642 #[unstable(feature = "allocator_api", issue = "32838")]
1643 pub unsafe fn increment_strong_count_in(ptr: *const T, alloc: A)
1644 where
1645 A: Clone,
1646 {
1647 // Retain Rc, but don't touch refcount by wrapping in ManuallyDrop
1648 let rc = unsafe { mem::ManuallyDrop::new(Rc::<T, A>::from_raw_in(ptr, alloc)) };
1649 // Now increase refcount, but don't drop new refcount either
1650 let _rc_clone: mem::ManuallyDrop<_> = rc.clone();
1651 }
1652
1653 /// Decrements the strong reference count on the `Rc<T>` associated with the
1654 /// provided pointer by one.
1655 ///
1656 /// # Safety
1657 ///
1658 /// The pointer must have been obtained through `Rc::into_raw`and must satisfy the
1659 /// same layout requirements specified in [`Rc::from_raw_in`][from_raw_in].
1660 /// The associated `Rc` instance must be valid (i.e. the strong count must be at
1661 /// least 1) when invoking this method, and `ptr` must point to a block of memory
1662 /// allocated by `alloc`. This method can be used to release the final `Rc` and
1663 /// backing storage, but **should not** be called after the final `Rc` has been released.
1664 ///
1665 /// [from_raw_in]: Rc::from_raw_in
1666 ///
1667 /// # Examples
1668 ///
1669 /// ```
1670 /// #![feature(allocator_api)]
1671 ///
1672 /// use std::rc::Rc;
1673 /// use std::alloc::System;
1674 ///
1675 /// let five = Rc::new_in(5, System);
1676 ///
1677 /// unsafe {
1678 /// let (ptr, _alloc) = Rc::into_raw_with_allocator(five);
1679 /// Rc::increment_strong_count_in(ptr, System);
1680 ///
1681 /// let five = Rc::from_raw_in(ptr, System);
1682 /// assert_eq!(2, Rc::strong_count(&five));
1683 /// Rc::decrement_strong_count_in(ptr, System);
1684 /// assert_eq!(1, Rc::strong_count(&five));
1685 /// }
1686 /// ```
1687 #[inline]
1688 #[unstable(feature = "allocator_api", issue = "32838")]
1689 pub unsafe fn decrement_strong_count_in(ptr: *const T, alloc: A) {
1690 unsafe { drop(Rc::from_raw_in(ptr, alloc)) };
1691 }
1692
1693 /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
1694 /// this allocation.
1695 #[inline]
1696 fn is_unique(this: &Self) -> bool {
1697 Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
1698 }
1699
1700 /// Returns a mutable reference into the given `Rc`, if there are
1701 /// no other `Rc` or [`Weak`] pointers to the same allocation.
1702 ///
1703 /// Returns [`None`] otherwise, because it is not safe to
1704 /// mutate a shared value.
1705 ///
1706 /// See also [`make_mut`][make_mut], which will [`clone`][clone]
1707 /// the inner value when there are other `Rc` pointers.
1708 ///
1709 /// [make_mut]: Rc::make_mut
1710 /// [clone]: Clone::clone
1711 ///
1712 /// # Examples
1713 ///
1714 /// ```
1715 /// use std::rc::Rc;
1716 ///
1717 /// let mut x = Rc::new(3);
1718 /// *Rc::get_mut(&mut x).unwrap() = 4;
1719 /// assert_eq!(*x, 4);
1720 ///
1721 /// let _y = Rc::clone(&x);
1722 /// assert!(Rc::get_mut(&mut x).is_none());
1723 /// ```
1724 #[inline]
1725 #[stable(feature = "rc_unique", since = "1.4.0")]
1726 pub fn get_mut(this: &mut Self) -> Option<&mut T> {
1727 if Rc::is_unique(this) { unsafe { Some(Rc::get_mut_unchecked(this)) } } else { None }
1728 }
1729
1730 /// Returns a mutable reference into the given `Rc`,
1731 /// without any check.
1732 ///
1733 /// See also [`get_mut`], which is safe and does appropriate checks.
1734 ///
1735 /// [`get_mut`]: Rc::get_mut
1736 ///
1737 /// # Safety
1738 ///
1739 /// If any other `Rc` or [`Weak`] pointers to the same allocation exist, then
1740 /// they must not be dereferenced or have active borrows for the duration
1741 /// of the returned borrow, and their inner type must be exactly the same as the
1742 /// inner type of this Rc (including lifetimes). This is trivially the case if no
1743 /// such pointers exist, for example immediately after `Rc::new`.
1744 ///
1745 /// # Examples
1746 ///
1747 /// ```
1748 /// #![feature(get_mut_unchecked)]
1749 ///
1750 /// use std::rc::Rc;
1751 ///
1752 /// let mut x = Rc::new(String::new());
1753 /// unsafe {
1754 /// Rc::get_mut_unchecked(&mut x).push_str("foo")
1755 /// }
1756 /// assert_eq!(*x, "foo");
1757 /// ```
1758 /// Other `Rc` pointers to the same allocation must be to the same type.
1759 /// ```no_run
1760 /// #![feature(get_mut_unchecked)]
1761 ///
1762 /// use std::rc::Rc;
1763 ///
1764 /// let x: Rc<str> = Rc::from("Hello, world!");
1765 /// let mut y: Rc<[u8]> = x.clone().into();
1766 /// unsafe {
1767 /// // this is Undefined Behavior, because x's inner type is str, not [u8]
1768 /// Rc::get_mut_unchecked(&mut y).fill(0xff); // 0xff is invalid in UTF-8
1769 /// }
1770 /// println!("{}", &*x); // Invalid UTF-8 in a str
1771 /// ```
1772 /// Other `Rc` pointers to the same allocation must be to the exact same type, including lifetimes.
1773 /// ```no_run
1774 /// #![feature(get_mut_unchecked)]
1775 ///
1776 /// use std::rc::Rc;
1777 ///
1778 /// let x: Rc<&str> = Rc::new("Hello, world!");
1779 /// {
1780 /// let s = String::from("Oh, no!");
1781 /// let mut y: Rc<&str> = x.clone();
1782 /// unsafe {
1783 /// // this is Undefined Behavior, because x's inner type
1784 /// // is &'long str, not &'short str
1785 /// *Rc::get_mut_unchecked(&mut y) = &s;
1786 /// }
1787 /// }
1788 /// println!("{}", &*x); // Use-after-free
1789 /// ```
1790 #[inline]
1791 #[unstable(feature = "get_mut_unchecked", issue = "63292")]
1792 pub unsafe fn get_mut_unchecked(this: &mut Self) -> &mut T {
1793 // We are careful to *not* create a reference covering the "count" fields, as
1794 // this would conflict with accesses to the reference counts (e.g. by `Weak`).
1795 unsafe { &mut (*this.ptr.as_ptr()).value }
1796 }
1797
1798 #[inline]
1799 #[stable(feature = "ptr_eq", since = "1.17.0")]
1800 /// Returns `true` if the two `Rc`s point to the same allocation in a vein similar to
1801 /// [`ptr::eq`]. This function ignores the metadata of `dyn Trait` pointers.
1802 ///
1803 /// # Examples
1804 ///
1805 /// ```
1806 /// use std::rc::Rc;
1807 ///
1808 /// let five = Rc::new(5);
1809 /// let same_five = Rc::clone(&five);
1810 /// let other_five = Rc::new(5);
1811 ///
1812 /// assert!(Rc::ptr_eq(&five, &same_five));
1813 /// assert!(!Rc::ptr_eq(&five, &other_five));
1814 /// ```
1815 pub fn ptr_eq(this: &Self, other: &Self) -> bool {
1816 ptr::addr_eq(this.ptr.as_ptr(), other.ptr.as_ptr())
1817 }
1818}
1819
1820#[cfg(not(no_global_oom_handling))]
1821impl<T: ?Sized + CloneToUninit, A: Allocator + Clone> Rc<T, A> {
1822 /// Makes a mutable reference into the given `Rc`.
1823 ///
1824 /// If there are other `Rc` pointers to the same allocation, then `make_mut` will
1825 /// [`clone`] the inner value to a new allocation to ensure unique ownership. This is also
1826 /// referred to as clone-on-write.
1827 ///
1828 /// However, if there are no other `Rc` pointers to this allocation, but some [`Weak`]
1829 /// pointers, then the [`Weak`] pointers will be disassociated and the inner value will not
1830 /// be cloned.
1831 ///
1832 /// See also [`get_mut`], which will fail rather than cloning the inner value
1833 /// or disassociating [`Weak`] pointers.
1834 ///
1835 /// [`clone`]: Clone::clone
1836 /// [`get_mut`]: Rc::get_mut
1837 ///
1838 /// # Examples
1839 ///
1840 /// ```
1841 /// use std::rc::Rc;
1842 ///
1843 /// let mut data = Rc::new(5);
1844 ///
1845 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
1846 /// let mut other_data = Rc::clone(&data); // Won't clone inner data
1847 /// *Rc::make_mut(&mut data) += 1; // Clones inner data
1848 /// *Rc::make_mut(&mut data) += 1; // Won't clone anything
1849 /// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything
1850 ///
1851 /// // Now `data` and `other_data` point to different allocations.
1852 /// assert_eq!(*data, 8);
1853 /// assert_eq!(*other_data, 12);
1854 /// ```
1855 ///
1856 /// [`Weak`] pointers will be disassociated:
1857 ///
1858 /// ```
1859 /// use std::rc::Rc;
1860 ///
1861 /// let mut data = Rc::new(75);
1862 /// let weak = Rc::downgrade(&data);
1863 ///
1864 /// assert!(75 == *data);
1865 /// assert!(75 == *weak.upgrade().unwrap());
1866 ///
1867 /// *Rc::make_mut(&mut data) += 1;
1868 ///
1869 /// assert!(76 == *data);
1870 /// assert!(weak.upgrade().is_none());
1871 /// ```
1872 #[inline]
1873 #[stable(feature = "rc_unique", since = "1.4.0")]
1874 pub fn make_mut(this: &mut Self) -> &mut T {
1875 let size_of_val = size_of_val::<T>(&**this);
1876
1877 if Rc::strong_count(this) != 1 {
1878 // Gotta clone the data, there are other Rcs.
1879
1880 let this_data_ref: &T = &**this;
1881 // `in_progress` drops the allocation if we panic before finishing initializing it.
1882 let mut in_progress: UniqueRcUninit<T, A> =
1883 UniqueRcUninit::new(this_data_ref, this.alloc.clone());
1884
1885 // Initialize with clone of this.
1886 let initialized_clone = unsafe {
1887 // Clone. If the clone panics, `in_progress` will be dropped and clean up.
1888 this_data_ref.clone_to_uninit(in_progress.data_ptr().cast());
1889 // Cast type of pointer, now that it is initialized.
1890 in_progress.into_rc()
1891 };
1892
1893 // Replace `this` with newly constructed Rc.
1894 *this = initialized_clone;
1895 } else if Rc::weak_count(this) != 0 {
1896 // Can just steal the data, all that's left is Weaks
1897
1898 // We don't need panic-protection like the above branch does, but we might as well
1899 // use the same mechanism.
1900 let mut in_progress: UniqueRcUninit<T, A> =
1901 UniqueRcUninit::new(&**this, this.alloc.clone());
1902 unsafe {
1903 // Initialize `in_progress` with move of **this.
1904 // We have to express this in terms of bytes because `T: ?Sized`; there is no
1905 // operation that just copies a value based on its `size_of_val()`.
1906 ptr::copy_nonoverlapping(
1907 ptr::from_ref(&**this).cast::<u8>(),
1908 in_progress.data_ptr().cast::<u8>(),
1909 size_of_val,
1910 );
1911
1912 this.inner().dec_strong();
1913 // Remove implicit strong-weak ref (no need to craft a fake
1914 // Weak here -- we know other Weaks can clean up for us)
1915 this.inner().dec_weak();
1916 // Replace `this` with newly constructed Rc that has the moved data.
1917 ptr::write(this, in_progress.into_rc());
1918 }
1919 }
1920 // This unsafety is ok because we're guaranteed that the pointer
1921 // returned is the *only* pointer that will ever be returned to T. Our
1922 // reference count is guaranteed to be 1 at this point, and we required
1923 // the `Rc<T>` itself to be `mut`, so we're returning the only possible
1924 // reference to the allocation.
1925 unsafe { &mut this.ptr.as_mut().value }
1926 }
1927}
1928
1929impl<T: Clone, A: Allocator> Rc<T, A> {
1930 /// If we have the only reference to `T` then unwrap it. Otherwise, clone `T` and return the
1931 /// clone.
1932 ///
1933 /// Assuming `rc_t` is of type `Rc<T>`, this function is functionally equivalent to
1934 /// `(*rc_t).clone()`, but will avoid cloning the inner value where possible.
1935 ///
1936 /// # Examples
1937 ///
1938 /// ```
1939 /// # use std::{ptr, rc::Rc};
1940 /// let inner = String::from("test");
1941 /// let ptr = inner.as_ptr();
1942 ///
1943 /// let rc = Rc::new(inner);
1944 /// let inner = Rc::unwrap_or_clone(rc);
1945 /// // The inner value was not cloned
1946 /// assert!(ptr::eq(ptr, inner.as_ptr()));
1947 ///
1948 /// let rc = Rc::new(inner);
1949 /// let rc2 = rc.clone();
1950 /// let inner = Rc::unwrap_or_clone(rc);
1951 /// // Because there were 2 references, we had to clone the inner value.
1952 /// assert!(!ptr::eq(ptr, inner.as_ptr()));
1953 /// // `rc2` is the last reference, so when we unwrap it we get back
1954 /// // the original `String`.
1955 /// let inner = Rc::unwrap_or_clone(rc2);
1956 /// assert!(ptr::eq(ptr, inner.as_ptr()));
1957 /// ```
1958 #[inline]
1959 #[stable(feature = "arc_unwrap_or_clone", since = "1.76.0")]
1960 pub fn unwrap_or_clone(this: Self) -> T {
1961 Rc::try_unwrap(this).unwrap_or_else(|rc| (*rc).clone())
1962 }
1963}
1964
1965impl<A: Allocator> Rc<dyn Any, A> {
1966 /// Attempts to downcast the `Rc<dyn Any>` to a concrete type.
1967 ///
1968 /// # Examples
1969 ///
1970 /// ```
1971 /// use std::any::Any;
1972 /// use std::rc::Rc;
1973 ///
1974 /// fn print_if_string(value: Rc<dyn Any>) {
1975 /// if let Ok(string) = value.downcast::<String>() {
1976 /// println!("String ({}): {}", string.len(), string);
1977 /// }
1978 /// }
1979 ///
1980 /// let my_string = "Hello World".to_string();
1981 /// print_if_string(Rc::new(my_string));
1982 /// print_if_string(Rc::new(0i8));
1983 /// ```
1984 #[inline]
1985 #[stable(feature = "rc_downcast", since = "1.29.0")]
1986 pub fn downcast<T: Any>(self) -> Result<Rc<T, A>, Self> {
1987 if (*self).is::<T>() {
1988 unsafe {
1989 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
1990 Ok(Rc::from_inner_in(ptr.cast(), alloc))
1991 }
1992 } else {
1993 Err(self)
1994 }
1995 }
1996
1997 /// Downcasts the `Rc<dyn Any>` to a concrete type.
1998 ///
1999 /// For a safe alternative see [`downcast`].
2000 ///
2001 /// # Examples
2002 ///
2003 /// ```
2004 /// #![feature(downcast_unchecked)]
2005 ///
2006 /// use std::any::Any;
2007 /// use std::rc::Rc;
2008 ///
2009 /// let x: Rc<dyn Any> = Rc::new(1_usize);
2010 ///
2011 /// unsafe {
2012 /// assert_eq!(*x.downcast_unchecked::<usize>(), 1);
2013 /// }
2014 /// ```
2015 ///
2016 /// # Safety
2017 ///
2018 /// The contained value must be of type `T`. Calling this method
2019 /// with the incorrect type is *undefined behavior*.
2020 ///
2021 ///
2022 /// [`downcast`]: Self::downcast
2023 #[inline]
2024 #[unstable(feature = "downcast_unchecked", issue = "90850")]
2025 pub unsafe fn downcast_unchecked<T: Any>(self) -> Rc<T, A> {
2026 unsafe {
2027 let (ptr, alloc) = Rc::into_inner_with_allocator(self);
2028 Rc::from_inner_in(ptr.cast(), alloc)
2029 }
2030 }
2031}
2032
2033impl<T: ?Sized> Rc<T> {
2034 /// Allocates an `RcInner<T>` with sufficient space for
2035 /// a possibly-unsized inner value where the value has the layout provided.
2036 ///
2037 /// The function `mem_to_rc_inner` is called with the data pointer
2038 /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2039 #[cfg(not(no_global_oom_handling))]
2040 unsafe fn allocate_for_layout(
2041 value_layout: Layout,
2042 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2043 mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2044 ) -> *mut RcInner<T> {
2045 let layout = rc_inner_layout_for_value_layout(value_layout);
2046 unsafe {
2047 Rc::try_allocate_for_layout(value_layout, allocate, mem_to_rc_inner)
2048 .unwrap_or_else(|_| handle_alloc_error(layout))
2049 }
2050 }
2051
2052 /// Allocates an `RcInner<T>` with sufficient space for
2053 /// a possibly-unsized inner value where the value has the layout provided,
2054 /// returning an error if allocation fails.
2055 ///
2056 /// The function `mem_to_rc_inner` is called with the data pointer
2057 /// and must return back a (potentially fat)-pointer for the `RcInner<T>`.
2058 #[inline]
2059 unsafe fn try_allocate_for_layout(
2060 value_layout: Layout,
2061 allocate: impl FnOnce(Layout) -> Result<NonNull<[u8]>, AllocError>,
2062 mem_to_rc_inner: impl FnOnce(*mut u8) -> *mut RcInner<T>,
2063 ) -> Result<*mut RcInner<T>, AllocError> {
2064 let layout = rc_inner_layout_for_value_layout(value_layout);
2065
2066 // Allocate for the layout.
2067 let ptr = allocate(layout)?;
2068
2069 // Initialize the RcInner
2070 let inner = mem_to_rc_inner(ptr.as_non_null_ptr().as_ptr());
2071 unsafe {
2072 debug_assert_eq!(Layout::for_value_raw(inner), layout);
2073
2074 (&raw mut (*inner).strong).write(Cell::new(1));
2075 (&raw mut (*inner).weak).write(Cell::new(1));
2076 }
2077
2078 Ok(inner)
2079 }
2080}
2081
2082impl<T: ?Sized, A: Allocator> Rc<T, A> {
2083 /// Allocates an `RcInner<T>` with sufficient space for an unsized inner value
2084 #[cfg(not(no_global_oom_handling))]
2085 unsafe fn allocate_for_ptr_in(ptr: *const T, alloc: &A) -> *mut RcInner<T> {
2086 // Allocate for the `RcInner<T>` using the given value.
2087 unsafe {
2088 Rc::<T>::allocate_for_layout(
2089 Layout::for_value_raw(ptr),
2090 |layout| alloc.allocate(layout),
2091 |mem| mem.with_metadata_of(ptr as *const RcInner<T>),
2092 )
2093 }
2094 }
2095
2096 #[cfg(not(no_global_oom_handling))]
2097 fn from_box_in(src: Box<T, A>) -> Rc<T, A> {
2098 unsafe {
2099 let value_size = size_of_val(&*src);
2100 let ptr = Self::allocate_for_ptr_in(&*src, Box::allocator(&src));
2101
2102 // Copy value as bytes
2103 ptr::copy_nonoverlapping(
2104 (&raw const *src) as *const u8,
2105 (&raw mut (*ptr).value) as *mut u8,
2106 value_size,
2107 );
2108
2109 // Free the allocation without dropping its contents
2110 let (bptr, alloc) = Box::into_raw_with_allocator(src);
2111 let src = Box::from_raw_in(bptr as *mut mem::ManuallyDrop<T>, alloc.by_ref());
2112 drop(src);
2113
2114 Self::from_ptr_in(ptr, alloc)
2115 }
2116 }
2117}
2118
2119impl<T> Rc<[T]> {
2120 /// Allocates an `RcInner<[T]>` with the given length.
2121 #[cfg(not(no_global_oom_handling))]
2122 unsafe fn allocate_for_slice(len: usize) -> *mut RcInner<[T]> {
2123 unsafe {
2124 Self::allocate_for_layout(
2125 Layout::array::<T>(len).unwrap(),
2126 |layout| Global.allocate(layout),
2127 |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2128 )
2129 }
2130 }
2131
2132 /// Copy elements from slice into newly allocated `Rc<[T]>`
2133 ///
2134 /// Unsafe because the caller must either take ownership or bind `T: Copy`
2135 #[cfg(not(no_global_oom_handling))]
2136 unsafe fn copy_from_slice(v: &[T]) -> Rc<[T]> {
2137 unsafe {
2138 let ptr = Self::allocate_for_slice(v.len());
2139 ptr::copy_nonoverlapping(v.as_ptr(), (&raw mut (*ptr).value) as *mut T, v.len());
2140 Self::from_ptr(ptr)
2141 }
2142 }
2143
2144 /// Constructs an `Rc<[T]>` from an iterator known to be of a certain size.
2145 ///
2146 /// Behavior is undefined should the size be wrong.
2147 #[cfg(not(no_global_oom_handling))]
2148 unsafe fn from_iter_exact(iter: impl Iterator<Item = T>, len: usize) -> Rc<[T]> {
2149 // Panic guard while cloning T elements.
2150 // In the event of a panic, elements that have been written
2151 // into the new RcInner will be dropped, then the memory freed.
2152 struct Guard<T> {
2153 mem: NonNull<u8>,
2154 elems: *mut T,
2155 layout: Layout,
2156 n_elems: usize,
2157 }
2158
2159 impl<T> Drop for Guard<T> {
2160 fn drop(&mut self) {
2161 unsafe {
2162 let slice = from_raw_parts_mut(self.elems, self.n_elems);
2163 ptr::drop_in_place(slice);
2164
2165 Global.deallocate(self.mem, self.layout);
2166 }
2167 }
2168 }
2169
2170 unsafe {
2171 let ptr = Self::allocate_for_slice(len);
2172
2173 let mem = ptr as *mut _ as *mut u8;
2174 let layout = Layout::for_value_raw(ptr);
2175
2176 // Pointer to first element
2177 let elems = (&raw mut (*ptr).value) as *mut T;
2178
2179 let mut guard = Guard { mem: NonNull::new_unchecked(mem), elems, layout, n_elems: 0 };
2180
2181 for (i, item) in iter.enumerate() {
2182 ptr::write(elems.add(i), item);
2183 guard.n_elems += 1;
2184 }
2185
2186 // All clear. Forget the guard so it doesn't free the new RcInner.
2187 mem::forget(guard);
2188
2189 Self::from_ptr(ptr)
2190 }
2191 }
2192}
2193
2194impl<T, A: Allocator> Rc<[T], A> {
2195 /// Allocates an `RcInner<[T]>` with the given length.
2196 #[inline]
2197 #[cfg(not(no_global_oom_handling))]
2198 unsafe fn allocate_for_slice_in(len: usize, alloc: &A) -> *mut RcInner<[T]> {
2199 unsafe {
2200 Rc::<[T]>::allocate_for_layout(
2201 Layout::array::<T>(len).unwrap(),
2202 |layout| alloc.allocate(layout),
2203 |mem| ptr::slice_from_raw_parts_mut(mem.cast::<T>(), len) as *mut RcInner<[T]>,
2204 )
2205 }
2206 }
2207}
2208
2209#[cfg(not(no_global_oom_handling))]
2210/// Specialization trait used for `From<&[T]>`.
2211trait RcFromSlice<T> {
2212 fn from_slice(slice: &[T]) -> Self;
2213}
2214
2215#[cfg(not(no_global_oom_handling))]
2216impl<T: Clone> RcFromSlice<T> for Rc<[T]> {
2217 #[inline]
2218 default fn from_slice(v: &[T]) -> Self {
2219 unsafe { Self::from_iter_exact(v.iter().cloned(), v.len()) }
2220 }
2221}
2222
2223#[cfg(not(no_global_oom_handling))]
2224impl<T: Copy> RcFromSlice<T> for Rc<[T]> {
2225 #[inline]
2226 fn from_slice(v: &[T]) -> Self {
2227 unsafe { Rc::copy_from_slice(v) }
2228 }
2229}
2230
2231#[stable(feature = "rust1", since = "1.0.0")]
2232impl<T: ?Sized, A: Allocator> Deref for Rc<T, A> {
2233 type Target = T;
2234
2235 #[inline(always)]
2236 fn deref(&self) -> &T {
2237 &self.inner().value
2238 }
2239}
2240
2241#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2242unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Rc<T, A> {}
2243
2244//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2245#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2246unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for UniqueRc<T, A> {}
2247
2248#[unstable(feature = "pin_coerce_unsized_trait", issue = "123430")]
2249unsafe impl<T: ?Sized, A: Allocator> PinCoerceUnsized for Weak<T, A> {}
2250
2251#[unstable(feature = "deref_pure_trait", issue = "87121")]
2252unsafe impl<T: ?Sized, A: Allocator> DerefPure for Rc<T, A> {}
2253
2254//#[unstable(feature = "unique_rc_arc", issue = "112566")]
2255#[unstable(feature = "deref_pure_trait", issue = "87121")]
2256unsafe impl<T: ?Sized, A: Allocator> DerefPure for UniqueRc<T, A> {}
2257
2258#[unstable(feature = "legacy_receiver_trait", issue = "none")]
2259impl<T: ?Sized> LegacyReceiver for Rc<T> {}
2260
2261#[stable(feature = "rust1", since = "1.0.0")]
2262unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Rc<T, A> {
2263 /// Drops the `Rc`.
2264 ///
2265 /// This will decrement the strong reference count. If the strong reference
2266 /// count reaches zero then the only other references (if any) are
2267 /// [`Weak`], so we `drop` the inner value.
2268 ///
2269 /// # Examples
2270 ///
2271 /// ```
2272 /// use std::rc::Rc;
2273 ///
2274 /// struct Foo;
2275 ///
2276 /// impl Drop for Foo {
2277 /// fn drop(&mut self) {
2278 /// println!("dropped!");
2279 /// }
2280 /// }
2281 ///
2282 /// let foo = Rc::new(Foo);
2283 /// let foo2 = Rc::clone(&foo);
2284 ///
2285 /// drop(foo); // Doesn't print anything
2286 /// drop(foo2); // Prints "dropped!"
2287 /// ```
2288 #[inline]
2289 fn drop(&mut self) {
2290 unsafe {
2291 self.inner().dec_strong();
2292 if self.inner().strong() == 0 {
2293 self.drop_slow();
2294 }
2295 }
2296 }
2297}
2298
2299#[stable(feature = "rust1", since = "1.0.0")]
2300impl<T: ?Sized, A: Allocator + Clone> Clone for Rc<T, A> {
2301 /// Makes a clone of the `Rc` pointer.
2302 ///
2303 /// This creates another pointer to the same allocation, increasing the
2304 /// strong reference count.
2305 ///
2306 /// # Examples
2307 ///
2308 /// ```
2309 /// use std::rc::Rc;
2310 ///
2311 /// let five = Rc::new(5);
2312 ///
2313 /// let _ = Rc::clone(&five);
2314 /// ```
2315 #[inline]
2316 fn clone(&self) -> Self {
2317 unsafe {
2318 self.inner().inc_strong();
2319 Self::from_inner_in(self.ptr, self.alloc.clone())
2320 }
2321 }
2322}
2323
2324#[unstable(feature = "ergonomic_clones", issue = "132290")]
2325impl<T: ?Sized, A: Allocator + Clone> UseCloned for Rc<T, A> {}
2326
2327#[cfg(not(no_global_oom_handling))]
2328#[stable(feature = "rust1", since = "1.0.0")]
2329impl<T: Default> Default for Rc<T> {
2330 /// Creates a new `Rc<T>`, with the `Default` value for `T`.
2331 ///
2332 /// # Examples
2333 ///
2334 /// ```
2335 /// use std::rc::Rc;
2336 ///
2337 /// let x: Rc<i32> = Default::default();
2338 /// assert_eq!(*x, 0);
2339 /// ```
2340 #[inline]
2341 fn default() -> Self {
2342 unsafe {
2343 Self::from_inner(
2344 Box::leak(Box::write(
2345 Box::new_uninit(),
2346 RcInner { strong: Cell::new(1), weak: Cell::new(1), value: T::default() },
2347 ))
2348 .into(),
2349 )
2350 }
2351 }
2352}
2353
2354#[cfg(not(no_global_oom_handling))]
2355#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2356impl Default for Rc<str> {
2357 /// Creates an empty `str` inside an `Rc`.
2358 ///
2359 /// This may or may not share an allocation with other Rcs on the same thread.
2360 #[inline]
2361 fn default() -> Self {
2362 let rc = Rc::<[u8]>::default();
2363 // `[u8]` has the same layout as `str`.
2364 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2365 }
2366}
2367
2368#[cfg(not(no_global_oom_handling))]
2369#[stable(feature = "more_rc_default_impls", since = "1.80.0")]
2370impl<T> Default for Rc<[T]> {
2371 /// Creates an empty `[T]` inside an `Rc`.
2372 ///
2373 /// This may or may not share an allocation with other Rcs on the same thread.
2374 #[inline]
2375 fn default() -> Self {
2376 let arr: [T; 0] = [];
2377 Rc::from(arr)
2378 }
2379}
2380
2381#[cfg(not(no_global_oom_handling))]
2382#[stable(feature = "pin_default_impls", since = "1.91.0")]
2383impl<T> Default for Pin<Rc<T>>
2384where
2385 T: ?Sized,
2386 Rc<T>: Default,
2387{
2388 #[inline]
2389 fn default() -> Self {
2390 unsafe { Pin::new_unchecked(Rc::<T>::default()) }
2391 }
2392}
2393
2394#[stable(feature = "rust1", since = "1.0.0")]
2395trait RcEqIdent<T: ?Sized + PartialEq, A: Allocator> {
2396 fn eq(&self, other: &Rc<T, A>) -> bool;
2397 fn ne(&self, other: &Rc<T, A>) -> bool;
2398}
2399
2400#[stable(feature = "rust1", since = "1.0.0")]
2401impl<T: ?Sized + PartialEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2402 #[inline]
2403 default fn eq(&self, other: &Rc<T, A>) -> bool {
2404 **self == **other
2405 }
2406
2407 #[inline]
2408 default fn ne(&self, other: &Rc<T, A>) -> bool {
2409 **self != **other
2410 }
2411}
2412
2413// Hack to allow specializing on `Eq` even though `Eq` has a method.
2414#[rustc_unsafe_specialization_marker]
2415pub(crate) trait MarkerEq: PartialEq<Self> {}
2416
2417impl<T: Eq> MarkerEq for T {}
2418
2419/// We're doing this specialization here, and not as a more general optimization on `&T`, because it
2420/// would otherwise add a cost to all equality checks on refs. We assume that `Rc`s are used to
2421/// store large values, that are slow to clone, but also heavy to check for equality, causing this
2422/// cost to pay off more easily. It's also more likely to have two `Rc` clones, that point to
2423/// the same value, than two `&T`s.
2424///
2425/// We can only do this when `T: Eq` as a `PartialEq` might be deliberately irreflexive.
2426#[stable(feature = "rust1", since = "1.0.0")]
2427impl<T: ?Sized + MarkerEq, A: Allocator> RcEqIdent<T, A> for Rc<T, A> {
2428 #[inline]
2429 fn eq(&self, other: &Rc<T, A>) -> bool {
2430 Rc::ptr_eq(self, other) || **self == **other
2431 }
2432
2433 #[inline]
2434 fn ne(&self, other: &Rc<T, A>) -> bool {
2435 !Rc::ptr_eq(self, other) && **self != **other
2436 }
2437}
2438
2439#[stable(feature = "rust1", since = "1.0.0")]
2440impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for Rc<T, A> {
2441 /// Equality for two `Rc`s.
2442 ///
2443 /// Two `Rc`s are equal if their inner values are equal, even if they are
2444 /// stored in different allocation.
2445 ///
2446 /// If `T` also implements `Eq` (implying reflexivity of equality),
2447 /// two `Rc`s that point to the same allocation are
2448 /// always equal.
2449 ///
2450 /// # Examples
2451 ///
2452 /// ```
2453 /// use std::rc::Rc;
2454 ///
2455 /// let five = Rc::new(5);
2456 ///
2457 /// assert!(five == Rc::new(5));
2458 /// ```
2459 #[inline]
2460 fn eq(&self, other: &Rc<T, A>) -> bool {
2461 RcEqIdent::eq(self, other)
2462 }
2463
2464 /// Inequality for two `Rc`s.
2465 ///
2466 /// Two `Rc`s are not equal if their inner values are not equal.
2467 ///
2468 /// If `T` also implements `Eq` (implying reflexivity of equality),
2469 /// two `Rc`s that point to the same allocation are
2470 /// always equal.
2471 ///
2472 /// # Examples
2473 ///
2474 /// ```
2475 /// use std::rc::Rc;
2476 ///
2477 /// let five = Rc::new(5);
2478 ///
2479 /// assert!(five != Rc::new(6));
2480 /// ```
2481 #[inline]
2482 fn ne(&self, other: &Rc<T, A>) -> bool {
2483 RcEqIdent::ne(self, other)
2484 }
2485}
2486
2487#[stable(feature = "rust1", since = "1.0.0")]
2488impl<T: ?Sized + Eq, A: Allocator> Eq for Rc<T, A> {}
2489
2490#[stable(feature = "rust1", since = "1.0.0")]
2491impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for Rc<T, A> {
2492 /// Partial comparison for two `Rc`s.
2493 ///
2494 /// The two are compared by calling `partial_cmp()` on their inner values.
2495 ///
2496 /// # Examples
2497 ///
2498 /// ```
2499 /// use std::rc::Rc;
2500 /// use std::cmp::Ordering;
2501 ///
2502 /// let five = Rc::new(5);
2503 ///
2504 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6)));
2505 /// ```
2506 #[inline(always)]
2507 fn partial_cmp(&self, other: &Rc<T, A>) -> Option<Ordering> {
2508 (**self).partial_cmp(&**other)
2509 }
2510
2511 /// Less-than comparison for two `Rc`s.
2512 ///
2513 /// The two are compared by calling `<` on their inner values.
2514 ///
2515 /// # Examples
2516 ///
2517 /// ```
2518 /// use std::rc::Rc;
2519 ///
2520 /// let five = Rc::new(5);
2521 ///
2522 /// assert!(five < Rc::new(6));
2523 /// ```
2524 #[inline(always)]
2525 fn lt(&self, other: &Rc<T, A>) -> bool {
2526 **self < **other
2527 }
2528
2529 /// 'Less than or equal to' comparison for two `Rc`s.
2530 ///
2531 /// The two are compared by calling `<=` on their inner values.
2532 ///
2533 /// # Examples
2534 ///
2535 /// ```
2536 /// use std::rc::Rc;
2537 ///
2538 /// let five = Rc::new(5);
2539 ///
2540 /// assert!(five <= Rc::new(5));
2541 /// ```
2542 #[inline(always)]
2543 fn le(&self, other: &Rc<T, A>) -> bool {
2544 **self <= **other
2545 }
2546
2547 /// Greater-than comparison for two `Rc`s.
2548 ///
2549 /// The two are compared by calling `>` on their inner values.
2550 ///
2551 /// # Examples
2552 ///
2553 /// ```
2554 /// use std::rc::Rc;
2555 ///
2556 /// let five = Rc::new(5);
2557 ///
2558 /// assert!(five > Rc::new(4));
2559 /// ```
2560 #[inline(always)]
2561 fn gt(&self, other: &Rc<T, A>) -> bool {
2562 **self > **other
2563 }
2564
2565 /// 'Greater than or equal to' comparison for two `Rc`s.
2566 ///
2567 /// The two are compared by calling `>=` on their inner values.
2568 ///
2569 /// # Examples
2570 ///
2571 /// ```
2572 /// use std::rc::Rc;
2573 ///
2574 /// let five = Rc::new(5);
2575 ///
2576 /// assert!(five >= Rc::new(5));
2577 /// ```
2578 #[inline(always)]
2579 fn ge(&self, other: &Rc<T, A>) -> bool {
2580 **self >= **other
2581 }
2582}
2583
2584#[stable(feature = "rust1", since = "1.0.0")]
2585impl<T: ?Sized + Ord, A: Allocator> Ord for Rc<T, A> {
2586 /// Comparison for two `Rc`s.
2587 ///
2588 /// The two are compared by calling `cmp()` on their inner values.
2589 ///
2590 /// # Examples
2591 ///
2592 /// ```
2593 /// use std::rc::Rc;
2594 /// use std::cmp::Ordering;
2595 ///
2596 /// let five = Rc::new(5);
2597 ///
2598 /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6)));
2599 /// ```
2600 #[inline]
2601 fn cmp(&self, other: &Rc<T, A>) -> Ordering {
2602 (**self).cmp(&**other)
2603 }
2604}
2605
2606#[stable(feature = "rust1", since = "1.0.0")]
2607impl<T: ?Sized + Hash, A: Allocator> Hash for Rc<T, A> {
2608 fn hash<H: Hasher>(&self, state: &mut H) {
2609 (**self).hash(state);
2610 }
2611}
2612
2613#[stable(feature = "rust1", since = "1.0.0")]
2614impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for Rc<T, A> {
2615 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2616 fmt::Display::fmt(&**self, f)
2617 }
2618}
2619
2620#[stable(feature = "rust1", since = "1.0.0")]
2621impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for Rc<T, A> {
2622 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2623 fmt::Debug::fmt(&**self, f)
2624 }
2625}
2626
2627#[stable(feature = "rust1", since = "1.0.0")]
2628impl<T: ?Sized, A: Allocator> fmt::Pointer for Rc<T, A> {
2629 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2630 fmt::Pointer::fmt(&(&raw const **self), f)
2631 }
2632}
2633
2634#[cfg(not(no_global_oom_handling))]
2635#[stable(feature = "from_for_ptrs", since = "1.6.0")]
2636impl<T> From<T> for Rc<T> {
2637 /// Converts a generic type `T` into an `Rc<T>`
2638 ///
2639 /// The conversion allocates on the heap and moves `t`
2640 /// from the stack into it.
2641 ///
2642 /// # Example
2643 /// ```rust
2644 /// # use std::rc::Rc;
2645 /// let x = 5;
2646 /// let rc = Rc::new(5);
2647 ///
2648 /// assert_eq!(Rc::from(x), rc);
2649 /// ```
2650 fn from(t: T) -> Self {
2651 Rc::new(t)
2652 }
2653}
2654
2655#[cfg(not(no_global_oom_handling))]
2656#[stable(feature = "shared_from_array", since = "1.74.0")]
2657impl<T, const N: usize> From<[T; N]> for Rc<[T]> {
2658 /// Converts a [`[T; N]`](prim@array) into an `Rc<[T]>`.
2659 ///
2660 /// The conversion moves the array into a newly allocated `Rc`.
2661 ///
2662 /// # Example
2663 ///
2664 /// ```
2665 /// # use std::rc::Rc;
2666 /// let original: [i32; 3] = [1, 2, 3];
2667 /// let shared: Rc<[i32]> = Rc::from(original);
2668 /// assert_eq!(&[1, 2, 3], &shared[..]);
2669 /// ```
2670 #[inline]
2671 fn from(v: [T; N]) -> Rc<[T]> {
2672 Rc::<[T; N]>::from(v)
2673 }
2674}
2675
2676#[cfg(not(no_global_oom_handling))]
2677#[stable(feature = "shared_from_slice", since = "1.21.0")]
2678impl<T: Clone> From<&[T]> for Rc<[T]> {
2679 /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2680 ///
2681 /// # Example
2682 ///
2683 /// ```
2684 /// # use std::rc::Rc;
2685 /// let original: &[i32] = &[1, 2, 3];
2686 /// let shared: Rc<[i32]> = Rc::from(original);
2687 /// assert_eq!(&[1, 2, 3], &shared[..]);
2688 /// ```
2689 #[inline]
2690 fn from(v: &[T]) -> Rc<[T]> {
2691 <Self as RcFromSlice<T>>::from_slice(v)
2692 }
2693}
2694
2695#[cfg(not(no_global_oom_handling))]
2696#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2697impl<T: Clone> From<&mut [T]> for Rc<[T]> {
2698 /// Allocates a reference-counted slice and fills it by cloning `v`'s items.
2699 ///
2700 /// # Example
2701 ///
2702 /// ```
2703 /// # use std::rc::Rc;
2704 /// let mut original = [1, 2, 3];
2705 /// let original: &mut [i32] = &mut original;
2706 /// let shared: Rc<[i32]> = Rc::from(original);
2707 /// assert_eq!(&[1, 2, 3], &shared[..]);
2708 /// ```
2709 #[inline]
2710 fn from(v: &mut [T]) -> Rc<[T]> {
2711 Rc::from(&*v)
2712 }
2713}
2714
2715#[cfg(not(no_global_oom_handling))]
2716#[stable(feature = "shared_from_slice", since = "1.21.0")]
2717impl From<&str> for Rc<str> {
2718 /// Allocates a reference-counted string slice and copies `v` into it.
2719 ///
2720 /// # Example
2721 ///
2722 /// ```
2723 /// # use std::rc::Rc;
2724 /// let shared: Rc<str> = Rc::from("statue");
2725 /// assert_eq!("statue", &shared[..]);
2726 /// ```
2727 #[inline]
2728 fn from(v: &str) -> Rc<str> {
2729 let rc = Rc::<[u8]>::from(v.as_bytes());
2730 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const str) }
2731 }
2732}
2733
2734#[cfg(not(no_global_oom_handling))]
2735#[stable(feature = "shared_from_mut_slice", since = "1.84.0")]
2736impl From<&mut str> for Rc<str> {
2737 /// Allocates a reference-counted string slice and copies `v` into it.
2738 ///
2739 /// # Example
2740 ///
2741 /// ```
2742 /// # use std::rc::Rc;
2743 /// let mut original = String::from("statue");
2744 /// let original: &mut str = &mut original;
2745 /// let shared: Rc<str> = Rc::from(original);
2746 /// assert_eq!("statue", &shared[..]);
2747 /// ```
2748 #[inline]
2749 fn from(v: &mut str) -> Rc<str> {
2750 Rc::from(&*v)
2751 }
2752}
2753
2754#[cfg(not(no_global_oom_handling))]
2755#[stable(feature = "shared_from_slice", since = "1.21.0")]
2756impl From<String> for Rc<str> {
2757 /// Allocates a reference-counted string slice and copies `v` into it.
2758 ///
2759 /// # Example
2760 ///
2761 /// ```
2762 /// # use std::rc::Rc;
2763 /// let original: String = "statue".to_owned();
2764 /// let shared: Rc<str> = Rc::from(original);
2765 /// assert_eq!("statue", &shared[..]);
2766 /// ```
2767 #[inline]
2768 fn from(v: String) -> Rc<str> {
2769 Rc::from(&v[..])
2770 }
2771}
2772
2773#[cfg(not(no_global_oom_handling))]
2774#[stable(feature = "shared_from_slice", since = "1.21.0")]
2775impl<T: ?Sized, A: Allocator> From<Box<T, A>> for Rc<T, A> {
2776 /// Move a boxed object to a new, reference counted, allocation.
2777 ///
2778 /// # Example
2779 ///
2780 /// ```
2781 /// # use std::rc::Rc;
2782 /// let original: Box<i32> = Box::new(1);
2783 /// let shared: Rc<i32> = Rc::from(original);
2784 /// assert_eq!(1, *shared);
2785 /// ```
2786 #[inline]
2787 fn from(v: Box<T, A>) -> Rc<T, A> {
2788 Rc::from_box_in(v)
2789 }
2790}
2791
2792#[cfg(not(no_global_oom_handling))]
2793#[stable(feature = "shared_from_slice", since = "1.21.0")]
2794impl<T, A: Allocator> From<Vec<T, A>> for Rc<[T], A> {
2795 /// Allocates a reference-counted slice and moves `v`'s items into it.
2796 ///
2797 /// # Example
2798 ///
2799 /// ```
2800 /// # use std::rc::Rc;
2801 /// let unique: Vec<i32> = vec![1, 2, 3];
2802 /// let shared: Rc<[i32]> = Rc::from(unique);
2803 /// assert_eq!(&[1, 2, 3], &shared[..]);
2804 /// ```
2805 #[inline]
2806 fn from(v: Vec<T, A>) -> Rc<[T], A> {
2807 unsafe {
2808 let (vec_ptr, len, cap, alloc) = v.into_raw_parts_with_alloc();
2809
2810 let rc_ptr = Self::allocate_for_slice_in(len, &alloc);
2811 ptr::copy_nonoverlapping(vec_ptr, (&raw mut (*rc_ptr).value) as *mut T, len);
2812
2813 // Create a `Vec<T, &A>` with length 0, to deallocate the buffer
2814 // without dropping its contents or the allocator
2815 let _ = Vec::from_raw_parts_in(vec_ptr, 0, cap, &alloc);
2816
2817 Self::from_ptr_in(rc_ptr, alloc)
2818 }
2819 }
2820}
2821
2822#[stable(feature = "shared_from_cow", since = "1.45.0")]
2823impl<'a, B> From<Cow<'a, B>> for Rc<B>
2824where
2825 B: ToOwned + ?Sized,
2826 Rc<B>: From<&'a B> + From<B::Owned>,
2827{
2828 /// Creates a reference-counted pointer from a clone-on-write pointer by
2829 /// copying its content.
2830 ///
2831 /// # Example
2832 ///
2833 /// ```rust
2834 /// # use std::rc::Rc;
2835 /// # use std::borrow::Cow;
2836 /// let cow: Cow<'_, str> = Cow::Borrowed("eggplant");
2837 /// let shared: Rc<str> = Rc::from(cow);
2838 /// assert_eq!("eggplant", &shared[..]);
2839 /// ```
2840 #[inline]
2841 fn from(cow: Cow<'a, B>) -> Rc<B> {
2842 match cow {
2843 Cow::Borrowed(s) => Rc::from(s),
2844 Cow::Owned(s) => Rc::from(s),
2845 }
2846 }
2847}
2848
2849#[stable(feature = "shared_from_str", since = "1.62.0")]
2850impl From<Rc<str>> for Rc<[u8]> {
2851 /// Converts a reference-counted string slice into a byte slice.
2852 ///
2853 /// # Example
2854 ///
2855 /// ```
2856 /// # use std::rc::Rc;
2857 /// let string: Rc<str> = Rc::from("eggplant");
2858 /// let bytes: Rc<[u8]> = Rc::from(string);
2859 /// assert_eq!("eggplant".as_bytes(), bytes.as_ref());
2860 /// ```
2861 #[inline]
2862 fn from(rc: Rc<str>) -> Self {
2863 // SAFETY: `str` has the same layout as `[u8]`.
2864 unsafe { Rc::from_raw(Rc::into_raw(rc) as *const [u8]) }
2865 }
2866}
2867
2868#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
2869impl<T, A: Allocator, const N: usize> TryFrom<Rc<[T], A>> for Rc<[T; N], A> {
2870 type Error = Rc<[T], A>;
2871
2872 fn try_from(boxed_slice: Rc<[T], A>) -> Result<Self, Self::Error> {
2873 if boxed_slice.len() == N {
2874 let (ptr, alloc) = Rc::into_inner_with_allocator(boxed_slice);
2875 Ok(unsafe { Rc::from_inner_in(ptr.cast(), alloc) })
2876 } else {
2877 Err(boxed_slice)
2878 }
2879 }
2880}
2881
2882#[cfg(not(no_global_oom_handling))]
2883#[stable(feature = "shared_from_iter", since = "1.37.0")]
2884impl<T> FromIterator<T> for Rc<[T]> {
2885 /// Takes each element in the `Iterator` and collects it into an `Rc<[T]>`.
2886 ///
2887 /// # Performance characteristics
2888 ///
2889 /// ## The general case
2890 ///
2891 /// In the general case, collecting into `Rc<[T]>` is done by first
2892 /// collecting into a `Vec<T>`. That is, when writing the following:
2893 ///
2894 /// ```rust
2895 /// # use std::rc::Rc;
2896 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0).collect();
2897 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2898 /// ```
2899 ///
2900 /// this behaves as if we wrote:
2901 ///
2902 /// ```rust
2903 /// # use std::rc::Rc;
2904 /// let evens: Rc<[u8]> = (0..10).filter(|&x| x % 2 == 0)
2905 /// .collect::<Vec<_>>() // The first set of allocations happens here.
2906 /// .into(); // A second allocation for `Rc<[T]>` happens here.
2907 /// # assert_eq!(&*evens, &[0, 2, 4, 6, 8]);
2908 /// ```
2909 ///
2910 /// This will allocate as many times as needed for constructing the `Vec<T>`
2911 /// and then it will allocate once for turning the `Vec<T>` into the `Rc<[T]>`.
2912 ///
2913 /// ## Iterators of known length
2914 ///
2915 /// When your `Iterator` implements `TrustedLen` and is of an exact size,
2916 /// a single allocation will be made for the `Rc<[T]>`. For example:
2917 ///
2918 /// ```rust
2919 /// # use std::rc::Rc;
2920 /// let evens: Rc<[u8]> = (0..10).collect(); // Just a single allocation happens here.
2921 /// # assert_eq!(&*evens, &*(0..10).collect::<Vec<_>>());
2922 /// ```
2923 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
2924 ToRcSlice::to_rc_slice(iter.into_iter())
2925 }
2926}
2927
2928/// Specialization trait used for collecting into `Rc<[T]>`.
2929#[cfg(not(no_global_oom_handling))]
2930trait ToRcSlice<T>: Iterator<Item = T> + Sized {
2931 fn to_rc_slice(self) -> Rc<[T]>;
2932}
2933
2934#[cfg(not(no_global_oom_handling))]
2935impl<T, I: Iterator<Item = T>> ToRcSlice<T> for I {
2936 default fn to_rc_slice(self) -> Rc<[T]> {
2937 self.collect::<Vec<T>>().into()
2938 }
2939}
2940
2941#[cfg(not(no_global_oom_handling))]
2942impl<T, I: iter::TrustedLen<Item = T>> ToRcSlice<T> for I {
2943 fn to_rc_slice(self) -> Rc<[T]> {
2944 // This is the case for a `TrustedLen` iterator.
2945 let (low, high) = self.size_hint();
2946 if let Some(high) = high {
2947 debug_assert_eq!(
2948 low,
2949 high,
2950 "TrustedLen iterator's size hint is not exact: {:?}",
2951 (low, high)
2952 );
2953
2954 unsafe {
2955 // SAFETY: We need to ensure that the iterator has an exact length and we have.
2956 Rc::from_iter_exact(self, low)
2957 }
2958 } else {
2959 // TrustedLen contract guarantees that `upper_bound == None` implies an iterator
2960 // length exceeding `usize::MAX`.
2961 // The default implementation would collect into a vec which would panic.
2962 // Thus we panic here immediately without invoking `Vec` code.
2963 panic!("capacity overflow");
2964 }
2965 }
2966}
2967
2968/// `Weak` is a version of [`Rc`] that holds a non-owning reference to the
2969/// managed allocation.
2970///
2971/// The allocation is accessed by calling [`upgrade`] on the `Weak`
2972/// pointer, which returns an <code>[Option]<[Rc]\<T>></code>.
2973///
2974/// Since a `Weak` reference does not count towards ownership, it will not
2975/// prevent the value stored in the allocation from being dropped, and `Weak` itself makes no
2976/// guarantees about the value still being present. Thus it may return [`None`]
2977/// when [`upgrade`]d. Note however that a `Weak` reference *does* prevent the allocation
2978/// itself (the backing store) from being deallocated.
2979///
2980/// A `Weak` pointer is useful for keeping a temporary reference to the allocation
2981/// managed by [`Rc`] without preventing its inner value from being dropped. It is also used to
2982/// prevent circular references between [`Rc`] pointers, since mutual owning references
2983/// would never allow either [`Rc`] to be dropped. For example, a tree could
2984/// have strong [`Rc`] pointers from parent nodes to children, and `Weak`
2985/// pointers from children back to their parents.
2986///
2987/// The typical way to obtain a `Weak` pointer is to call [`Rc::downgrade`].
2988///
2989/// [`upgrade`]: Weak::upgrade
2990#[stable(feature = "rc_weak", since = "1.4.0")]
2991#[rustc_diagnostic_item = "RcWeak"]
2992pub struct Weak<
2993 T: ?Sized,
2994 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
2995> {
2996 // This is a `NonNull` to allow optimizing the size of this type in enums,
2997 // but it is not necessarily a valid pointer.
2998 // `Weak::new` sets this to `usize::MAX` so that it doesn’t need
2999 // to allocate space on the heap. That's not a value a real pointer
3000 // will ever have because RcInner has alignment at least 2.
3001 ptr: NonNull<RcInner<T>>,
3002 alloc: A,
3003}
3004
3005#[stable(feature = "rc_weak", since = "1.4.0")]
3006impl<T: ?Sized, A: Allocator> !Send for Weak<T, A> {}
3007#[stable(feature = "rc_weak", since = "1.4.0")]
3008impl<T: ?Sized, A: Allocator> !Sync for Weak<T, A> {}
3009
3010#[unstable(feature = "coerce_unsized", issue = "18598")]
3011impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<Weak<U, A>> for Weak<T, A> {}
3012
3013#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3014impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<Weak<U>> for Weak<T> {}
3015
3016impl<T> Weak<T> {
3017 /// Constructs a new `Weak<T>`, without allocating any memory.
3018 /// Calling [`upgrade`] on the return value always gives [`None`].
3019 ///
3020 /// [`upgrade`]: Weak::upgrade
3021 ///
3022 /// # Examples
3023 ///
3024 /// ```
3025 /// use std::rc::Weak;
3026 ///
3027 /// let empty: Weak<i64> = Weak::new();
3028 /// assert!(empty.upgrade().is_none());
3029 /// ```
3030 #[inline]
3031 #[stable(feature = "downgraded_weak", since = "1.10.0")]
3032 #[rustc_const_stable(feature = "const_weak_new", since = "1.73.0")]
3033 #[must_use]
3034 pub const fn new() -> Weak<T> {
3035 Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc: Global }
3036 }
3037}
3038
3039impl<T, A: Allocator> Weak<T, A> {
3040 /// Constructs a new `Weak<T>`, without allocating any memory, technically in the provided
3041 /// allocator.
3042 /// Calling [`upgrade`] on the return value always gives [`None`].
3043 ///
3044 /// [`upgrade`]: Weak::upgrade
3045 ///
3046 /// # Examples
3047 ///
3048 /// ```
3049 /// use std::rc::Weak;
3050 ///
3051 /// let empty: Weak<i64> = Weak::new();
3052 /// assert!(empty.upgrade().is_none());
3053 /// ```
3054 #[inline]
3055 #[unstable(feature = "allocator_api", issue = "32838")]
3056 pub fn new_in(alloc: A) -> Weak<T, A> {
3057 Weak { ptr: NonNull::without_provenance(NonZeroUsize::MAX), alloc }
3058 }
3059}
3060
3061pub(crate) fn is_dangling<T: ?Sized>(ptr: *const T) -> bool {
3062 (ptr.cast::<()>()).addr() == usize::MAX
3063}
3064
3065/// Helper type to allow accessing the reference counts without
3066/// making any assertions about the data field.
3067struct WeakInner<'a> {
3068 weak: &'a Cell<usize>,
3069 strong: &'a Cell<usize>,
3070}
3071
3072impl<T: ?Sized> Weak<T> {
3073 /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3074 ///
3075 /// This can be used to safely get a strong reference (by calling [`upgrade`]
3076 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3077 ///
3078 /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3079 /// as these don't own anything; the method still works on them).
3080 ///
3081 /// # Safety
3082 ///
3083 /// The pointer must have originated from the [`into_raw`] and must still own its potential
3084 /// weak reference, and `ptr` must point to a block of memory allocated by the global allocator.
3085 ///
3086 /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3087 /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3088 /// count is not modified by this operation) and therefore it must be paired with a previous
3089 /// call to [`into_raw`].
3090 ///
3091 /// # Examples
3092 ///
3093 /// ```
3094 /// use std::rc::{Rc, Weak};
3095 ///
3096 /// let strong = Rc::new("hello".to_owned());
3097 ///
3098 /// let raw_1 = Rc::downgrade(&strong).into_raw();
3099 /// let raw_2 = Rc::downgrade(&strong).into_raw();
3100 ///
3101 /// assert_eq!(2, Rc::weak_count(&strong));
3102 ///
3103 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3104 /// assert_eq!(1, Rc::weak_count(&strong));
3105 ///
3106 /// drop(strong);
3107 ///
3108 /// // Decrement the last weak count.
3109 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3110 /// ```
3111 ///
3112 /// [`into_raw`]: Weak::into_raw
3113 /// [`upgrade`]: Weak::upgrade
3114 /// [`new`]: Weak::new
3115 #[inline]
3116 #[stable(feature = "weak_into_raw", since = "1.45.0")]
3117 pub unsafe fn from_raw(ptr: *const T) -> Self {
3118 unsafe { Self::from_raw_in(ptr, Global) }
3119 }
3120
3121 /// Consumes the `Weak<T>` and turns it into a raw pointer.
3122 ///
3123 /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3124 /// one weak reference (the weak count is not modified by this operation). It can be turned
3125 /// back into the `Weak<T>` with [`from_raw`].
3126 ///
3127 /// The same restrictions of accessing the target of the pointer as with
3128 /// [`as_ptr`] apply.
3129 ///
3130 /// # Examples
3131 ///
3132 /// ```
3133 /// use std::rc::{Rc, Weak};
3134 ///
3135 /// let strong = Rc::new("hello".to_owned());
3136 /// let weak = Rc::downgrade(&strong);
3137 /// let raw = weak.into_raw();
3138 ///
3139 /// assert_eq!(1, Rc::weak_count(&strong));
3140 /// assert_eq!("hello", unsafe { &*raw });
3141 ///
3142 /// drop(unsafe { Weak::from_raw(raw) });
3143 /// assert_eq!(0, Rc::weak_count(&strong));
3144 /// ```
3145 ///
3146 /// [`from_raw`]: Weak::from_raw
3147 /// [`as_ptr`]: Weak::as_ptr
3148 #[must_use = "losing the pointer will leak memory"]
3149 #[stable(feature = "weak_into_raw", since = "1.45.0")]
3150 pub fn into_raw(self) -> *const T {
3151 mem::ManuallyDrop::new(self).as_ptr()
3152 }
3153}
3154
3155impl<T: ?Sized, A: Allocator> Weak<T, A> {
3156 /// Returns a reference to the underlying allocator.
3157 #[inline]
3158 #[unstable(feature = "allocator_api", issue = "32838")]
3159 pub fn allocator(&self) -> &A {
3160 &self.alloc
3161 }
3162
3163 /// Returns a raw pointer to the object `T` pointed to by this `Weak<T>`.
3164 ///
3165 /// The pointer is valid only if there are some strong references. The pointer may be dangling,
3166 /// unaligned or even [`null`] otherwise.
3167 ///
3168 /// # Examples
3169 ///
3170 /// ```
3171 /// use std::rc::Rc;
3172 /// use std::ptr;
3173 ///
3174 /// let strong = Rc::new("hello".to_owned());
3175 /// let weak = Rc::downgrade(&strong);
3176 /// // Both point to the same object
3177 /// assert!(ptr::eq(&*strong, weak.as_ptr()));
3178 /// // The strong here keeps it alive, so we can still access the object.
3179 /// assert_eq!("hello", unsafe { &*weak.as_ptr() });
3180 ///
3181 /// drop(strong);
3182 /// // But not any more. We can do weak.as_ptr(), but accessing the pointer would lead to
3183 /// // undefined behavior.
3184 /// // assert_eq!("hello", unsafe { &*weak.as_ptr() });
3185 /// ```
3186 ///
3187 /// [`null`]: ptr::null
3188 #[must_use]
3189 #[stable(feature = "rc_as_ptr", since = "1.45.0")]
3190 pub fn as_ptr(&self) -> *const T {
3191 let ptr: *mut RcInner<T> = NonNull::as_ptr(self.ptr);
3192
3193 if is_dangling(ptr) {
3194 // If the pointer is dangling, we return the sentinel directly. This cannot be
3195 // a valid payload address, as the payload is at least as aligned as RcInner (usize).
3196 ptr as *const T
3197 } else {
3198 // SAFETY: if is_dangling returns false, then the pointer is dereferenceable.
3199 // The payload may be dropped at this point, and we have to maintain provenance,
3200 // so use raw pointer manipulation.
3201 unsafe { &raw mut (*ptr).value }
3202 }
3203 }
3204
3205 /// Consumes the `Weak<T>`, returning the wrapped pointer and allocator.
3206 ///
3207 /// This converts the weak pointer into a raw pointer, while still preserving the ownership of
3208 /// one weak reference (the weak count is not modified by this operation). It can be turned
3209 /// back into the `Weak<T>` with [`from_raw_in`].
3210 ///
3211 /// The same restrictions of accessing the target of the pointer as with
3212 /// [`as_ptr`] apply.
3213 ///
3214 /// # Examples
3215 ///
3216 /// ```
3217 /// #![feature(allocator_api)]
3218 /// use std::rc::{Rc, Weak};
3219 /// use std::alloc::System;
3220 ///
3221 /// let strong = Rc::new_in("hello".to_owned(), System);
3222 /// let weak = Rc::downgrade(&strong);
3223 /// let (raw, alloc) = weak.into_raw_with_allocator();
3224 ///
3225 /// assert_eq!(1, Rc::weak_count(&strong));
3226 /// assert_eq!("hello", unsafe { &*raw });
3227 ///
3228 /// drop(unsafe { Weak::from_raw_in(raw, alloc) });
3229 /// assert_eq!(0, Rc::weak_count(&strong));
3230 /// ```
3231 ///
3232 /// [`from_raw_in`]: Weak::from_raw_in
3233 /// [`as_ptr`]: Weak::as_ptr
3234 #[must_use = "losing the pointer will leak memory"]
3235 #[inline]
3236 #[unstable(feature = "allocator_api", issue = "32838")]
3237 pub fn into_raw_with_allocator(self) -> (*const T, A) {
3238 let this = mem::ManuallyDrop::new(self);
3239 let result = this.as_ptr();
3240 // Safety: `this` is ManuallyDrop so the allocator will not be double-dropped
3241 let alloc = unsafe { ptr::read(&this.alloc) };
3242 (result, alloc)
3243 }
3244
3245 /// Converts a raw pointer previously created by [`into_raw`] back into `Weak<T>`.
3246 ///
3247 /// This can be used to safely get a strong reference (by calling [`upgrade`]
3248 /// later) or to deallocate the weak count by dropping the `Weak<T>`.
3249 ///
3250 /// It takes ownership of one weak reference (with the exception of pointers created by [`new`],
3251 /// as these don't own anything; the method still works on them).
3252 ///
3253 /// # Safety
3254 ///
3255 /// The pointer must have originated from the [`into_raw`] and must still own its potential
3256 /// weak reference, and `ptr` must point to a block of memory allocated by `alloc`.
3257 ///
3258 /// It is allowed for the strong count to be 0 at the time of calling this. Nevertheless, this
3259 /// takes ownership of one weak reference currently represented as a raw pointer (the weak
3260 /// count is not modified by this operation) and therefore it must be paired with a previous
3261 /// call to [`into_raw`].
3262 ///
3263 /// # Examples
3264 ///
3265 /// ```
3266 /// use std::rc::{Rc, Weak};
3267 ///
3268 /// let strong = Rc::new("hello".to_owned());
3269 ///
3270 /// let raw_1 = Rc::downgrade(&strong).into_raw();
3271 /// let raw_2 = Rc::downgrade(&strong).into_raw();
3272 ///
3273 /// assert_eq!(2, Rc::weak_count(&strong));
3274 ///
3275 /// assert_eq!("hello", &*unsafe { Weak::from_raw(raw_1) }.upgrade().unwrap());
3276 /// assert_eq!(1, Rc::weak_count(&strong));
3277 ///
3278 /// drop(strong);
3279 ///
3280 /// // Decrement the last weak count.
3281 /// assert!(unsafe { Weak::from_raw(raw_2) }.upgrade().is_none());
3282 /// ```
3283 ///
3284 /// [`into_raw`]: Weak::into_raw
3285 /// [`upgrade`]: Weak::upgrade
3286 /// [`new`]: Weak::new
3287 #[inline]
3288 #[unstable(feature = "allocator_api", issue = "32838")]
3289 pub unsafe fn from_raw_in(ptr: *const T, alloc: A) -> Self {
3290 // See Weak::as_ptr for context on how the input pointer is derived.
3291
3292 let ptr = if is_dangling(ptr) {
3293 // This is a dangling Weak.
3294 ptr as *mut RcInner<T>
3295 } else {
3296 // Otherwise, we're guaranteed the pointer came from a nondangling Weak.
3297 // SAFETY: data_offset is safe to call, as ptr references a real (potentially dropped) T.
3298 let offset = unsafe { data_offset(ptr) };
3299 // Thus, we reverse the offset to get the whole RcInner.
3300 // SAFETY: the pointer originated from a Weak, so this offset is safe.
3301 unsafe { ptr.byte_sub(offset) as *mut RcInner<T> }
3302 };
3303
3304 // SAFETY: we now have recovered the original Weak pointer, so can create the Weak.
3305 Weak { ptr: unsafe { NonNull::new_unchecked(ptr) }, alloc }
3306 }
3307
3308 /// Attempts to upgrade the `Weak` pointer to an [`Rc`], delaying
3309 /// dropping of the inner value if successful.
3310 ///
3311 /// Returns [`None`] if the inner value has since been dropped.
3312 ///
3313 /// # Examples
3314 ///
3315 /// ```
3316 /// use std::rc::Rc;
3317 ///
3318 /// let five = Rc::new(5);
3319 ///
3320 /// let weak_five = Rc::downgrade(&five);
3321 ///
3322 /// let strong_five: Option<Rc<_>> = weak_five.upgrade();
3323 /// assert!(strong_five.is_some());
3324 ///
3325 /// // Destroy all strong pointers.
3326 /// drop(strong_five);
3327 /// drop(five);
3328 ///
3329 /// assert!(weak_five.upgrade().is_none());
3330 /// ```
3331 #[must_use = "this returns a new `Rc`, \
3332 without modifying the original weak pointer"]
3333 #[stable(feature = "rc_weak", since = "1.4.0")]
3334 pub fn upgrade(&self) -> Option<Rc<T, A>>
3335 where
3336 A: Clone,
3337 {
3338 let inner = self.inner()?;
3339
3340 if inner.strong() == 0 {
3341 None
3342 } else {
3343 unsafe {
3344 inner.inc_strong();
3345 Some(Rc::from_inner_in(self.ptr, self.alloc.clone()))
3346 }
3347 }
3348 }
3349
3350 /// Gets the number of strong (`Rc`) pointers pointing to this allocation.
3351 ///
3352 /// If `self` was created using [`Weak::new`], this will return 0.
3353 #[must_use]
3354 #[stable(feature = "weak_counts", since = "1.41.0")]
3355 pub fn strong_count(&self) -> usize {
3356 if let Some(inner) = self.inner() { inner.strong() } else { 0 }
3357 }
3358
3359 /// Gets the number of `Weak` pointers pointing to this allocation.
3360 ///
3361 /// If no strong pointers remain, this will return zero.
3362 #[must_use]
3363 #[stable(feature = "weak_counts", since = "1.41.0")]
3364 pub fn weak_count(&self) -> usize {
3365 if let Some(inner) = self.inner() {
3366 if inner.strong() > 0 {
3367 inner.weak() - 1 // subtract the implicit weak ptr
3368 } else {
3369 0
3370 }
3371 } else {
3372 0
3373 }
3374 }
3375
3376 /// Returns `None` when the pointer is dangling and there is no allocated `RcInner`,
3377 /// (i.e., when this `Weak` was created by `Weak::new`).
3378 #[inline]
3379 fn inner(&self) -> Option<WeakInner<'_>> {
3380 if is_dangling(self.ptr.as_ptr()) {
3381 None
3382 } else {
3383 // We are careful to *not* create a reference covering the "data" field, as
3384 // the field may be mutated concurrently (for example, if the last `Rc`
3385 // is dropped, the data field will be dropped in-place).
3386 Some(unsafe {
3387 let ptr = self.ptr.as_ptr();
3388 WeakInner { strong: &(*ptr).strong, weak: &(*ptr).weak }
3389 })
3390 }
3391 }
3392
3393 /// Returns `true` if the two `Weak`s point to the same allocation similar to [`ptr::eq`], or if
3394 /// both don't point to any allocation (because they were created with `Weak::new()`). However,
3395 /// this function ignores the metadata of `dyn Trait` pointers.
3396 ///
3397 /// # Notes
3398 ///
3399 /// Since this compares pointers it means that `Weak::new()` will equal each
3400 /// other, even though they don't point to any allocation.
3401 ///
3402 /// # Examples
3403 ///
3404 /// ```
3405 /// use std::rc::Rc;
3406 ///
3407 /// let first_rc = Rc::new(5);
3408 /// let first = Rc::downgrade(&first_rc);
3409 /// let second = Rc::downgrade(&first_rc);
3410 ///
3411 /// assert!(first.ptr_eq(&second));
3412 ///
3413 /// let third_rc = Rc::new(5);
3414 /// let third = Rc::downgrade(&third_rc);
3415 ///
3416 /// assert!(!first.ptr_eq(&third));
3417 /// ```
3418 ///
3419 /// Comparing `Weak::new`.
3420 ///
3421 /// ```
3422 /// use std::rc::{Rc, Weak};
3423 ///
3424 /// let first = Weak::new();
3425 /// let second = Weak::new();
3426 /// assert!(first.ptr_eq(&second));
3427 ///
3428 /// let third_rc = Rc::new(());
3429 /// let third = Rc::downgrade(&third_rc);
3430 /// assert!(!first.ptr_eq(&third));
3431 /// ```
3432 #[inline]
3433 #[must_use]
3434 #[stable(feature = "weak_ptr_eq", since = "1.39.0")]
3435 pub fn ptr_eq(&self, other: &Self) -> bool {
3436 ptr::addr_eq(self.ptr.as_ptr(), other.ptr.as_ptr())
3437 }
3438}
3439
3440#[stable(feature = "rc_weak", since = "1.4.0")]
3441unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Weak<T, A> {
3442 /// Drops the `Weak` pointer.
3443 ///
3444 /// # Examples
3445 ///
3446 /// ```
3447 /// use std::rc::{Rc, Weak};
3448 ///
3449 /// struct Foo;
3450 ///
3451 /// impl Drop for Foo {
3452 /// fn drop(&mut self) {
3453 /// println!("dropped!");
3454 /// }
3455 /// }
3456 ///
3457 /// let foo = Rc::new(Foo);
3458 /// let weak_foo = Rc::downgrade(&foo);
3459 /// let other_weak_foo = Weak::clone(&weak_foo);
3460 ///
3461 /// drop(weak_foo); // Doesn't print anything
3462 /// drop(foo); // Prints "dropped!"
3463 ///
3464 /// assert!(other_weak_foo.upgrade().is_none());
3465 /// ```
3466 fn drop(&mut self) {
3467 let inner = if let Some(inner) = self.inner() { inner } else { return };
3468
3469 inner.dec_weak();
3470 // the weak count starts at 1, and will only go to zero if all
3471 // the strong pointers have disappeared.
3472 if inner.weak() == 0 {
3473 unsafe {
3474 self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
3475 }
3476 }
3477 }
3478}
3479
3480#[stable(feature = "rc_weak", since = "1.4.0")]
3481impl<T: ?Sized, A: Allocator + Clone> Clone for Weak<T, A> {
3482 /// Makes a clone of the `Weak` pointer that points to the same allocation.
3483 ///
3484 /// # Examples
3485 ///
3486 /// ```
3487 /// use std::rc::{Rc, Weak};
3488 ///
3489 /// let weak_five = Rc::downgrade(&Rc::new(5));
3490 ///
3491 /// let _ = Weak::clone(&weak_five);
3492 /// ```
3493 #[inline]
3494 fn clone(&self) -> Weak<T, A> {
3495 if let Some(inner) = self.inner() {
3496 inner.inc_weak()
3497 }
3498 Weak { ptr: self.ptr, alloc: self.alloc.clone() }
3499 }
3500}
3501
3502#[unstable(feature = "ergonomic_clones", issue = "132290")]
3503impl<T: ?Sized, A: Allocator + Clone> UseCloned for Weak<T, A> {}
3504
3505#[stable(feature = "rc_weak", since = "1.4.0")]
3506impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
3507 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3508 write!(f, "(Weak)")
3509 }
3510}
3511
3512#[stable(feature = "downgraded_weak", since = "1.10.0")]
3513impl<T> Default for Weak<T> {
3514 /// Constructs a new `Weak<T>`, without allocating any memory.
3515 /// Calling [`upgrade`] on the return value always gives [`None`].
3516 ///
3517 /// [`upgrade`]: Weak::upgrade
3518 ///
3519 /// # Examples
3520 ///
3521 /// ```
3522 /// use std::rc::Weak;
3523 ///
3524 /// let empty: Weak<i64> = Default::default();
3525 /// assert!(empty.upgrade().is_none());
3526 /// ```
3527 fn default() -> Weak<T> {
3528 Weak::new()
3529 }
3530}
3531
3532// NOTE: If you mem::forget Rcs (or Weaks), drop is skipped and the ref-count
3533// is not decremented, meaning the ref-count can overflow, and then you can
3534// free the allocation while outstanding Rcs (or Weaks) exist, which would be
3535// unsound. We abort because this is such a degenerate scenario that we don't
3536// care about what happens -- no real program should ever experience this.
3537//
3538// This should have negligible overhead since you don't actually need to
3539// clone these much in Rust thanks to ownership and move-semantics.
3540
3541#[doc(hidden)]
3542trait RcInnerPtr {
3543 fn weak_ref(&self) -> &Cell<usize>;
3544 fn strong_ref(&self) -> &Cell<usize>;
3545
3546 #[inline]
3547 fn strong(&self) -> usize {
3548 self.strong_ref().get()
3549 }
3550
3551 #[inline]
3552 fn inc_strong(&self) {
3553 let strong = self.strong();
3554
3555 // We insert an `assume` here to hint LLVM at an otherwise
3556 // missed optimization.
3557 // SAFETY: The reference count will never be zero when this is
3558 // called.
3559 unsafe {
3560 hint::assert_unchecked(strong != 0);
3561 }
3562
3563 let strong = strong.wrapping_add(1);
3564 self.strong_ref().set(strong);
3565
3566 // We want to abort on overflow instead of dropping the value.
3567 // Checking for overflow after the store instead of before
3568 // allows for slightly better code generation.
3569 if core::intrinsics::unlikely(strong == 0) {
3570 abort();
3571 }
3572 }
3573
3574 #[inline]
3575 fn dec_strong(&self) {
3576 self.strong_ref().set(self.strong() - 1);
3577 }
3578
3579 #[inline]
3580 fn weak(&self) -> usize {
3581 self.weak_ref().get()
3582 }
3583
3584 #[inline]
3585 fn inc_weak(&self) {
3586 let weak = self.weak();
3587
3588 // We insert an `assume` here to hint LLVM at an otherwise
3589 // missed optimization.
3590 // SAFETY: The reference count will never be zero when this is
3591 // called.
3592 unsafe {
3593 hint::assert_unchecked(weak != 0);
3594 }
3595
3596 let weak = weak.wrapping_add(1);
3597 self.weak_ref().set(weak);
3598
3599 // We want to abort on overflow instead of dropping the value.
3600 // Checking for overflow after the store instead of before
3601 // allows for slightly better code generation.
3602 if core::intrinsics::unlikely(weak == 0) {
3603 abort();
3604 }
3605 }
3606
3607 #[inline]
3608 fn dec_weak(&self) {
3609 self.weak_ref().set(self.weak() - 1);
3610 }
3611}
3612
3613impl<T: ?Sized> RcInnerPtr for RcInner<T> {
3614 #[inline(always)]
3615 fn weak_ref(&self) -> &Cell<usize> {
3616 &self.weak
3617 }
3618
3619 #[inline(always)]
3620 fn strong_ref(&self) -> &Cell<usize> {
3621 &self.strong
3622 }
3623}
3624
3625impl<'a> RcInnerPtr for WeakInner<'a> {
3626 #[inline(always)]
3627 fn weak_ref(&self) -> &Cell<usize> {
3628 self.weak
3629 }
3630
3631 #[inline(always)]
3632 fn strong_ref(&self) -> &Cell<usize> {
3633 self.strong
3634 }
3635}
3636
3637#[stable(feature = "rust1", since = "1.0.0")]
3638impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for Rc<T, A> {
3639 fn borrow(&self) -> &T {
3640 &**self
3641 }
3642}
3643
3644#[stable(since = "1.5.0", feature = "smart_ptr_as_ref")]
3645impl<T: ?Sized, A: Allocator> AsRef<T> for Rc<T, A> {
3646 fn as_ref(&self) -> &T {
3647 &**self
3648 }
3649}
3650
3651#[stable(feature = "pin", since = "1.33.0")]
3652impl<T: ?Sized, A: Allocator> Unpin for Rc<T, A> {}
3653
3654/// Gets the offset within an `RcInner` for the payload behind a pointer.
3655///
3656/// # Safety
3657///
3658/// The pointer must point to (and have valid metadata for) a previously
3659/// valid instance of T, but the T is allowed to be dropped.
3660unsafe fn data_offset<T: ?Sized>(ptr: *const T) -> usize {
3661 // Align the unsized value to the end of the RcInner.
3662 // Because RcInner is repr(C), it will always be the last field in memory.
3663 // SAFETY: since the only unsized types possible are slices, trait objects,
3664 // and extern types, the input safety requirement is currently enough to
3665 // satisfy the requirements of align_of_val_raw; this is an implementation
3666 // detail of the language that must not be relied upon outside of std.
3667 unsafe { data_offset_align(align_of_val_raw(ptr)) }
3668}
3669
3670#[inline]
3671fn data_offset_align(align: usize) -> usize {
3672 let layout = Layout::new::<RcInner<()>>();
3673 layout.size() + layout.padding_needed_for(align)
3674}
3675
3676/// A uniquely owned [`Rc`].
3677///
3678/// This represents an `Rc` that is known to be uniquely owned -- that is, have exactly one strong
3679/// reference. Multiple weak pointers can be created, but attempts to upgrade those to strong
3680/// references will fail unless the `UniqueRc` they point to has been converted into a regular `Rc`.
3681///
3682/// Because they are uniquely owned, the contents of a `UniqueRc` can be freely mutated. A common
3683/// use case is to have an object be mutable during its initialization phase but then have it become
3684/// immutable and converted to a normal `Rc`.
3685///
3686/// This can be used as a flexible way to create cyclic data structures, as in the example below.
3687///
3688/// ```
3689/// #![feature(unique_rc_arc)]
3690/// use std::rc::{Rc, Weak, UniqueRc};
3691///
3692/// struct Gadget {
3693/// #[allow(dead_code)]
3694/// me: Weak<Gadget>,
3695/// }
3696///
3697/// fn create_gadget() -> Option<Rc<Gadget>> {
3698/// let mut rc = UniqueRc::new(Gadget {
3699/// me: Weak::new(),
3700/// });
3701/// rc.me = UniqueRc::downgrade(&rc);
3702/// Some(UniqueRc::into_rc(rc))
3703/// }
3704///
3705/// create_gadget().unwrap();
3706/// ```
3707///
3708/// An advantage of using `UniqueRc` over [`Rc::new_cyclic`] to build cyclic data structures is that
3709/// [`Rc::new_cyclic`]'s `data_fn` parameter cannot be async or return a [`Result`]. As shown in the
3710/// previous example, `UniqueRc` allows for more flexibility in the construction of cyclic data,
3711/// including fallible or async constructors.
3712#[unstable(feature = "unique_rc_arc", issue = "112566")]
3713pub struct UniqueRc<
3714 T: ?Sized,
3715 #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
3716> {
3717 ptr: NonNull<RcInner<T>>,
3718 // Define the ownership of `RcInner<T>` for drop-check
3719 _marker: PhantomData<RcInner<T>>,
3720 // Invariance is necessary for soundness: once other `Weak`
3721 // references exist, we already have a form of shared mutability!
3722 _marker2: PhantomData<*mut T>,
3723 alloc: A,
3724}
3725
3726// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3727// but having an explicit negative impl is nice for documentation purposes
3728// and results in nicer error messages.
3729#[unstable(feature = "unique_rc_arc", issue = "112566")]
3730impl<T: ?Sized, A: Allocator> !Send for UniqueRc<T, A> {}
3731
3732// Not necessary for correctness since `UniqueRc` contains `NonNull`,
3733// but having an explicit negative impl is nice for documentation purposes
3734// and results in nicer error messages.
3735#[unstable(feature = "unique_rc_arc", issue = "112566")]
3736impl<T: ?Sized, A: Allocator> !Sync for UniqueRc<T, A> {}
3737
3738#[unstable(feature = "unique_rc_arc", issue = "112566")]
3739impl<T: ?Sized + Unsize<U>, U: ?Sized, A: Allocator> CoerceUnsized<UniqueRc<U, A>>
3740 for UniqueRc<T, A>
3741{
3742}
3743
3744//#[unstable(feature = "unique_rc_arc", issue = "112566")]
3745#[unstable(feature = "dispatch_from_dyn", issue = "none")]
3746impl<T: ?Sized + Unsize<U>, U: ?Sized> DispatchFromDyn<UniqueRc<U>> for UniqueRc<T> {}
3747
3748#[unstable(feature = "unique_rc_arc", issue = "112566")]
3749impl<T: ?Sized + fmt::Display, A: Allocator> fmt::Display for UniqueRc<T, A> {
3750 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3751 fmt::Display::fmt(&**self, f)
3752 }
3753}
3754
3755#[unstable(feature = "unique_rc_arc", issue = "112566")]
3756impl<T: ?Sized + fmt::Debug, A: Allocator> fmt::Debug for UniqueRc<T, A> {
3757 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3758 fmt::Debug::fmt(&**self, f)
3759 }
3760}
3761
3762#[unstable(feature = "unique_rc_arc", issue = "112566")]
3763impl<T: ?Sized, A: Allocator> fmt::Pointer for UniqueRc<T, A> {
3764 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
3765 fmt::Pointer::fmt(&(&raw const **self), f)
3766 }
3767}
3768
3769#[unstable(feature = "unique_rc_arc", issue = "112566")]
3770impl<T: ?Sized, A: Allocator> borrow::Borrow<T> for UniqueRc<T, A> {
3771 fn borrow(&self) -> &T {
3772 &**self
3773 }
3774}
3775
3776#[unstable(feature = "unique_rc_arc", issue = "112566")]
3777impl<T: ?Sized, A: Allocator> borrow::BorrowMut<T> for UniqueRc<T, A> {
3778 fn borrow_mut(&mut self) -> &mut T {
3779 &mut **self
3780 }
3781}
3782
3783#[unstable(feature = "unique_rc_arc", issue = "112566")]
3784impl<T: ?Sized, A: Allocator> AsRef<T> for UniqueRc<T, A> {
3785 fn as_ref(&self) -> &T {
3786 &**self
3787 }
3788}
3789
3790#[unstable(feature = "unique_rc_arc", issue = "112566")]
3791impl<T: ?Sized, A: Allocator> AsMut<T> for UniqueRc<T, A> {
3792 fn as_mut(&mut self) -> &mut T {
3793 &mut **self
3794 }
3795}
3796
3797#[unstable(feature = "unique_rc_arc", issue = "112566")]
3798impl<T: ?Sized, A: Allocator> Unpin for UniqueRc<T, A> {}
3799
3800#[unstable(feature = "unique_rc_arc", issue = "112566")]
3801impl<T: ?Sized + PartialEq, A: Allocator> PartialEq for UniqueRc<T, A> {
3802 /// Equality for two `UniqueRc`s.
3803 ///
3804 /// Two `UniqueRc`s are equal if their inner values are equal.
3805 ///
3806 /// # Examples
3807 ///
3808 /// ```
3809 /// #![feature(unique_rc_arc)]
3810 /// use std::rc::UniqueRc;
3811 ///
3812 /// let five = UniqueRc::new(5);
3813 ///
3814 /// assert!(five == UniqueRc::new(5));
3815 /// ```
3816 #[inline]
3817 fn eq(&self, other: &Self) -> bool {
3818 PartialEq::eq(&**self, &**other)
3819 }
3820
3821 /// Inequality for two `UniqueRc`s.
3822 ///
3823 /// Two `UniqueRc`s are not equal if their inner values are not equal.
3824 ///
3825 /// # Examples
3826 ///
3827 /// ```
3828 /// #![feature(unique_rc_arc)]
3829 /// use std::rc::UniqueRc;
3830 ///
3831 /// let five = UniqueRc::new(5);
3832 ///
3833 /// assert!(five != UniqueRc::new(6));
3834 /// ```
3835 #[inline]
3836 fn ne(&self, other: &Self) -> bool {
3837 PartialEq::ne(&**self, &**other)
3838 }
3839}
3840
3841#[unstable(feature = "unique_rc_arc", issue = "112566")]
3842impl<T: ?Sized + PartialOrd, A: Allocator> PartialOrd for UniqueRc<T, A> {
3843 /// Partial comparison for two `UniqueRc`s.
3844 ///
3845 /// The two are compared by calling `partial_cmp()` on their inner values.
3846 ///
3847 /// # Examples
3848 ///
3849 /// ```
3850 /// #![feature(unique_rc_arc)]
3851 /// use std::rc::UniqueRc;
3852 /// use std::cmp::Ordering;
3853 ///
3854 /// let five = UniqueRc::new(5);
3855 ///
3856 /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&UniqueRc::new(6)));
3857 /// ```
3858 #[inline(always)]
3859 fn partial_cmp(&self, other: &UniqueRc<T, A>) -> Option<Ordering> {
3860 (**self).partial_cmp(&**other)
3861 }
3862
3863 /// Less-than comparison for two `UniqueRc`s.
3864 ///
3865 /// The two are compared by calling `<` on their inner values.
3866 ///
3867 /// # Examples
3868 ///
3869 /// ```
3870 /// #![feature(unique_rc_arc)]
3871 /// use std::rc::UniqueRc;
3872 ///
3873 /// let five = UniqueRc::new(5);
3874 ///
3875 /// assert!(five < UniqueRc::new(6));
3876 /// ```
3877 #[inline(always)]
3878 fn lt(&self, other: &UniqueRc<T, A>) -> bool {
3879 **self < **other
3880 }
3881
3882 /// 'Less than or equal to' comparison for two `UniqueRc`s.
3883 ///
3884 /// The two are compared by calling `<=` on their inner values.
3885 ///
3886 /// # Examples
3887 ///
3888 /// ```
3889 /// #![feature(unique_rc_arc)]
3890 /// use std::rc::UniqueRc;
3891 ///
3892 /// let five = UniqueRc::new(5);
3893 ///
3894 /// assert!(five <= UniqueRc::new(5));
3895 /// ```
3896 #[inline(always)]
3897 fn le(&self, other: &UniqueRc<T, A>) -> bool {
3898 **self <= **other
3899 }
3900
3901 /// Greater-than comparison for two `UniqueRc`s.
3902 ///
3903 /// The two are compared by calling `>` on their inner values.
3904 ///
3905 /// # Examples
3906 ///
3907 /// ```
3908 /// #![feature(unique_rc_arc)]
3909 /// use std::rc::UniqueRc;
3910 ///
3911 /// let five = UniqueRc::new(5);
3912 ///
3913 /// assert!(five > UniqueRc::new(4));
3914 /// ```
3915 #[inline(always)]
3916 fn gt(&self, other: &UniqueRc<T, A>) -> bool {
3917 **self > **other
3918 }
3919
3920 /// 'Greater than or equal to' comparison for two `UniqueRc`s.
3921 ///
3922 /// The two are compared by calling `>=` on their inner values.
3923 ///
3924 /// # Examples
3925 ///
3926 /// ```
3927 /// #![feature(unique_rc_arc)]
3928 /// use std::rc::UniqueRc;
3929 ///
3930 /// let five = UniqueRc::new(5);
3931 ///
3932 /// assert!(five >= UniqueRc::new(5));
3933 /// ```
3934 #[inline(always)]
3935 fn ge(&self, other: &UniqueRc<T, A>) -> bool {
3936 **self >= **other
3937 }
3938}
3939
3940#[unstable(feature = "unique_rc_arc", issue = "112566")]
3941impl<T: ?Sized + Ord, A: Allocator> Ord for UniqueRc<T, A> {
3942 /// Comparison for two `UniqueRc`s.
3943 ///
3944 /// The two are compared by calling `cmp()` on their inner values.
3945 ///
3946 /// # Examples
3947 ///
3948 /// ```
3949 /// #![feature(unique_rc_arc)]
3950 /// use std::rc::UniqueRc;
3951 /// use std::cmp::Ordering;
3952 ///
3953 /// let five = UniqueRc::new(5);
3954 ///
3955 /// assert_eq!(Ordering::Less, five.cmp(&UniqueRc::new(6)));
3956 /// ```
3957 #[inline]
3958 fn cmp(&self, other: &UniqueRc<T, A>) -> Ordering {
3959 (**self).cmp(&**other)
3960 }
3961}
3962
3963#[unstable(feature = "unique_rc_arc", issue = "112566")]
3964impl<T: ?Sized + Eq, A: Allocator> Eq for UniqueRc<T, A> {}
3965
3966#[unstable(feature = "unique_rc_arc", issue = "112566")]
3967impl<T: ?Sized + Hash, A: Allocator> Hash for UniqueRc<T, A> {
3968 fn hash<H: Hasher>(&self, state: &mut H) {
3969 (**self).hash(state);
3970 }
3971}
3972
3973// Depends on A = Global
3974impl<T> UniqueRc<T> {
3975 /// Creates a new `UniqueRc`.
3976 ///
3977 /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
3978 /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
3979 /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
3980 /// point to the new [`Rc`].
3981 #[cfg(not(no_global_oom_handling))]
3982 #[unstable(feature = "unique_rc_arc", issue = "112566")]
3983 pub fn new(value: T) -> Self {
3984 Self::new_in(value, Global)
3985 }
3986}
3987
3988impl<T, A: Allocator> UniqueRc<T, A> {
3989 /// Creates a new `UniqueRc` in the provided allocator.
3990 ///
3991 /// Weak references to this `UniqueRc` can be created with [`UniqueRc::downgrade`]. Upgrading
3992 /// these weak references will fail before the `UniqueRc` has been converted into an [`Rc`].
3993 /// After converting the `UniqueRc` into an [`Rc`], any weak references created beforehand will
3994 /// point to the new [`Rc`].
3995 #[cfg(not(no_global_oom_handling))]
3996 #[unstable(feature = "unique_rc_arc", issue = "112566")]
3997 pub fn new_in(value: T, alloc: A) -> Self {
3998 let (ptr, alloc) = Box::into_unique(Box::new_in(
3999 RcInner {
4000 strong: Cell::new(0),
4001 // keep one weak reference so if all the weak pointers that are created are dropped
4002 // the UniqueRc still stays valid.
4003 weak: Cell::new(1),
4004 value,
4005 },
4006 alloc,
4007 ));
4008 Self { ptr: ptr.into(), _marker: PhantomData, _marker2: PhantomData, alloc }
4009 }
4010}
4011
4012impl<T: ?Sized, A: Allocator> UniqueRc<T, A> {
4013 /// Converts the `UniqueRc` into a regular [`Rc`].
4014 ///
4015 /// This consumes the `UniqueRc` and returns a regular [`Rc`] that contains the `value` that
4016 /// is passed to `into_rc`.
4017 ///
4018 /// Any weak references created before this method is called can now be upgraded to strong
4019 /// references.
4020 #[unstable(feature = "unique_rc_arc", issue = "112566")]
4021 pub fn into_rc(this: Self) -> Rc<T, A> {
4022 let mut this = ManuallyDrop::new(this);
4023
4024 // Move the allocator out.
4025 // SAFETY: `this.alloc` will not be accessed again, nor dropped because it is in
4026 // a `ManuallyDrop`.
4027 let alloc: A = unsafe { ptr::read(&this.alloc) };
4028
4029 // SAFETY: This pointer was allocated at creation time so we know it is valid.
4030 unsafe {
4031 // Convert our weak reference into a strong reference
4032 this.ptr.as_mut().strong.set(1);
4033 Rc::from_inner_in(this.ptr, alloc)
4034 }
4035 }
4036}
4037
4038impl<T: ?Sized, A: Allocator + Clone> UniqueRc<T, A> {
4039 /// Creates a new weak reference to the `UniqueRc`.
4040 ///
4041 /// Attempting to upgrade this weak reference will fail before the `UniqueRc` has been converted
4042 /// to a [`Rc`] using [`UniqueRc::into_rc`].
4043 #[unstable(feature = "unique_rc_arc", issue = "112566")]
4044 pub fn downgrade(this: &Self) -> Weak<T, A> {
4045 // SAFETY: This pointer was allocated at creation time and we guarantee that we only have
4046 // one strong reference before converting to a regular Rc.
4047 unsafe {
4048 this.ptr.as_ref().inc_weak();
4049 }
4050 Weak { ptr: this.ptr, alloc: this.alloc.clone() }
4051 }
4052}
4053
4054#[unstable(feature = "unique_rc_arc", issue = "112566")]
4055impl<T: ?Sized, A: Allocator> Deref for UniqueRc<T, A> {
4056 type Target = T;
4057
4058 fn deref(&self) -> &T {
4059 // SAFETY: This pointer was allocated at creation time so we know it is valid.
4060 unsafe { &self.ptr.as_ref().value }
4061 }
4062}
4063
4064#[unstable(feature = "unique_rc_arc", issue = "112566")]
4065impl<T: ?Sized, A: Allocator> DerefMut for UniqueRc<T, A> {
4066 fn deref_mut(&mut self) -> &mut T {
4067 // SAFETY: This pointer was allocated at creation time so we know it is valid. We know we
4068 // have unique ownership and therefore it's safe to make a mutable reference because
4069 // `UniqueRc` owns the only strong reference to itself.
4070 unsafe { &mut (*self.ptr.as_ptr()).value }
4071 }
4072}
4073
4074#[unstable(feature = "unique_rc_arc", issue = "112566")]
4075unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for UniqueRc<T, A> {
4076 fn drop(&mut self) {
4077 unsafe {
4078 // destroy the contained object
4079 drop_in_place(DerefMut::deref_mut(self));
4080
4081 // remove the implicit "strong weak" pointer now that we've destroyed the contents.
4082 self.ptr.as_ref().dec_weak();
4083
4084 if self.ptr.as_ref().weak() == 0 {
4085 self.alloc.deallocate(self.ptr.cast(), Layout::for_value_raw(self.ptr.as_ptr()));
4086 }
4087 }
4088 }
4089}
4090
4091/// A unique owning pointer to a [`RcInner`] **that does not imply the contents are initialized,**
4092/// but will deallocate it (without dropping the value) when dropped.
4093///
4094/// This is a helper for [`Rc::make_mut()`] to ensure correct cleanup on panic.
4095/// It is nearly a duplicate of `UniqueRc<MaybeUninit<T>, A>` except that it allows `T: !Sized`,
4096/// which `MaybeUninit` does not.
4097#[cfg(not(no_global_oom_handling))]
4098struct UniqueRcUninit<T: ?Sized, A: Allocator> {
4099 ptr: NonNull<RcInner<T>>,
4100 layout_for_value: Layout,
4101 alloc: Option<A>,
4102}
4103
4104#[cfg(not(no_global_oom_handling))]
4105impl<T: ?Sized, A: Allocator> UniqueRcUninit<T, A> {
4106 /// Allocates a RcInner with layout suitable to contain `for_value` or a clone of it.
4107 fn new(for_value: &T, alloc: A) -> UniqueRcUninit<T, A> {
4108 let layout = Layout::for_value(for_value);
4109 let ptr = unsafe {
4110 Rc::allocate_for_layout(
4111 layout,
4112 |layout_for_rc_inner| alloc.allocate(layout_for_rc_inner),
4113 |mem| mem.with_metadata_of(ptr::from_ref(for_value) as *const RcInner<T>),
4114 )
4115 };
4116 Self { ptr: NonNull::new(ptr).unwrap(), layout_for_value: layout, alloc: Some(alloc) }
4117 }
4118
4119 /// Returns the pointer to be written into to initialize the [`Rc`].
4120 fn data_ptr(&mut self) -> *mut T {
4121 let offset = data_offset_align(self.layout_for_value.align());
4122 unsafe { self.ptr.as_ptr().byte_add(offset) as *mut T }
4123 }
4124
4125 /// Upgrade this into a normal [`Rc`].
4126 ///
4127 /// # Safety
4128 ///
4129 /// The data must have been initialized (by writing to [`Self::data_ptr()`]).
4130 unsafe fn into_rc(self) -> Rc<T, A> {
4131 let mut this = ManuallyDrop::new(self);
4132 let ptr = this.ptr;
4133 let alloc = this.alloc.take().unwrap();
4134
4135 // SAFETY: The pointer is valid as per `UniqueRcUninit::new`, and the caller is responsible
4136 // for having initialized the data.
4137 unsafe { Rc::from_ptr_in(ptr.as_ptr(), alloc) }
4138 }
4139}
4140
4141#[cfg(not(no_global_oom_handling))]
4142impl<T: ?Sized, A: Allocator> Drop for UniqueRcUninit<T, A> {
4143 fn drop(&mut self) {
4144 // SAFETY:
4145 // * new() produced a pointer safe to deallocate.
4146 // * We own the pointer unless into_rc() was called, which forgets us.
4147 unsafe {
4148 self.alloc.take().unwrap().deallocate(
4149 self.ptr.cast(),
4150 rc_inner_layout_for_value_layout(self.layout_for_value),
4151 );
4152 }
4153 }
4154}