From eb7eb6215110324df393bff089603e79629deb2d Mon Sep 17 00:00:00 2001 From: Kevin Robert Stravers Date: Sun, 4 May 2025 12:40:29 -0400 Subject: [PATCH] Implement RcUninit (#112566) RcUninit was discussed in https://github.com/rust-lang/libs-team/issues/90 https://internals.rust-lang.org/t/an-alternative-to-rc-new-cyclic/22849/6 https://github.com/rust-lang/rust/issues/112566 RcUninit allows the user to construct cyclic data structures without using `Rc::new_cyclic`, which allows cyclic constructions across await points. It also allows you to create long linked lists without overflowing the stack. This is an alternative to `UniqueRc`. While `UniqueRc` does allow for cyclic data structures to be created, it must be done by mutating the UniqueRc. Mutation is prone to creating reference cycles. Construction-only assignment of fields, without any mutation to "set" the struct afterwards cannot generate reference cycles. It's also more cumbersome to work with. For instance, if we have objects A, B, and C, and we want these to connect as `A => B => C -> A` (`=>` being strong, `->` being weak), then we must do something along the following lines. let mut a_uniq = UniqueRc::new(A::new()); let a_weak = UniqueRc::downgrade(&a_uniq); let c = Rc::new(C::new(a_weak)); let b = Rc::new(B::new(c)); a_uniq.set_b(b); let a = a_uniq.into_rc(); To implement `A::set_b`, the field `A::b` must either be - `Option>`: Requiring unwrap/clone for each access. - `MaybeUninit>`: Requiring unsafe. - `Weak`: Requiring upgrade for every access. The above also makes it easier to make mistakes in more complex programs where we don't have the full picture. It is not hard to change the above into `Rc>`, and then provide this pointer to `C`, which would cause a reference cycle to be created once `a.borrow_mut().set_b(b)` gets called. On the other hand RcUninit doesn't have this problem, since initialization is deferred. The equivalent would look like the following. let a_uninit = RcUninit::new(); let b_uninit = RcUninit::new(); let c_uninit = RcUninit::new(); let c = c_uninit.init(C::new(a_uninit.weak())); let b = b_uninit.init(B::new(c)); let a = a_uninit.init(b); This creates the structure (A => B => C -> A) --- library/alloc/src/rc.rs | 82 ++++++++++++++++++++++++++++++++++ library/alloctests/tests/rc.rs | 26 ++++++++++- 2 files changed, 107 insertions(+), 1 deletion(-) diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 619d9f258e342..b7116f2914685 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -4159,3 +4159,85 @@ impl Drop for UniqueRcUninit { } } } + +/// An uninitialized Rc that allows deferred construction whilst exposing weak pointers before +/// being constructed. +/// +/// Weak pointers will return `None` on `upgrade` as long as [RcUninit::init] has not been called. +#[unstable(feature = "unique_rc_arc", issue = "112566")] +#[cfg(not(no_global_oom_handling))] +pub struct RcUninit< + T, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, +> { + ptr: NonNull>, + weak: Weak, +} + +impl RcUninit { + /// Creates new RcUninit. + #[unstable(feature = "unique_rc_arc", issue = "112566")] + pub fn new() -> Self { + Self::new_in(Global) + } +} + +impl RcUninit { + /// Creates new RcUninit. + #[unstable(feature = "unique_rc_arc", issue = "112566")] + pub fn new_in(alloc: A) -> Self { + let ptr = unsafe { + Rc::allocate_for_layout( + Layout::new::(), + |layout| alloc.allocate(layout), + <*mut u8>::cast, + ) + }; + + unsafe { + (*ptr).strong.set(0); + (*ptr).weak.set(2); + }; + let ptr = NonNull::new(ptr).unwrap(); + + Self { ptr, weak: Weak { ptr, alloc } } + } + + /// Get a weak reference. + #[unstable(feature = "unique_rc_arc", issue = "112566")] + pub fn weak(&self) -> &Weak { + &self.weak + } + + /// Write a value and return Rc. + #[unstable(feature = "unique_rc_arc", issue = "112566")] + pub fn init(self, value: T) -> Rc { + unsafe { + let ptr = self.weak.ptr.as_ptr(); + (*ptr).strong.set(1); + let weak = &(*ptr).weak; + weak.set(weak.get() - 1); + ptr::write(&raw mut (*ptr).value, value); + } + + let ptr = self.ptr; + let alloc = self.weak.alloc.clone(); + mem::forget(self); + + Rc { ptr, phantom: PhantomData, alloc } + } +} + +#[unstable(feature = "unique_rc_arc", issue = "112566")] +impl fmt::Debug for RcUninit { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "(RcUninit)") + } +} + +#[unstable(feature = "unique_rc_arc", issue = "112566")] +impl Drop for RcUninit { + fn drop(&mut self) { + unsafe { Rc::from_inner(self.ptr) }; + } +} diff --git a/library/alloctests/tests/rc.rs b/library/alloctests/tests/rc.rs index bb68eb4ac9e3d..b0209050ac4ce 100644 --- a/library/alloctests/tests/rc.rs +++ b/library/alloctests/tests/rc.rs @@ -2,7 +2,7 @@ use std::any::Any; use std::cell::{Cell, RefCell}; use std::iter::TrustedLen; use std::mem; -use std::rc::{Rc, UniqueRc, Weak}; +use std::rc::{Rc, RcUninit, UniqueRc, Weak}; #[test] fn uninhabited() { @@ -922,3 +922,27 @@ fn test_unique_rc_unsizing_coercion() { let rc: Rc<[u8]> = UniqueRc::into_rc(rc); assert_eq!(*rc, [123, 0, 0]); } + +#[test] +fn test_rc_uninit() { + RcUninit::<()>::new(); + RcUninit::::new(); + RcUninit::::new(); +} + +#[test] +fn test_rc_uninit_init() { + let x: RcUninit = RcUninit::new(); + assert_eq!(Weak::strong_count(x.weak()), 0); + assert_eq!(Weak::weak_count(x.weak()), 0); + let weak = x.weak().clone(); + assert!(weak.upgrade().is_none()); + + let rc = x.init(123); + assert_eq!(Rc::strong_count(&rc), 1); + assert_eq!(Rc::weak_count(&rc), 1); + + assert_eq!(*rc, 123); + assert_eq!(weak.upgrade().map(|x| *x), Some(123)); + assert!(Rc::ptr_eq(&weak.upgrade().unwrap(), &rc)); +}