@@ -612,21 +612,10 @@ impl<T> Rc<T> {
612612 #[ inline]
613613 #[ stable( feature = "rc_unique" , since = "1.4.0" ) ]
614614 pub fn try_unwrap ( this : Self ) -> Result < T , Self > {
615- if Rc :: strong_count ( & this) == 1 {
616- unsafe {
617- let val = ptr:: read ( & * this) ; // copy the contained object
618-
619- // Indicate to Weaks that they can't be promoted by decrementing
620- // the strong count, and then remove the implicit "strong weak"
621- // pointer while also handling drop logic by just crafting a
622- // fake Weak.
623- this. inner ( ) . dec_strong ( ) ;
624- let _weak = Weak { ptr : this. ptr } ;
625- forget ( this) ;
626- Ok ( val)
627- }
628- } else {
629- Err ( this)
615+ let weak = Self :: leak_as_owning_weak ( this) ?;
616+ unsafe {
617+ let val = ptr:: read ( weak. as_ptr ( ) ) ; // copy the contained object
618+ Ok ( val)
630619 }
631620 }
632621}
@@ -997,6 +986,72 @@ impl<T: ?Sized> Rc<T> {
997986 unsafe { mem:: drop ( Rc :: from_raw ( ptr) ) } ;
998987 }
999988
989+ /// Reduce the strong count, if the `Rc` has exactly one strong reference.
990+ ///
991+ /// Otherwise, an [`Err`] is returned with the same `Rc` that was passed in.
992+ ///
993+ /// This will succeed even if there are outstanding weak references.
994+ ///
995+ /// After this operation succeeds, no more strong references to the allocation can be created,
996+ /// making the caller the owner of the contained value. This returns a `Weak` that manages the
997+ /// allocation while the caller can (unsafely) take advantage of their ownership. In contrast
998+ /// to `try_unwrap` this also works for unsized pointees.
999+ fn leak_as_owning_weak ( this : Self ) -> Result < Weak < T > , Self > {
1000+ if Rc :: strong_count ( & this) == 1 {
1001+ // Indicate to Weaks that they can't be promoted by decrementing
1002+ // the strong count, and then produce the implicit "strong weak"
1003+ // pointer that is still handling dropping of the allocation.
1004+ this. inner ( ) . dec_strong ( ) ;
1005+ let this = mem:: ManuallyDrop :: new ( this) ;
1006+ let weak = Weak { ptr : this. ptr } ;
1007+ // Return the 'fake weak'.
1008+ Ok ( weak)
1009+ } else {
1010+ Err ( this)
1011+ }
1012+ }
1013+
1014+ /// Returns the boxed inner value, if the `Rc` has exactly one strong reference.
1015+ ///
1016+ /// Otherwise, an [`Err`] is returned with the same `Rc` that was
1017+ /// passed in.
1018+ ///
1019+ /// This will succeed even if there are outstanding weak references.
1020+ ///
1021+ /// # Examples
1022+ ///
1023+ /// ```
1024+ /// #![feature(unwrap_rc_as_box)]
1025+ ///
1026+ /// use std::rc::Rc;
1027+ ///
1028+ /// let x: Rc<str> = Rc::from("Hello, world");
1029+ /// assert!(matches!(
1030+ /// Rc::try_unwrap_as_box(x),
1031+ /// Ok(b) if &b[..2] == ("He")
1032+ /// ));
1033+ /// ```
1034+ #[ cfg( not( no_global_oom_handling) ) ]
1035+ #[ unstable( feature = "unwrap_rc_as_box" , reason = "recently added" , issue = "none" ) ]
1036+ pub fn try_unwrap_as_box ( this : Self ) -> Result < Box < T > , Self > {
1037+ let owning_weak = Self :: leak_as_owning_weak ( this) ?;
1038+ let src_ptr = owning_weak. as_ptr ( ) ;
1039+
1040+ unsafe {
1041+ // We 'own' this value right now so it is still initialized.
1042+ let size = mem:: size_of_val ( & * src_ptr) ;
1043+ // The raw allocation for our Box—after this we don't panic as otherwise we would leak
1044+ // this memory. We can't use MaybeUninit here as that is only valid for sized types.
1045+ let raw_box = Box :: < T > :: allocate_for_ptr ( & Global , src_ptr) ;
1046+
1047+ // This is a new allocation so it can not overlap with the one which `owning_weak` is
1048+ // still holding onto.
1049+ ptr:: copy_nonoverlapping ( src_ptr as * const u8 , raw_box as * mut u8 , size) ;
1050+
1051+ Ok ( Box :: from_raw ( raw_box) )
1052+ }
1053+ }
1054+
10001055 /// Returns `true` if there are no other `Rc` or [`Weak`] pointers to
10011056 /// this allocation.
10021057 #[ inline]
0 commit comments