1- use core:: alloc:: Allocator ;
1+ use core:: alloc:: { AllocError , Allocator } ;
22use core:: cell:: UnsafeCell ;
33#[ cfg( not( no_global_oom_handling) ) ]
44use core:: clone:: CloneToUninit ;
55use core:: marker:: PhantomData ;
66#[ cfg( not( no_global_oom_handling) ) ]
7- use core:: mem:: { self , DropGuard } ;
7+ use core:: mem;
8+ use core:: mem:: DropGuard ;
89#[ cfg( not( no_global_oom_handling) ) ]
910use core:: ops:: DerefMut ;
1011#[ cfg( not( no_global_oom_handling) ) ]
@@ -20,7 +21,7 @@ use crate::raw_rc::raw_weak::RawWeak;
2021#[ cfg( not( no_global_oom_handling) ) ]
2122use crate :: raw_rc:: rc_alloc;
2223#[ cfg( not( no_global_oom_handling) ) ]
23- use crate :: raw_rc:: rc_layout:: RcLayout ;
24+ use crate :: raw_rc:: rc_layout:: { RcLayout , RcLayoutExt } ;
2425use crate :: raw_rc:: rc_value_pointer:: RcValuePointer ;
2526
2627/// Decrements strong reference count in a reference-counted allocation with a value object that is
@@ -355,3 +356,126 @@ where
355356 unsafe { self . weak . value_ptr_unchecked ( ) }
356357 }
357358}
359+
360+ impl < T , A > RawRc < T , A > {
361+ /// # Safety
362+ ///
363+ /// `weak` must be non-dangling.
364+ unsafe fn from_weak_with_value ( weak : RawWeak < T , A > , value : T ) -> Self {
365+ unsafe {
366+ weak. as_ptr ( ) . write ( value) ;
367+
368+ Self :: from_weak ( weak)
369+ }
370+ }
371+
372+ #[ inline]
373+ pub ( crate ) fn try_new ( value : T ) -> Result < Self , AllocError >
374+ where
375+ A : Allocator + Default ,
376+ {
377+ RawWeak :: try_new_uninit :: < 1 > ( )
378+ . map ( |weak| unsafe { Self :: from_weak_with_value ( weak, value) } )
379+ }
380+
381+ #[ inline]
382+ pub ( crate ) fn try_new_in ( value : T , alloc : A ) -> Result < Self , AllocError >
383+ where
384+ A : Allocator ,
385+ {
386+ RawWeak :: try_new_uninit_in :: < 1 > ( alloc)
387+ . map ( |weak| unsafe { Self :: from_weak_with_value ( weak, value) } )
388+ }
389+
390+ #[ cfg( not( no_global_oom_handling) ) ]
391+ #[ inline]
392+ pub ( crate ) fn new ( value : T ) -> Self
393+ where
394+ A : Allocator + Default ,
395+ {
396+ unsafe { Self :: from_weak_with_value ( RawWeak :: new_uninit :: < 1 > ( ) , value) }
397+ }
398+
399+ #[ cfg( not( no_global_oom_handling) ) ]
400+ #[ inline]
401+ pub ( crate ) fn new_in ( value : T , alloc : A ) -> Self
402+ where
403+ A : Allocator ,
404+ {
405+ unsafe { Self :: from_weak_with_value ( RawWeak :: new_uninit_in :: < 1 > ( alloc) , value) }
406+ }
407+
408+ #[ cfg( not( no_global_oom_handling) ) ]
409+ fn new_with < F > ( f : F ) -> Self
410+ where
411+ A : Allocator + Default ,
412+ F : FnOnce ( ) -> T ,
413+ {
414+ let ( ptr, alloc) = rc_alloc:: allocate_with :: < A , _ , 1 > ( T :: RC_LAYOUT , |ptr| unsafe {
415+ ptr. as_ptr ( ) . cast ( ) . write ( f ( ) )
416+ } ) ;
417+
418+ unsafe { Self :: from_raw_parts ( ptr. as_ptr ( ) . cast ( ) , alloc) }
419+ }
420+
421+ /// # Safety
422+ ///
423+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
424+ pub ( crate ) unsafe fn into_inner < R > ( self ) -> Option < T >
425+ where
426+ A : Allocator ,
427+ R : RefCounter ,
428+ {
429+ let is_last_strong_ref = unsafe { decrement_strong_ref_count :: < R > ( self . value_ptr ( ) ) } ;
430+
431+ is_last_strong_ref. then ( || unsafe { self . weak . assume_init_into_inner :: < R > ( ) } )
432+ }
433+
434+ /// # Safety
435+ ///
436+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
437+ pub ( crate ) unsafe fn try_unwrap < R > ( self ) -> Result < T , RawRc < T , A > >
438+ where
439+ A : Allocator ,
440+ R : RefCounter ,
441+ {
442+ unsafe fn inner < R > ( value_ptr : RcValuePointer ) -> bool
443+ where
444+ R : RefCounter ,
445+ {
446+ unsafe {
447+ R :: from_raw_counter ( value_ptr. strong_count_ptr ( ) . as_ref ( ) ) . try_lock_strong_count ( )
448+ }
449+ }
450+
451+ let is_last_strong_ref = unsafe { inner :: < R > ( self . value_ptr ( ) ) } ;
452+
453+ if is_last_strong_ref {
454+ Ok ( unsafe { self . weak . assume_init_into_inner :: < R > ( ) } )
455+ } else {
456+ Err ( self )
457+ }
458+ }
459+
460+ /// # Safety
461+ ///
462+ /// All accesses to `self` must use the same `RefCounter` implementation for `R`.
463+ pub ( crate ) unsafe fn unwrap_or_clone < R > ( self ) -> T
464+ where
465+ T : Clone ,
466+ A : Allocator ,
467+ R : RefCounter ,
468+ {
469+ // SAFETY: Caller guarantees `rc` will only be accessed with the same `RefCounter`
470+ // implementation.
471+ unsafe { self . try_unwrap :: < R > ( ) } . unwrap_or_else ( |rc| {
472+ // SAFETY: Caller guarantees `rc` will only be accessed with the same `RefCounter`
473+ // implementation, and the `rc` local variable will not be accessed again after the
474+ // drop guard being triggered.
475+ let guard = DropGuard :: new ( rc, |mut rc| unsafe { rc. drop :: < R > ( ) } ) ;
476+
477+ // SAFETY: `RawRc<T, A>` is guaranteed to contain a valid `T` value.
478+ T :: clone ( unsafe { guard. as_ptr ( ) . as_ref ( ) } )
479+ } )
480+ }
481+ }
0 commit comments