@@ -26,8 +26,8 @@ use core::pin::{Pin, PinCoerceUnsized};
2626use core:: ptr:: { self , NonNull } ;
2727#[ cfg( not( no_global_oom_handling) ) ]
2828use core:: slice:: from_raw_parts_mut;
29- use core:: sync:: atomic;
3029use core:: sync:: atomic:: Ordering :: { Acquire , Relaxed , Release } ;
30+ use core:: sync:: atomic:: { self , Atomic } ;
3131use core:: { borrow, fmt, hint} ;
3232
3333#[ cfg( not( no_global_oom_handling) ) ]
@@ -369,12 +369,12 @@ impl<T: ?Sized, A: Allocator> fmt::Debug for Weak<T, A> {
369369// inner types.
370370#[ repr( C ) ]
371371struct ArcInner < T : ?Sized > {
372- strong : atomic :: AtomicUsize ,
372+ strong : Atomic < usize > ,
373373
374374 // the value usize::MAX acts as a sentinel for temporarily "locking" the
375375 // ability to upgrade weak pointers or downgrade strong ones; this is used
376376 // to avoid races in `make_mut` and `get_mut`.
377- weak : atomic :: AtomicUsize ,
377+ weak : Atomic < usize > ,
378378
379379 data : T ,
380380}
@@ -2446,7 +2446,7 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> {
24462446 #[ inline]
24472447 #[ stable( feature = "arc_unique" , since = "1.4.0" ) ]
24482448 pub fn get_mut ( this : & mut Self ) -> Option < & mut T > {
2449- if this . is_unique ( ) {
2449+ if Self :: is_unique ( this ) {
24502450 // This unsafety is ok because we're guaranteed that the pointer
24512451 // returned is the *only* pointer that will ever be returned to T. Our
24522452 // reference count is guaranteed to be 1 at this point, and we required
@@ -2526,28 +2526,81 @@ impl<T: ?Sized, A: Allocator> Arc<T, A> {
25262526 unsafe { & mut ( * this. ptr . as_ptr ( ) ) . data }
25272527 }
25282528
2529- /// Determine whether this is the unique reference (including weak refs) to
2530- /// the underlying data.
2529+ /// Determine whether this is the unique reference to the underlying data.
25312530 ///
2532- /// Note that this requires locking the weak ref count.
2533- fn is_unique ( & mut self ) -> bool {
2531+ /// Returns `true` if there are no other `Arc` or [`Weak`] pointers to the same allocation;
2532+ /// returns `false` otherwise.
2533+ ///
2534+ /// If this function returns `true`, then is guaranteed to be safe to call [`get_mut_unchecked`]
2535+ /// on this `Arc`, so long as no clones occur in between.
2536+ ///
2537+ /// # Examples
2538+ ///
2539+ /// ```
2540+ /// #![feature(arc_is_unique)]
2541+ ///
2542+ /// use std::sync::Arc;
2543+ ///
2544+ /// let x = Arc::new(3);
2545+ /// assert!(Arc::is_unique(&x));
2546+ ///
2547+ /// let y = Arc::clone(&x);
2548+ /// assert!(!Arc::is_unique(&x));
2549+ /// drop(y);
2550+ ///
2551+ /// // Weak references also count, because they could be upgraded at any time.
2552+ /// let z = Arc::downgrade(&x);
2553+ /// assert!(!Arc::is_unique(&x));
2554+ /// ```
2555+ ///
2556+ /// # Pointer invalidation
2557+ ///
2558+ /// This function will always return the same value as `Arc::get_mut(arc).is_some()`. However,
2559+ /// unlike that operation it does not produce any mutable references to the underlying data,
2560+ /// meaning no pointers to the data inside the `Arc` are invalidated by the call. Thus, the
2561+ /// following code is valid, even though it would be UB if it used `Arc::get_mut`:
2562+ ///
2563+ /// ```
2564+ /// #![feature(arc_is_unique)]
2565+ ///
2566+ /// use std::sync::Arc;
2567+ ///
2568+ /// let arc = Arc::new(5);
2569+ /// let pointer: *const i32 = &*arc;
2570+ /// assert!(Arc::is_unique(&arc));
2571+ /// assert_eq!(unsafe { *pointer }, 5);
2572+ /// ```
2573+ ///
2574+ /// # Atomic orderings
2575+ ///
2576+ /// Concurrent drops to other `Arc` pointers to the same allocation will synchronize with this
2577+ /// call - that is, this call performs an `Acquire` operation on the underlying strong and weak
2578+ /// ref counts. This ensures that calling `get_mut_unchecked` is safe.
2579+ ///
2580+ /// Note that this operation requires locking the weak ref count, so concurrent calls to
2581+ /// `downgrade` may spin-loop for a short period of time.
2582+ ///
2583+ /// [`get_mut_unchecked`]: Self::get_mut_unchecked
2584+ #[ inline]
2585+ #[ unstable( feature = "arc_is_unique" , issue = "138938" ) ]
2586+ pub fn is_unique ( this : & Self ) -> bool {
25342587 // lock the weak pointer count if we appear to be the sole weak pointer
25352588 // holder.
25362589 //
25372590 // The acquire label here ensures a happens-before relationship with any
25382591 // writes to `strong` (in particular in `Weak::upgrade`) prior to decrements
25392592 // of the `weak` count (via `Weak::drop`, which uses release). If the upgraded
25402593 // weak ref was never dropped, the CAS here will fail so we do not care to synchronize.
2541- if self . inner ( ) . weak . compare_exchange ( 1 , usize:: MAX , Acquire , Relaxed ) . is_ok ( ) {
2594+ if this . inner ( ) . weak . compare_exchange ( 1 , usize:: MAX , Acquire , Relaxed ) . is_ok ( ) {
25422595 // This needs to be an `Acquire` to synchronize with the decrement of the `strong`
25432596 // counter in `drop` -- the only access that happens when any but the last reference
25442597 // is being dropped.
2545- let unique = self . inner ( ) . strong . load ( Acquire ) == 1 ;
2598+ let unique = this . inner ( ) . strong . load ( Acquire ) == 1 ;
25462599
25472600 // The release write here synchronizes with a read in `downgrade`,
25482601 // effectively preventing the above read of `strong` from happening
25492602 // after the write.
2550- self . inner ( ) . weak . store ( 1 , Release ) ; // release the lock
2603+ this . inner ( ) . weak . store ( 1 , Release ) ; // release the lock
25512604 unique
25522605 } else {
25532606 false
@@ -2760,8 +2813,8 @@ impl<T, A: Allocator> Weak<T, A> {
27602813/// Helper type to allow accessing the reference counts without
27612814/// making any assertions about the data field.
27622815struct WeakInner < ' a > {
2763- weak : & ' a atomic :: AtomicUsize ,
2764- strong : & ' a atomic :: AtomicUsize ,
2816+ weak : & ' a Atomic < usize > ,
2817+ strong : & ' a Atomic < usize > ,
27652818}
27662819
27672820impl < T : ?Sized > Weak < T > {
0 commit comments