@@ -131,11 +131,12 @@ pub struct Arc<T: ?Sized> {
131131}
132132
133133#[ stable( feature = "rust1" , since = "1.0.0" ) ]
134- unsafe impl < T : ?Sized + Sync + Send > Send for Arc < T > { }
134+ unsafe impl < T : ?Sized + Sync + Send > Send for Arc < T > { }
135135#[ stable( feature = "rust1" , since = "1.0.0" ) ]
136- unsafe impl < T : ?Sized + Sync + Send > Sync for Arc < T > { }
136+ unsafe impl < T : ?Sized + Sync + Send > Sync for Arc < T > { }
137137
138- #[ cfg( not( stage0) ) ] // remove cfg after new snapshot
138+ // remove cfg after new snapshot
139+ #[ cfg( not( stage0) ) ]
139140#[ unstable( feature = "coerce_unsized" , issue = "27732" ) ]
140141impl < T : ?Sized + Unsize < U > , U : ?Sized > CoerceUnsized < Arc < U > > for Arc < T > { }
141142
@@ -152,11 +153,12 @@ pub struct Weak<T: ?Sized> {
152153}
153154
154155#[ stable( feature = "rust1" , since = "1.0.0" ) ]
155- unsafe impl < T : ?Sized + Sync + Send > Send for Weak < T > { }
156+ unsafe impl < T : ?Sized + Sync + Send > Send for Weak < T > { }
156157#[ stable( feature = "rust1" , since = "1.0.0" ) ]
157- unsafe impl < T : ?Sized + Sync + Send > Sync for Weak < T > { }
158+ unsafe impl < T : ?Sized + Sync + Send > Sync for Weak < T > { }
158159
159- #[ cfg( not( stage0) ) ] // remove cfg after new snapshot
160+ // remove cfg after new snapshot
161+ #[ cfg( not( stage0) ) ]
160162#[ unstable( feature = "coerce_unsized" , issue = "27732" ) ]
161163impl < T : ?Sized + Unsize < U > , U : ?Sized > CoerceUnsized < Weak < U > > for Weak < T > { }
162164
@@ -226,7 +228,7 @@ impl<T> Arc<T> {
226228 pub fn try_unwrap ( this : Self ) -> Result < T , Self > {
227229 // See `drop` for why all these atomics are like this
228230 if this. inner ( ) . strong . compare_and_swap ( 1 , 0 , Release ) != 1 {
229- return Err ( this)
231+ return Err ( this) ;
230232 }
231233
232234 atomic:: fence ( Acquire ) ;
@@ -265,7 +267,7 @@ impl<T: ?Sized> Arc<T> {
265267
266268 // check if the weak counter is currently "locked"; if so, spin.
267269 if cur == usize:: MAX {
268- continue
270+ continue ;
269271 }
270272
271273 // NOTE: this code currently ignores the possibility of overflow
@@ -276,7 +278,7 @@ impl<T: ?Sized> Arc<T> {
276278 // synchronize with the write coming from `is_unique`, so that the
277279 // events prior to that write happen before this read.
278280 if this. inner ( ) . weak . compare_and_swap ( cur, cur + 1 , Acquire ) == cur {
279- return Weak { _ptr : this. _ptr }
281+ return Weak { _ptr : this. _ptr } ;
280282 }
281283 }
282284 }
@@ -568,14 +570,14 @@ impl<T: ?Sized> Drop for Arc<T> {
568570 let ptr = * self . _ptr ;
569571 // if ptr.is_null() { return }
570572 if ptr as * mut u8 as usize == 0 || ptr as * mut u8 as usize == mem:: POST_DROP_USIZE {
571- return
573+ return ;
572574 }
573575
574576 // Because `fetch_sub` is already atomic, we do not need to synchronize
575577 // with other threads unless we are going to delete the object. This
576578 // same logic applies to the below `fetch_sub` to the `weak` count.
577579 if self . inner ( ) . strong . fetch_sub ( 1 , Release ) != 1 {
578- return
580+ return ;
579581 }
580582
581583 // This fence is needed to prevent reordering of use of the data and
@@ -634,7 +636,7 @@ impl<T: ?Sized> Weak<T> {
634636 // confirmed via the CAS below.
635637 let n = inner. strong . load ( Relaxed ) ;
636638 if n == 0 {
637- return None
639+ return None ;
638640 }
639641
640642 // See comments in `Arc::clone` for why we do this (for `mem::forget`).
@@ -645,7 +647,7 @@ impl<T: ?Sized> Weak<T> {
645647 // Relaxed is valid for the same reason it is on Arc's Clone impl
646648 let old = inner. strong . compare_and_swap ( n, n + 1 , Relaxed ) ;
647649 if old == n {
648- return Some ( Arc { _ptr : self . _ptr } )
650+ return Some ( Arc { _ptr : self . _ptr } ) ;
649651 }
650652 }
651653 }
@@ -687,7 +689,7 @@ impl<T: ?Sized> Clone for Weak<T> {
687689 }
688690 }
689691
690- return Weak { _ptr : self . _ptr }
692+ return Weak { _ptr : self . _ptr } ;
691693 }
692694}
693695
@@ -723,7 +725,7 @@ impl<T: ?Sized> Drop for Weak<T> {
723725
724726 // see comments above for why this check is here
725727 if ptr as * mut u8 as usize == 0 || ptr as * mut u8 as usize == mem:: POST_DROP_USIZE {
726- return
728+ return ;
727729 }
728730
729731 // If we find out that we were the last weak pointer, then its time to
@@ -933,8 +935,7 @@ mod tests {
933935
934936 struct Canary ( * mut atomic:: AtomicUsize ) ;
935937
936- impl Drop for Canary
937- {
938+ impl Drop for Canary {
938939 fn drop ( & mut self ) {
939940 unsafe {
940941 match * self {
@@ -948,7 +949,7 @@ mod tests {
948949
949950 #[ test]
950951 fn manually_share_arc ( ) {
951- let v = vec ! ( 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 ) ;
952+ let v = vec ! [ 1 , 2 , 3 , 4 , 5 , 6 , 7 , 8 , 9 , 10 ] ;
952953 let arc_v = Arc :: new ( v) ;
953954
954955 let ( tx, rx) = channel ( ) ;
0 commit comments