@@ -77,11 +77,12 @@ use core::atomic;
7777use core:: atomic:: Ordering :: { Relaxed , Release , Acquire , SeqCst } ;
7878use core:: fmt;
7979use core:: cmp:: Ordering ;
80- use core:: mem:: { min_align_of, size_of} ;
80+ use core:: mem:: { min_align_of_val, size_of_val} ;
81+ use core:: intrinsics:: drop_in_place;
8182use core:: mem;
8283use core:: nonzero:: NonZero ;
83- use core:: ops:: Deref ;
84- use core:: ptr ;
84+ use core:: ops:: { Deref , CoerceUnsized } ;
85+ use core:: marker :: Unsize ;
8586use core:: hash:: { Hash , Hasher } ;
8687use heap:: deallocate;
8788
@@ -118,15 +119,16 @@ use heap::deallocate;
118119/// ```
119120#[ unsafe_no_drop_flag]
120121#[ stable( feature = "rust1" , since = "1.0.0" ) ]
121- pub struct Arc < T > {
122+ pub struct Arc < T : ? Sized > {
122123 // FIXME #12808: strange name to try to avoid interfering with
123124 // field accesses of the contained type via Deref
124125 _ptr : NonZero < * mut ArcInner < T > > ,
125126}
126127
127- unsafe impl < T : Sync + Send > Send for Arc < T > { }
128- unsafe impl < T : Sync + Send > Sync for Arc < T > { }
128+ unsafe impl < T : ? Sized + Sync + Send > Send for Arc < T > { }
129+ unsafe impl < T : ? Sized + Sync + Send > Sync for Arc < T > { }
129130
131+ impl < T : ?Sized + Unsize < U > , U : ?Sized > CoerceUnsized < Arc < U > > for Arc < T > { }
130132
131133/// A weak pointer to an `Arc`.
132134///
@@ -135,30 +137,30 @@ unsafe impl<T: Sync + Send> Sync for Arc<T> { }
135137#[ unsafe_no_drop_flag]
136138#[ unstable( feature = "alloc" ,
137139 reason = "Weak pointers may not belong in this module." ) ]
138- pub struct Weak < T > {
140+ pub struct Weak < T : ? Sized > {
139141 // FIXME #12808: strange name to try to avoid interfering with
140142 // field accesses of the contained type via Deref
141143 _ptr : NonZero < * mut ArcInner < T > > ,
142144}
143145
144- unsafe impl < T : Sync + Send > Send for Weak < T > { }
145- unsafe impl < T : Sync + Send > Sync for Weak < T > { }
146+ unsafe impl < T : ? Sized + Sync + Send > Send for Weak < T > { }
147+ unsafe impl < T : ? Sized + Sync + Send > Sync for Weak < T > { }
146148
147149#[ stable( feature = "rust1" , since = "1.0.0" ) ]
148- impl < T : fmt:: Debug > fmt:: Debug for Weak < T > {
150+ impl < T : ? Sized + fmt:: Debug > fmt:: Debug for Weak < T > {
149151 fn fmt ( & self , f : & mut fmt:: Formatter ) -> fmt:: Result {
150152 write ! ( f, "(Weak)" )
151153 }
152154}
153155
154- struct ArcInner < T > {
156+ struct ArcInner < T : ? Sized > {
155157 strong : atomic:: AtomicUsize ,
156158 weak : atomic:: AtomicUsize ,
157159 data : T ,
158160}
159161
160- unsafe impl < T : Sync + Send > Send for ArcInner < T > { }
161- unsafe impl < T : Sync + Send > Sync for ArcInner < T > { }
162+ unsafe impl < T : ? Sized + Sync + Send > Send for ArcInner < T > { }
163+ unsafe impl < T : ? Sized + Sync + Send > Sync for ArcInner < T > { }
162164
163165impl < T > Arc < T > {
164166 /// Constructs a new `Arc<T>`.
@@ -182,7 +184,9 @@ impl<T> Arc<T> {
182184 } ;
183185 Arc { _ptr : unsafe { NonZero :: new ( mem:: transmute ( x) ) } }
184186 }
187+ }
185188
189+ impl < T : ?Sized > Arc < T > {
186190 /// Downgrades the `Arc<T>` to a `Weak<T>` reference.
187191 ///
188192 /// # Examples
@@ -204,7 +208,7 @@ impl<T> Arc<T> {
204208 }
205209}
206210
207- impl < T > Arc < T > {
211+ impl < T : ? Sized > Arc < T > {
208212 #[ inline]
209213 fn inner ( & self ) -> & ArcInner < T > {
210214 // This unsafety is ok because while this arc is alive we're guaranteed
@@ -222,24 +226,24 @@ impl<T> Arc<T> {
222226
223227 // Destroy the data at this time, even though we may not free the box
224228 // allocation itself (there may still be weak pointers lying around).
225- drop ( ptr :: read ( & self . inner ( ) . data ) ) ;
229+ drop_in_place ( & mut ( * ptr ) . data ) ;
226230
227231 if self . inner ( ) . weak . fetch_sub ( 1 , Release ) == 1 {
228232 atomic:: fence ( Acquire ) ;
229- deallocate ( ptr as * mut u8 , size_of :: < ArcInner < T > > ( ) , min_align_of :: < ArcInner < T > > ( ) )
233+ deallocate ( ptr as * mut u8 , size_of_val ( & * ptr ) , min_align_of_val ( & * ptr ) )
230234 }
231235 }
232236}
233237
234238/// Get the number of weak references to this value.
235239#[ inline]
236240#[ unstable( feature = "alloc" ) ]
237- pub fn weak_count < T > ( this : & Arc < T > ) -> usize { this. inner ( ) . weak . load ( SeqCst ) - 1 }
241+ pub fn weak_count < T : ? Sized > ( this : & Arc < T > ) -> usize { this. inner ( ) . weak . load ( SeqCst ) - 1 }
238242
239243/// Get the number of strong references to this value.
240244#[ inline]
241245#[ unstable( feature = "alloc" ) ]
242- pub fn strong_count < T > ( this : & Arc < T > ) -> usize { this. inner ( ) . strong . load ( SeqCst ) }
246+ pub fn strong_count < T : ? Sized > ( this : & Arc < T > ) -> usize { this. inner ( ) . strong . load ( SeqCst ) }
243247
244248
245249/// Returns a mutable reference to the contained value if the `Arc<T>` is unique.
@@ -264,7 +268,7 @@ pub fn strong_count<T>(this: &Arc<T>) -> usize { this.inner().strong.load(SeqCst
264268/// ```
265269#[ inline]
266270#[ unstable( feature = "alloc" ) ]
267- pub fn get_mut < T > ( this : & mut Arc < T > ) -> Option < & mut T > {
271+ pub fn get_mut < T : ? Sized > ( this : & mut Arc < T > ) -> Option < & mut T > {
268272 if strong_count ( this) == 1 && weak_count ( this) == 0 {
269273 // This unsafety is ok because we're guaranteed that the pointer
270274 // returned is the *only* pointer that will ever be returned to T. Our
@@ -279,7 +283,7 @@ pub fn get_mut<T>(this: &mut Arc<T>) -> Option<&mut T> {
279283}
280284
281285#[ stable( feature = "rust1" , since = "1.0.0" ) ]
282- impl < T > Clone for Arc < T > {
286+ impl < T : ? Sized > Clone for Arc < T > {
283287 /// Makes a clone of the `Arc<T>`.
284288 ///
285289 /// This increases the strong reference count.
@@ -313,7 +317,7 @@ impl<T> Clone for Arc<T> {
313317}
314318
315319#[ stable( feature = "rust1" , since = "1.0.0" ) ]
316- impl < T > Deref for Arc < T > {
320+ impl < T : ? Sized > Deref for Arc < T > {
317321 type Target = T ;
318322
319323 #[ inline]
@@ -356,7 +360,7 @@ impl<T: Clone> Arc<T> {
356360}
357361
358362#[ stable( feature = "rust1" , since = "1.0.0" ) ]
359- impl < T > Drop for Arc < T > {
363+ impl < T : ? Sized > Drop for Arc < T > {
360364 /// Drops the `Arc<T>`.
361365 ///
362366 /// This will decrement the strong reference count. If the strong reference
@@ -390,7 +394,7 @@ impl<T> Drop for Arc<T> {
390394 // it's run more than once)
391395 let ptr = * self . _ptr ;
392396 // if ptr.is_null() { return }
393- if ptr. is_null ( ) || ptr as usize == mem:: POST_DROP_USIZE { return }
397+ if ptr as usize == 0 || ptr as usize == mem:: POST_DROP_USIZE { return }
394398
395399 // Because `fetch_sub` is already atomic, we do not need to synchronize
396400 // with other threads unless we are going to delete the object. This
@@ -424,7 +428,7 @@ impl<T> Drop for Arc<T> {
424428
425429#[ unstable( feature = "alloc" ,
426430 reason = "Weak pointers may not belong in this module." ) ]
427- impl < T > Weak < T > {
431+ impl < T : ? Sized > Weak < T > {
428432 /// Upgrades a weak reference to a strong reference.
429433 ///
430434 /// Upgrades the `Weak<T>` reference to an `Arc<T>`, if possible.
@@ -465,7 +469,7 @@ impl<T> Weak<T> {
465469
466470#[ unstable( feature = "alloc" ,
467471 reason = "Weak pointers may not belong in this module." ) ]
468- impl < T > Clone for Weak < T > {
472+ impl < T : ? Sized > Clone for Weak < T > {
469473 /// Makes a clone of the `Weak<T>`.
470474 ///
471475 /// This increases the weak reference count.
@@ -489,7 +493,7 @@ impl<T> Clone for Weak<T> {
489493}
490494
491495#[ stable( feature = "rust1" , since = "1.0.0" ) ]
492- impl < T > Drop for Weak < T > {
496+ impl < T : ? Sized > Drop for Weak < T > {
493497 /// Drops the `Weak<T>`.
494498 ///
495499 /// This will decrement the weak reference count.
@@ -520,21 +524,22 @@ impl<T> Drop for Weak<T> {
520524 let ptr = * self . _ptr ;
521525
522526 // see comments above for why this check is here
523- if ptr. is_null ( ) || ptr as usize == mem:: POST_DROP_USIZE { return }
527+ if ptr as usize == 0 || ptr as usize == mem:: POST_DROP_USIZE { return }
524528
525529 // If we find out that we were the last weak pointer, then its time to
526530 // deallocate the data entirely. See the discussion in Arc::drop() about
527531 // the memory orderings
528532 if self . inner ( ) . weak . fetch_sub ( 1 , Release ) == 1 {
529533 atomic:: fence ( Acquire ) ;
530- unsafe { deallocate ( ptr as * mut u8 , size_of :: < ArcInner < T > > ( ) ,
531- min_align_of :: < ArcInner < T > > ( ) ) }
534+ unsafe { deallocate ( ptr as * mut u8 ,
535+ size_of_val ( & * ptr) ,
536+ min_align_of_val ( & * ptr) ) }
532537 }
533538 }
534539}
535540
536541#[ stable( feature = "rust1" , since = "1.0.0" ) ]
537- impl < T : PartialEq > PartialEq for Arc < T > {
542+ impl < T : ? Sized + PartialEq > PartialEq for Arc < T > {
538543 /// Equality for two `Arc<T>`s.
539544 ///
540545 /// Two `Arc<T>`s are equal if their inner value are equal.
@@ -566,7 +571,7 @@ impl<T: PartialEq> PartialEq for Arc<T> {
566571 fn ne ( & self , other : & Arc < T > ) -> bool { * ( * self ) != * ( * other) }
567572}
568573#[ stable( feature = "rust1" , since = "1.0.0" ) ]
569- impl < T : PartialOrd > PartialOrd for Arc < T > {
574+ impl < T : ? Sized + PartialOrd > PartialOrd for Arc < T > {
570575 /// Partial comparison for two `Arc<T>`s.
571576 ///
572577 /// The two are compared by calling `partial_cmp()` on their inner values.
@@ -645,21 +650,21 @@ impl<T: PartialOrd> PartialOrd for Arc<T> {
645650 fn ge ( & self , other : & Arc < T > ) -> bool { * ( * self ) >= * ( * other) }
646651}
647652#[ stable( feature = "rust1" , since = "1.0.0" ) ]
648- impl < T : Ord > Ord for Arc < T > {
653+ impl < T : ? Sized + Ord > Ord for Arc < T > {
649654 fn cmp ( & self , other : & Arc < T > ) -> Ordering { ( * * self ) . cmp ( & * * other) }
650655}
651656#[ stable( feature = "rust1" , since = "1.0.0" ) ]
652- impl < T : Eq > Eq for Arc < T > { }
657+ impl < T : ? Sized + Eq > Eq for Arc < T > { }
653658
654659#[ stable( feature = "rust1" , since = "1.0.0" ) ]
655- impl < T : fmt:: Display > fmt:: Display for Arc < T > {
660+ impl < T : ? Sized + fmt:: Display > fmt:: Display for Arc < T > {
656661 fn fmt ( & self , f : & mut fmt:: Formatter ) -> fmt:: Result {
657662 fmt:: Display :: fmt ( & * * self , f)
658663 }
659664}
660665
661666#[ stable( feature = "rust1" , since = "1.0.0" ) ]
662- impl < T : fmt:: Debug > fmt:: Debug for Arc < T > {
667+ impl < T : ? Sized + fmt:: Debug > fmt:: Debug for Arc < T > {
663668 fn fmt ( & self , f : & mut fmt:: Formatter ) -> fmt:: Result {
664669 fmt:: Debug :: fmt ( & * * self , f)
665670 }
@@ -679,7 +684,7 @@ impl<T: Default> Default for Arc<T> {
679684}
680685
681686#[ stable( feature = "rust1" , since = "1.0.0" ) ]
682- impl < T : Hash > Hash for Arc < T > {
687+ impl < T : ? Sized + Hash > Hash for Arc < T > {
683688 fn hash < H : Hasher > ( & self , state : & mut H ) {
684689 ( * * self ) . hash ( state)
685690 }
@@ -906,4 +911,13 @@ mod tests {
906911 // Make sure deriving works with Arc<T>
907912 #[ derive( Eq , Ord , PartialEq , PartialOrd , Clone , Debug , Default ) ]
908913 struct Foo { inner : Arc < i32 > }
914+
915+ #[ test]
916+ fn test_unsized ( ) {
917+ let x: Arc < [ i32 ] > = Arc :: new ( [ 1 , 2 , 3 ] ) ;
918+ assert_eq ! ( format!( "{:?}" , x) , "[1, 2, 3]" ) ;
919+ let y = x. clone ( ) . downgrade ( ) ;
920+ drop ( x) ;
921+ assert ! ( y. upgrade( ) . is_none( ) ) ;
922+ }
909923}
0 commit comments