1919#![ feature( rustc_attrs) ]
2020#![ cfg_attr( test, feature( test) ) ]
2121#![ feature( strict_provenance) ]
22+ #![ feature( ptr_const_cast) ]
2223
2324use smallvec:: SmallVec ;
2425
@@ -27,7 +28,7 @@ use std::cell::{Cell, RefCell};
2728use std:: cmp;
2829use std:: marker:: { PhantomData , Send } ;
2930use std:: mem:: { self , MaybeUninit } ;
30- use std:: ptr;
31+ use std:: ptr:: { self , NonNull } ;
3132use std:: slice;
3233
3334#[ inline( never) ]
@@ -55,15 +56,24 @@ pub struct TypedArena<T> {
5556
5657struct ArenaChunk < T = u8 > {
5758 /// The raw storage for the arena chunk.
58- storage : Box < [ MaybeUninit < T > ] > ,
59+ storage : NonNull < [ MaybeUninit < T > ] > ,
5960 /// The number of valid entries in the chunk.
6061 entries : usize ,
6162}
6263
64+ unsafe impl < #[ may_dangle] T > Drop for ArenaChunk < T > {
65+ fn drop ( & mut self ) {
66+ unsafe { Box :: from_raw ( self . storage . as_mut ( ) ) } ;
67+ }
68+ }
69+
6370impl < T > ArenaChunk < T > {
6471 #[ inline]
6572 unsafe fn new ( capacity : usize ) -> ArenaChunk < T > {
66- ArenaChunk { storage : Box :: new_uninit_slice ( capacity) , entries : 0 }
73+ ArenaChunk {
74+ storage : NonNull :: new ( Box :: into_raw ( Box :: new_uninit_slice ( capacity) ) ) . unwrap ( ) ,
75+ entries : 0 ,
76+ }
6777 }
6878
6979 /// Destroys this arena chunk.
@@ -72,14 +82,15 @@ impl<T> ArenaChunk<T> {
7282 // The branch on needs_drop() is an -O1 performance optimization.
7383 // Without the branch, dropping TypedArena<u8> takes linear time.
7484 if mem:: needs_drop :: < T > ( ) {
75- ptr:: drop_in_place ( MaybeUninit :: slice_assume_init_mut ( & mut self . storage [ ..len] ) ) ;
85+ let slice = & mut * ( self . storage . as_mut ( ) ) ;
86+ ptr:: drop_in_place ( MaybeUninit :: slice_assume_init_mut ( & mut slice[ ..len] ) ) ;
7687 }
7788 }
7889
7990 // Returns a pointer to the first allocated object.
8091 #[ inline]
8192 fn start ( & mut self ) -> * mut T {
82- MaybeUninit :: slice_as_mut_ptr ( & mut self . storage )
93+ self . storage . as_ptr ( ) as * mut T
8394 }
8495
8596 // Returns a pointer to the end of the allocated space.
@@ -90,7 +101,7 @@ impl<T> ArenaChunk<T> {
90101 // A pointer as large as possible for zero-sized elements.
91102 ptr:: invalid_mut ( !0 )
92103 } else {
93- self . start ( ) . add ( self . storage . len ( ) )
104+ self . start ( ) . add ( ( * self . storage . as_ptr ( ) ) . len ( ) )
94105 }
95106 }
96107 }
@@ -274,7 +285,7 @@ impl<T> TypedArena<T> {
274285 // If the previous chunk's len is less than HUGE_PAGE
275286 // bytes, then this chunk will be least double the previous
276287 // chunk's size.
277- new_cap = last_chunk. storage . len ( ) . min ( HUGE_PAGE / elem_size / 2 ) ;
288+ new_cap = ( * last_chunk. storage . as_ptr ( ) ) . len ( ) . min ( HUGE_PAGE / elem_size / 2 ) ;
278289 new_cap *= 2 ;
279290 } else {
280291 new_cap = PAGE / elem_size;
@@ -382,7 +393,7 @@ impl DroplessArena {
382393 // If the previous chunk's len is less than HUGE_PAGE
383394 // bytes, then this chunk will be least double the previous
384395 // chunk's size.
385- new_cap = last_chunk. storage . len ( ) . min ( HUGE_PAGE / 2 ) ;
396+ new_cap = ( * last_chunk. storage . as_ptr ( ) ) . len ( ) . min ( HUGE_PAGE / 2 ) ;
386397 new_cap *= 2 ;
387398 } else {
388399 new_cap = PAGE ;
0 commit comments