2020#![ feature( rustc_attrs) ]
2121#![ cfg_attr( test, feature( test) ) ]
2222#![ feature( strict_provenance) ]
23+ #![ deny( unsafe_op_in_unsafe_fn) ]
2324#![ deny( rustc:: untranslatable_diagnostic) ]
2425#![ deny( rustc:: diagnostic_outside_of_impl) ]
2526#![ allow( clippy:: mut_from_ref) ] // Arena allocators are one of the places where this pattern is fine.
@@ -74,19 +75,27 @@ impl<T> ArenaChunk<T> {
7475 #[ inline]
7576 unsafe fn new ( capacity : usize ) -> ArenaChunk < T > {
7677 ArenaChunk {
77- storage : NonNull :: new_unchecked ( Box :: into_raw ( Box :: new_uninit_slice ( capacity) ) ) ,
78+ storage : NonNull :: from ( Box :: leak ( Box :: new_uninit_slice ( capacity) ) ) ,
7879 entries : 0 ,
7980 }
8081 }
8182
8283 /// Destroys this arena chunk.
84+ ///
85+ /// # Safety
86+ ///
87+ /// The caller must ensure that `len` elements of this chunk have been initialized.
8388 #[ inline]
8489 unsafe fn destroy ( & mut self , len : usize ) {
8590 // The branch on needs_drop() is an -O1 performance optimization.
86- // Without the branch, dropping TypedArena<u8 > takes linear time.
91+ // Without the branch, dropping TypedArena<T > takes linear time.
8792 if mem:: needs_drop :: < T > ( ) {
88- let slice = self . storage . as_mut ( ) ;
89- ptr:: drop_in_place ( MaybeUninit :: slice_assume_init_mut ( & mut slice[ ..len] ) ) ;
93+ // SAFETY: The caller must ensure that `len` elements of this chunk have
94+ // been initialized.
95+ unsafe {
96+ let slice = self . storage . as_mut ( ) ;
97+ ptr:: drop_in_place ( MaybeUninit :: slice_assume_init_mut ( & mut slice[ ..len] ) ) ;
98+ }
9099 }
91100 }
92101
@@ -255,7 +264,9 @@ impl<T> TypedArena<T> {
255264 self . ensure_capacity ( len) ;
256265
257266 let start_ptr = self . ptr . get ( ) ;
258- self . ptr . set ( start_ptr. add ( len) ) ;
267+ // SAFETY: `self.ensure_capacity` makes sure that there is enough space
268+ // for `len` elements.
269+ unsafe { self . ptr . set ( start_ptr. add ( len) ) } ;
259270 start_ptr
260271 }
261272
@@ -483,6 +494,10 @@ impl DroplessArena {
483494 }
484495 }
485496
497+ /// # Safety
498+ ///
499+ /// The caller must ensure that `mem` is valid for writes up to
500+ /// `size_of::<T>() * len`.
486501 #[ inline]
487502 unsafe fn write_from_iter < T , I : Iterator < Item = T > > (
488503 & self ,
@@ -494,13 +509,18 @@ impl DroplessArena {
494509 // Use a manual loop since LLVM manages to optimize it better for
495510 // slice iterators
496511 loop {
497- let value = iter. next ( ) ;
498- if i >= len || value. is_none ( ) {
499- // We only return as many items as the iterator gave us, even
500- // though it was supposed to give us `len`
501- return slice:: from_raw_parts_mut ( mem, i) ;
512+ // SAFETY: The caller must ensure that `mem` is valid for writes up to
513+ // `size_of::<T>() * len`.
514+ unsafe {
515+ match iter. next ( ) {
516+ Some ( value) if i < len => mem. add ( i) . write ( value) ,
517+ Some ( _) | None => {
518+ // We only return as many items as the iterator gave us, even
519+ // though it was supposed to give us `len`
520+ return slice:: from_raw_parts_mut ( mem, i) ;
521+ }
522+ }
502523 }
503- ptr:: write ( mem. add ( i) , value. unwrap ( ) ) ;
504524 i += 1 ;
505525 }
506526 }
0 commit comments