@@ -367,7 +367,7 @@ impl DroplessArena {
367367 /// current memory chunk. Returns `None` if there is no free space left to
368368 /// satisfy the request.
369369 #[ inline]
370- fn alloc_raw_without_grow ( & self , bytes : usize , align : usize ) -> Option < & mut [ u8 ] > {
370+ fn alloc_raw_without_grow ( & self , bytes : usize , align : usize ) -> Option < * mut u8 > {
371371 let ptr = self . ptr . get ( ) as usize ;
372372 let end = self . end . get ( ) as usize ;
373373 // The allocation request fits into the current chunk iff:
@@ -383,14 +383,14 @@ impl DroplessArena {
383383 let new_ptr = aligned. checked_add ( bytes) ?;
384384 if new_ptr <= end {
385385 self . ptr . set ( new_ptr as * mut u8 ) ;
386- unsafe { Some ( slice :: from_raw_parts_mut ( aligned as * mut u8 , bytes ) ) }
386+ Some ( aligned as * mut u8 )
387387 } else {
388388 None
389389 }
390390 }
391391
392392 #[ inline]
393- pub fn alloc_raw ( & self , bytes : usize , align : usize ) -> & mut [ u8 ] {
393+ pub fn alloc_raw ( & self , bytes : usize , align : usize ) -> * mut u8 {
394394 assert ! ( bytes != 0 ) ;
395395 loop {
396396 if let Some ( a) = self . alloc_raw_without_grow ( bytes, align) {
@@ -406,7 +406,7 @@ impl DroplessArena {
406406 pub fn alloc < T > ( & self , object : T ) -> & mut T {
407407 assert ! ( !mem:: needs_drop:: <T >( ) ) ;
408408
409- let mem = self . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut _ as * mut T ;
409+ let mem = self . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut T ;
410410
411411 unsafe {
412412 // Write into uninitialized memory.
@@ -431,13 +431,11 @@ impl DroplessArena {
431431 assert ! ( mem:: size_of:: <T >( ) != 0 ) ;
432432 assert ! ( !slice. is_empty( ) ) ;
433433
434- let mem = self . alloc_raw ( slice. len ( ) * mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut _
435- as * mut T ;
434+ let mem = self . alloc_raw ( slice. len ( ) * mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut T ;
436435
437436 unsafe {
438- let arena_slice = slice:: from_raw_parts_mut ( mem, slice. len ( ) ) ;
439- arena_slice. copy_from_slice ( slice) ;
440- arena_slice
437+ mem. copy_from_nonoverlapping ( slice. as_ptr ( ) , slice. len ( ) ) ;
438+ slice:: from_raw_parts_mut ( mem, slice. len ( ) )
441439 }
442440 }
443441
@@ -480,7 +478,7 @@ impl DroplessArena {
480478 return & mut [ ] ;
481479 }
482480 let size = len. checked_mul ( mem:: size_of :: < T > ( ) ) . unwrap ( ) ;
483- let mem = self . alloc_raw ( size, mem:: align_of :: < T > ( ) ) as * mut _ as * mut T ;
481+ let mem = self . alloc_raw ( size, mem:: align_of :: < T > ( ) ) as * mut T ;
484482 unsafe { self . write_from_iter ( iter, len, mem) }
485483 }
486484 ( _, _) => {
@@ -495,7 +493,7 @@ impl DroplessArena {
495493 let len = vec. len ( ) ;
496494 let start_ptr = self
497495 . alloc_raw ( len * mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) )
498- as * mut _ as * mut T ;
496+ as * mut T ;
499497 vec. as_ptr ( ) . copy_to_nonoverlapping ( start_ptr, len) ;
500498 vec. set_len ( 0 ) ;
501499 slice:: from_raw_parts_mut ( start_ptr, len)
@@ -539,8 +537,7 @@ pub struct DropArena {
539537impl DropArena {
540538 #[ inline]
541539 pub unsafe fn alloc < T > ( & self , object : T ) -> & mut T {
542- let mem =
543- self . arena . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut _ as * mut T ;
540+ let mem = self . arena . alloc_raw ( mem:: size_of :: < T > ( ) , mem:: align_of :: < T > ( ) ) as * mut T ;
544541 // Write into uninitialized memory.
545542 ptr:: write ( mem, object) ;
546543 let result = & mut * mem;
@@ -563,7 +560,7 @@ impl DropArena {
563560 let start_ptr = self
564561 . arena
565562 . alloc_raw ( len. checked_mul ( mem:: size_of :: < T > ( ) ) . unwrap ( ) , mem:: align_of :: < T > ( ) )
566- as * mut _ as * mut T ;
563+ as * mut T ;
567564
568565 let mut destructors = self . destructors . borrow_mut ( ) ;
569566 // Reserve space for the destructors so we can't panic while adding them
0 commit comments