55#[ stable( feature = "alloc_module" , since = "1.28.0" ) ]
66#[ doc( inline) ]
77pub use core:: alloc:: * ;
8- use core:: hint;
98use core:: ptr:: { self , NonNull } ;
9+ use core:: { cmp, hint} ;
1010
1111unsafe extern "Rust" {
1212 // These are the magic symbols to call the global allocator. rustc generates
@@ -182,7 +182,7 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
182182impl Global {
183183 #[ inline]
184184 #[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
185- fn alloc_impl ( & self , layout : Layout , zeroed : bool ) -> Result < NonNull < [ u8 ] > , AllocError > {
185+ fn alloc_impl_runtime ( layout : Layout , zeroed : bool ) -> Result < NonNull < [ u8 ] > , AllocError > {
186186 match layout. size ( ) {
187187 0 => Ok ( NonNull :: slice_from_raw_parts ( layout. dangling ( ) , 0 ) ) ,
188188 // SAFETY: `layout` is non-zero in size,
@@ -194,10 +194,26 @@ impl Global {
194194 }
195195 }
196196
197+ #[ inline]
198+ #[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
199+ fn deallocate_impl_runtime ( ptr : NonNull < u8 > , layout : Layout ) {
200+ if layout. size ( ) != 0 {
201+ // SAFETY:
202+ // * We have checked that `layout` is non-zero in size.
203+ // * The caller is obligated to provide a layout that "fits", and in this case,
204+ // "fit" always means a layout that is equal to the original, because our
205+ // `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
206+ // allocation than requested.
207+ // * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
208+ // safety documentation.
209+ unsafe { dealloc ( ptr. as_ptr ( ) , layout) }
210+ }
211+ }
212+
197213 // SAFETY: Same as `Allocator::grow`
198214 #[ inline]
199215 #[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
200- unsafe fn grow_impl (
216+ fn grow_impl_runtime (
201217 & self ,
202218 ptr : NonNull < u8 > ,
203219 old_layout : Layout ,
@@ -241,10 +257,176 @@ impl Global {
241257 } ,
242258 }
243259 }
260+
261+ // SAFETY: Same as `Allocator::grow`
262+ #[ inline]
263+ #[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
264+ fn shrink_impl_runtime (
265+ & self ,
266+ ptr : NonNull < u8 > ,
267+ old_layout : Layout ,
268+ new_layout : Layout ,
269+ _zeroed : bool ,
270+ ) -> Result < NonNull < [ u8 ] > , AllocError > {
271+ debug_assert ! (
272+ new_layout. size( ) <= old_layout. size( ) ,
273+ "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
274+ ) ;
275+
276+ match new_layout. size ( ) {
277+ // SAFETY: conditions must be upheld by the caller
278+ 0 => unsafe {
279+ self . deallocate ( ptr, old_layout) ;
280+ Ok ( NonNull :: slice_from_raw_parts ( new_layout. dangling ( ) , 0 ) )
281+ } ,
282+
283+ // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
284+ new_size if old_layout. align ( ) == new_layout. align ( ) => unsafe {
285+ // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
286+ hint:: assert_unchecked ( new_size <= old_layout. size ( ) ) ;
287+
288+ let raw_ptr = realloc ( ptr. as_ptr ( ) , old_layout, new_size) ;
289+ let ptr = NonNull :: new ( raw_ptr) . ok_or ( AllocError ) ?;
290+ Ok ( NonNull :: slice_from_raw_parts ( ptr, new_size) )
291+ } ,
292+
293+ // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
294+ // both the old and new memory allocation are valid for reads and writes for `new_size`
295+ // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
296+ // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
297+ // for `dealloc` must be upheld by the caller.
298+ new_size => unsafe {
299+ let new_ptr = self . allocate ( new_layout) ?;
300+ ptr:: copy_nonoverlapping ( ptr. as_ptr ( ) , new_ptr. as_mut_ptr ( ) , new_size) ;
301+ self . deallocate ( ptr, old_layout) ;
302+ Ok ( new_ptr)
303+ } ,
304+ }
305+ }
306+
307+ // SAFETY: Same as `Allocator::allocate`
308+ #[ inline]
309+ #[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
310+ #[ rustc_const_unstable( feature = "const_heap" , issue = "79597" ) ]
311+ const fn alloc_impl ( & self , layout : Layout , zeroed : bool ) -> Result < NonNull < [ u8 ] > , AllocError > {
312+ core:: intrinsics:: const_eval_select (
313+ ( layout, zeroed) ,
314+ Global :: alloc_impl_const,
315+ Global :: alloc_impl_runtime,
316+ )
317+ }
318+
319+ // SAFETY: Same as `Allocator::deallocate`
320+ #[ inline]
321+ #[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
322+ #[ rustc_const_unstable( feature = "const_heap" , issue = "79597" ) ]
323+ const unsafe fn deallocate_impl ( & self , ptr : NonNull < u8 > , layout : Layout ) {
324+ core:: intrinsics:: const_eval_select (
325+ ( ptr, layout) ,
326+ Global :: deallocate_impl_const,
327+ Global :: deallocate_impl_runtime,
328+ )
329+ }
330+
331+ // SAFETY: Same as `Allocator::grow`
332+ #[ inline]
333+ #[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
334+ #[ rustc_const_unstable( feature = "const_heap" , issue = "79597" ) ]
335+ const unsafe fn grow_impl (
336+ & self ,
337+ ptr : NonNull < u8 > ,
338+ old_layout : Layout ,
339+ new_layout : Layout ,
340+ zeroed : bool ,
341+ ) -> Result < NonNull < [ u8 ] > , AllocError > {
342+ core:: intrinsics:: const_eval_select (
343+ ( self , ptr, old_layout, new_layout, zeroed) ,
344+ Global :: grow_shrink_impl_const,
345+ Global :: grow_impl_runtime,
346+ )
347+ }
348+
349+ // SAFETY: Same as `Allocator::shrink`
350+ #[ inline]
351+ #[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
352+ #[ rustc_const_unstable( feature = "const_heap" , issue = "79597" ) ]
353+ const unsafe fn shrink_impl (
354+ & self ,
355+ ptr : NonNull < u8 > ,
356+ old_layout : Layout ,
357+ new_layout : Layout ,
358+ ) -> Result < NonNull < [ u8 ] > , AllocError > {
359+ core:: intrinsics:: const_eval_select (
360+ ( self , ptr, old_layout, new_layout, false ) ,
361+ Global :: grow_shrink_impl_const,
362+ Global :: shrink_impl_runtime,
363+ )
364+ }
365+
366+ #[ inline]
367+ #[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
368+ #[ rustc_const_unstable( feature = "const_heap" , issue = "79597" ) ]
369+ const fn alloc_impl_const ( layout : Layout , zeroed : bool ) -> Result < NonNull < [ u8 ] > , AllocError > {
370+ match layout. size ( ) {
371+ 0 => Ok ( NonNull :: slice_from_raw_parts ( layout. dangling ( ) , 0 ) ) ,
372+ // SAFETY: `layout` is non-zero in size,
373+ size => unsafe {
374+ let raw_ptr = core:: intrinsics:: const_allocate ( layout. size ( ) , layout. align ( ) ) ;
375+ let ptr = NonNull :: new ( raw_ptr) . ok_or ( AllocError ) ?;
376+ if zeroed {
377+ let mut offset = 0 ;
378+ while offset < size {
379+ offset += 1 ;
380+ // SAFETY: the pointer returned by `const_allocate` is valid to write to.
381+ ptr. add ( offset) . write ( 0 )
382+ }
383+ }
384+ Ok ( NonNull :: slice_from_raw_parts ( ptr, size) )
385+ } ,
386+ }
387+ }
388+
389+ #[ inline]
390+ #[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
391+ #[ rustc_const_unstable( feature = "const_heap" , issue = "79597" ) ]
392+ const fn deallocate_impl_const ( ptr : NonNull < u8 > , layout : Layout ) {
393+ if layout. size ( ) != 0 {
394+ // SAFETY: We checked for nonzero size; other preconditions must be upheld by caller.
395+ unsafe {
396+ core:: intrinsics:: const_deallocate ( ptr. as_ptr ( ) , layout. size ( ) , layout. align ( ) ) ;
397+ }
398+ }
399+ }
400+
401+ #[ inline]
402+ #[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
403+ #[ rustc_const_unstable( feature = "const_heap" , issue = "79597" ) ]
404+ const fn grow_shrink_impl_const (
405+ & self ,
406+ ptr : NonNull < u8 > ,
407+ old_layout : Layout ,
408+ new_layout : Layout ,
409+ zeroed : bool ,
410+ ) -> Result < NonNull < [ u8 ] > , AllocError > {
411+ let new_ptr = self . alloc_impl ( new_layout, zeroed) ?;
412+ // SAFETY: both pointers are valid and this operations is in bounds.
413+ unsafe {
414+ ptr:: copy_nonoverlapping (
415+ ptr. as_ptr ( ) ,
416+ new_ptr. as_mut_ptr ( ) ,
417+ cmp:: min ( old_layout. size ( ) , new_layout. size ( ) ) ,
418+ ) ;
419+ }
420+ unsafe {
421+ self . deallocate_impl ( ptr, old_layout) ;
422+ }
423+ Ok ( new_ptr)
424+ }
244425}
245426
246427#[ unstable( feature = "allocator_api" , issue = "32838" ) ]
247- unsafe impl Allocator for Global {
428+ #[ rustc_const_unstable( feature = "const_heap" , issue = "79597" ) ]
429+ unsafe impl const Allocator for Global {
248430 #[ inline]
249431 #[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
250432 fn allocate ( & self , layout : Layout ) -> Result < NonNull < [ u8 ] > , AllocError > {
@@ -260,17 +442,8 @@ unsafe impl Allocator for Global {
260442 #[ inline]
261443 #[ cfg_attr( miri, track_caller) ] // even without panics, this helps for Miri backtraces
262444 unsafe fn deallocate ( & self , ptr : NonNull < u8 > , layout : Layout ) {
263- if layout. size ( ) != 0 {
264- // SAFETY:
265- // * We have checked that `layout` is non-zero in size.
266- // * The caller is obligated to provide a layout that "fits", and in this case,
267- // "fit" always means a layout that is equal to the original, because our
268- // `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
269- // allocation than requested.
270- // * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
271- // safety documentation.
272- unsafe { dealloc ( ptr. as_ptr ( ) , layout) }
273- }
445+ // SAFETY: all conditions must be upheld by the caller
446+ unsafe { self . deallocate_impl ( ptr, layout) }
274447 }
275448
276449 #[ inline]
@@ -305,40 +478,8 @@ unsafe impl Allocator for Global {
305478 old_layout : Layout ,
306479 new_layout : Layout ,
307480 ) -> Result < NonNull < [ u8 ] > , AllocError > {
308- debug_assert ! (
309- new_layout. size( ) <= old_layout. size( ) ,
310- "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
311- ) ;
312-
313- match new_layout. size ( ) {
314- // SAFETY: conditions must be upheld by the caller
315- 0 => unsafe {
316- self . deallocate ( ptr, old_layout) ;
317- Ok ( NonNull :: slice_from_raw_parts ( new_layout. dangling ( ) , 0 ) )
318- } ,
319-
320- // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
321- new_size if old_layout. align ( ) == new_layout. align ( ) => unsafe {
322- // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
323- hint:: assert_unchecked ( new_size <= old_layout. size ( ) ) ;
324-
325- let raw_ptr = realloc ( ptr. as_ptr ( ) , old_layout, new_size) ;
326- let ptr = NonNull :: new ( raw_ptr) . ok_or ( AllocError ) ?;
327- Ok ( NonNull :: slice_from_raw_parts ( ptr, new_size) )
328- } ,
329-
330- // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
331- // both the old and new memory allocation are valid for reads and writes for `new_size`
332- // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
333- // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
334- // for `dealloc` must be upheld by the caller.
335- new_size => unsafe {
336- let new_ptr = self . allocate ( new_layout) ?;
337- ptr:: copy_nonoverlapping ( ptr. as_ptr ( ) , new_ptr. as_mut_ptr ( ) , new_size) ;
338- self . deallocate ( ptr, old_layout) ;
339- Ok ( new_ptr)
340- } ,
341- }
481+ // SAFETY: all conditions must be upheld by the caller
482+ unsafe { self . shrink_impl ( ptr, old_layout, new_layout) }
342483 }
343484}
344485
0 commit comments