Skip to content

Commit 57e6fd9

Browse files
committed
Vec::push in consts MVP
1 parent 6380899 commit 57e6fd9

File tree

11 files changed

+351
-77
lines changed

11 files changed

+351
-77
lines changed

library/alloc/src/alloc.rs

Lines changed: 190 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@
55
#[stable(feature = "alloc_module", since = "1.28.0")]
66
#[doc(inline)]
77
pub use core::alloc::*;
8-
use core::hint;
98
use core::ptr::{self, NonNull};
9+
use core::{cmp, hint};
1010

1111
unsafe extern "Rust" {
1212
// These are the magic symbols to call the global allocator. rustc generates
@@ -182,7 +182,7 @@ pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
182182
impl Global {
183183
#[inline]
184184
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
185-
fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
185+
fn alloc_impl_runtime(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
186186
match layout.size() {
187187
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
188188
// SAFETY: `layout` is non-zero in size,
@@ -194,10 +194,26 @@ impl Global {
194194
}
195195
}
196196

197+
#[inline]
198+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
199+
fn deallocate_impl_runtime(ptr: NonNull<u8>, layout: Layout) {
200+
if layout.size() != 0 {
201+
// SAFETY:
202+
// * We have checked that `layout` is non-zero in size.
203+
// * The caller is obligated to provide a layout that "fits", and in this case,
204+
// "fit" always means a layout that is equal to the original, because our
205+
// `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
206+
// allocation than requested.
207+
// * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
208+
// safety documentation.
209+
unsafe { dealloc(ptr.as_ptr(), layout) }
210+
}
211+
}
212+
197213
// SAFETY: Same as `Allocator::grow`
198214
#[inline]
199215
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
200-
unsafe fn grow_impl(
216+
fn grow_impl_runtime(
201217
&self,
202218
ptr: NonNull<u8>,
203219
old_layout: Layout,
@@ -241,10 +257,176 @@ impl Global {
241257
},
242258
}
243259
}
260+
261+
// SAFETY: Same as `Allocator::grow`
262+
#[inline]
263+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
264+
fn shrink_impl_runtime(
265+
&self,
266+
ptr: NonNull<u8>,
267+
old_layout: Layout,
268+
new_layout: Layout,
269+
_zeroed: bool,
270+
) -> Result<NonNull<[u8]>, AllocError> {
271+
debug_assert!(
272+
new_layout.size() <= old_layout.size(),
273+
"`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
274+
);
275+
276+
match new_layout.size() {
277+
// SAFETY: conditions must be upheld by the caller
278+
0 => unsafe {
279+
self.deallocate(ptr, old_layout);
280+
Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
281+
},
282+
283+
// SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
284+
new_size if old_layout.align() == new_layout.align() => unsafe {
285+
// `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
286+
hint::assert_unchecked(new_size <= old_layout.size());
287+
288+
let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
289+
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
290+
Ok(NonNull::slice_from_raw_parts(ptr, new_size))
291+
},
292+
293+
// SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
294+
// both the old and new memory allocation are valid for reads and writes for `new_size`
295+
// bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
296+
// `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
297+
// for `dealloc` must be upheld by the caller.
298+
new_size => unsafe {
299+
let new_ptr = self.allocate(new_layout)?;
300+
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
301+
self.deallocate(ptr, old_layout);
302+
Ok(new_ptr)
303+
},
304+
}
305+
}
306+
307+
// SAFETY: Same as `Allocator::allocate`
308+
#[inline]
309+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
310+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
311+
const fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
312+
core::intrinsics::const_eval_select(
313+
(layout, zeroed),
314+
Global::alloc_impl_const,
315+
Global::alloc_impl_runtime,
316+
)
317+
}
318+
319+
// SAFETY: Same as `Allocator::deallocate`
320+
#[inline]
321+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
322+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
323+
const unsafe fn deallocate_impl(&self, ptr: NonNull<u8>, layout: Layout) {
324+
core::intrinsics::const_eval_select(
325+
(ptr, layout),
326+
Global::deallocate_impl_const,
327+
Global::deallocate_impl_runtime,
328+
)
329+
}
330+
331+
// SAFETY: Same as `Allocator::grow`
332+
#[inline]
333+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
334+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
335+
const unsafe fn grow_impl(
336+
&self,
337+
ptr: NonNull<u8>,
338+
old_layout: Layout,
339+
new_layout: Layout,
340+
zeroed: bool,
341+
) -> Result<NonNull<[u8]>, AllocError> {
342+
core::intrinsics::const_eval_select(
343+
(self, ptr, old_layout, new_layout, zeroed),
344+
Global::grow_shrink_impl_const,
345+
Global::grow_impl_runtime,
346+
)
347+
}
348+
349+
// SAFETY: Same as `Allocator::shrink`
350+
#[inline]
351+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
352+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
353+
const unsafe fn shrink_impl(
354+
&self,
355+
ptr: NonNull<u8>,
356+
old_layout: Layout,
357+
new_layout: Layout,
358+
) -> Result<NonNull<[u8]>, AllocError> {
359+
core::intrinsics::const_eval_select(
360+
(self, ptr, old_layout, new_layout, false),
361+
Global::grow_shrink_impl_const,
362+
Global::shrink_impl_runtime,
363+
)
364+
}
365+
366+
#[inline]
367+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
368+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
369+
const fn alloc_impl_const(layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
370+
match layout.size() {
371+
0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
372+
// SAFETY: `layout` is non-zero in size,
373+
size => unsafe {
374+
let raw_ptr = core::intrinsics::const_allocate(layout.size(), layout.align());
375+
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
376+
if zeroed {
377+
let mut offset = 0;
378+
while offset < size {
379+
offset += 1;
380+
// SAFETY: the pointer returned by `const_allocate` is valid to write to.
381+
ptr.add(offset).write(0)
382+
}
383+
}
384+
Ok(NonNull::slice_from_raw_parts(ptr, size))
385+
},
386+
}
387+
}
388+
389+
#[inline]
390+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
391+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
392+
const fn deallocate_impl_const(ptr: NonNull<u8>, layout: Layout) {
393+
if layout.size() != 0 {
394+
// SAFETY: We checked for nonzero size; other preconditions must be upheld by caller.
395+
unsafe {
396+
core::intrinsics::const_deallocate(ptr.as_ptr(), layout.size(), layout.align());
397+
}
398+
}
399+
}
400+
401+
#[inline]
402+
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
403+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
404+
const fn grow_shrink_impl_const(
405+
&self,
406+
ptr: NonNull<u8>,
407+
old_layout: Layout,
408+
new_layout: Layout,
409+
zeroed: bool,
410+
) -> Result<NonNull<[u8]>, AllocError> {
411+
let new_ptr = self.alloc_impl(new_layout, zeroed)?;
412+
// SAFETY: both pointers are valid and this operations is in bounds.
413+
unsafe {
414+
ptr::copy_nonoverlapping(
415+
ptr.as_ptr(),
416+
new_ptr.as_mut_ptr(),
417+
cmp::min(old_layout.size(), new_layout.size()),
418+
);
419+
}
420+
unsafe {
421+
self.deallocate_impl(ptr, old_layout);
422+
}
423+
Ok(new_ptr)
424+
}
244425
}
245426

246427
#[unstable(feature = "allocator_api", issue = "32838")]
247-
unsafe impl Allocator for Global {
428+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
429+
unsafe impl const Allocator for Global {
248430
#[inline]
249431
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
250432
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
@@ -260,17 +442,8 @@ unsafe impl Allocator for Global {
260442
#[inline]
261443
#[cfg_attr(miri, track_caller)] // even without panics, this helps for Miri backtraces
262444
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
263-
if layout.size() != 0 {
264-
// SAFETY:
265-
// * We have checked that `layout` is non-zero in size.
266-
// * The caller is obligated to provide a layout that "fits", and in this case,
267-
// "fit" always means a layout that is equal to the original, because our
268-
// `allocate()`, `grow()`, and `shrink()` implementations never returns a larger
269-
// allocation than requested.
270-
// * Other conditions must be upheld by the caller, as per `Allocator::deallocate()`'s
271-
// safety documentation.
272-
unsafe { dealloc(ptr.as_ptr(), layout) }
273-
}
445+
// SAFETY: all conditions must be upheld by the caller
446+
unsafe { self.deallocate_impl(ptr, layout) }
274447
}
275448

276449
#[inline]
@@ -305,40 +478,8 @@ unsafe impl Allocator for Global {
305478
old_layout: Layout,
306479
new_layout: Layout,
307480
) -> Result<NonNull<[u8]>, AllocError> {
308-
debug_assert!(
309-
new_layout.size() <= old_layout.size(),
310-
"`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
311-
);
312-
313-
match new_layout.size() {
314-
// SAFETY: conditions must be upheld by the caller
315-
0 => unsafe {
316-
self.deallocate(ptr, old_layout);
317-
Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
318-
},
319-
320-
// SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
321-
new_size if old_layout.align() == new_layout.align() => unsafe {
322-
// `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
323-
hint::assert_unchecked(new_size <= old_layout.size());
324-
325-
let raw_ptr = realloc(ptr.as_ptr(), old_layout, new_size);
326-
let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
327-
Ok(NonNull::slice_from_raw_parts(ptr, new_size))
328-
},
329-
330-
// SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
331-
// both the old and new memory allocation are valid for reads and writes for `new_size`
332-
// bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
333-
// `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
334-
// for `dealloc` must be upheld by the caller.
335-
new_size => unsafe {
336-
let new_ptr = self.allocate(new_layout)?;
337-
ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
338-
self.deallocate(ptr, old_layout);
339-
Ok(new_ptr)
340-
},
341-
}
481+
// SAFETY: all conditions must be upheld by the caller
482+
unsafe { self.shrink_impl(ptr, old_layout, new_layout) }
342483
}
343484
}
344485

library/alloc/src/collections/mod.rs

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -84,13 +84,14 @@ impl TryReserveError {
8484
reason = "Uncertain how much info should be exposed",
8585
issue = "48043"
8686
)]
87-
pub fn kind(&self) -> TryReserveErrorKind {
87+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
88+
pub const fn kind(&self) -> TryReserveErrorKind {
8889
self.kind.clone()
8990
}
9091
}
9192

9293
/// Details of the allocation that caused a `TryReserveError`
93-
#[derive(Clone, PartialEq, Eq, Debug)]
94+
#[derive(PartialEq, Eq, Debug)]
9495
#[unstable(
9596
feature = "try_reserve_kind",
9697
reason = "Uncertain how much info should be exposed",
@@ -120,6 +121,24 @@ pub enum TryReserveErrorKind {
120121
},
121122
}
122123

124+
#[unstable(
125+
feature = "try_reserve_kind",
126+
reason = "Uncertain how much info should be exposed",
127+
issue = "48043"
128+
)]
129+
#[rustc_const_unstable(feature = "const_heap", issue = "79597")]
130+
#[cfg(not(test))]
131+
impl const Clone for TryReserveErrorKind {
132+
fn clone(&self) -> Self {
133+
match self {
134+
TryReserveErrorKind::CapacityOverflow => TryReserveErrorKind::CapacityOverflow,
135+
TryReserveErrorKind::AllocError { layout, non_exhaustive: () } => {
136+
TryReserveErrorKind::AllocError { layout: *layout, non_exhaustive: () }
137+
}
138+
}
139+
}
140+
}
141+
123142
#[cfg(test)]
124143
pub use realalloc::collections::TryReserveErrorKind;
125144

library/alloc/src/lib.rs

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -101,10 +101,15 @@
101101
#![feature(char_max_len)]
102102
#![feature(clone_to_uninit)]
103103
#![feature(coerce_unsized)]
104+
#![feature(const_clone)]
105+
#![feature(const_cmp)]
104106
#![feature(const_convert)]
105107
#![feature(const_default)]
108+
#![feature(const_destruct)]
106109
#![feature(const_eval_select)]
107110
#![feature(const_heap)]
111+
#![feature(const_option_ops)]
112+
#![feature(const_try)]
108113
#![feature(core_intrinsics)]
109114
#![feature(deprecated_suggestion)]
110115
#![feature(deref_pure_trait)]
@@ -165,6 +170,7 @@
165170
#![feature(const_trait_impl)]
166171
#![feature(coroutine_trait)]
167172
#![feature(decl_macro)]
173+
#![feature(derive_const)]
168174
#![feature(dropck_eyepatch)]
169175
#![feature(fundamental)]
170176
#![feature(hashmap_internals)]

0 commit comments

Comments
 (0)