Skip to content

Commit c5727d9

Browse files
committed
Implement some more checks for ptr_guaranteed_cmp in consteval:
Pointers with different residues modulo their least common allocation alignment are never equal. Pointers to the same static allocation are equal if and only if they have the same offset. Pointers to different non-zero-sized static allocations are unequal if both point within their allocation, and not on opposite ends.
1 parent 07b7dc9 commit c5727d9

File tree

2 files changed

+256
-7
lines changed

2 files changed

+256
-7
lines changed

compiler/rustc_const_eval/src/const_eval/machine.rs

Lines changed: 85 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -289,13 +289,91 @@ impl<'tcx> CompileTimeInterpCx<'tcx> {
289289
}
290290
// Other ways of comparing integers and pointers can never be known for sure.
291291
(Scalar::Int { .. }, Scalar::Ptr(..)) | (Scalar::Ptr(..), Scalar::Int { .. }) => 2,
292-
// FIXME: return a `1` for when both sides are the same pointer, *except* that
293-
// some things (like functions and vtables) do not have stable addresses
294-
// so we need to be careful around them (see e.g. #73722).
295-
// FIXME: return `0` for at least some comparisons where we can reliably
296-
// determine the result of runtime inequality tests at compile-time.
297-
// Examples include comparison of addresses in different static items.
298-
(Scalar::Ptr(..), Scalar::Ptr(..)) => 2,
292+
(Scalar::Ptr(a, _), Scalar::Ptr(b, _)) => {
293+
let (a_prov, a_offset) = a.prov_and_relative_offset();
294+
let (b_prov, b_offset) = b.prov_and_relative_offset();
295+
let a_allocid = a_prov.alloc_id();
296+
let b_allocid = b_prov.alloc_id();
297+
let a_info = self.get_alloc_info(a_allocid);
298+
let b_info = self.get_alloc_info(b_allocid);
299+
300+
// Check if the pointers cannot be equal due to alignment
301+
if a_info.align > Align::ONE && b_info.align > Align::ONE {
302+
let min_align = Ord::min(a_info.align.bytes(), b_info.align.bytes());
303+
let a_residue = a_offset.bytes() % min_align;
304+
let b_residue = b_offset.bytes() % min_align;
305+
if a_residue != b_residue {
306+
// If the two pointers have a different residue from their
307+
// common alignment, they cannot be equal.
308+
return interp_ok(0);
309+
}
310+
// The pointers have the same residue modulo their common alignment,
311+
// so they could be equal. Try the other checks.
312+
}
313+
314+
if a_allocid == b_allocid {
315+
match self.tcx.try_get_global_alloc(a_allocid) {
316+
None => 2,
317+
// A static cannot be duplicated, so if two pointers are into the same
318+
// static, they are equal if and only if their offsets into the static
319+
// are equal
320+
Some(GlobalAlloc::Static(_)) => (a_offset == b_offset) as u8,
321+
// Functions and vtables can be duplicated (and deduplicated), so we
322+
// cannot be sure of runtime equality of pointers to the same one, (or the
323+
// runtime inequality of pointers to different ones) (see e.g. #73722).
324+
Some(GlobalAlloc::Function { .. } | GlobalAlloc::VTable(..)) => 2,
325+
// FIXME: Can these be duplicated (or deduplicated)?
326+
Some(GlobalAlloc::Memory(..) | GlobalAlloc::TypeId { .. }) => 2,
327+
}
328+
} else {
329+
if let (Some(GlobalAlloc::Static(a_did)), Some(GlobalAlloc::Static(b_did))) = (
330+
self.tcx.try_get_global_alloc(a_allocid),
331+
self.tcx.try_get_global_alloc(b_allocid),
332+
) {
333+
debug_assert_ne!(
334+
a_did, b_did,
335+
"same static item DefId had two different AllocIds? {a_allocid:?} != {b_allocid:?}, {a_did:?} == {b_did:?}"
336+
);
337+
338+
if a_info.size == Size::ZERO || b_info.size == Size::ZERO {
339+
// One or both allocations is zero-sized, so we can't know if the
340+
// pointers are (in)equal.
341+
// FIXME: Can zero-sized static be "within" non-zero-sized statics?
342+
// Conservatively we say yes, since that doesn't cause them to
343+
// "overlap" any bytes, but if not, then we could delete this branch
344+
// and have the other branches handle ZST allocations.
345+
2
346+
} else if a_offset > a_info.size || b_offset > b_info.size {
347+
// One or both pointers are out of bounds of their allocation,
348+
// so conservatively say we don't know.
349+
// FIXME: we could reason about how far out of bounds the pointers are,
350+
// e.g. two pointers cannot be equal if them being equal would require
351+
// their statics to overlap.
352+
2
353+
} else if (a_offset == Size::ZERO && b_offset == b_info.size)
354+
|| (a_offset == a_info.size && b_offset == Size::ZERO)
355+
{
356+
// The pointers are on opposite ends of different allocations, we
357+
// cannot know if they are equal, since the allocations may end up
358+
// adjacent at runtime.
359+
2
360+
} else {
361+
// The pointers are within (or one past the end of) different
362+
// non-zero-sized static allocations, and they are not at oppotiste
363+
// ends, so we know they are not equal because statics cannot
364+
// overlap or be deduplicated.
365+
0
366+
}
367+
} else {
368+
// Even if one of them is a static, as per https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.storage-disjointness
369+
// immutable statics can overlap with other kinds of allocations somtimes.
370+
// FIXME: We could be more decisive for mutable statics, which cannot
371+
// overlap with other kinds of allocations.
372+
// FIXME: Can we determine any other cases?
373+
2
374+
}
375+
}
376+
}
299377
})
300378
}
301379
}
Lines changed: 171 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,171 @@
1+
//@ build-pass
2+
//@ edition: 2024
3+
#![feature(const_raw_ptr_comparison)]
4+
#![feature(fn_align)]
5+
// Generally:
6+
// For any `Some` return, `None` would also be valid, unless otherwise noted.
7+
// For any `None` return, only `None` is valid, unless otherwise noted.
8+
9+
macro_rules! do_test {
10+
($a:expr, $b:expr, $expected:pat) => {
11+
const _: () = {
12+
let a: *const _ = $a;
13+
let b: *const _ = $b;
14+
assert!(matches!(<*const u8>::guaranteed_eq(a.cast(), b.cast()), $expected));
15+
};
16+
};
17+
}
18+
19+
#[repr(align(2))]
20+
struct T(#[allow(unused)] u16);
21+
22+
#[repr(align(2))]
23+
struct AlignedZst;
24+
25+
static A: T = T(42);
26+
static B: T = T(42);
27+
static mut MUT_STATIC: T = T(42);
28+
static ZST: () = ();
29+
static ALIGNED_ZST: AlignedZst = AlignedZst;
30+
static LARGE_WORD_ALIGNED: [usize; 2] = [0, 1];
31+
static mut MUT_LARGE_WORD_ALIGNED: [usize; 2] = [0, 1];
32+
33+
const FN_PTR: *const () = {
34+
fn foo() {}
35+
unsafe { std::mem::transmute(foo as fn()) }
36+
};
37+
38+
const ALIGNED_FN_PTR: *const () = {
39+
#[rustc_align(2)]
40+
fn aligned_foo() {}
41+
unsafe { std::mem::transmute(aligned_foo as fn()) }
42+
};
43+
44+
// Only on armv5te-* and armv4t-*
45+
#[cfg(all(
46+
target_arch = "arm",
47+
not(target_feature = "v6"),
48+
))]
49+
const ALIGNED_THUMB_FN_PTR: *const () = {
50+
#[rustc_align(2)]
51+
#[instruction_set(arm::t32)]
52+
fn aligned_thumb_foo() {}
53+
unsafe { std::mem::transmute(aligned_thumb_foo as fn()) }
54+
};
55+
56+
trait Trait {
57+
#[allow(unused)]
58+
fn method(&self) -> u8;
59+
}
60+
impl Trait for u32 {
61+
fn method(&self) -> u8 { 1 }
62+
}
63+
impl Trait for i32 {
64+
fn method(&self) -> u8 { 2 }
65+
}
66+
67+
const VTABLE_PTR_1: *const () = {
68+
let [_data, vtable] = unsafe {
69+
std::mem::transmute::<&dyn Trait, [*const (); 2]>(&42_u32 as &dyn Trait)
70+
};
71+
vtable
72+
};
73+
const VTABLE_PTR_2: *const () = {
74+
let [_data, vtable] = unsafe {
75+
std::mem::transmute::<&dyn Trait, [*const (); 2]>(&42_i32 as &dyn Trait)
76+
};
77+
vtable
78+
};
79+
80+
// Cannot be `None`: static's address, references, and `fn` pointers cannot be null,
81+
// and `is_null` is stable with strong guarantees, and `is_null` is implemented using
82+
// `guaranteed_cmp`.
83+
do_test!(&A, std::ptr::null::<()>(), Some(false));
84+
do_test!(&ZST, std::ptr::null::<()>(), Some(false));
85+
do_test!(&(), std::ptr::null::<()>(), Some(false));
86+
do_test!(const { &() }, std::ptr::null::<()>(), Some(false));
87+
do_test!(FN_PTR, std::ptr::null::<()>(), Some(false));
88+
89+
// Statics cannot be duplicated
90+
do_test!(&A, &A, Some(true));
91+
92+
// Two non-ZST statics cannot have the same address
93+
do_test!(&A, &B, Some(false));
94+
do_test!(&A, &raw const MUT_STATIC, Some(false));
95+
96+
// One-past-the-end of one static can be equal to the address of another static.
97+
do_test!(&A, (&raw const B).wrapping_add(1), None);
98+
99+
// Cannot know if ZST static is at the same address with anything non-null (if alignment allows).
100+
do_test!(&A, &ZST, None);
101+
do_test!(&A, &ALIGNED_ZST, None);
102+
103+
// Unclear if ZST statics can be placed "in the middle of" non-ZST statics.
104+
// For now, we conservatively say they could, and return None here.
105+
do_test!(&ZST, (&raw const A).wrapping_byte_add(1), None);
106+
107+
// As per https://doc.rust-lang.org/nightly/reference/items/static-items.html#r-items.static.storage-disjointness
108+
// immutable statics are allowed to overlap with const items and promoteds.
109+
do_test!(&A, &T(42), None);
110+
do_test!(&A, const { &T(42) }, None);
111+
do_test!(&A, { const X: T = T(42); &X }, None);
112+
113+
// These could return Some(false), since only immutable statics can overlap with const items
114+
// and promoteds.
115+
do_test!(&raw const MUT_STATIC, &T(42), None);
116+
do_test!(&raw const MUT_STATIC, const { &T(42) }, None);
117+
do_test!(&raw const MUT_STATIC, { const X: T = T(42); &X }, None);
118+
119+
// An odd offset from a 2-aligned allocation can never be equal to an even offset from a
120+
// 2-aligned allocation, even if the offsets are out-of-bounds.
121+
do_test!(&A, (&raw const B).wrapping_byte_add(1), Some(false));
122+
do_test!(&A, (&raw const B).wrapping_byte_add(5), Some(false));
123+
do_test!(&A, (&raw const ALIGNED_ZST).wrapping_byte_add(1), Some(false));
124+
do_test!(&ALIGNED_ZST, (&raw const A).wrapping_byte_add(1), Some(false));
125+
do_test!(&A, (&T(42) as *const T).wrapping_byte_add(1), Some(false));
126+
do_test!(&A, (const { &T(42) } as *const T).wrapping_byte_add(1), Some(false));
127+
do_test!(&A, ({ const X: T = T(42); &X } as *const T).wrapping_byte_add(1), Some(false));
128+
129+
// Pointers into the same static are equal if and only if their offset is the same,
130+
// even if either is out-of-bounds.
131+
do_test!(&A, &A, Some(true));
132+
do_test!(&A, &A.0, Some(true));
133+
do_test!(&A, (&raw const A).wrapping_byte_add(1), Some(false));
134+
do_test!(&A, (&raw const A).wrapping_byte_add(2), Some(false));
135+
do_test!(&A, (&raw const A).wrapping_byte_add(51), Some(false));
136+
do_test!((&raw const A).wrapping_byte_add(51), (&raw const A).wrapping_byte_add(51), Some(true));
137+
138+
// Pointers to the same fn may be unequal, since `fn`s can be duplicated.
139+
do_test!(FN_PTR, FN_PTR, None);
140+
do_test!(ALIGNED_FN_PTR, ALIGNED_FN_PTR, None);
141+
142+
// Pointers to different fns may be equal, since `fn`s can be deduplicated.
143+
do_test!(FN_PTR, ALIGNED_FN_PTR, None);
144+
145+
// Pointers to the same vtable may be unequal, since vtables can be duplicated.
146+
do_test!(VTABLE_PTR_1, VTABLE_PTR_1, None);
147+
148+
// Pointers to different vtables may be equal, since vtables can be deduplicated.
149+
do_test!(VTABLE_PTR_1, VTABLE_PTR_2, None);
150+
151+
// Function pointers to aligned function allocations are not necessarily actually aligned,
152+
// due to platform-specific semantics.
153+
// See https://github.com/rust-lang/rust/issues/144661
154+
// FIXME: This could return `Some` on platforms where function pointers' addresses actually
155+
// correspond to function addresses including alignment, or on ARM if t32 function pointers
156+
// have their low bit set for consteval.
157+
do_test!(ALIGNED_FN_PTR, ALIGNED_FN_PTR.wrapping_byte_offset(1), None);
158+
#[cfg(all(
159+
target_arch = "arm",
160+
not(target_feature = "v6"),
161+
))]
162+
do_test!(ALIGNED_THUMB_FN_PTR, ALIGNED_THUMB_FN_PTR.wrapping_byte_offset(1), None);
163+
164+
// Conservatively say we don't know.
165+
do_test!(FN_PTR, VTABLE_PTR_1, None);
166+
do_test!((&raw const LARGE_WORD_ALIGNED).cast::<usize>().wrapping_add(1), VTABLE_PTR_1, None);
167+
do_test!((&raw const MUT_LARGE_WORD_ALIGNED).cast::<usize>().wrapping_add(1), VTABLE_PTR_1, None);
168+
do_test!((&raw const LARGE_WORD_ALIGNED).cast::<usize>().wrapping_add(1), FN_PTR, None);
169+
do_test!((&raw const MUT_LARGE_WORD_ALIGNED).cast::<usize>().wrapping_add(1), FN_PTR, None);
170+
171+
fn main() {}

0 commit comments

Comments
 (0)