11//! This module is responsible for managing the absolute addresses that allocations are located at,
22//! and for casting between pointers and integers based on those addresses.
33
4+ mod address_generator;
45mod reuse_pool;
56
67use std:: cell:: RefCell ;
7- use std:: cmp:: max;
88
9- use rand:: Rng ;
109use rustc_abi:: { Align , Size } ;
1110use rustc_data_structures:: fx:: { FxHashMap , FxHashSet } ;
11+ use rustc_middle:: ty:: TyCtxt ;
1212
13+ pub use self :: address_generator:: AddressGenerator ;
1314use self :: reuse_pool:: ReusePool ;
1415use crate :: concurrency:: VClock ;
1516use crate :: * ;
@@ -49,9 +50,8 @@ pub struct GlobalStateInner {
4950 /// Whether an allocation has been exposed or not. This cannot be put
5051 /// into `AllocExtra` for the same reason as `base_addr`.
5152 exposed : FxHashSet < AllocId > ,
52- /// This is used as a memory address when a new pointer is casted to an integer. It
53- /// is always larger than any address that was previously made part of a block.
54- next_base_addr : u64 ,
53+ /// The generator for new addresses in a given range.
54+ address_generator : AddressGenerator ,
5555 /// The provenance to use for int2ptr casts
5656 provenance_mode : ProvenanceMode ,
5757}
@@ -64,7 +64,7 @@ impl VisitProvenance for GlobalStateInner {
6464 prepared_alloc_bytes : _,
6565 reuse : _,
6666 exposed : _,
67- next_base_addr : _,
67+ address_generator : _,
6868 provenance_mode : _,
6969 } = self ;
7070 // Though base_addr, int_to_ptr_map, and exposed contain AllocIds, we do not want to visit them.
@@ -77,14 +77,14 @@ impl VisitProvenance for GlobalStateInner {
7777}
7878
7979impl GlobalStateInner {
80- pub fn new ( config : & MiriConfig , stack_addr : u64 ) -> Self {
80+ pub fn new < ' tcx > ( config : & MiriConfig , stack_addr : u64 , tcx : TyCtxt < ' tcx > ) -> Self {
8181 GlobalStateInner {
8282 int_to_ptr_map : Vec :: default ( ) ,
8383 base_addr : FxHashMap :: default ( ) ,
8484 prepared_alloc_bytes : FxHashMap :: default ( ) ,
8585 reuse : ReusePool :: new ( config) ,
8686 exposed : FxHashSet :: default ( ) ,
87- next_base_addr : stack_addr,
87+ address_generator : AddressGenerator :: new ( stack_addr..tcx . target_usize_max ( ) ) ,
8888 provenance_mode : config. provenance_mode ,
8989 }
9090 }
@@ -96,15 +96,6 @@ impl GlobalStateInner {
9696 }
9797}
9898
99- /// Shifts `addr` to make it aligned with `align` by rounding `addr` to the smallest multiple
100- /// of `align` that is larger or equal to `addr`
101- fn align_addr ( addr : u64 , align : u64 ) -> u64 {
102- match addr % align {
103- 0 => addr,
104- rem => addr. strict_add ( align) - rem,
105- }
106- }
107-
10899impl < ' tcx > EvalContextExtPriv < ' tcx > for crate :: MiriInterpCx < ' tcx > { }
109100trait EvalContextExtPriv < ' tcx > : crate :: MiriInterpCxExt < ' tcx > {
110101 fn addr_from_alloc_id_uncached (
@@ -194,34 +185,17 @@ trait EvalContextExtPriv<'tcx>: crate::MiriInterpCxExt<'tcx> {
194185 interp_ok ( reuse_addr)
195186 } else {
196187 // We have to pick a fresh address.
197- // Leave some space to the previous allocation, to give it some chance to be less aligned.
198- // We ensure that `(global_state.next_base_addr + slack) % 16` is uniformly distributed.
199- let slack = rng. random_range ( 0 ..16 ) ;
200- // From next_base_addr + slack, round up to adjust for alignment.
201- let base_addr = global_state
202- . next_base_addr
203- . checked_add ( slack)
204- . ok_or_else ( || err_exhaust ! ( AddressSpaceFull ) ) ?;
205- let base_addr = align_addr ( base_addr, info. align . bytes ( ) ) ;
206-
207- // Remember next base address. If this allocation is zero-sized, leave a gap of at
208- // least 1 to avoid two allocations having the same base address. (The logic in
209- // `alloc_id_from_addr` assumes unique addresses, and different function/vtable pointers
210- // need to be distinguishable!)
211- global_state. next_base_addr = base_addr
212- . checked_add ( max ( info. size . bytes ( ) , 1 ) )
213- . ok_or_else ( || err_exhaust ! ( AddressSpaceFull ) ) ?;
214- // Even if `Size` didn't overflow, we might still have filled up the address space.
215- if global_state. next_base_addr > this. target_usize_max ( ) {
216- throw_exhaust ! ( AddressSpaceFull ) ;
217- }
188+ let new_addr =
189+ global_state. address_generator . generate ( info. size , info. align , & mut rng) ?;
190+
218191 // If we filled up more than half the address space, start aggressively reusing
219192 // addresses to avoid running out.
220- if global_state. next_base_addr > u64:: try_from ( this. target_isize_max ( ) ) . unwrap ( ) {
193+ let remaining_range = global_state. address_generator . get_remaining ( ) ;
194+ if remaining_range. start > remaining_range. end / 2 {
221195 global_state. reuse . address_space_shortage ( ) ;
222196 }
223197
224- interp_ok ( base_addr )
198+ interp_ok ( new_addr )
225199 }
226200 }
227201}
@@ -519,14 +493,3 @@ impl<'tcx> MiriMachine<'tcx> {
519493 } )
520494 }
521495}
522-
523- #[ cfg( test) ]
524- mod tests {
525- use super :: * ;
526-
527- #[ test]
528- fn test_align_addr ( ) {
529- assert_eq ! ( align_addr( 37 , 4 ) , 40 ) ;
530- assert_eq ! ( align_addr( 44 , 4 ) , 44 ) ;
531- }
532- }
0 commit comments