@@ -20,7 +20,7 @@ pub struct ReusePool {
2020 /// allocations as address-size pairs, the list must be sorted by the size and then the thread ID.
2121 ///
2222 /// Each of these maps has at most MAX_POOL_SIZE elements, and since alignment is limited to
23- /// less than 64 different possible value , that bounds the overall size of the pool.
23+ /// less than 64 different possible values , that bounds the overall size of the pool.
2424 ///
2525 /// We also store the ID and the data-race clock of the thread that donated this pool element,
2626 /// to ensure synchronization with the thread that picks up this address.
@@ -36,6 +36,15 @@ impl ReusePool {
3636 }
3737 }
3838
39+ /// Call this when we are using up a lot of the address space: if memory reuse is enabled at all,
40+ /// this will bump the intra-thread reuse rate to 100% so that we can keep running this program as
41+ /// long as possible.
42+ pub fn address_space_shortage ( & mut self ) {
43+ if self . address_reuse_rate > 0.0 {
44+ self . address_reuse_rate = 1.0 ;
45+ }
46+ }
47+
3948 fn subpool ( & mut self , align : Align ) -> & mut Vec < ( u64 , Size , ThreadId , VClock ) > {
4049 let pool_idx: usize = align. bytes ( ) . trailing_zeros ( ) . try_into ( ) . unwrap ( ) ;
4150 if self . pool . len ( ) <= pool_idx {
@@ -55,9 +64,7 @@ impl ReusePool {
5564 clock : impl FnOnce ( ) -> VClock ,
5665 ) {
5766 // Let's see if we even want to remember this address.
58- // We don't remember stack addresses: there's a lot of them (so the perf impact is big),
59- // and we only want to reuse stack slots within the same thread or else we'll add a lot of
60- // undesired synchronization.
67+ // We don't remember stack addresses since there's so many of them (so the perf impact is big).
6168 if kind == MemoryKind :: Stack || !rng. random_bool ( self . address_reuse_rate ) {
6269 return ;
6370 }
0 commit comments