@@ -41,13 +41,12 @@ pub trait CompileTimeMachine<'mir, 'tcx: 'mir, T> = Machine<
4141 /// allocation is interned immutably; if it is `Mutability :: Mut `, then the allocation * must be*
4242/// already mutable (as a sanity check).
4343 ///
44- /// `recursive_alloc` is called for all recursively encountered allocations .
44+ /// Returns an iterator over all relocations referred to by this allocation .
4545fn intern_shallow < ' rt , ' mir , ' tcx , T , M : CompileTimeMachine < ' mir , ' tcx , T > > (
4646 ecx : & ' rt mut InterpCx < ' mir , ' tcx , M > ,
4747 alloc_id : AllocId ,
4848 mutability : Mutability ,
49- mut recursive_alloc : impl FnMut ( & InterpCx < ' mir , ' tcx , M > , CtfeProvenance ) ,
50- ) -> Result < ( ) , ( ) > {
49+ ) -> Result < impl Iterator < Item = CtfeProvenance > + ' tcx , ( ) > {
5150 trace ! ( "intern_shallow {:?}" , alloc_id) ;
5251 // remove allocation
5352 let Some ( ( _kind, mut alloc) ) = ecx. memory . alloc_map . remove ( & alloc_id) else {
@@ -65,14 +64,10 @@ fn intern_shallow<'rt, 'mir, 'tcx, T, M: CompileTimeMachine<'mir, 'tcx, T>>(
6564 assert_eq ! ( alloc. mutability, Mutability :: Mut ) ;
6665 }
6766 }
68- // record child allocations
69- for & ( _, prov) in alloc. provenance ( ) . ptrs ( ) . iter ( ) {
70- recursive_alloc ( ecx, prov) ;
71- }
7267 // link the alloc id to the actual allocation
7368 let alloc = ecx. tcx . mk_const_alloc ( alloc) ;
7469 ecx. tcx . set_alloc_id_memory ( alloc_id, alloc) ;
75- Ok ( ( ) )
70+ Ok ( alloc . 0 . 0 . provenance ( ) . ptrs ( ) . iter ( ) . map ( | & ( _ , prov ) | prov ) )
7671}
7772
7873/// How a constant value should be interned.
@@ -128,7 +123,7 @@ pub fn intern_const_alloc_recursive<
128123 }
129124 } ;
130125
131- // Initialize recursive interning.
126+ // Intern the base allocation, and initialize todo list for recursive interning.
132127 let base_alloc_id = ret. ptr ( ) . provenance . unwrap ( ) . alloc_id ( ) ;
133128 let mut todo = vec ! [ ( base_alloc_id, base_mutability) ] ;
134129 // We need to distinguish "has just been interned" from "was already in `tcx`",
@@ -154,7 +149,10 @@ pub fn intern_const_alloc_recursive<
154149 continue ;
155150 }
156151 just_interned. insert ( alloc_id) ;
157- intern_shallow ( ecx, alloc_id, mutability, |ecx, prov| {
152+ let provs = intern_shallow ( ecx, alloc_id, mutability) . map_err ( |( ) | {
153+ ecx. tcx . dcx ( ) . emit_err ( DanglingPtrInFinal { span : ecx. tcx . span , kind : intern_kind } )
154+ } ) ?;
155+ for prov in provs {
158156 let alloc_id = prov. alloc_id ( ) ;
159157 if intern_kind != InternKind :: Promoted
160158 && inner_mutability == Mutability :: Not
@@ -169,7 +167,7 @@ pub fn intern_const_alloc_recursive<
169167 // during interning is to justify why we intern the *new* allocations immutably,
170168 // so we can completely ignore existing allocations. We also don't need to add
171169 // this to the todo list, since after all it is already interned.
172- return ;
170+ continue ;
173171 }
174172 // Found a mutable pointer inside a const where inner allocations should be
175173 // immutable. We exclude promoteds from this, since things like `&mut []` and
@@ -189,10 +187,7 @@ pub fn intern_const_alloc_recursive<
189187 // okay with losing some potential for immutability here. This can anyway only affect
190188 // `static mut`.
191189 todo. push ( ( alloc_id, inner_mutability) ) ;
192- } )
193- . map_err ( |( ) | {
194- ecx. tcx . dcx ( ) . emit_err ( DanglingPtrInFinal { span : ecx. tcx . span , kind : intern_kind } )
195- } ) ?;
190+ }
196191 }
197192 if found_bad_mutable_pointer {
198193 return Err ( ecx
@@ -220,13 +215,13 @@ pub fn intern_const_alloc_for_constprop<
220215 return Ok ( ( ) ) ;
221216 }
222217 // Move allocation to `tcx`.
223- intern_shallow ( ecx, alloc_id, Mutability :: Not , |_ecx , _| {
218+ for _ in intern_shallow ( ecx, alloc_id, Mutability :: Not ) . map_err ( | ( ) | err_ub ! ( DeadLocal ) ) ? {
224219 // We are not doing recursive interning, so we don't currently support provenance.
225220 // (If this assertion ever triggers, we should just implement a
226221 // proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
227222 panic ! ( "`intern_const_alloc_for_constprop` called on allocation with nested provenance" )
228- } )
229- . map_err ( | ( ) | err_ub ! ( DeadLocal ) . into ( ) )
223+ }
224+ Ok ( ( ) )
230225}
231226
232227impl < ' mir , ' tcx : ' mir , M : super :: intern:: CompileTimeMachine < ' mir , ' tcx , !> >
@@ -247,15 +242,14 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
247242 let dest = self . allocate ( layout, MemoryKind :: Stack ) ?;
248243 f ( self , & dest. clone ( ) . into ( ) ) ?;
249244 let alloc_id = dest. ptr ( ) . provenance . unwrap ( ) . alloc_id ( ) ; // this was just allocated, it must have provenance
250- intern_shallow ( self , alloc_id, Mutability :: Not , |ecx , prov| {
245+ for prov in intern_shallow ( self , alloc_id, Mutability :: Not ) . unwrap ( ) {
251246 // We are not doing recursive interning, so we don't currently support provenance.
252247 // (If this assertion ever triggers, we should just implement a
253248 // proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
254- if !ecx . tcx . try_get_global_alloc ( prov. alloc_id ( ) ) . is_some ( ) {
249+ if !self . tcx . try_get_global_alloc ( prov. alloc_id ( ) ) . is_some ( ) {
255250 panic ! ( "`intern_with_temp_alloc` with nested allocations" ) ;
256251 }
257- } )
258- . unwrap ( ) ;
252+ }
259253 Ok ( alloc_id)
260254 }
261255}
0 commit comments