@@ -366,7 +366,15 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
366366 #[ instrument( level = "trace" , skip( self ) , ret) ]
367367 fn eval_to_const ( & mut self , value : VnIndex ) -> Option < OpTy < ' tcx > > {
368368 use Value :: * ;
369- let op = match * self . get ( value) {
369+ // LLVM optimizes the load of `sizeof(size_t) * 2` as a single `mov`,
370+ // which is cheap. Bigger values make more `mov` instructions generated.
371+ // After GVN, it became a single load (`lea`) of an address in `.rodata`.
372+ // But to avoid blessing differences between 32-bit and 64-bit target,
373+ // let's choose `size_t = u64`.
374+ const STACK_THRESHOLD : u64 = std:: mem:: size_of :: < u64 > ( ) as u64 * 2 ;
375+ let vvalue = self . get ( value) ;
376+ debug ! ( ?vvalue) ;
377+ let op = match * vvalue {
370378 Opaque ( _) => return None ,
371379 // Do not bother evaluating repeat expressions. This would uselessly consume memory.
372380 Repeat ( ..) => return None ,
@@ -381,10 +389,8 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
381389 . collect :: < Option < Vec < _ > > > ( ) ?;
382390 let ty = match kind {
383391 AggregateTy :: Array => {
384- let [ field, ..] = fields. as_slice ( ) else {
385- bug ! ( "fields.len() == 0" ) ;
386- } ;
387- let field_ty = field. layout . ty ;
392+ assert ! ( fields. len( ) > 0 ) ;
393+ let field_ty = fields[ 0 ] . layout . ty ;
388394 // Ignore nested array
389395 if field_ty. is_array ( ) {
390396 trace ! (
@@ -418,6 +424,9 @@ impl<'body, 'tcx> VnState<'body, 'tcx> {
418424 let ptr_imm = Immediate :: new_pointer_with_meta ( data, meta, & self . ecx ) ;
419425 ImmTy :: from_immediate ( ptr_imm, ty) . into ( )
420426 } else if matches ! ( kind, AggregateTy :: Array ) {
427+ if ty. layout . size ( ) . bytes ( ) <= STACK_THRESHOLD {
428+ return None ;
429+ }
421430 let mut mplace = None ;
422431 let alloc_id = self
423432 . ecx
0 commit comments