@@ -7,7 +7,7 @@ use rustc_codegen_ssa::traits::*;
77use rustc_data_structures:: fx:: FxHashMap ;
88use rustc_middle:: ty:: Instance ;
99use rustc_middle:: ty:: layout:: TyAndLayout ;
10- use rustc_middle:: { bug, span_bug} ;
10+ use rustc_middle:: { bug, mir , span_bug} ;
1111use rustc_span:: { Pos , Span , Symbol , sym} ;
1212use rustc_target:: asm:: * ;
1313use smallvec:: SmallVec ;
@@ -396,6 +396,111 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> {
396396 let intel_syntax = matches ! ( asm_arch, InlineAsmArch :: X86 | InlineAsmArch :: X86_64 )
397397 && !options. contains ( InlineAsmOptions :: ATT_SYNTAX ) ;
398398
399+ // Convert all operands to string interpolations
400+ let converted_operands = operands
401+ . iter ( )
402+ . enumerate ( )
403+ . map ( |( operand_idx, operand) | {
404+ match * operand {
405+ GlobalAsmOperandRef :: Interpolate { ref string } => {
406+ // Const operands get injected directly into the
407+ // template. Note that we don't need to escape $
408+ // here unlike normal inline assembly.
409+ string. to_owned ( )
410+ }
411+ GlobalAsmOperandRef :: ConstPointer { value, instance } => {
412+ let ( prov, offset) = value. into_parts ( ) ;
413+ let global_alloc = self . tcx . global_alloc ( prov. alloc_id ( ) ) ;
414+ let llval = ' llval: {
415+ let alloc = match global_alloc {
416+ mir:: interpret:: GlobalAlloc :: Function { instance } => {
417+ break ' llval self . get_fn ( instance) ;
418+ }
419+ mir:: interpret:: GlobalAlloc :: VTable ( ty, dyn_ty) => self
420+ . tcx
421+ . global_alloc ( self . tcx . vtable_allocation ( (
422+ ty,
423+ dyn_ty. principal ( ) . map ( |principal| {
424+ self . tcx
425+ . instantiate_bound_regions_with_erased ( principal)
426+ } ) ,
427+ ) ) )
428+ . unwrap_memory ( ) ,
429+ mir:: interpret:: GlobalAlloc :: Static ( def_id) => {
430+ break ' llval self
431+ . renamed_statics
432+ . borrow ( )
433+ . get ( & def_id)
434+ . copied ( )
435+ . unwrap_or_else ( || self . get_static ( def_id) ) ;
436+ }
437+ mir:: interpret:: GlobalAlloc :: Memory ( alloc) => alloc,
438+ } ;
439+
440+ // For ZSTs directly codegen an aligned pointer.
441+ if alloc. inner ( ) . len ( ) == 0 {
442+ assert_eq ! ( offset. bytes( ) , 0 ) ;
443+ return format ! ( "{}" , alloc. inner( ) . align. bytes( ) ) ;
444+ }
445+
446+ let asm_name = self . tcx . symbol_name ( instance) ;
447+ let sym_name = format ! ( "{asm_name}.{operand_idx}" ) ;
448+
449+ let init = crate :: consts:: const_alloc_to_llvm (
450+ self , alloc, /*static*/ false ,
451+ ) ;
452+ let alloc = alloc. inner ( ) ;
453+ let g = self . static_addr_of_mut ( init, alloc. align , None ) ;
454+ if alloc. mutability . is_not ( ) {
455+ // NB: we can't use `static_addr_of_impl` here to avoid sharing
456+ // the global, as we need to set name and linkage.
457+ unsafe { llvm:: LLVMSetGlobalConstant ( g, llvm:: True ) } ;
458+ }
459+
460+ llvm:: set_value_name ( g, sym_name. as_bytes ( ) ) ;
461+
462+ // `static_addr_of_mut` gives us a private global which can't be
463+ // used by global asm. Update it to a hidden internal global instead.
464+ llvm:: set_linkage ( g, llvm:: Linkage :: InternalLinkage ) ;
465+ llvm:: set_visibility ( g, llvm:: Visibility :: Hidden ) ;
466+ g
467+ } ;
468+ self . add_compiler_used_global ( llval) ;
469+ let symbol = llvm:: build_string ( |s| unsafe {
470+ llvm:: LLVMRustGetMangledName ( llval, s) ;
471+ } )
472+ . expect ( "symbol is not valid UTF-8" ) ;
473+
474+ let offset = offset. bytes ( ) ;
475+ if offset != 0 { format ! ( "{symbol}+{offset}" ) } else { symbol }
476+ }
477+ GlobalAsmOperandRef :: SymFn { instance } => {
478+ let llval = self . get_fn ( instance) ;
479+ self . add_compiler_used_global ( llval) ;
480+ let symbol = llvm:: build_string ( |s| unsafe {
481+ llvm:: LLVMRustGetMangledName ( llval, s) ;
482+ } )
483+ . expect ( "symbol is not valid UTF-8" ) ;
484+ symbol
485+ }
486+ GlobalAsmOperandRef :: SymStatic { def_id } => {
487+ let llval = self
488+ . renamed_statics
489+ . borrow ( )
490+ . get ( & def_id)
491+ . copied ( )
492+ . unwrap_or_else ( || self . get_static ( def_id) ) ;
493+ self . add_compiler_used_global ( llval) ;
494+ let symbol = llvm:: build_string ( |s| unsafe {
495+ llvm:: LLVMRustGetMangledName ( llval, s) ;
496+ } )
497+ . expect ( "symbol is not valid UTF-8" ) ;
498+ symbol
499+ }
500+ }
501+ } )
502+ . collect :: < Vec < _ > > ( ) ;
503+
399504 // Build the template string
400505 let mut template_str = String :: new ( ) ;
401506 if intel_syntax {
@@ -405,37 +510,7 @@ impl<'tcx> AsmCodegenMethods<'tcx> for CodegenCx<'_, 'tcx> {
405510 match * piece {
406511 InlineAsmTemplatePiece :: String ( ref s) => template_str. push_str ( s) ,
407512 InlineAsmTemplatePiece :: Placeholder { operand_idx, modifier : _, span : _ } => {
408- match operands[ operand_idx] {
409- GlobalAsmOperandRef :: Interpolate { ref string } => {
410- // Const operands get injected directly into the
411- // template. Note that we don't need to escape $
412- // here unlike normal inline assembly.
413- template_str. push_str ( string) ;
414- }
415- GlobalAsmOperandRef :: SymFn { instance } => {
416- let llval = self . get_fn ( instance) ;
417- self . add_compiler_used_global ( llval) ;
418- let symbol = llvm:: build_string ( |s| unsafe {
419- llvm:: LLVMRustGetMangledName ( llval, s) ;
420- } )
421- . expect ( "symbol is not valid UTF-8" ) ;
422- template_str. push_str ( & symbol) ;
423- }
424- GlobalAsmOperandRef :: SymStatic { def_id } => {
425- let llval = self
426- . renamed_statics
427- . borrow ( )
428- . get ( & def_id)
429- . copied ( )
430- . unwrap_or_else ( || self . get_static ( def_id) ) ;
431- self . add_compiler_used_global ( llval) ;
432- let symbol = llvm:: build_string ( |s| unsafe {
433- llvm:: LLVMRustGetMangledName ( llval, s) ;
434- } )
435- . expect ( "symbol is not valid UTF-8" ) ;
436- template_str. push_str ( & symbol) ;
437- }
438- }
513+ template_str. push_str ( & converted_operands[ operand_idx] )
439514 }
440515 }
441516 }
0 commit comments