From b4962cdd48f205704df096c766019eef61481c7f Mon Sep 17 00:00:00 2001 From: Jeremy Fitzhardinge Date: Sun, 12 Oct 2025 22:41:03 -0700 Subject: [PATCH 1/2] Add -Zannotate-moves for profiler visibility of move/copy operations This implements a new unstable compiler flag `-Zannotate-moves` that makes move and copy operations visible in profilers by creating synthetic debug information. This is achieved with zero runtime cost by manipulating debug info scopes to make moves/copies appear as calls to `compiler_move` and `compiler_copy` marker functions in profiling tools. A new `AnnotateMoves` MIR transform pass runs after MIR optimization and modifies source scopes for statements containing `Operand::Move` and `Operand::Copy` to make them appear as if inlined from profiling marker functions in `core::profiling`. Two marker functions (`compiler_move` and `compiler_copy`) are defined in `library/core/src/profiling.rs`. These are never actually called - they exist solely as debug info anchors. The transform creates synthetic `SourceScopeData` with the `inlined` field set to point to the appropriate profiling marker, leveraging the existing inlining infrastructure. Operations are only annotated if the type: - Meets the size threshold (default: 65 bytes, configurable via `-Zannotate-moves=SIZE`) - Has a non-scalar backend representation (scalars use registers, not memcpy) An early issue was that modifying a statement's SourceInfo to add the `compiler_move` scope affected the entire statement, including function calls when the move was a call argument. This made profilers attribute the whole function call to the move operation, greatly exaggerating its cost. The solution stores argument move/copy SourceInfo separately in `Body::call_arg_move_source_info`. During codegen, this SourceInfo is applied only during argument preparation, then reset to the call site location before emitting the call instruction itself. This ensures profilers see the argument copy attributed to `compiler_move` while the function call retains its proper source attribution. --- compiler/rustc_codegen_ssa/src/mir/block.rs | 10 + compiler/rustc_middle/src/mir/mod.rs | 9 + .../rustc_mir_build/src/builder/custom/mod.rs | 1 + .../rustc_mir_transform/src/annotate_moves.rs | 380 ++++++++++++++++++ compiler/rustc_mir_transform/src/lib.rs | 4 + compiler/rustc_session/src/config.rs | 38 +- compiler/rustc_session/src/options.rs | 37 ++ compiler/rustc_span/src/symbol.rs | 2 + library/core/src/lib.rs | 2 + library/core/src/profiling.rs | 29 ++ .../src/compiler-flags/annotate-moves.md | 78 ++++ .../annotate-moves/annotate-moves-disabled.rs | 33 ++ .../annotate-moves-integration.rs | 114 ++++++ .../annotate-moves-size-limit.rs | 47 +++ .../annotate-moves/annotate-moves.rs | 58 +++ .../annotate-moves/call-arg-scope.rs | 33 ++ tests/mir-opt/annotate-moves/aggregate.rs | 19 + ...ate.test_aggregate.AnnotateMoves.after.mir | 17 + tests/mir-opt/annotate-moves/async.rs | 36 ++ .../async.test_async.AnnotateMoves.after.mir | 13 + ...c.test_future_move.AnnotateMoves.after.mir | 10 + tests/mir-opt/annotate-moves/call_arg.rs | 20 + ..._arg.test_call_arg.AnnotateMoves.after.mir | 17 + tests/mir-opt/annotate-moves/copy_field.rs | 17 + ...ld.test_copy_field.AnnotateMoves.after.mir | 13 + tests/mir-opt/annotate-moves/iter.rs | 42 ++ ...est_impl_trait_arg.AnnotateMoves.after.mir | 14 + ...t_impl_trait_chain.AnnotateMoves.after.mir | 178 ++++++++ ..._impl_trait_return.AnnotateMoves.after.mir | 165 ++++++++ tests/mir-opt/annotate-moves/match_move.rs | 21 + ...ch_move.test_match.AnnotateMoves.after.mir | 37 ++ tests/mir-opt/annotate-moves/move_return.rs | 17 + ...e_return.test_move.AnnotateMoves.after.mir | 13 + tests/mir-opt/annotate-moves/small_move.rs | 19 + ...ve.test_small_move.AnnotateMoves.after.mir | 11 + .../ui/annotate-moves/annotate-moves-basic.rs | 15 + .../annotate-moves-invalid-flag.rs | 10 + .../annotate-moves-invalid-flag.stderr | 2 + .../annotate-moves-size-limit-invalid.rs | 10 + .../annotate-moves-size-limit-invalid.stderr | 2 + 40 files changed, 1586 insertions(+), 7 deletions(-) create mode 100644 compiler/rustc_mir_transform/src/annotate_moves.rs create mode 100644 library/core/src/profiling.rs create mode 100644 src/doc/unstable-book/src/compiler-flags/annotate-moves.md create mode 100644 tests/codegen-llvm/annotate-moves/annotate-moves-disabled.rs create mode 100644 tests/codegen-llvm/annotate-moves/annotate-moves-integration.rs create mode 100644 tests/codegen-llvm/annotate-moves/annotate-moves-size-limit.rs create mode 100644 tests/codegen-llvm/annotate-moves/annotate-moves.rs create mode 100644 tests/codegen-llvm/annotate-moves/call-arg-scope.rs create mode 100644 tests/mir-opt/annotate-moves/aggregate.rs create mode 100644 tests/mir-opt/annotate-moves/aggregate.test_aggregate.AnnotateMoves.after.mir create mode 100644 tests/mir-opt/annotate-moves/async.rs create mode 100644 tests/mir-opt/annotate-moves/async.test_async.AnnotateMoves.after.mir create mode 100644 tests/mir-opt/annotate-moves/async.test_future_move.AnnotateMoves.after.mir create mode 100644 tests/mir-opt/annotate-moves/call_arg.rs create mode 100644 tests/mir-opt/annotate-moves/call_arg.test_call_arg.AnnotateMoves.after.mir create mode 100644 tests/mir-opt/annotate-moves/copy_field.rs create mode 100644 tests/mir-opt/annotate-moves/copy_field.test_copy_field.AnnotateMoves.after.mir create mode 100644 tests/mir-opt/annotate-moves/iter.rs create mode 100644 tests/mir-opt/annotate-moves/iter.test_impl_trait_arg.AnnotateMoves.after.mir create mode 100644 tests/mir-opt/annotate-moves/iter.test_impl_trait_chain.AnnotateMoves.after.mir create mode 100644 tests/mir-opt/annotate-moves/iter.test_impl_trait_return.AnnotateMoves.after.mir create mode 100644 tests/mir-opt/annotate-moves/match_move.rs create mode 100644 tests/mir-opt/annotate-moves/match_move.test_match.AnnotateMoves.after.mir create mode 100644 tests/mir-opt/annotate-moves/move_return.rs create mode 100644 tests/mir-opt/annotate-moves/move_return.test_move.AnnotateMoves.after.mir create mode 100644 tests/mir-opt/annotate-moves/small_move.rs create mode 100644 tests/mir-opt/annotate-moves/small_move.test_small_move.AnnotateMoves.after.mir create mode 100644 tests/ui/annotate-moves/annotate-moves-basic.rs create mode 100644 tests/ui/annotate-moves/annotate-moves-invalid-flag.rs create mode 100644 tests/ui/annotate-moves/annotate-moves-invalid-flag.stderr create mode 100644 tests/ui/annotate-moves/annotate-moves-size-limit-invalid.rs create mode 100644 tests/ui/annotate-moves/annotate-moves-size-limit-invalid.stderr diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index 1b218a0d33956..75e7520c33544 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -1151,6 +1151,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { _ => {} } + // Look up stored SourceInfo for this argument if it exists (from annotate_moves pass) + let bb_info = self + .mir + .call_arg_move_source_info + .iter() + .find(|&&((block, idx), _)| block == helper.bb && idx == i); + if let Some((_, arg_source_info)) = bb_info { + self.set_debug_loc(bx, *arg_source_info); + } + self.codegen_argument( bx, op, diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index 28142382b130b..1ff57162f28e8 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -331,6 +331,13 @@ pub struct Body<'tcx> { #[type_foldable(identity)] #[type_visitable(ignore)] pub function_coverage_info: Option>, + + /// Debug information for argument moves/copies in Call parameters. Stores pairs of + /// ((BasicBlock, argument_index), SourceInfo) for move/copy operations. Only populated when + /// `-Zannotate-moves` is enabled. + #[type_foldable(identity)] + #[type_visitable(ignore)] + pub call_arg_move_source_info: Vec<((BasicBlock, usize), SourceInfo)>, } impl<'tcx> Body<'tcx> { @@ -374,6 +381,7 @@ impl<'tcx> Body<'tcx> { tainted_by_errors, coverage_info_hi: None, function_coverage_info: None, + call_arg_move_source_info: Vec::new(), }; body.is_polymorphic = body.has_non_region_param(); body @@ -405,6 +413,7 @@ impl<'tcx> Body<'tcx> { tainted_by_errors: None, coverage_info_hi: None, function_coverage_info: None, + call_arg_move_source_info: Vec::new(), }; body.is_polymorphic = body.has_non_region_param(); body diff --git a/compiler/rustc_mir_build/src/builder/custom/mod.rs b/compiler/rustc_mir_build/src/builder/custom/mod.rs index 792ad6d782cf3..1093bda324d17 100644 --- a/compiler/rustc_mir_build/src/builder/custom/mod.rs +++ b/compiler/rustc_mir_build/src/builder/custom/mod.rs @@ -62,6 +62,7 @@ pub(super) fn build_custom_mir<'tcx>( pass_count: 0, coverage_info_hi: None, function_coverage_info: None, + call_arg_move_source_info: Vec::new(), }; body.local_decls.push(LocalDecl::new(return_ty, return_ty_span)); diff --git a/compiler/rustc_mir_transform/src/annotate_moves.rs b/compiler/rustc_mir_transform/src/annotate_moves.rs new file mode 100644 index 0000000000000..8de490c7fdd1a --- /dev/null +++ b/compiler/rustc_mir_transform/src/annotate_moves.rs @@ -0,0 +1,380 @@ +//! Annotation pass for move/copy operations. +//! +//! This pass modifies the source scopes of statements containing `Operand::Move` and `Operand::Copy` +//! to make them appear as if they were inlined from `compiler_move()` and `compiler_copy()` intrinsic +//! functions. This creates the illusion that moves/copies are function calls in debuggers and +//! profilers, making them visible for performance analysis. +//! +//! The pass leverages the existing inlining infrastructure by creating synthetic `SourceScopeData` +//! with the `inlined` field set to point to the appropriate intrinsic function. + +use rustc_hir::def_id::DefId; +use rustc_index::IndexVec; +use rustc_middle::mir::*; +use rustc_middle::ty::{self, Instance, Ty, TyCtxt, TypingEnv}; +use rustc_session::config::DebugInfo; +use rustc_span::sym; + +/// Default minimum size in bytes for move/copy operations to be annotated. Set to 64+1 bytes +/// (typical cache line size) to focus on potentially expensive operations. +const DEFAULT_ANNOTATE_MOVES_SIZE_LIMIT: u64 = 65; + +/// Bundle up parameters into a structure to make repeated calling neater +struct Params<'a, 'tcx> { + tcx: TyCtxt<'tcx>, + source_scopes: &'a mut IndexVec>, + local_decls: &'a IndexVec>, + typing_env: TypingEnv<'tcx>, + size_limit: u64, +} + +/// MIR transform that annotates move/copy operations for profiler visibility. +pub(crate) struct AnnotateMoves { + compiler_copy: Option, + compiler_move: Option, +} + +impl<'tcx> crate::MirPass<'tcx> for AnnotateMoves { + fn is_enabled(&self, sess: &rustc_session::Session) -> bool { + sess.opts.unstable_opts.annotate_moves.is_enabled() + && sess.opts.debuginfo != DebugInfo::None + } + + fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) { + // Skip promoted MIR bodies to avoid recursion + if body.source.promoted.is_some() { + return; + } + + let typing_env = body.typing_env(tcx); + let size_limit = tcx + .sess + .opts + .unstable_opts + .annotate_moves + .size_limit() + .unwrap_or(DEFAULT_ANNOTATE_MOVES_SIZE_LIMIT); + + // Common params, including selectively borrowing the bits of Body we need to avoid + // mut/non-mut aliasing problems. + let mut params = Params { + tcx, + source_scopes: &mut body.source_scopes, + local_decls: &body.local_decls, + typing_env, + size_limit, + }; + + // Storage for Call terminator argument SourceInfo + let mut call_arg_source_info = Vec::new(); + + // Process each basic block + for (block, block_data) in body.basic_blocks.as_mut().iter_enumerated_mut() { + for stmt in &mut block_data.statements { + let source_info = &mut stmt.source_info; + + if let StatementKind::Assign(box (_, rvalue)) = &stmt.kind { + // Save the original scope before processing any operands. This prevents + // chaining when multiple operands are processed. + let original_scope = source_info.scope; + + match rvalue { + Rvalue::Use(op) + | Rvalue::Repeat(op, _) + | Rvalue::Cast(_, op, _) + | Rvalue::UnaryOp(_, op) => { + self.annotate_move(&mut params, source_info, original_scope, op); + } + Rvalue::BinaryOp(_, box (lop, rop)) => { + self.annotate_move(&mut params, source_info, original_scope, lop); + self.annotate_move(&mut params, source_info, original_scope, rop); + } + Rvalue::Aggregate(_, ops) => { + for op in ops { + self.annotate_move(&mut params, source_info, original_scope, op); + } + } + Rvalue::Ref(..) + | Rvalue::ThreadLocalRef(..) + | Rvalue::RawPtr(..) + | Rvalue::NullaryOp(..) + | Rvalue::Discriminant(..) + | Rvalue::CopyForDeref(..) + | Rvalue::ShallowInitBox(..) + | Rvalue::WrapUnsafeBinder(..) => {} // No operands to instrument + } + } + } + + // Process terminator operands + if let Some(terminator) = &mut block_data.terminator { + let source_info = &mut terminator.source_info; + // Save the original scope before processing any operands + let original_scope = source_info.scope; + + match &terminator.kind { + TerminatorKind::Call { func, args, .. } + | TerminatorKind::TailCall { func, args, .. } => { + self.annotate_move(&mut params, source_info, original_scope, func); + + // For Call arguments, store SourceInfo separately instead of modifying the + // terminator's SourceInfo (which would affect the entire Call) + for (index, arg) in args.iter().enumerate() { + if let Some(arg_source_info) = self.get_annotated_source_info( + &mut params, + original_scope, + &arg.node, + ) { + call_arg_source_info.push(((block, index), arg_source_info)); + } + } + } + TerminatorKind::SwitchInt { discr: op, .. } + | TerminatorKind::Assert { cond: op, .. } + | TerminatorKind::Yield { value: op, .. } => { + self.annotate_move(&mut params, source_info, original_scope, op); + } + TerminatorKind::InlineAsm { operands, .. } => { + for op in &**operands { + match op { + InlineAsmOperand::In { value, .. } + | InlineAsmOperand::InOut { in_value: value, .. } => { + self.annotate_move( + &mut params, + source_info, + original_scope, + value, + ); + } + // Const, SymFn, SymStatic, Out, and Label don't have Operands we care about + _ => {} + } + } + } + _ => {} // Other terminators don't have operands + } + } + } + + // Store the Call argument SourceInfo in the body (only if we have any) + body.call_arg_move_source_info = call_arg_source_info; + } + + fn is_required(&self) -> bool { + false // Optional optimization/instrumentation pass + } +} + +impl AnnotateMoves { + pub(crate) fn new<'tcx>(tcx: TyCtxt<'tcx>) -> Self { + let compiler_copy = tcx.get_diagnostic_item(sym::compiler_copy); + let compiler_move = tcx.get_diagnostic_item(sym::compiler_move); + + Self { compiler_copy, compiler_move } + } + + /// Returns annotated SourceInfo for a move/copy operation without modifying anything. Used for + /// Call arguments where we need to store SourceInfo separately. Returns None if the operand + /// should not be annotated. + fn get_annotated_source_info<'tcx>( + &self, + params: &mut Params<'_, 'tcx>, + original_scope: SourceScope, + op: &Operand<'tcx>, + ) -> Option { + let (place, profiling_marker) = match op { + Operand::Move(place) => (place, self.compiler_move?), + Operand::Copy(place) => (place, self.compiler_copy?), + _ => return None, + }; + + let Params { tcx, typing_env, local_decls, size_limit, source_scopes } = params; + + let type_size = + self.should_annotate_operation(*tcx, *typing_env, local_decls, place, *size_limit)?; + + let ty = place.ty(*local_decls, *tcx).ty; + let callsite_span = source_scopes[original_scope].span; + let new_scope = self.create_inlined_scope( + *tcx, + *typing_env, + source_scopes, + original_scope, + callsite_span, + profiling_marker, + ty, + type_size, + ); + + Some(SourceInfo { span: callsite_span, scope: new_scope }) + } + + /// If this is a Move or Copy of a concrete type, update its debug info to make it look like it + /// was inlined from `core::profiling::compiler_move`/`compiler_copy`. + /// + /// Takes an explicit `original_scope` to use as the parent scope, which prevents chaining when + /// multiple operands in the same statement are processed. + /// + /// The statement's span is NOT modified, so profilers will show the move at its actual source + /// location rather than at profiling.rs. This provides more useful context about where the move + /// occurs in the user's code. + fn annotate_move<'tcx>( + &self, + params: &mut Params<'_, 'tcx>, + source_info: &mut SourceInfo, + original_scope: SourceScope, + op: &Operand<'tcx>, + ) { + let (place, Some(profiling_marker)) = (match op { + Operand::Move(place) => (place, self.compiler_move), + Operand::Copy(place) => (place, self.compiler_copy), + _ => return, + }) else { + return; + }; + let Params { tcx, typing_env, local_decls, size_limit, source_scopes } = params; + + if let Some(type_size) = + self.should_annotate_operation(*tcx, *typing_env, local_decls, place, *size_limit) + { + let ty = place.ty(*local_decls, *tcx).ty; + let callsite_span = source_info.span; + let new_scope = self.create_inlined_scope( + *tcx, + *typing_env, + source_scopes, + original_scope, + callsite_span, + profiling_marker, + ty, + type_size, + ); + source_info.scope = new_scope; + // Note: We deliberately do NOT modify source_info.span. + // Keeping the original span means profilers show the actual source location + // of the move/copy, which is more useful than showing profiling.rs:13. + // The scope change is sufficient to make the move appear as an inlined call + // to compiler_move/copy in the profiler. + } + } + + /// Determines if an operation should be annotated based on type characteristics. + /// Returns Some(size) if it should be annotated, None otherwise. + fn should_annotate_operation<'tcx>( + &self, + tcx: TyCtxt<'tcx>, + typing_env: ty::TypingEnv<'tcx>, + local_decls: &rustc_index::IndexVec>, + place: &Place<'tcx>, + size_limit: u64, + ) -> Option { + let ty = place.ty(local_decls, tcx).ty; + let layout = match tcx.layout_of(typing_env.as_query_input(ty)) { + Ok(layout) => layout, + Err(err) => { + tracing::info!("Failed to get layout of {ty:?}: {err}"); + return None; + } + }; + + let size = layout.size.bytes(); + + // 1. Skip ZST types (no actual move/copy happens) + if layout.is_zst() { + return None; + } + + // 2. Check size threshold (only annotate large moves/copies) + if size < size_limit { + return None; + } + + // 3. Skip scalar/vector types that won't generate memcpy + match layout.layout.backend_repr { + rustc_abi::BackendRepr::Scalar(_) + | rustc_abi::BackendRepr::ScalarPair(_, _) + | rustc_abi::BackendRepr::SimdVector { .. } => None, + _ => Some(size), + } + } + + /// Creates an inlined scope that makes operations appear to come from + /// the specified compiler intrinsic function. + fn create_inlined_scope<'tcx>( + &self, + tcx: TyCtxt<'tcx>, + typing_env: TypingEnv<'tcx>, + source_scopes: &mut IndexVec>, + original_scope: SourceScope, + callsite_span: rustc_span::Span, + profiling_def_id: DefId, + ty: Ty<'tcx>, + type_size: u64, + ) -> SourceScope { + // Monomorphize the profiling marker for the actual type being moved/copied + size const + // parameter compiler_move or compiler_copy + let size_const = ty::Const::from_target_usize(tcx, type_size); + let generic_args = tcx.mk_args(&[ty.into(), size_const.into()]); + let profiling_instance = Instance::expect_resolve( + tcx, + typing_env, + profiling_def_id, + generic_args, + callsite_span, + ); + + // Get the profiling marker's definition span to use as the scope's span + // This ensures the file_start_pos/file_end_pos in the DebugScope match the DIScope's file + let profiling_span = tcx.def_span(profiling_def_id); + + // Create new inlined scope that makes the operation appear to come from the profiling + // marker + let inlined_scope_data = SourceScopeData { + // Use profiling_span so file bounds match the DIScope (profiling.rs) + // This prevents DILexicalBlockFile mismatches that would show profiling.rs + // with incorrect line numbers + span: profiling_span, + parent_scope: Some(original_scope), + + // The inlined field shows: (what was inlined, where it was called from) + // - profiling_instance: the compiler_move/copy function that was "inlined" + // - callsite_span: where the move/copy actually occurs in the user's code + inlined: Some((profiling_instance, callsite_span)), + + // Proper inlined scope chaining to maintain debug info hierarchy + // We need to find the first non-compiler_move inlined scope in the chain + inlined_parent_scope: { + let mut scope = original_scope; + loop { + let scope_data = &source_scopes[scope]; + if let Some((instance, _)) = scope_data.inlined { + // Check if this is a compiler_move/copy scope we created + if let Some(def_id) = instance.def_id().as_local() { + let def_id = Some(def_id.to_def_id()); + if def_id == self.compiler_move || def_id == self.compiler_copy { + // This is one of our scopes, skip it and look at its inlined_parent_scope + if let Some(parent) = scope_data.inlined_parent_scope { + scope = parent; + continue; + } else { + // No more parents, this is fine + break None; + } + } + } + // This is a real inlined scope (not compiler_move/copy), use it + break Some(scope); + } else { + // Not an inlined scope, use its inlined_parent_scope + break scope_data.inlined_parent_scope; + } + } + }, + + local_data: ClearCrossCrate::Clear, + }; + + // Add the new scope and return its index + source_scopes.push(inlined_scope_data) + } +} diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index 9ff7e0b550030..aba6388020e5c 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -147,6 +147,7 @@ declare_passes! { // by custom rustc drivers, running all the steps by themselves. See #114628. pub mod inline : Inline, ForceInline; mod impossible_predicates : ImpossiblePredicates; + mod annotate_moves : AnnotateMoves; mod instsimplify : InstSimplify { BeforeInline, AfterSimplifyCfg }; mod jump_threading : JumpThreading; mod known_panics_lint : KnownPanicsLint; @@ -730,6 +731,9 @@ pub(crate) fn run_optimization_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<' // Cleanup for human readability, off by default. &prettify::ReorderBasicBlocks, &prettify::ReorderLocals, + // Annotate move/copy operations for profiler visibility. + // Late so we're annotating any Move/Copy that survived all the previous passes. + &annotate_moves::AnnotateMoves::new(tcx), // Dump the end result for testing and debugging purposes. &dump_mir::Marker("PreCodegen"), ], diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index ebb6a93b1dd18..45ab87c0a5b20 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -219,6 +219,29 @@ pub enum CoverageLevel { Condition, } +/// The different settings that the `-Z annotate-moves` flag can have. +#[derive(Clone, Copy, PartialEq, Hash, Debug)] +pub enum AnnotateMoves { + /// `-Z annotate-moves=no` (or `off`, `false` etc.) + Disabled, + /// `-Z annotate-moves` or `-Z annotate-moves=yes` (use default size limit) + /// `-Z annotate-moves=SIZE` (use specified size limit) + Enabled(Option), +} + +impl AnnotateMoves { + pub fn is_enabled(&self) -> bool { + matches!(self, AnnotateMoves::Enabled(_)) + } + + pub fn size_limit(&self) -> Option { + match self { + AnnotateMoves::Disabled => None, + AnnotateMoves::Enabled(limit) => *limit, + } + } +} + // The different settings that the `-Z offload` flag can have. #[derive(Clone, Copy, PartialEq, Hash, Debug)] pub enum Offload { @@ -3227,13 +3250,13 @@ pub(crate) mod dep_tracking { }; use super::{ - AutoDiff, BranchProtection, CFGuard, CFProtection, CollapseMacroDebuginfo, CoverageOptions, - CrateType, DebugInfo, DebugInfoCompression, ErrorOutputType, FmtDebug, FunctionReturn, - InliningThreshold, InstrumentCoverage, InstrumentXRay, LinkerPluginLto, LocationDetail, - LtoCli, MirStripDebugInfo, NextSolverConfig, Offload, OomStrategy, OptLevel, OutFileName, - OutputType, OutputTypes, PatchableFunctionEntry, Polonius, RemapPathScopeComponents, - ResolveDocLinks, SourceFileHashAlgorithm, SplitDwarfKind, SwitchWithOptPath, - SymbolManglingVersion, WasiExecModel, + AnnotateMoves, AutoDiff, BranchProtection, CFGuard, CFProtection, CollapseMacroDebuginfo, + CoverageOptions, CrateType, DebugInfo, DebugInfoCompression, ErrorOutputType, FmtDebug, + FunctionReturn, InliningThreshold, InstrumentCoverage, InstrumentXRay, LinkerPluginLto, + LocationDetail, LtoCli, MirStripDebugInfo, NextSolverConfig, Offload, OomStrategy, + OptLevel, OutFileName, OutputType, OutputTypes, PatchableFunctionEntry, Polonius, + RemapPathScopeComponents, ResolveDocLinks, SourceFileHashAlgorithm, SplitDwarfKind, + SwitchWithOptPath, SymbolManglingVersion, WasiExecModel, }; use crate::lint; use crate::utils::NativeLib; @@ -3276,6 +3299,7 @@ pub(crate) mod dep_tracking { impl_dep_tracking_hash_via_hash!( (), + AnnotateMoves, AutoDiff, Offload, bool, diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index b2cc169f12cb7..6a549f9be34f6 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -790,6 +790,8 @@ mod desc { pub(crate) const parse_opt_langid: &str = "a language identifier"; pub(crate) const parse_opt_pathbuf: &str = "a path"; pub(crate) const parse_list: &str = "a space-separated list of strings"; + pub(crate) const parse_annotate_moves: &str = + "either a boolean (`y`, `yes`, `on`, `true`, `n`, `no`, `off` or `false`), or a number"; pub(crate) const parse_list_with_polarity: &str = "a comma-separated list of strings, with elements beginning with + or -"; pub(crate) const parse_autodiff: &str = "a comma separated list of settings: `Enable`, `PrintSteps`, `PrintTA`, `PrintTAFn`, `PrintAA`, `PrintPerf`, `PrintModBefore`, `PrintModAfter`, `PrintModFinal`, `PrintPasses`, `NoPostopt`, `LooseTypes`, `Inline`"; @@ -1633,6 +1635,38 @@ pub mod parse { true } + pub(crate) fn parse_annotate_moves(slot: &mut AnnotateMoves, v: Option<&str>) -> bool { + match v { + // No value provided: -Z annotate-moves (enable with default limit) + None => { + *slot = AnnotateMoves::Enabled(None); + true + } + Some(s) => { + // Try to parse as boolean first + match s { + "y" | "yes" | "on" | "true" => { + *slot = AnnotateMoves::Enabled(None); + return true; + } + "n" | "no" | "off" | "false" => { + *slot = AnnotateMoves::Disabled; + return true; + } + _ => {} + } + + // Try to parse as number (size limit) + if let Ok(size_limit) = s.parse::() { + *slot = AnnotateMoves::Enabled(Some(size_limit)); + true + } else { + false + } + } + } + } + pub(crate) fn parse_lto(slot: &mut LtoCli, v: Option<&str>) -> bool { if v.is_some() { let mut bool_arg = None; @@ -2195,6 +2229,9 @@ options! { "only allow the listed language features to be enabled in code (comma separated)"), always_encode_mir: bool = (false, parse_bool, [TRACKED], "encode MIR of all functions into the crate metadata (default: no)"), + annotate_moves: AnnotateMoves = (AnnotateMoves::Disabled, parse_annotate_moves, [TRACKED], + "emit debug info for compiler-generated move and copy operations \ + to make them visible in profilers. Can be a boolean or a size limit in bytes (default: disabled)"), assert_incr_state: Option = (None, parse_opt_string, [UNTRACKED], "assert that the incremental cache is in given state: \ either `loaded` or `not-loaded`."), diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index cd422da0c1c83..f418656f5aa50 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -691,7 +691,9 @@ symbols! { compile_error, compiler, compiler_builtins, + compiler_copy, compiler_fence, + compiler_move, concat, concat_bytes, concat_idents, diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index 54adf97f10020..d8f3bffb1301d 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -279,6 +279,8 @@ pub mod num; pub mod hint; pub mod intrinsics; pub mod mem; +#[unstable(feature = "profiling_marker_api", issue = "none")] +pub mod profiling; pub mod ptr; #[unstable(feature = "ub_checks", issue = "none")] pub mod ub_checks; diff --git a/library/core/src/profiling.rs b/library/core/src/profiling.rs new file mode 100644 index 0000000000000..57508eebb32cf --- /dev/null +++ b/library/core/src/profiling.rs @@ -0,0 +1,29 @@ +//! Profiling markers for compiler instrumentation. + +/// Profiling marker for move operations. +/// +/// This function is never called at runtime. When `-Z annotate-moves` is enabled, +/// the compiler creates synthetic debug info that makes move operations appear as +/// calls to this function in profilers. +/// +/// The `SIZE` parameter encodes the size of the type being moved. +#[unstable(feature = "profiling_marker_api", issue = "none")] +#[rustc_force_inline] +#[rustc_diagnostic_item = "compiler_move"] +pub fn compiler_move(_src: *const T, _dst: *mut T) { + unreachable!("compiler_move should never be called - it's only for debug info") +} + +/// Profiling marker for copy operations. +/// +/// This function is never called at runtime. When `-Z annotate-moves` is enabled, +/// the compiler creates synthetic debug info that makes copy operations appear as +/// calls to this function in profilers. +/// +/// The `SIZE` parameter encodes the size of the type being copied. +#[unstable(feature = "profiling_marker_api", issue = "none")] +#[rustc_force_inline] +#[rustc_diagnostic_item = "compiler_copy"] +pub fn compiler_copy(_src: *const T, _dst: *mut T) { + unreachable!("compiler_copy should never be called - it's only for debug info") +} diff --git a/src/doc/unstable-book/src/compiler-flags/annotate-moves.md b/src/doc/unstable-book/src/compiler-flags/annotate-moves.md new file mode 100644 index 0000000000000..5e5d6685c8474 --- /dev/null +++ b/src/doc/unstable-book/src/compiler-flags/annotate-moves.md @@ -0,0 +1,78 @@ +# `annotate-moves` + +The `-Z annotate-moves` flag enables annotation of compiler-generated +move and copy operations, making them visible in profilers and stack traces +for performance debugging. + +When enabled, the compiler will inject calls to `core::profiling::compiler_move` +and `core::profiling::compiler_copy` functions around large move and copy operations. +These functions are never actually executed (they contain `unreachable!()`), but +their presence in debug info makes expensive memory operations visible in profilers. + +## Syntax + +```bash +rustc -Z annotate-moves[=] +``` + +Where `` can be: +- A boolean: `true`, `false`, `yes`, `no`, `on`, `off` +- A number: size threshold in bytes (e.g., `128`) +- Omitted: enables with default threshold (65 bytes) + +## Options + +- `-Z annotate-moves` or `-Z annotate-moves=true`: Enable with default size limit (65 bytes) +- `-Z annotate-moves=false`: Disable annotation +- `-Z annotate-moves=N`: Enable with custom size limit of N bytes + +## Examples + +```bash +# Enable annotation with default threshold (65 bytes) +rustc -Z annotate-moves main.rs + +# Enable with custom 128-byte threshold +rustc -Z annotate-moves=128 main.rs + +# Only annotate very large moves (1KB+) +rustc -Z annotate-moves=1024 main.rs + +# Explicitly disable +rustc -Z annotate-moves=false main.rs +``` + +## Behavior + +The annotation only applies to: +- Types larger than the specified size threshold +- Non-immediate types (those that would generate `memcpy`) +- Operations that actually move/copy data (not ZST types) + +Stack traces will show the operations: +```text +0: memcpy +1: core::profiling::compiler_move:: +2: my_function +``` + +## Example + +```rust +#[derive(Clone)] +struct LargeData { + buffer: [u8; 1000], +} + +fn example() { + let data = LargeData { buffer: [0; 1000] }; + let copy = data.clone(); // Shows as compiler_copy in profiler + let moved = data; // Shows as compiler_move in profiler +} +``` + +## Overhead + +This has no effect on generated code; it only adds debuginfo. The overhead is +typically very small; on rustc itself, the default limit of 65 bytes adds about +0.055% to the binary size. diff --git a/tests/codegen-llvm/annotate-moves/annotate-moves-disabled.rs b/tests/codegen-llvm/annotate-moves/annotate-moves-disabled.rs new file mode 100644 index 0000000000000..2aae41e64db9e --- /dev/null +++ b/tests/codegen-llvm/annotate-moves/annotate-moves-disabled.rs @@ -0,0 +1,33 @@ +// +//@ compile-flags: -Copt-level=0 -g + +// Test that move/copy operations are NOT annotated when the flag is disabled + +#![crate_type = "lib"] + +#[derive(Clone)] +struct LargeStruct { + data: [u64; 20], // 160 bytes - would normally trigger annotation +} + +impl LargeStruct { + fn new() -> Self { + LargeStruct { data: [42; 20] } + } +} + +// Without -Z annotate-moves flag, no annotation should be generated + +// CHECK-LABEL: annotate_moves_disabled::test_large_copy_no_annotation +// CHECK-NOT: !DISubprogram(name: "compiler_copy" +pub fn test_large_copy_no_annotation() { + let large = LargeStruct::new(); + let _copy = large.clone(); +} + +// CHECK-LABEL: annotate_moves_disabled::test_large_move_no_annotation +// CHECK-NOT: !DISubprogram(name: "compiler_move" +pub fn test_large_move_no_annotation() { + let large = LargeStruct::new(); + let _moved = large; +} diff --git a/tests/codegen-llvm/annotate-moves/annotate-moves-integration.rs b/tests/codegen-llvm/annotate-moves/annotate-moves-integration.rs new file mode 100644 index 0000000000000..2d6eea480696e --- /dev/null +++ b/tests/codegen-llvm/annotate-moves/annotate-moves-integration.rs @@ -0,0 +1,114 @@ +//@ compile-flags: -Z annotate-moves=50 -Copt-level=0 -g + +#![crate_type = "lib"] + +// Comprehensive integration test for move/copy annotation + +#[derive(Clone)] +struct VerySmall { + x: u8, // 1 byte - should not be annotated +} + +#[derive(Clone)] +struct Small { + data: [u32; 10], // 40 bytes - below 50-byte threshold +} + +#[derive(Clone)] +struct Medium { + data: [u64; 10], // 80 bytes - above 50-byte threshold +} + +#[derive(Clone)] +struct Large { + data: [u64; 30], // 240 bytes - well above threshold +} + +// Test 1: Very small types should never be annotated +// CHECK-LABEL: annotate_moves_integration::test_very_small_operations +// CHECK-NOT: !DISubprogram(name: "compiler_copy" +// CHECK-NOT: !DISubprogram(name: "compiler_move" +pub fn test_very_small_operations() { + let vs = VerySmall { x: 42 }; + let _copy = vs.clone(); + let _moved = vs; +} + +// Test 2: Small types below threshold should not be annotated +// CHECK-LABEL: annotate_moves_integration::test_small_operations +// CHECK-NOT: !DISubprogram(name: "compiler_copy" +// CHECK-NOT: !DISubprogram(name: "compiler_move" +pub fn test_small_operations() { + let s = Small { data: [42; 10] }; + let _copy = s.clone(); + let _moved = s; +} + +// Test 3: Medium types above threshold should be annotated +// CHECK-LABEL: annotate_moves_integration::test_medium_copy +pub fn test_medium_copy() { + let m = Medium { data: [42; 10] }; + let _copy = m.clone(); // Should be annotated +} + +// CHECK-LABEL: annotate_moves_integration::test_medium_move +pub fn test_medium_move() { + let m = Medium { data: [42; 10] }; + let _moved = m; // Should be annotated +} + +// Test 4: Large types should definitely be annotated +// CHECK-LABEL: annotate_moves_integration::test_large_copy +pub fn test_large_copy() { + let l = Large { data: [42; 30] }; + let _copy = l.clone(); // Should be annotated +} + +// CHECK-LABEL: annotate_moves_integration::test_large_move +pub fn test_large_move() { + let l = Large { data: [42; 30] }; + let _moved = l; // Should be annotated +} + +// Test 5: Multiple operations in same function +// CHECK-LABEL: annotate_moves_integration::test_multiple_operations +pub fn test_multiple_operations() { + let l1 = Large { data: [1; 30] }; + let _copy1 = l1.clone(); // Should be annotated + let _moved1 = l1; // Should be annotated + + let l2 = Large { data: [2; 30] }; + let _copy2 = l2.clone(); // Should be annotated + drop(_copy2); +} + +// Test 6: Function parameters and returns +// CHECK-LABEL: annotate_moves_integration::test_function_parameters +pub fn test_function_parameters() { + let l = Large { data: [42; 30] }; + helper_function(l); // Should be annotated for move into function +} + +// CHECK-LABEL: annotate_moves_integration::helper_function +pub fn helper_function(_param: Large) { + // Parameter receipt shouldn't be annotated (it's the caller's move) +} + +// Test 7: Verify ZST types are never annotated +pub struct ZeroSizedType; + +// CHECK-LABEL: annotate_moves_integration::test_zst_operations +// CHECK-NOT: !DISubprogram(name: "compiler_copy" +// CHECK-NOT: !DISubprogram(name: "compiler_move" +pub fn test_zst_operations() { + let zst = ZeroSizedType; + let _copy = zst; + let _moved = ZeroSizedType; +} + +// Check that compiler debug info is generated for medium and large types +// (above 50-byte threshold) with size parameter +// CHECK: !DISubprogram(name: "compiler_copy<[u64; 10], 80>" +// CHECK: !DISubprogram(name: "compiler_move<[u64; 10], 80>" +// CHECK: !DISubprogram(name: "compiler_copy<[u64; 30], 240>" +// CHECK: !DISubprogram(name: "compiler_move<[u64; 30], 240>" diff --git a/tests/codegen-llvm/annotate-moves/annotate-moves-size-limit.rs b/tests/codegen-llvm/annotate-moves/annotate-moves-size-limit.rs new file mode 100644 index 0000000000000..3d7ca62ef2fb1 --- /dev/null +++ b/tests/codegen-llvm/annotate-moves/annotate-moves-size-limit.rs @@ -0,0 +1,47 @@ +// +//@ compile-flags: -Z annotate-moves=100 -Copt-level=0 -g + +#![crate_type = "lib"] + +#[derive(Clone)] +struct MediumStruct { + data: [u64; 10], // 80 bytes - below custom 100-byte threshold +} + +#[derive(Clone)] +struct LargeStruct { + data: [u64; 20], // 160 bytes - above custom 100-byte threshold +} + +impl MediumStruct { + fn new() -> Self { + MediumStruct { data: [42; 10] } + } +} + +impl LargeStruct { + fn new() -> Self { + LargeStruct { data: [42; 20] } + } +} + +// With custom size limit of 100 bytes: +// Medium struct (80 bytes) should NOT be annotated +// Large struct (160 bytes) should be annotated + +// CHECK-LABEL: annotate_moves_size_limit::test_medium_copy +// CHECK-NOT: !DISubprogram(name: "compiler_copy" +pub fn test_medium_copy() { + let medium = MediumStruct::new(); + let _copy = medium.clone(); // Should NOT generate annotation (below threshold) +} + +// CHECK-LABEL: annotate_moves_size_limit::test_large_copy +pub fn test_large_copy() { + let large = LargeStruct::new(); + let _copy = large.clone(); // Should generate annotation (above threshold) +} + +// Check that compiler_copy debug info is generated for large copies +// (above 100-byte threshold) with size parameter +// CHECK: !DISubprogram(name: "compiler_copy<[u64; 20], 160>" diff --git a/tests/codegen-llvm/annotate-moves/annotate-moves.rs b/tests/codegen-llvm/annotate-moves/annotate-moves.rs new file mode 100644 index 0000000000000..86e55edfc0f74 --- /dev/null +++ b/tests/codegen-llvm/annotate-moves/annotate-moves.rs @@ -0,0 +1,58 @@ +// +//@ compile-flags: -Z annotate-moves -Copt-level=0 -g + +#![crate_type = "lib"] + +#[derive(Clone)] +struct LargeStruct { + data: [u64; 20], // 160 bytes - should trigger annotation (above 64 bytes) +} + +#[derive(Clone)] +struct SmallStruct { + data: u32, // 4 bytes - should NOT trigger annotation +} + +impl LargeStruct { + fn new() -> Self { + LargeStruct { data: [42; 20] } + } +} + +impl SmallStruct { + fn new() -> Self { + SmallStruct { data: 42 } + } +} + +// CHECK-LABEL: annotate_moves::test_large_copy +pub fn test_large_copy() { + let large = LargeStruct::new(); + let _copy = large.clone(); // Should generate annotation debug info +} + +// CHECK-LABEL: annotate_moves::test_large_move +pub fn test_large_move() { + let large = LargeStruct::new(); + let _moved = large; // Should generate annotation debug info +} + +// CHECK-LABEL: annotate_moves::test_small_copy +// CHECK-NOT: !DISubprogram(name: "compiler_copy" +pub fn test_small_copy() { + let small = SmallStruct::new(); + let _copy = small.clone(); // Should NOT generate annotation debug info +} + +// CHECK-LABEL: annotate_moves::test_small_move +// CHECK-NOT: !DISubprogram(name: "compiler_move" +pub fn test_small_move() { + let small = SmallStruct::new(); + let _moved = small; // Should NOT generate annotation debug info +} + +// Check that compiler_copy debug info is generated for large copies with size parameter +// CHECK: !DISubprogram(name: "compiler_copy<[u64; 20], 160>" + +// Check that compiler_move debug info is generated for large moves with size parameter +// CHECK: !DISubprogram(name: "compiler_move<[u64; 20], 160>" diff --git a/tests/codegen-llvm/annotate-moves/call-arg-scope.rs b/tests/codegen-llvm/annotate-moves/call-arg-scope.rs new file mode 100644 index 0000000000000..1997164d0b28c --- /dev/null +++ b/tests/codegen-llvm/annotate-moves/call-arg-scope.rs @@ -0,0 +1,33 @@ +//@ compile-flags: -Z annotate-moves=8 -Copt-level=0 -g + +#![crate_type = "lib"] + +#[derive(Clone, Copy)] +pub struct LargeStruct { + pub data: [u64; 20], // 160 bytes +} + +// This test verifies that when passing arguments to functions, the actual CALL instruction +// does not have the compiler_move debug scope, even though the argument itself might be +// annotated with compiler_move in MIR. +// +// Note: On most ABIs, large structs are passed by pointer even when written as "by value", +// so there may not be an actual memcpy operation to attach compiler_move to. This test +// mainly verifies that IF debug info is emitted, the call itself uses the source location. + +// CHECK-LABEL: call_arg_scope::test_call_with_move +pub fn test_call_with_move(s: LargeStruct) { + // The key test: the call instruction should reference the source line (line 22), + // NOT a compiler_move scope. + helper(s); +} + +// Find the call instruction and verify its debug location +// CHECK: call {{.*}}@{{.*}}helper{{.*}}({{.*}}), !dbg ![[CALL_LOC:[0-9]+]] + +// Verify that the call's debug location points to line 22 (the actual source line) +// and NOT to a scope with inlinedAt referencing compiler_move +// CHECK: ![[CALL_LOC]] = !DILocation(line: 22, + +#[inline(never)] +fn helper(_s: LargeStruct) {} diff --git a/tests/mir-opt/annotate-moves/aggregate.rs b/tests/mir-opt/annotate-moves/aggregate.rs new file mode 100644 index 0000000000000..2125213e753ed --- /dev/null +++ b/tests/mir-opt/annotate-moves/aggregate.rs @@ -0,0 +1,19 @@ +//@ compile-flags: -Z annotate-moves=8 -C debuginfo=full +//@ ignore-std-debug-assertions +//@ edition: 2021 + +#![crate_type = "lib"] + +#[derive(Clone)] +pub struct LargeStruct { + pub data: [u64; 20], // 160 bytes +} + +// EMIT_MIR aggregate.test_aggregate.AnnotateMoves.after.mir +pub fn test_aggregate() -> LargeStruct { + // CHECK-LABEL: fn test_aggregate( + // Struct initialization with Rvalue::Aggregate + // CHECK: scope {{[0-9]+}} (inlined core::profiling::compiler_copy::<[u64; 20], 160>) + let data = [0u64; 20]; + LargeStruct { data } +} diff --git a/tests/mir-opt/annotate-moves/aggregate.test_aggregate.AnnotateMoves.after.mir b/tests/mir-opt/annotate-moves/aggregate.test_aggregate.AnnotateMoves.after.mir new file mode 100644 index 0000000000000..0f4937a66d404 --- /dev/null +++ b/tests/mir-opt/annotate-moves/aggregate.test_aggregate.AnnotateMoves.after.mir @@ -0,0 +1,17 @@ +// MIR for `test_aggregate` after AnnotateMoves + +fn test_aggregate() -> LargeStruct { + let mut _0: LargeStruct; + let _1: [u64; 20]; + scope 1 { + debug data => _1; + scope 2 (inlined core::profiling::compiler_copy::<[u64; 20], 160>) { + } + } + + bb0: { + _1 = [const 0_u64; 20]; + _0 = LargeStruct { data: copy _1 }; + return; + } +} diff --git a/tests/mir-opt/annotate-moves/async.rs b/tests/mir-opt/annotate-moves/async.rs new file mode 100644 index 0000000000000..0f834e0574155 --- /dev/null +++ b/tests/mir-opt/annotate-moves/async.rs @@ -0,0 +1,36 @@ +//@ compile-flags: -Z annotate-moves=1 -C debuginfo=full +//@ ignore-std-debug-assertions +//@ edition: 2021 + +#![crate_type = "lib"] + +#[derive(Clone)] +pub struct LargeStruct { + pub data: [u64; 20], // 160 bytes +} + +// EMIT_MIR async.test_async.AnnotateMoves.after.mir +pub async fn test_async(s: LargeStruct) -> LargeStruct { + // CHECK-LABEL: fn test_async( + // Async generates a state machine that moves values across await points + // The move may show up when constructing the future state + // CHECK: scope {{[0-9]+}} (inlined core::profiling::compiler_move::) + s +} + +async fn make_future() -> LargeStruct { + LargeStruct { data: [0; 20] } +} + +async fn consume_future(f: F) -> F::Output { + f.await +} + +// EMIT_MIR async.test_future_move.AnnotateMoves.after.mir +pub async fn test_future_move() -> LargeStruct { + // CHECK-LABEL: fn test_future_move( + // Moving the future type itself (the state machine) when passing to consume_future + // CHECK: scope {{[0-9]+}} (inlined core::profiling::compiler_move::<{async fn body of make_future()} + let fut = make_future(); + consume_future(fut).await +} diff --git a/tests/mir-opt/annotate-moves/async.test_async.AnnotateMoves.after.mir b/tests/mir-opt/annotate-moves/async.test_async.AnnotateMoves.after.mir new file mode 100644 index 0000000000000..9227ff8c262ff --- /dev/null +++ b/tests/mir-opt/annotate-moves/async.test_async.AnnotateMoves.after.mir @@ -0,0 +1,13 @@ +// MIR for `test_async` after AnnotateMoves + +fn test_async(_1: LargeStruct) -> {async fn body of test_async()} { + debug s => _1; + let mut _0: {async fn body of test_async()}; + scope 1 (inlined core::profiling::compiler_move::) { + } + + bb0: { + _0 = {coroutine@$DIR/async.rs:13:56: 19:2 (#0)} { s: move _1 }; + return; + } +} diff --git a/tests/mir-opt/annotate-moves/async.test_future_move.AnnotateMoves.after.mir b/tests/mir-opt/annotate-moves/async.test_future_move.AnnotateMoves.after.mir new file mode 100644 index 0000000000000..948d6a8b0cde3 --- /dev/null +++ b/tests/mir-opt/annotate-moves/async.test_future_move.AnnotateMoves.after.mir @@ -0,0 +1,10 @@ +// MIR for `test_future_move` after AnnotateMoves + +fn test_future_move() -> {async fn body of test_future_move()} { + let mut _0: {async fn body of test_future_move()}; + + bb0: { + _0 = {coroutine@$DIR/async.rs:30:48: 36:2 (#0)}; + return; + } +} diff --git a/tests/mir-opt/annotate-moves/call_arg.rs b/tests/mir-opt/annotate-moves/call_arg.rs new file mode 100644 index 0000000000000..54004dd651d05 --- /dev/null +++ b/tests/mir-opt/annotate-moves/call_arg.rs @@ -0,0 +1,20 @@ +//@ compile-flags: -Z annotate-moves=8 -C debuginfo=full +//@ ignore-std-debug-assertions +//@ edition: 2021 + +#![crate_type = "lib"] + +#[derive(Clone)] +pub struct LargeStruct { + pub data: [u64; 20], // 160 bytes +} + +// EMIT_MIR call_arg.test_call_arg.AnnotateMoves.after.mir +pub fn test_call_arg(s: LargeStruct) { + // CHECK-LABEL: fn test_call_arg( + // CHECK: scope {{[0-9]+}} (inlined core::profiling::compiler_move::) + helper(s); +} + +#[inline(never)] +fn helper(_s: LargeStruct) {} diff --git a/tests/mir-opt/annotate-moves/call_arg.test_call_arg.AnnotateMoves.after.mir b/tests/mir-opt/annotate-moves/call_arg.test_call_arg.AnnotateMoves.after.mir new file mode 100644 index 0000000000000..91edbcaef3ff0 --- /dev/null +++ b/tests/mir-opt/annotate-moves/call_arg.test_call_arg.AnnotateMoves.after.mir @@ -0,0 +1,17 @@ +// MIR for `test_call_arg` after AnnotateMoves + +fn test_call_arg(_1: LargeStruct) -> () { + debug s => _1; + let mut _0: (); + let _2: (); + scope 1 (inlined core::profiling::compiler_move::) { + } + + bb0: { + _2 = helper(move _1) -> [return: bb1, unwind unreachable]; + } + + bb1: { + return; + } +} diff --git a/tests/mir-opt/annotate-moves/copy_field.rs b/tests/mir-opt/annotate-moves/copy_field.rs new file mode 100644 index 0000000000000..67072f03429f8 --- /dev/null +++ b/tests/mir-opt/annotate-moves/copy_field.rs @@ -0,0 +1,17 @@ +//@ compile-flags: -Z annotate-moves=8 -C debuginfo=full +//@ ignore-std-debug-assertions +//@ edition: 2021 + +#![crate_type = "lib"] + +#[derive(Clone)] +pub struct LargeStruct { + pub data: [u64; 20], // 160 bytes +} + +// EMIT_MIR copy_field.test_copy_field.AnnotateMoves.after.mir +pub fn test_copy_field(s: &LargeStruct) -> [u64; 20] { + // CHECK-LABEL: fn test_copy_field( + // CHECK: scope {{[0-9]+}} (inlined core::profiling::compiler_copy::<[u64; 20], 160>) + s.data +} diff --git a/tests/mir-opt/annotate-moves/copy_field.test_copy_field.AnnotateMoves.after.mir b/tests/mir-opt/annotate-moves/copy_field.test_copy_field.AnnotateMoves.after.mir new file mode 100644 index 0000000000000..b3c83f4c27159 --- /dev/null +++ b/tests/mir-opt/annotate-moves/copy_field.test_copy_field.AnnotateMoves.after.mir @@ -0,0 +1,13 @@ +// MIR for `test_copy_field` after AnnotateMoves + +fn test_copy_field(_1: &LargeStruct) -> [u64; 20] { + debug s => _1; + let mut _0: [u64; 20]; + scope 1 (inlined core::profiling::compiler_copy::<[u64; 20], 160>) { + } + + bb0: { + _0 = copy ((*_1).0: [u64; 20]); + return; + } +} diff --git a/tests/mir-opt/annotate-moves/iter.rs b/tests/mir-opt/annotate-moves/iter.rs new file mode 100644 index 0000000000000..b87b7b9df58f6 --- /dev/null +++ b/tests/mir-opt/annotate-moves/iter.rs @@ -0,0 +1,42 @@ +//@ compile-flags: -Z annotate-moves=8 -C debuginfo=full +//@ ignore-std-debug-assertions +//@ edition: 2021 + +#![crate_type = "lib"] + +// Test impl Trait with a large iterator type +fn make_large_iter() -> impl Iterator { + fn double(x: u64) -> u64 { + x * 2 + } + + // Chain, fn(u64) -> u64>, Map, fn(u64) -> u64>> + // IntoIter owns the array data, making the iterator larger + [1u64, 2, 3, 4, 5] + .into_iter() + .map(double as fn(u64) -> u64) + .chain([6u64, 7, 8, 9, 10].into_iter().map(double as fn(u64) -> u64)) +} + +// EMIT_MIR iter.test_impl_trait_return.AnnotateMoves.after.mir +pub fn test_impl_trait_return() -> impl Iterator { + // CHECK-LABEL: fn test_impl_trait_return( + // The iterator is returned directly without moving through a local + make_large_iter() +} + +// EMIT_MIR iter.test_impl_trait_arg.AnnotateMoves.after.mir +pub fn test_impl_trait_arg(iter: impl Iterator) -> Vec { + // CHECK-LABEL: fn test_impl_trait_arg( + // Generic impl trait parameter - concrete type determined at call site + iter.collect() +} + +// EMIT_MIR iter.test_impl_trait_chain.AnnotateMoves.after.mir +pub fn test_impl_trait_chain() -> Vec { + // CHECK-LABEL: fn test_impl_trait_chain( + // The iterator move shows up when passing to test_impl_trait_arg + // CHECK: scope {{[0-9]+}} (inlined core::profiling::compiler_move::, fn(u64) -> u64>, Map, fn(u64) -> u64>>, {{[0-9]+}}>) + let iter = make_large_iter(); + test_impl_trait_arg(iter) +} diff --git a/tests/mir-opt/annotate-moves/iter.test_impl_trait_arg.AnnotateMoves.after.mir b/tests/mir-opt/annotate-moves/iter.test_impl_trait_arg.AnnotateMoves.after.mir new file mode 100644 index 0000000000000..081ba937fb2cb --- /dev/null +++ b/tests/mir-opt/annotate-moves/iter.test_impl_trait_arg.AnnotateMoves.after.mir @@ -0,0 +1,14 @@ +// MIR for `test_impl_trait_arg` after AnnotateMoves + +fn test_impl_trait_arg(_1: impl Iterator) -> Vec { + debug iter => _1; + let mut _0: std::vec::Vec; + + bb0: { + _0 = as Iterator>::collect::>(move _1) -> [return: bb1, unwind unreachable]; + } + + bb1: { + return; + } +} diff --git a/tests/mir-opt/annotate-moves/iter.test_impl_trait_chain.AnnotateMoves.after.mir b/tests/mir-opt/annotate-moves/iter.test_impl_trait_chain.AnnotateMoves.after.mir new file mode 100644 index 0000000000000..b1aae6bfd3037 --- /dev/null +++ b/tests/mir-opt/annotate-moves/iter.test_impl_trait_chain.AnnotateMoves.after.mir @@ -0,0 +1,178 @@ +// MIR for `test_impl_trait_chain` after AnnotateMoves + +fn test_impl_trait_chain() -> Vec { + let mut _0: std::vec::Vec; + let _15: std::iter::Chain, fn(u64) -> u64>, std::iter::Map, fn(u64) -> u64>>; + scope 1 { + debug iter => _15; + scope 32 (inlined test_impl_trait_arg::, fn(u64) -> u64>, Map, fn(u64) -> u64>>>) { + debug iter => _15; + scope 45 (inlined core::profiling::compiler_move::, fn(u64) -> u64>, Map, fn(u64) -> u64>>, 104>) { + } + } + } + scope 2 (inlined make_large_iter) { + let mut _1: [u64; 5]; + let mut _4: std::array::IntoIter; + let mut _5: fn(u64) -> u64; + let mut _6: std::iter::Map, fn(u64) -> u64>; + let mut _7: [u64; 5]; + let mut _10: std::array::IntoIter; + let mut _11: fn(u64) -> u64; + let mut _12: std::iter::Map, fn(u64) -> u64>; + scope 3 (inlined array::iter::::into_iter) { + debug self => _1; + let _2: [std::mem::MaybeUninit; 5]; + scope 4 { + let _3: std::array::iter::iter_inner::PolymorphicIter<[std::mem::MaybeUninit; 5]>; + scope 5 { + scope 22 (inlined core::profiling::compiler_copy::; 5]>, 48>) { + scope 35 (inlined core::profiling::compiler_copy::; 5]>, 48>) { + } + } + } + scope 6 (inlined ops::index_range::IndexRange::zero_to) { + } + scope 7 (inlined array::iter::iter_inner::PolymorphicIter::<[MaybeUninit; 5]>::new_unchecked) { + scope 21 (inlined core::profiling::compiler_copy::<[MaybeUninit; 5], 40>) { + scope 34 (inlined core::profiling::compiler_copy::<[MaybeUninit; 5], 40>) { + } + } + } + } + scope 20 (inlined core::profiling::compiler_copy::<[u64; 5], 40>) { + scope 33 (inlined core::profiling::compiler_copy::<[u64; 5], 40>) { + } + } + } + scope 8 (inlined as Iterator>::map:: u64>) { + debug self => _4; + debug f => _5; + scope 9 (inlined Map::, fn(u64) -> u64>::new) { + scope 23 (inlined core::profiling::compiler_copy::, 48>) { + scope 36 (inlined core::profiling::compiler_copy::, 48>) { + } + } + } + } + scope 10 (inlined array::iter::::into_iter) { + debug self => _7; + let _8: [std::mem::MaybeUninit; 5]; + scope 11 { + let _9: std::array::iter::iter_inner::PolymorphicIter<[std::mem::MaybeUninit; 5]>; + scope 12 { + scope 26 (inlined core::profiling::compiler_copy::; 5]>, 48>) { + scope 39 (inlined core::profiling::compiler_copy::; 5]>, 48>) { + } + } + } + scope 13 (inlined ops::index_range::IndexRange::zero_to) { + } + scope 14 (inlined array::iter::iter_inner::PolymorphicIter::<[MaybeUninit; 5]>::new_unchecked) { + scope 25 (inlined core::profiling::compiler_copy::<[MaybeUninit; 5], 40>) { + scope 38 (inlined core::profiling::compiler_copy::<[MaybeUninit; 5], 40>) { + } + } + } + } + scope 24 (inlined core::profiling::compiler_copy::<[u64; 5], 40>) { + scope 37 (inlined core::profiling::compiler_copy::<[u64; 5], 40>) { + } + } + } + scope 15 (inlined as Iterator>::map:: u64>) { + debug self => _10; + debug f => _11; + scope 16 (inlined Map::, fn(u64) -> u64>::new) { + scope 27 (inlined core::profiling::compiler_copy::, 48>) { + scope 40 (inlined core::profiling::compiler_copy::, 48>) { + } + } + } + } + scope 17 (inlined , fn(u64) -> u64> as Iterator>::chain::, fn(u64) -> u64>>) { + debug self => _6; + debug other => _12; + scope 18 (inlined std::iter::Chain::, fn(u64) -> u64>, Map, fn(u64) -> u64>>::new) { + debug a => _6; + debug b => _12; + let mut _13: std::option::Option, fn(u64) -> u64>>; + let mut _14: std::option::Option, fn(u64) -> u64>>; + scope 28 (inlined core::profiling::compiler_copy::, fn(u64) -> u64>, 52>) { + scope 41 (inlined core::profiling::compiler_copy::, fn(u64) -> u64>, 52>) { + } + } + scope 29 (inlined core::profiling::compiler_copy::, fn(u64) -> u64>, 52>) { + scope 42 (inlined core::profiling::compiler_copy::, fn(u64) -> u64>, 52>) { + } + } + scope 30 (inlined core::profiling::compiler_move::, fn(u64) -> u64>>, 52>) { + } + scope 31 (inlined core::profiling::compiler_move::, fn(u64) -> u64>>, 52>) { + scope 43 (inlined core::profiling::compiler_move::, fn(u64) -> u64>>, 52>) { + } + scope 44 (inlined core::profiling::compiler_move::, fn(u64) -> u64>>, 52>) { + } + } + } + scope 19 (inlined , fn(u64) -> u64> as IntoIterator>::into_iter) { + debug self => _12; + } + } + } + + bb0: { + StorageLive(_12); + StorageLive(_6); + StorageLive(_4); + StorageLive(_1); + _1 = [const 1_u64, const 2_u64, const 3_u64, const 4_u64, const 5_u64]; + StorageLive(_2); + StorageLive(_3); + _2 = copy _1 as [std::mem::MaybeUninit; 5] (Transmute); + _3 = array::iter::iter_inner::PolymorphicIter::<[MaybeUninit; 5]> { alive: const ops::index_range::IndexRange {{ start: 0_usize, end: 5_usize }}, data: copy _2 }; + _4 = std::array::IntoIter:: { inner: copy _3 }; + StorageDead(_3); + StorageDead(_2); + StorageDead(_1); + StorageLive(_5); + _5 = make_large_iter::double as fn(u64) -> u64 (PointerCoercion(ReifyFnPointer, AsCast)); + _6 = Map::, fn(u64) -> u64> { iter: copy _4, f: copy _5 }; + StorageDead(_5); + StorageDead(_4); + StorageLive(_10); + StorageLive(_7); + _7 = [const 6_u64, const 7_u64, const 8_u64, const 9_u64, const 10_u64]; + StorageLive(_8); + StorageLive(_9); + _8 = copy _7 as [std::mem::MaybeUninit; 5] (Transmute); + _9 = array::iter::iter_inner::PolymorphicIter::<[MaybeUninit; 5]> { alive: const ops::index_range::IndexRange {{ start: 0_usize, end: 5_usize }}, data: copy _8 }; + _10 = std::array::IntoIter:: { inner: copy _9 }; + StorageDead(_9); + StorageDead(_8); + StorageDead(_7); + StorageLive(_11); + _11 = make_large_iter::double as fn(u64) -> u64 (PointerCoercion(ReifyFnPointer, AsCast)); + _12 = Map::, fn(u64) -> u64> { iter: copy _10, f: copy _11 }; + StorageDead(_11); + StorageDead(_10); + StorageLive(_13); + _13 = Option::, fn(u64) -> u64>>::Some(copy _6); + StorageLive(_14); + _14 = Option::, fn(u64) -> u64>>::Some(copy _12); + _15 = std::iter::Chain::, fn(u64) -> u64>, Map, fn(u64) -> u64>> { a: move _13, b: move _14 }; + StorageDead(_14); + StorageDead(_13); + StorageDead(_6); + StorageDead(_12); + _0 = , fn(u64) -> u64>, Map, fn(u64) -> u64>> as Iterator>::collect::>(move _15) -> [return: bb1, unwind unreachable]; + } + + bb1: { + return; + } +} + +ALLOC0 (size: 8, align: 4) { + 00 00 00 00 05 00 00 00 │ ........ +} diff --git a/tests/mir-opt/annotate-moves/iter.test_impl_trait_return.AnnotateMoves.after.mir b/tests/mir-opt/annotate-moves/iter.test_impl_trait_return.AnnotateMoves.after.mir new file mode 100644 index 0000000000000..fe8429c848397 --- /dev/null +++ b/tests/mir-opt/annotate-moves/iter.test_impl_trait_return.AnnotateMoves.after.mir @@ -0,0 +1,165 @@ +// MIR for `test_impl_trait_return` after AnnotateMoves + +fn test_impl_trait_return() -> std::iter::Chain, fn(u64) -> u64>, Map, fn(u64) -> u64>> { + let mut _0: std::iter::Chain, fn(u64) -> u64>, std::iter::Map, fn(u64) -> u64>>; + scope 1 (inlined make_large_iter) { + let mut _1: [u64; 5]; + let mut _4: std::array::IntoIter; + let mut _5: fn(u64) -> u64; + let mut _6: std::iter::Map, fn(u64) -> u64>; + let mut _7: [u64; 5]; + let mut _10: std::array::IntoIter; + let mut _11: fn(u64) -> u64; + let mut _12: std::iter::Map, fn(u64) -> u64>; + scope 2 (inlined array::iter::::into_iter) { + debug self => _1; + let _2: [std::mem::MaybeUninit; 5]; + scope 3 { + let _3: std::array::iter::iter_inner::PolymorphicIter<[std::mem::MaybeUninit; 5]>; + scope 4 { + scope 21 (inlined core::profiling::compiler_copy::; 5]>, 48>) { + scope 33 (inlined core::profiling::compiler_copy::; 5]>, 48>) { + } + } + } + scope 5 (inlined ops::index_range::IndexRange::zero_to) { + } + scope 6 (inlined array::iter::iter_inner::PolymorphicIter::<[MaybeUninit; 5]>::new_unchecked) { + scope 20 (inlined core::profiling::compiler_copy::<[MaybeUninit; 5], 40>) { + scope 32 (inlined core::profiling::compiler_copy::<[MaybeUninit; 5], 40>) { + } + } + } + } + scope 19 (inlined core::profiling::compiler_copy::<[u64; 5], 40>) { + scope 31 (inlined core::profiling::compiler_copy::<[u64; 5], 40>) { + } + } + } + scope 7 (inlined as Iterator>::map:: u64>) { + debug self => _4; + debug f => _5; + scope 8 (inlined Map::, fn(u64) -> u64>::new) { + scope 22 (inlined core::profiling::compiler_copy::, 48>) { + scope 34 (inlined core::profiling::compiler_copy::, 48>) { + } + } + } + } + scope 9 (inlined array::iter::::into_iter) { + debug self => _7; + let _8: [std::mem::MaybeUninit; 5]; + scope 10 { + let _9: std::array::iter::iter_inner::PolymorphicIter<[std::mem::MaybeUninit; 5]>; + scope 11 { + scope 25 (inlined core::profiling::compiler_copy::; 5]>, 48>) { + scope 37 (inlined core::profiling::compiler_copy::; 5]>, 48>) { + } + } + } + scope 12 (inlined ops::index_range::IndexRange::zero_to) { + } + scope 13 (inlined array::iter::iter_inner::PolymorphicIter::<[MaybeUninit; 5]>::new_unchecked) { + scope 24 (inlined core::profiling::compiler_copy::<[MaybeUninit; 5], 40>) { + scope 36 (inlined core::profiling::compiler_copy::<[MaybeUninit; 5], 40>) { + } + } + } + } + scope 23 (inlined core::profiling::compiler_copy::<[u64; 5], 40>) { + scope 35 (inlined core::profiling::compiler_copy::<[u64; 5], 40>) { + } + } + } + scope 14 (inlined as Iterator>::map:: u64>) { + debug self => _10; + debug f => _11; + scope 15 (inlined Map::, fn(u64) -> u64>::new) { + scope 26 (inlined core::profiling::compiler_copy::, 48>) { + scope 38 (inlined core::profiling::compiler_copy::, 48>) { + } + } + } + } + scope 16 (inlined , fn(u64) -> u64> as Iterator>::chain::, fn(u64) -> u64>>) { + debug self => _6; + debug other => _12; + scope 17 (inlined std::iter::Chain::, fn(u64) -> u64>, Map, fn(u64) -> u64>>::new) { + debug a => _6; + debug b => _12; + let mut _13: std::option::Option, fn(u64) -> u64>>; + let mut _14: std::option::Option, fn(u64) -> u64>>; + scope 27 (inlined core::profiling::compiler_copy::, fn(u64) -> u64>, 52>) { + scope 39 (inlined core::profiling::compiler_copy::, fn(u64) -> u64>, 52>) { + } + } + scope 28 (inlined core::profiling::compiler_copy::, fn(u64) -> u64>, 52>) { + scope 40 (inlined core::profiling::compiler_copy::, fn(u64) -> u64>, 52>) { + } + } + scope 29 (inlined core::profiling::compiler_move::, fn(u64) -> u64>>, 52>) { + } + scope 30 (inlined core::profiling::compiler_move::, fn(u64) -> u64>>, 52>) { + scope 41 (inlined core::profiling::compiler_move::, fn(u64) -> u64>>, 52>) { + } + scope 42 (inlined core::profiling::compiler_move::, fn(u64) -> u64>>, 52>) { + } + } + } + scope 18 (inlined , fn(u64) -> u64> as IntoIterator>::into_iter) { + debug self => _12; + } + } + } + + bb0: { + StorageLive(_12); + StorageLive(_6); + StorageLive(_4); + StorageLive(_1); + _1 = [const 1_u64, const 2_u64, const 3_u64, const 4_u64, const 5_u64]; + StorageLive(_2); + StorageLive(_3); + _2 = copy _1 as [std::mem::MaybeUninit; 5] (Transmute); + _3 = array::iter::iter_inner::PolymorphicIter::<[MaybeUninit; 5]> { alive: const ops::index_range::IndexRange {{ start: 0_usize, end: 5_usize }}, data: copy _2 }; + _4 = std::array::IntoIter:: { inner: copy _3 }; + StorageDead(_3); + StorageDead(_2); + StorageDead(_1); + StorageLive(_5); + _5 = make_large_iter::double as fn(u64) -> u64 (PointerCoercion(ReifyFnPointer, AsCast)); + _6 = Map::, fn(u64) -> u64> { iter: copy _4, f: copy _5 }; + StorageDead(_5); + StorageDead(_4); + StorageLive(_10); + StorageLive(_7); + _7 = [const 6_u64, const 7_u64, const 8_u64, const 9_u64, const 10_u64]; + StorageLive(_8); + StorageLive(_9); + _8 = copy _7 as [std::mem::MaybeUninit; 5] (Transmute); + _9 = array::iter::iter_inner::PolymorphicIter::<[MaybeUninit; 5]> { alive: const ops::index_range::IndexRange {{ start: 0_usize, end: 5_usize }}, data: copy _8 }; + _10 = std::array::IntoIter:: { inner: copy _9 }; + StorageDead(_9); + StorageDead(_8); + StorageDead(_7); + StorageLive(_11); + _11 = make_large_iter::double as fn(u64) -> u64 (PointerCoercion(ReifyFnPointer, AsCast)); + _12 = Map::, fn(u64) -> u64> { iter: copy _10, f: copy _11 }; + StorageDead(_11); + StorageDead(_10); + StorageLive(_13); + _13 = Option::, fn(u64) -> u64>>::Some(copy _6); + StorageLive(_14); + _14 = Option::, fn(u64) -> u64>>::Some(copy _12); + _0 = std::iter::Chain::, fn(u64) -> u64>, Map, fn(u64) -> u64>> { a: move _13, b: move _14 }; + StorageDead(_14); + StorageDead(_13); + StorageDead(_6); + StorageDead(_12); + return; + } +} + +ALLOC0 (size: 8, align: 4) { + 00 00 00 00 05 00 00 00 │ ........ +} diff --git a/tests/mir-opt/annotate-moves/match_move.rs b/tests/mir-opt/annotate-moves/match_move.rs new file mode 100644 index 0000000000000..5aceb31279053 --- /dev/null +++ b/tests/mir-opt/annotate-moves/match_move.rs @@ -0,0 +1,21 @@ +//@ compile-flags: -Z annotate-moves=8 -C debuginfo=full +//@ ignore-std-debug-assertions +//@ edition: 2021 + +#![crate_type = "lib"] + +#[derive(Clone)] +pub struct LargeStruct { + pub data: [u64; 20], // 160 bytes +} + +// EMIT_MIR match_move.test_match.AnnotateMoves.after.mir +pub fn test_match(opt: Option) -> LargeStruct { + // CHECK-LABEL: fn test_match( + // Move in match expression + // CHECK: scope {{[0-9]+}} (inlined core::profiling::compiler_move::) + match opt { + Some(s) => s, + None => LargeStruct { data: [0; 20] }, + } +} diff --git a/tests/mir-opt/annotate-moves/match_move.test_match.AnnotateMoves.after.mir b/tests/mir-opt/annotate-moves/match_move.test_match.AnnotateMoves.after.mir new file mode 100644 index 0000000000000..553dfa6dbe2a4 --- /dev/null +++ b/tests/mir-opt/annotate-moves/match_move.test_match.AnnotateMoves.after.mir @@ -0,0 +1,37 @@ +// MIR for `test_match` after AnnotateMoves + +fn test_match(_1: Option) -> LargeStruct { + debug opt => _1; + let mut _0: LargeStruct; + let mut _2: isize; + let mut _3: [u64; 20]; + scope 1 { + debug s => _0; + } + scope 2 (inlined core::profiling::compiler_move::<[u64; 20], 160>) { + } + scope 3 (inlined core::profiling::compiler_move::) { + } + + bb0: { + _2 = discriminant(_1); + switchInt(move _2) -> [0: bb1, 1: bb2, otherwise: bb3]; + } + + bb1: { + StorageLive(_3); + _3 = [const 0_u64; 20]; + _0 = LargeStruct { data: move _3 }; + StorageDead(_3); + return; + } + + bb2: { + _0 = move ((_1 as Some).0: LargeStruct); + return; + } + + bb3: { + unreachable; + } +} diff --git a/tests/mir-opt/annotate-moves/move_return.rs b/tests/mir-opt/annotate-moves/move_return.rs new file mode 100644 index 0000000000000..84687b8ab842e --- /dev/null +++ b/tests/mir-opt/annotate-moves/move_return.rs @@ -0,0 +1,17 @@ +//@ compile-flags: -Z annotate-moves=8 -C debuginfo=full +//@ ignore-std-debug-assertions +//@ edition: 2021 + +#![crate_type = "lib"] + +#[derive(Clone)] +pub struct LargeStruct { + pub data: [u64; 20], // 160 bytes +} + +// EMIT_MIR move_return.test_move.AnnotateMoves.after.mir +pub fn test_move(s: LargeStruct) -> LargeStruct { + // CHECK-LABEL: fn test_move( + // CHECK: scope {{[0-9]+}} (inlined core::profiling::compiler_move::) + s +} diff --git a/tests/mir-opt/annotate-moves/move_return.test_move.AnnotateMoves.after.mir b/tests/mir-opt/annotate-moves/move_return.test_move.AnnotateMoves.after.mir new file mode 100644 index 0000000000000..8e39485b5ae53 --- /dev/null +++ b/tests/mir-opt/annotate-moves/move_return.test_move.AnnotateMoves.after.mir @@ -0,0 +1,13 @@ +// MIR for `test_move` after AnnotateMoves + +fn test_move(_1: LargeStruct) -> LargeStruct { + debug s => _1; + let mut _0: LargeStruct; + scope 1 (inlined core::profiling::compiler_move::) { + } + + bb0: { + _0 = move _1; + return; + } +} diff --git a/tests/mir-opt/annotate-moves/small_move.rs b/tests/mir-opt/annotate-moves/small_move.rs new file mode 100644 index 0000000000000..64a5763d57ac1 --- /dev/null +++ b/tests/mir-opt/annotate-moves/small_move.rs @@ -0,0 +1,19 @@ +//@ compile-flags: -Z annotate-moves=8 -C debuginfo=full +//@ ignore-std-debug-assertions +//@ edition: 2021 + +#![crate_type = "lib"] + +#[derive(Clone)] +pub struct SmallStruct { + pub data: u32, // 4 bytes +} + +// EMIT_MIR small_move.test_small_move.AnnotateMoves.after.mir +pub fn test_small_move(s: SmallStruct) -> SmallStruct { + // CHECK-LABEL: fn test_small_move( + // Small types should NOT be annotated + // CHECK-NOT: core::profiling::compiler_move + // CHECK-NOT: core::profiling::compiler_copy + s +} diff --git a/tests/mir-opt/annotate-moves/small_move.test_small_move.AnnotateMoves.after.mir b/tests/mir-opt/annotate-moves/small_move.test_small_move.AnnotateMoves.after.mir new file mode 100644 index 0000000000000..5ed79ecb0606c --- /dev/null +++ b/tests/mir-opt/annotate-moves/small_move.test_small_move.AnnotateMoves.after.mir @@ -0,0 +1,11 @@ +// MIR for `test_small_move` after AnnotateMoves + +fn test_small_move(_1: SmallStruct) -> SmallStruct { + debug s => _1; + let mut _0: SmallStruct; + + bb0: { + _0 = move _1; + return; + } +} diff --git a/tests/ui/annotate-moves/annotate-moves-basic.rs b/tests/ui/annotate-moves/annotate-moves-basic.rs new file mode 100644 index 0000000000000..645122113dab2 --- /dev/null +++ b/tests/ui/annotate-moves/annotate-moves-basic.rs @@ -0,0 +1,15 @@ +//@ check-pass +//@ compile-flags: -Z annotate-moves=100 + +// Test that valid annotate-moves flags are accepted + +#[derive(Clone)] +struct TestStruct { + data: [u64; 20], // 160 bytes +} + +fn main() { + let s = TestStruct { data: [42; 20] }; + let _copy = s.clone(); + let _moved = s; +} diff --git a/tests/ui/annotate-moves/annotate-moves-invalid-flag.rs b/tests/ui/annotate-moves/annotate-moves-invalid-flag.rs new file mode 100644 index 0000000000000..621b7861657ef --- /dev/null +++ b/tests/ui/annotate-moves/annotate-moves-invalid-flag.rs @@ -0,0 +1,10 @@ +//@ check-fail +//@ compile-flags: -Z annotate-moves=invalid + +// Test that invalid values for annotate-moves flag are rejected + +fn main() { + // This should fail at compile time due to invalid flag value +} + +//~? ERROR incorrect value `invalid` for unstable option `annotate-moves` diff --git a/tests/ui/annotate-moves/annotate-moves-invalid-flag.stderr b/tests/ui/annotate-moves/annotate-moves-invalid-flag.stderr new file mode 100644 index 0000000000000..1af6582ac5ad2 --- /dev/null +++ b/tests/ui/annotate-moves/annotate-moves-invalid-flag.stderr @@ -0,0 +1,2 @@ +error: incorrect value `invalid` for unstable option `annotate-moves` - either a boolean (`y`, `yes`, `on`, `true`, `n`, `no`, `off` or `false`), or a number was expected + diff --git a/tests/ui/annotate-moves/annotate-moves-size-limit-invalid.rs b/tests/ui/annotate-moves/annotate-moves-size-limit-invalid.rs new file mode 100644 index 0000000000000..50a402102184a --- /dev/null +++ b/tests/ui/annotate-moves/annotate-moves-size-limit-invalid.rs @@ -0,0 +1,10 @@ +//@ check-fail +//@ compile-flags: -Z annotate-moves=-5 + +// Test that negative size limits are rejected + +fn main() { + // This should fail at compile time due to invalid negative size limit +} + +//~? ERROR incorrect value `-5` for unstable option `annotate-moves` diff --git a/tests/ui/annotate-moves/annotate-moves-size-limit-invalid.stderr b/tests/ui/annotate-moves/annotate-moves-size-limit-invalid.stderr new file mode 100644 index 0000000000000..742cbeb89b974 --- /dev/null +++ b/tests/ui/annotate-moves/annotate-moves-size-limit-invalid.stderr @@ -0,0 +1,2 @@ +error: incorrect value `-5` for unstable option `annotate-moves` - either a boolean (`y`, `yes`, `on`, `true`, `n`, `no`, `off` or `false`), or a number was expected + From 723ea61d7684f5e4239a7b32231b5715f775dee5 Mon Sep 17 00:00:00 2001 From: Jeremy Fitzhardinge Date: Mon, 13 Oct 2025 00:47:39 -0700 Subject: [PATCH 2/2] Put move source locations for parameters into Call/TailCall Rather than store parameter moves in a separate side structure, which relies on MIR indexes not changing again, instead move the parameter move source_info into TerminatorKind::Call/TailCall itself. This keeps everything together as a parallel array of parameter move information. If we aren't annotating moves, or no parameters need annotation, then then remains as None, Otherwise it's a `Box<[Option]>` recording the source location of the annotated parameters. This ends up touching a lot of files with mostly one line changes just because of the extra field in the Call/TailCall enums. I changed a lot of the patterns to use `..` wildcard matching since in many cases most fields were being skipped. --- compiler/rustc_borrowck/src/lib.rs | 12 +- .../src/polonius/legacy/loan_invalidations.rs | 10 +- compiler/rustc_codegen_cranelift/src/base.rs | 10 +- compiler/rustc_codegen_ssa/src/mir/block.rs | 21 ++-- .../rustc_const_eval/src/interpret/step.rs | 12 +- compiler/rustc_middle/src/mir/mod.rs | 13 +-- compiler/rustc_middle/src/mir/syntax.rs | 22 +++- compiler/rustc_middle/src/mir/terminator.rs | 10 +- compiler/rustc_middle/src/mir/visit.rs | 6 +- .../rustc_mir_build/src/builder/custom/mod.rs | 1 - .../src/builder/custom/parse/instruction.rs | 2 + .../src/builder/expr/as_rvalue.rs | 1 + .../rustc_mir_build/src/builder/expr/into.rs | 2 + .../rustc_mir_build/src/builder/expr/stmt.rs | 7 +- .../src/builder/matches/test.rs | 2 + .../src/move_paths/builder.rs | 4 +- .../rustc_mir_transform/src/annotate_moves.rs | 105 ++++++++++-------- compiler/rustc_mir_transform/src/coroutine.rs | 10 +- .../rustc_mir_transform/src/coroutine/drop.rs | 2 + .../rustc_mir_transform/src/elaborate_drop.rs | 3 + .../src/function_item_references.rs | 1 + compiler/rustc_mir_transform/src/lib.rs | 1 + .../rustc_mir_transform/src/promote_consts.rs | 1 + compiler/rustc_mir_transform/src/shim.rs | 2 + .../src/shim/async_destructor_ctor.rs | 2 + .../src/mono_checks/move_check.rs | 2 +- .../src/unstable/convert/stable/mir.rs | 26 ++--- .../clippy_utils/src/qualify_min_const_fn.rs | 11 +- 28 files changed, 138 insertions(+), 163 deletions(-) diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index 4c380ddcf7084..b9f1d5d611f04 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -904,22 +904,14 @@ impl<'a, 'tcx> ResultsVisitor<'tcx, Borrowck<'a, 'tcx>> for MirBorrowckCtxt<'a, state, ); } - TerminatorKind::Call { - func, - args, - destination, - target: _, - unwind: _, - call_source: _, - fn_span: _, - } => { + TerminatorKind::Call { func, args, destination, .. } => { self.consume_operand(loc, (func, span), state); for arg in args { self.consume_operand(loc, (&arg.node, arg.span), state); } self.mutate_place(loc, (*destination, span), Deep, state); } - TerminatorKind::TailCall { func, args, fn_span: _ } => { + TerminatorKind::TailCall { func, args, .. } => { self.consume_operand(loc, (func, span), state); for arg in args { self.consume_operand(loc, (&arg.node, arg.span), state); diff --git a/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs b/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs index c2ad6fcb4b799..d4401d4398fd0 100644 --- a/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs +++ b/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs @@ -118,15 +118,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LoanInvalidationsGenerator<'a, 'tcx> { LocalMutationIsAllowed::Yes, ); } - TerminatorKind::Call { - func, - args, - destination, - target: _, - unwind: _, - call_source: _, - fn_span: _, - } => { + TerminatorKind::Call { func, args, destination, .. } => { self.consume_operand(location, func); for arg in args { self.consume_operand(location, &arg.node); diff --git a/compiler/rustc_codegen_cranelift/src/base.rs b/compiler/rustc_codegen_cranelift/src/base.rs index 41e11e1de6163..0f367c23ffd6a 100644 --- a/compiler/rustc_codegen_cranelift/src/base.rs +++ b/compiler/rustc_codegen_cranelift/src/base.rs @@ -478,15 +478,7 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) { switch.emit(&mut fx.bcx, discr, otherwise_block); } } - TerminatorKind::Call { - func, - args, - destination, - target, - fn_span, - unwind, - call_source: _, - } => { + TerminatorKind::Call { func, args, destination, target, fn_span, unwind, .. } => { fx.tcx.prof.generic_activity("codegen call").run(|| { crate::abi::codegen_terminator_call( fx, diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index 75e7520c33544..874a1d1f3adee 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -1152,13 +1152,16 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } // Look up stored SourceInfo for this argument if it exists (from annotate_moves pass) - let bb_info = self - .mir - .call_arg_move_source_info - .iter() - .find(|&&((block, idx), _)| block == helper.bb && idx == i); - if let Some((_, arg_source_info)) = bb_info { - self.set_debug_loc(bx, *arg_source_info); + let arg_source_info = match &terminator.kind { + mir::TerminatorKind::Call { arg_move_source_info, .. } + | mir::TerminatorKind::TailCall { arg_move_source_info, .. } => { + arg_move_source_info.as_ref().and_then(|infos| infos.get(i).copied().flatten()) + } + _ => None, + }; + + if let Some(arg_source_info) = arg_source_info { + self.set_debug_loc(bx, arg_source_info); } self.codegen_argument( @@ -1454,8 +1457,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { destination, target, unwind, - call_source: _, fn_span, + .. } => self.codegen_call_terminator( helper, bx, @@ -1469,7 +1472,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { CallKind::Normal, mergeable_succ(), ), - mir::TerminatorKind::TailCall { ref func, ref args, fn_span } => self + mir::TerminatorKind::TailCall { ref func, ref args, fn_span, .. } => self .codegen_call_terminator( helper, bx, diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs index 923e00ad4cf1a..2718bfe73f1eb 100644 --- a/compiler/rustc_const_eval/src/interpret/step.rs +++ b/compiler/rustc_const_eval/src/interpret/step.rs @@ -544,15 +544,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { self.go_to_block(target_block); } - Call { - ref func, - ref args, - destination, - target, - unwind, - call_source: _, - fn_span: _, - } => { + Call { ref func, ref args, destination, target, unwind, .. } => { let old_stack = self.frame_idx(); let old_loc = self.frame().loc; @@ -576,7 +568,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { } } - TailCall { ref func, ref args, fn_span: _ } => { + TailCall { ref func, ref args, .. } => { let old_frame_idx = self.frame_idx(); let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } = diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index 1ff57162f28e8..1f632eede7480 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -331,13 +331,6 @@ pub struct Body<'tcx> { #[type_foldable(identity)] #[type_visitable(ignore)] pub function_coverage_info: Option>, - - /// Debug information for argument moves/copies in Call parameters. Stores pairs of - /// ((BasicBlock, argument_index), SourceInfo) for move/copy operations. Only populated when - /// `-Zannotate-moves` is enabled. - #[type_foldable(identity)] - #[type_visitable(ignore)] - pub call_arg_move_source_info: Vec<((BasicBlock, usize), SourceInfo)>, } impl<'tcx> Body<'tcx> { @@ -381,7 +374,6 @@ impl<'tcx> Body<'tcx> { tainted_by_errors, coverage_info_hi: None, function_coverage_info: None, - call_arg_move_source_info: Vec::new(), }; body.is_polymorphic = body.has_non_region_param(); body @@ -413,7 +405,6 @@ impl<'tcx> Body<'tcx> { tainted_by_errors: None, coverage_info_hi: None, function_coverage_info: None, - call_arg_move_source_info: Vec::new(), }; body.is_polymorphic = body.has_non_region_param(); body @@ -1673,11 +1664,11 @@ mod size_asserts { use super::*; // tidy-alphabetical-start - static_assert_size!(BasicBlockData<'_>, 128); + static_assert_size!(BasicBlockData<'_>, 144); static_assert_size!(LocalDecl<'_>, 40); static_assert_size!(SourceScopeData<'_>, 64); static_assert_size!(Statement<'_>, 32); - static_assert_size!(Terminator<'_>, 96); + static_assert_size!(Terminator<'_>, 112); static_assert_size!(VarDebugInfo<'_>, 88); // tidy-alphabetical-end } diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index e6c8512564edc..5f843ab7c9461 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -16,7 +16,7 @@ use rustc_span::{Span, Symbol}; use rustc_target::asm::InlineAsmRegOrRegClass; use smallvec::SmallVec; -use super::{BasicBlock, Const, Local, UserTypeProjection}; +use super::{BasicBlock, Const, Local, SourceInfo, UserTypeProjection}; use crate::mir::coverage::CoverageKind; use crate::ty::adjustment::PointerCoercion; use crate::ty::{self, GenericArgsRef, List, Region, Ty, UserTypeAnnotationIndex}; @@ -812,7 +812,7 @@ pub enum TerminatorKind<'tcx> { /// /// [#71117]: https://github.com/rust-lang/rust/issues/71117 Call { - /// The function that’s being called. + /// The function that's being called. func: Operand<'tcx>, /// Arguments the function is called with. /// These are owned by the callee, which is free to modify them. @@ -832,6 +832,13 @@ pub enum TerminatorKind<'tcx> { /// This `Span` is the span of the function, without the dot and receiver /// e.g. `foo(a, b)` in `x.foo(a, b)` fn_span: Span, + /// Optional array of source info for move operations in the arguments. + /// This is used to make the move operations appear in profilers as if they + /// were inlined from compiler_move intrinsics. + /// If None, no special source info is used. + /// If Some, the array has the same length as args, with None for arguments + /// that don't need special source info. + arg_move_source_info: Option]>>, }, /// Tail call. @@ -851,7 +858,7 @@ pub enum TerminatorKind<'tcx> { /// [`Call`]: TerminatorKind::Call /// [`Return`]: TerminatorKind::Return TailCall { - /// The function that’s being called. + /// The function that's being called. func: Operand<'tcx>, /// Arguments the function is called with. /// These are owned by the callee, which is free to modify them. @@ -862,6 +869,13 @@ pub enum TerminatorKind<'tcx> { /// This `Span` is the span of the function, without the dot and receiver /// (e.g. `foo(a, b)` in `x.foo(a, b)` fn_span: Span, + /// Optional array of source info for move operations in the arguments. + /// This is used to make the move operations appear in profilers as if they + /// were inlined from compiler_move intrinsics. + /// If None, no special source info is used. + /// If Some, the array has the same length as args, with None for arguments + /// that don't need special source info. + arg_move_source_info: Option]>>, }, /// Evaluates the operand, which must have type `bool`. If it is not equal to `expected`, @@ -1734,6 +1748,6 @@ mod size_asserts { static_assert_size!(PlaceElem<'_>, 24); static_assert_size!(Rvalue<'_>, 40); static_assert_size!(StatementKind<'_>, 16); - static_assert_size!(TerminatorKind<'_>, 80); + static_assert_size!(TerminatorKind<'_>, 96); // tidy-alphabetical-end } diff --git a/compiler/rustc_middle/src/mir/terminator.rs b/compiler/rustc_middle/src/mir/terminator.rs index 4034a3a06e943..8bb99b568dbc2 100644 --- a/compiler/rustc_middle/src/mir/terminator.rs +++ b/compiler/rustc_middle/src/mir/terminator.rs @@ -779,15 +779,7 @@ impl<'tcx> TerminatorKind<'tcx> { } } - Call { - unwind, - destination, - ref target, - func: _, - args: _, - fn_span: _, - call_source: _, - } => TerminatorEdges::AssignOnReturn { + Call { unwind, destination, ref target, .. } => TerminatorEdges::AssignOnReturn { return_: target.as_ref().map(slice::from_ref).unwrap_or_default(), cleanup: unwind.cleanup_block(), place: CallReturnPlaces::Call(destination), diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs index 81df239dee42d..8f259cfaa27b9 100644 --- a/compiler/rustc_middle/src/mir/visit.rs +++ b/compiler/rustc_middle/src/mir/visit.rs @@ -551,10 +551,8 @@ macro_rules! make_mir_visitor { func, args, destination, - target: _, - unwind: _, - call_source: _, fn_span, + .. } => { self.visit_span($(& $mutability)? *fn_span); self.visit_operand(func, location); @@ -568,7 +566,7 @@ macro_rules! make_mir_visitor { ); } - TerminatorKind::TailCall { func, args, fn_span } => { + TerminatorKind::TailCall { func, args, fn_span, .. } => { self.visit_span($(& $mutability)? *fn_span); self.visit_operand(func, location); for arg in args { diff --git a/compiler/rustc_mir_build/src/builder/custom/mod.rs b/compiler/rustc_mir_build/src/builder/custom/mod.rs index 1093bda324d17..792ad6d782cf3 100644 --- a/compiler/rustc_mir_build/src/builder/custom/mod.rs +++ b/compiler/rustc_mir_build/src/builder/custom/mod.rs @@ -62,7 +62,6 @@ pub(super) fn build_custom_mir<'tcx>( pass_count: 0, coverage_info_hi: None, function_coverage_info: None, - call_arg_move_source_info: Vec::new(), }; body.local_decls.push(LocalDecl::new(return_ty, return_ty_span)); diff --git a/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs b/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs index 54490e0050902..ad692b5a8e196 100644 --- a/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs +++ b/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs @@ -194,6 +194,7 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { CallSource::OverloadedOperator }, fn_span: *fn_span, + arg_move_source_info: None, }) }, ) @@ -213,6 +214,7 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { func: fun, args, fn_span: *fn_span, + arg_move_source_info: None, }) }, ) diff --git a/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs b/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs index 3a5839f2d404d..85accdab94011 100644 --- a/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs +++ b/compiler/rustc_mir_build/src/builder/expr/as_rvalue.rs @@ -166,6 +166,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { unwind: UnwindAction::Continue, call_source: CallSource::Misc, fn_span: expr_span, + arg_move_source_info: None, }, ); this.diverge_from(block); diff --git a/compiler/rustc_mir_build/src/builder/expr/into.rs b/compiler/rustc_mir_build/src/builder/expr/into.rs index 7676b720e3579..48fc638ad7d0a 100644 --- a/compiler/rustc_mir_build/src/builder/expr/into.rs +++ b/compiler/rustc_mir_build/src/builder/expr/into.rs @@ -408,6 +408,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { CallSource::OverloadedOperator }, fn_span, + arg_move_source_info: None, }, ); this.diverge_from(block); @@ -451,6 +452,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { unwind: UnwindAction::Unreachable, call_source: CallSource::Use, fn_span: expr_span, + arg_move_source_info: None, }, ); success.unit() diff --git a/compiler/rustc_mir_build/src/builder/expr/stmt.rs b/compiler/rustc_mir_build/src/builder/expr/stmt.rs index 675beceea14a9..11c4295d43dec 100644 --- a/compiler/rustc_mir_build/src/builder/expr/stmt.rs +++ b/compiler/rustc_mir_build/src/builder/expr/stmt.rs @@ -135,7 +135,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { this.cfg.terminate( block, source_info, - TerminatorKind::TailCall { func: fun, args, fn_span }, + TerminatorKind::TailCall { + func: fun, + args, + fn_span, + arg_move_source_info: None, + }, ); this.cfg.start_new_block().unit() diff --git a/compiler/rustc_mir_build/src/builder/matches/test.rs b/compiler/rustc_mir_build/src/builder/matches/test.rs index 1b6d96e49f0c1..3520341144709 100644 --- a/compiler/rustc_mir_build/src/builder/matches/test.rs +++ b/compiler/rustc_mir_build/src/builder/matches/test.rs @@ -399,6 +399,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { unwind: UnwindAction::Continue, call_source: CallSource::Misc, fn_span: source_info.span, + arg_move_source_info: None, }, ); } @@ -475,6 +476,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { unwind: UnwindAction::Continue, call_source: CallSource::MatchCmp, fn_span: source_info.span, + arg_move_source_info: None, }, ); self.diverge_from(block); diff --git a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs index 72d4cd72c2bcf..8b8d656c8813e 100644 --- a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs +++ b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs @@ -459,9 +459,7 @@ impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> { ref args, destination, target, - unwind: _, - call_source: _, - fn_span: _, + .. } => { self.gather_operand(func); for arg in args { diff --git a/compiler/rustc_mir_transform/src/annotate_moves.rs b/compiler/rustc_mir_transform/src/annotate_moves.rs index 8de490c7fdd1a..cb44e9ec84ee5 100644 --- a/compiler/rustc_mir_transform/src/annotate_moves.rs +++ b/compiler/rustc_mir_transform/src/annotate_moves.rs @@ -65,11 +65,8 @@ impl<'tcx> crate::MirPass<'tcx> for AnnotateMoves { size_limit, }; - // Storage for Call terminator argument SourceInfo - let mut call_arg_source_info = Vec::new(); - // Process each basic block - for (block, block_data) in body.basic_blocks.as_mut().iter_enumerated_mut() { + for block_data in body.basic_blocks.as_mut().iter_mut() { for stmt in &mut block_data.statements { let source_info = &mut stmt.source_info; @@ -112,21 +109,35 @@ impl<'tcx> crate::MirPass<'tcx> for AnnotateMoves { // Save the original scope before processing any operands let original_scope = source_info.scope; - match &terminator.kind { - TerminatorKind::Call { func, args, .. } - | TerminatorKind::TailCall { func, args, .. } => { + match &mut terminator.kind { + TerminatorKind::Call { func, args, arg_move_source_info, .. } + | TerminatorKind::TailCall { func, args, arg_move_source_info, .. } + if arg_move_source_info.is_none() => + { self.annotate_move(&mut params, source_info, original_scope, func); - // For Call arguments, store SourceInfo separately instead of modifying the - // terminator's SourceInfo (which would affect the entire Call) - for (index, arg) in args.iter().enumerate() { - if let Some(arg_source_info) = self.get_annotated_source_info( + // For Call arguments, collect SourceInfo for the arguments + let mut arg_move_infos = Vec::with_capacity(args.len()); + let mut has_source_info = false; + + // First collect the source info for each argument + for arg in args.iter() { + let arg_source_info = self.get_annotated_source_info( &mut params, original_scope, &arg.node, - ) { - call_arg_source_info.push(((block, index), arg_source_info)); + ); + + if arg_source_info.is_some() { + has_source_info = true; } + + arg_move_infos.push(arg_source_info); + } + + // If we have any source info, update the terminator + if has_source_info { + *arg_move_source_info = Some(arg_move_infos.into_boxed_slice()); } } TerminatorKind::SwitchInt { discr: op, .. } @@ -155,9 +166,6 @@ impl<'tcx> crate::MirPass<'tcx> for AnnotateMoves { } } } - - // Store the Call argument SourceInfo in the body (only if we have any) - body.call_arg_move_source_info = call_arg_source_info; } fn is_required(&self) -> bool { @@ -234,6 +242,14 @@ impl AnnotateMoves { }; let Params { tcx, typing_env, local_decls, size_limit, source_scopes } = params; + // Check if source_info.scope is already a compiler_move/copy annotation. + // If so, skip it - this statement has already been annotated. + if let Some((instance, _)) = source_scopes[source_info.scope].inlined + && self.is_marker_def_id(instance.def_id()) + { + return; + } + if let Some(type_size) = self.should_annotate_operation(*tcx, *typing_env, local_decls, place, *size_limit) { @@ -250,11 +266,10 @@ impl AnnotateMoves { type_size, ); source_info.scope = new_scope; - // Note: We deliberately do NOT modify source_info.span. - // Keeping the original span means profilers show the actual source location - // of the move/copy, which is more useful than showing profiling.rs:13. - // The scope change is sufficient to make the move appear as an inlined call - // to compiler_move/copy in the profiler. + // Note: We deliberately do NOT modify source_info.span. Keeping the original span means + // profilers show the actual source location of the move/copy, which is more useful than + // showing profiling.rs:XX. The scope change is sufficient to make the move appear as an + // inlined call to compiler_move/copy in the profiler. } } @@ -279,17 +294,12 @@ impl AnnotateMoves { let size = layout.size.bytes(); - // 1. Skip ZST types (no actual move/copy happens) - if layout.is_zst() { - return None; - } - - // 2. Check size threshold (only annotate large moves/copies) - if size < size_limit { + // Skip zst and check size threshold. + if layout.is_zst() || size < size_limit { return None; } - // 3. Skip scalar/vector types that won't generate memcpy + // Skip scalar/vector types that won't generate memcpy match layout.layout.backend_repr { rustc_abi::BackendRepr::Scalar(_) | rustc_abi::BackendRepr::ScalarPair(_, _) @@ -298,6 +308,11 @@ impl AnnotateMoves { } } + fn is_marker_def_id(&self, def_id: DefId) -> bool { + let def_id = Some(def_id); + def_id == self.compiler_move || def_id == self.compiler_copy + } + /// Creates an inlined scope that makes operations appear to come from /// the specified compiler intrinsic function. fn create_inlined_scope<'tcx>( @@ -323,12 +338,11 @@ impl AnnotateMoves { callsite_span, ); - // Get the profiling marker's definition span to use as the scope's span - // This ensures the file_start_pos/file_end_pos in the DebugScope match the DIScope's file + // Get the profiling marker's definition span to use as the scope's span. let profiling_span = tcx.def_span(profiling_def_id); // Create new inlined scope that makes the operation appear to come from the profiling - // marker + // marker. let inlined_scope_data = SourceScopeData { // Use profiling_span so file bounds match the DIScope (profiling.rs) // This prevents DILexicalBlockFile mismatches that would show profiling.rs @@ -349,24 +363,21 @@ impl AnnotateMoves { let scope_data = &source_scopes[scope]; if let Some((instance, _)) = scope_data.inlined { // Check if this is a compiler_move/copy scope we created - if let Some(def_id) = instance.def_id().as_local() { - let def_id = Some(def_id.to_def_id()); - if def_id == self.compiler_move || def_id == self.compiler_copy { - // This is one of our scopes, skip it and look at its inlined_parent_scope - if let Some(parent) = scope_data.inlined_parent_scope { - scope = parent; - continue; - } else { - // No more parents, this is fine - break None; - } + if self.is_marker_def_id(instance.def_id()) { + // This is one of our scopes, skip it and look at its inlined_parent_scope + if let Some(parent) = scope_data.inlined_parent_scope { + scope = parent; + } else { + // No more parents, this is fine + break None; } + } else { + // This is a real inlined scope (not compiler_move/copy), use it + break Some(scope); } - // This is a real inlined scope (not compiler_move/copy), use it - break Some(scope); } else { - // Not an inlined scope, use its inlined_parent_scope - break scope_data.inlined_parent_scope; + // Not an inlined scope, so no inlined parent scope + break None; } } }, diff --git a/compiler/rustc_mir_transform/src/coroutine.rs b/compiler/rustc_mir_transform/src/coroutine.rs index c5cd06f170c47..ef1a1774570ab 100644 --- a/compiler/rustc_mir_transform/src/coroutine.rs +++ b/compiler/rustc_mir_transform/src/coroutine.rs @@ -1741,15 +1741,7 @@ impl<'tcx> Visitor<'tcx> for EnsureCoroutineFieldAssignmentsNeverAlias<'_> { // Checking for aliasing in terminators is probably overkill, but until we have actual // semantics, we should be conservative here. match &terminator.kind { - TerminatorKind::Call { - func, - args, - destination, - target: Some(_), - unwind: _, - call_source: _, - fn_span: _, - } => { + TerminatorKind::Call { func, args, destination, target: Some(_), .. } => { self.check_assigned_place(*destination, |this| { this.visit_operand(func, location); for arg in args { diff --git a/compiler/rustc_mir_transform/src/coroutine/drop.rs b/compiler/rustc_mir_transform/src/coroutine/drop.rs index fd2d8b2b0563e..fab8945d5a99c 100644 --- a/compiler/rustc_mir_transform/src/coroutine/drop.rs +++ b/compiler/rustc_mir_transform/src/coroutine/drop.rs @@ -61,6 +61,7 @@ fn build_poll_call<'tcx>( unwind, call_source: CallSource::Misc, fn_span: DUMMY_SP, + arg_move_source_info: None, }; insert_term_block(body, call) } @@ -114,6 +115,7 @@ fn build_pin_fut<'tcx>( unwind, call_source: CallSource::Misc, fn_span: span, + arg_move_source_info: None, }, }), false, diff --git a/compiler/rustc_mir_transform/src/elaborate_drop.rs b/compiler/rustc_mir_transform/src/elaborate_drop.rs index 4f3c53d761f10..f8e87d5616ecc 100644 --- a/compiler/rustc_mir_transform/src/elaborate_drop.rs +++ b/compiler/rustc_mir_transform/src/elaborate_drop.rs @@ -379,6 +379,7 @@ where unwind: unwind.into_action(), call_source: CallSource::Misc, fn_span: self.source_info.span, + arg_move_source_info: None, }, ); @@ -413,6 +414,7 @@ where unwind: unwind.into_action(), call_source: CallSource::Misc, fn_span: span, + arg_move_source_info: None, }, ); pin_obj_bb @@ -993,6 +995,7 @@ where unwind: unwind.into_action(), call_source: CallSource::Misc, fn_span: self.source_info.span, + arg_move_source_info: None, }, source_info: self.source_info, }), diff --git a/compiler/rustc_mir_transform/src/function_item_references.rs b/compiler/rustc_mir_transform/src/function_item_references.rs index 38b5ccdb32e77..2364ac17a2f8b 100644 --- a/compiler/rustc_mir_transform/src/function_item_references.rs +++ b/compiler/rustc_mir_transform/src/function_item_references.rs @@ -37,6 +37,7 @@ impl<'tcx> Visitor<'tcx> for FunctionItemRefChecker<'_, 'tcx> { unwind: _, call_source: _, fn_span: _, + arg_move_source_info: _, } = &terminator.kind { let source_info = *self.body.source_info(location); diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index aba6388020e5c..252d1cb6431a8 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -291,6 +291,7 @@ fn remap_mir_for_const_eval_select<'tcx>( unwind, call_source: CallSource::Misc, fn_span, + arg_move_source_info: None, }; } _ => {} diff --git a/compiler/rustc_mir_transform/src/promote_consts.rs b/compiler/rustc_mir_transform/src/promote_consts.rs index a0b0c8c990f33..06bf0271a873e 100644 --- a/compiler/rustc_mir_transform/src/promote_consts.rs +++ b/compiler/rustc_mir_transform/src/promote_consts.rs @@ -846,6 +846,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { target: Some(new_target), call_source: desugar, fn_span, + arg_move_source_info: None, }, source_info: SourceInfo::outermost(terminator.source_info.span), ..terminator diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs index bca8ffb693b90..84187acb77915 100644 --- a/compiler/rustc_mir_transform/src/shim.rs +++ b/compiler/rustc_mir_transform/src/shim.rs @@ -678,6 +678,7 @@ impl<'tcx> CloneShimBuilder<'tcx> { unwind: UnwindAction::Cleanup(cleanup), call_source: CallSource::Normal, fn_span: self.span, + arg_move_source_info: None, }, false, ); @@ -974,6 +975,7 @@ fn build_call_shim<'tcx>( }, call_source: CallSource::Misc, fn_span: span, + arg_move_source_info: None, }, false, ); diff --git a/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs b/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs index 18d09473c191e..5b3f977ac9dda 100644 --- a/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs +++ b/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs @@ -386,6 +386,7 @@ fn build_adrop_for_adrop_shim<'tcx>( unwind: UnwindAction::Continue, call_source: CallSource::Misc, fn_span: span, + arg_move_source_info: None, }, }), false, @@ -409,6 +410,7 @@ fn build_adrop_for_adrop_shim<'tcx>( unwind: UnwindAction::Continue, call_source: CallSource::Misc, fn_span: span, + arg_move_source_info: None, }, }), false, diff --git a/compiler/rustc_monomorphize/src/mono_checks/move_check.rs b/compiler/rustc_monomorphize/src/mono_checks/move_check.rs index 0adf1b089b53e..9f6a440978488 100644 --- a/compiler/rustc_monomorphize/src/mono_checks/move_check.rs +++ b/compiler/rustc_monomorphize/src/mono_checks/move_check.rs @@ -35,7 +35,7 @@ impl<'tcx> MirVisitor<'tcx> for MoveCheckVisitor<'tcx> { fn visit_terminator(&mut self, terminator: &mir::Terminator<'tcx>, location: Location) { match terminator.kind { mir::TerminatorKind::Call { ref func, ref args, ref fn_span, .. } - | mir::TerminatorKind::TailCall { ref func, ref args, ref fn_span } => { + | mir::TerminatorKind::TailCall { ref func, ref args, ref fn_span, .. } => { let callee_ty = func.ty(self.body, self.tcx); let callee_ty = self.monomorphize(callee_ty); self.check_fn_args_move_size(callee_ty, args, *fn_span, location); diff --git a/compiler/rustc_public/src/unstable/convert/stable/mir.rs b/compiler/rustc_public/src/unstable/convert/stable/mir.rs index b10af6526ead5..4a23d0bf04737 100644 --- a/compiler/rustc_public/src/unstable/convert/stable/mir.rs +++ b/compiler/rustc_public/src/unstable/convert/stable/mir.rs @@ -748,22 +748,16 @@ impl<'tcx> Stable<'tcx> for mir::TerminatorKind<'tcx> { target: target.as_usize(), unwind: unwind.stable(tables, cx), }, - mir::TerminatorKind::Call { - func, - args, - destination, - target, - unwind, - call_source: _, - fn_span: _, - } => TerminatorKind::Call { - func: func.stable(tables, cx), - args: args.iter().map(|arg| arg.node.stable(tables, cx)).collect(), - destination: destination.stable(tables, cx), - target: target.map(|t| t.as_usize()), - unwind: unwind.stable(tables, cx), - }, - mir::TerminatorKind::TailCall { func: _, args: _, fn_span: _ } => todo!(), + mir::TerminatorKind::Call { func, args, destination, target, unwind, .. } => { + TerminatorKind::Call { + func: func.stable(tables, cx), + args: args.iter().map(|arg| arg.node.stable(tables, cx)).collect(), + destination: destination.stable(tables, cx), + target: target.map(|t| t.as_usize()), + unwind: unwind.stable(tables, cx), + } + } + mir::TerminatorKind::TailCall { .. } => todo!(), mir::TerminatorKind::Assert { cond, expected, msg, target, unwind } => { TerminatorKind::Assert { cond: cond.stable(tables, cx), diff --git a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs index 2bda6d50373cd..21cb2570397e6 100644 --- a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs +++ b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs @@ -349,16 +349,7 @@ fn check_terminator<'tcx>( TerminatorKind::CoroutineDrop | TerminatorKind::Yield { .. } => { Err((span, "const fn coroutines are unstable".into())) }, - TerminatorKind::Call { - func, - args, - call_source: _, - destination: _, - target: _, - unwind: _, - fn_span: _, - } - | TerminatorKind::TailCall { func, args, fn_span: _ } => { + TerminatorKind::Call { func, args, .. } | TerminatorKind::TailCall { func, args, .. } => { let fn_ty = func.ty(body, cx.tcx); if let ty::FnDef(fn_def_id, fn_substs) = fn_ty.kind() { // FIXME: when analyzing a function with generic parameters, we may not have enough information to