@@ -97,6 +97,8 @@ pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
9797 /// A cold block is a block that is unlikely to be executed at runtime.
9898 cold_blocks : IndexVec < mir:: BasicBlock , bool > ,
9999
100+ nop_landing_pads : DenseBitSet < mir:: BasicBlock > ,
101+
100102 /// The location where each MIR arg/var/tmp/ret is stored. This is
101103 /// usually an `PlaceRef` representing an alloca, but not always:
102104 /// sometimes we can skip the alloca and just store the value
@@ -181,8 +183,14 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
181183
182184 let mut mir = tcx. instance_mir ( instance. def ) ;
183185
184- let fn_abi = cx. fn_abi_of_instance ( instance, ty:: List :: empty ( ) ) ;
185- debug ! ( "fn_abi: {:?}" , fn_abi) ;
186+ let nop_landing_pads = rustc_mir_transform:: remove_noop_landing_pads:: find_noop_landing_pads (
187+ mir,
188+ Some ( rustc_mir_transform:: remove_noop_landing_pads:: ExtraInfo {
189+ tcx,
190+ instance,
191+ typing_env : cx. typing_env ( ) ,
192+ } ) ,
193+ ) ;
186194
187195 if tcx. features ( ) . ergonomic_clones ( ) {
188196 let monomorphized_mir = instance. instantiate_mir_and_normalize_erasing_regions (
@@ -193,19 +201,23 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
193201 mir = tcx. arena . alloc ( optimize_use_clone :: < Bx > ( cx, monomorphized_mir) ) ;
194202 }
195203
204+ let fn_abi = cx. fn_abi_of_instance ( instance, ty:: List :: empty ( ) ) ;
205+ debug ! ( "fn_abi: {:?}" , fn_abi) ;
206+
196207 let debug_context = cx. create_function_debug_context ( instance, fn_abi, llfn, & mir) ;
197208
198209 let start_llbb = Bx :: append_block ( cx, llfn, "start" ) ;
199210 let mut start_bx = Bx :: build ( cx, start_llbb) ;
200211
201- if mir. basic_blocks . iter ( ) . any ( |bb| {
202- bb. is_cleanup || matches ! ( bb. terminator( ) . unwind( ) , Some ( mir:: UnwindAction :: Terminate ( _) ) )
212+ if mir:: traversal:: mono_reachable ( & mir, tcx, instance) . any ( |( bb, block) | {
213+ ( block. is_cleanup && !nop_landing_pads. contains ( bb) )
214+ || matches ! ( block. terminator( ) . unwind( ) , Some ( mir:: UnwindAction :: Terminate ( _) ) )
203215 } ) {
204216 start_bx. set_personality_fn ( cx. eh_personality ( ) ) ;
205217 }
206218
207- let cleanup_kinds =
208- base :: wants_new_eh_instructions ( tcx . sess ) . then ( || analyze:: cleanup_kinds ( & mir) ) ;
219+ let cleanup_kinds = base :: wants_new_eh_instructions ( tcx . sess )
220+ . then ( || analyze:: cleanup_kinds ( & mir, & nop_landing_pads ) ) ;
209221
210222 let cached_llbbs: IndexVec < mir:: BasicBlock , CachedLlbb < Bx :: BasicBlock > > =
211223 mir. basic_blocks
@@ -233,6 +245,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
233245 debug_context,
234246 per_local_var_debug_info : None ,
235247 caller_location : None ,
248+ nop_landing_pads,
236249 } ;
237250
238251 // It may seem like we should iterate over `required_consts` to ensure they all successfully
@@ -244,7 +257,36 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
244257 fx. compute_per_local_var_debug_info ( & mut start_bx) . unzip ( ) ;
245258 fx. per_local_var_debug_info = per_local_var_debug_info;
246259
247- let traversal_order = traversal:: mono_reachable_reverse_postorder ( mir, tcx, instance) ;
260+ let mut traversal_order = traversal:: mono_reachable_reverse_postorder ( mir, tcx, instance) ;
261+
262+ // Filter out blocks that won't be codegen'd because of nop_landing_pads optimization.
263+ // FIXME: We might want to integrate the nop_landing_pads analysis into mono reachability.
264+ {
265+ let mut reachable = DenseBitSet :: new_empty ( mir. basic_blocks . len ( ) ) ;
266+ let mut to_visit = vec ! [ mir:: START_BLOCK ] ;
267+ while let Some ( next) = to_visit. pop ( ) {
268+ if !reachable. insert ( next) {
269+ continue ;
270+ }
271+
272+ let block = & mir. basic_blocks [ next] ;
273+ if let Some ( mir:: UnwindAction :: Cleanup ( target) ) = block. terminator ( ) . unwind ( )
274+ && fx. nop_landing_pads . contains ( * target)
275+ {
276+ // This edge will not be followed when we actually codegen, so skip generating it here.
277+ //
278+ // It's guaranteed that the cleanup block (`target`) occurs only in
279+ // UnwindAction::Cleanup(...) -- i.e., we can't incorrectly filter too much here --
280+ // because cleanup transitions must happen via UnwindAction::Cleanup.
281+ to_visit. extend ( block. terminator ( ) . successors ( ) . filter ( |s| s != target) ) ;
282+ } else {
283+ to_visit. extend ( block. terminator ( ) . successors ( ) ) ;
284+ }
285+ }
286+
287+ traversal_order. retain ( |bb| reachable. contains ( * bb) ) ;
288+ }
289+
248290 let memory_locals = analyze:: non_ssa_locals ( & fx, & traversal_order) ;
249291
250292 // Allocate variable and temp allocas
0 commit comments