@@ -97,6 +97,8 @@ pub struct FunctionCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> {
9797 /// A cold block is a block that is unlikely to be executed at runtime.
9898 cold_blocks : IndexVec < mir:: BasicBlock , bool > ,
9999
100+ nop_landing_pads : DenseBitSet < mir:: BasicBlock > ,
101+
100102 /// The location where each MIR arg/var/tmp/ret is stored. This is
101103 /// usually an `PlaceRef` representing an alloca, but not always:
102104 /// sometimes we can skip the alloca and just store the value
@@ -176,8 +178,14 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
176178
177179 let mut mir = tcx. instance_mir ( instance. def ) ;
178180
179- let fn_abi = cx. fn_abi_of_instance ( instance, ty:: List :: empty ( ) ) ;
180- debug ! ( "fn_abi: {:?}" , fn_abi) ;
181+ let nop_landing_pads = rustc_mir_transform:: remove_noop_landing_pads:: find_noop_landing_pads (
182+ mir,
183+ Some ( rustc_mir_transform:: remove_noop_landing_pads:: ExtraInfo {
184+ tcx,
185+ instance,
186+ typing_env : cx. typing_env ( ) ,
187+ } ) ,
188+ ) ;
181189
182190 if tcx. features ( ) . ergonomic_clones ( ) {
183191 let monomorphized_mir = instance. instantiate_mir_and_normalize_erasing_regions (
@@ -188,19 +196,23 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
188196 mir = tcx. arena . alloc ( optimize_use_clone :: < Bx > ( cx, monomorphized_mir) ) ;
189197 }
190198
199+ let fn_abi = cx. fn_abi_of_instance ( instance, ty:: List :: empty ( ) ) ;
200+ debug ! ( "fn_abi: {:?}" , fn_abi) ;
201+
191202 let debug_context = cx. create_function_debug_context ( instance, fn_abi, llfn, & mir) ;
192203
193204 let start_llbb = Bx :: append_block ( cx, llfn, "start" ) ;
194205 let mut start_bx = Bx :: build ( cx, start_llbb) ;
195206
196- if mir. basic_blocks . iter ( ) . any ( |bb| {
197- bb. is_cleanup || matches ! ( bb. terminator( ) . unwind( ) , Some ( mir:: UnwindAction :: Terminate ( _) ) )
207+ if mir:: traversal:: mono_reachable ( & mir, tcx, instance) . any ( |( bb, block) | {
208+ ( block. is_cleanup && !nop_landing_pads. contains ( bb) )
209+ || matches ! ( block. terminator( ) . unwind( ) , Some ( mir:: UnwindAction :: Terminate ( _) ) )
198210 } ) {
199211 start_bx. set_personality_fn ( cx. eh_personality ( ) ) ;
200212 }
201213
202- let cleanup_kinds =
203- base :: wants_new_eh_instructions ( tcx . sess ) . then ( || analyze:: cleanup_kinds ( & mir) ) ;
214+ let cleanup_kinds = base :: wants_new_eh_instructions ( tcx . sess )
215+ . then ( || analyze:: cleanup_kinds ( & mir, & nop_landing_pads ) ) ;
204216
205217 let cached_llbbs: IndexVec < mir:: BasicBlock , CachedLlbb < Bx :: BasicBlock > > =
206218 mir. basic_blocks
@@ -228,6 +240,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
228240 debug_context,
229241 per_local_var_debug_info : None ,
230242 caller_location : None ,
243+ nop_landing_pads,
231244 } ;
232245
233246 // It may seem like we should iterate over `required_consts` to ensure they all successfully
@@ -239,7 +252,36 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
239252 fx. compute_per_local_var_debug_info ( & mut start_bx) . unzip ( ) ;
240253 fx. per_local_var_debug_info = per_local_var_debug_info;
241254
242- let traversal_order = traversal:: mono_reachable_reverse_postorder ( mir, tcx, instance) ;
255+ let mut traversal_order = traversal:: mono_reachable_reverse_postorder ( mir, tcx, instance) ;
256+
257+ // Filter out blocks that won't be codegen'd because of nop_landing_pads optimization.
258+ // FIXME: We might want to integrate the nop_landing_pads analysis into mono reachability.
259+ {
260+ let mut reachable = DenseBitSet :: new_empty ( mir. basic_blocks . len ( ) ) ;
261+ let mut to_visit = vec ! [ mir:: START_BLOCK ] ;
262+ while let Some ( next) = to_visit. pop ( ) {
263+ if !reachable. insert ( next) {
264+ continue ;
265+ }
266+
267+ let block = & mir. basic_blocks [ next] ;
268+ if let Some ( mir:: UnwindAction :: Cleanup ( target) ) = block. terminator ( ) . unwind ( )
269+ && fx. nop_landing_pads . contains ( * target)
270+ {
271+ // This edge will not be followed when we actually codegen, so skip generating it here.
272+ //
273+ // It's guaranteed that the cleanup block (`target`) occurs only in
274+ // UnwindAction::Cleanup(...) -- i.e., we can't incorrectly filter too much here --
275+ // because cleanup transitions must happen via UnwindAction::Cleanup.
276+ to_visit. extend ( block. terminator ( ) . successors ( ) . filter ( |s| s != target) ) ;
277+ } else {
278+ to_visit. extend ( block. terminator ( ) . successors ( ) ) ;
279+ }
280+ }
281+
282+ traversal_order. retain ( |bb| reachable. contains ( * bb) ) ;
283+ }
284+
243285 let memory_locals = analyze:: non_ssa_locals ( & fx, & traversal_order) ;
244286
245287 // Allocate variable and temp allocas
0 commit comments