@@ -116,7 +116,10 @@ pub struct Scope<'tcx> {
116116 /// The cache for drop chain on "generator drop" exit.
117117 cached_generator_drop : Option < BasicBlock > ,
118118
119- /// The cache for drop chain on "unwind" exit.
119+ /// The cache for drop chain on "unwind" exit. This block
120+ /// contains code to run the current drop and all the preceding
121+ /// drops (i.e., those having lower index in Drop’s Scope drop
122+ /// array)
120123 cached_unwind : CachedBlock ,
121124}
122125
@@ -133,21 +136,7 @@ struct DropData<'tcx> {
133136}
134137
135138#[ derive( Debug , Default , Clone , Copy ) ]
136- pub ( crate ) struct CachedBlock {
137- /// The cached block for the cleanups-on-diverge path. This block
138- /// contains code to run the current drop and all the preceding
139- /// drops (i.e., those having lower index in Drop’s Scope drop
140- /// array)
141- unwind : Option < BasicBlock > ,
142-
143- /// The cached block for unwinds during cleanups-on-generator-drop path
144- ///
145- /// This is split from the standard unwind path here to prevent drop
146- /// elaboration from creating drop flags that would have to be captured
147- /// by the generator. I'm not sure how important this optimization is,
148- /// but it is here.
149- generator_drop : Option < BasicBlock > ,
150- }
139+ pub ( crate ) struct CachedBlock ( Option < BasicBlock > ) ;
151140
152141#[ derive( Debug ) ]
153142pub ( crate ) enum DropKind {
@@ -173,24 +162,15 @@ pub struct BreakableScope<'tcx> {
173162
174163impl CachedBlock {
175164 fn invalidate ( & mut self ) {
176- self . generator_drop = None ;
177- self . unwind = None ;
165+ self . 0 = None ;
178166 }
179167
180- fn get ( & self , generator_drop : bool ) -> Option < BasicBlock > {
181- if generator_drop {
182- self . generator_drop
183- } else {
184- self . unwind
185- }
168+ fn get ( & self ) -> Option < BasicBlock > {
169+ self . 0
186170 }
187171
188- fn ref_mut ( & mut self , generator_drop : bool ) -> & mut Option < BasicBlock > {
189- if generator_drop {
190- & mut self . generator_drop
191- } else {
192- & mut self . unwind
193- }
172+ fn ref_mut ( & mut self ) -> & mut Option < BasicBlock > {
173+ & mut self . 0
194174 }
195175}
196176
@@ -370,7 +350,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
370350 assert_eq ! ( scope. region_scope, region_scope. 0 ) ;
371351
372352 let unwind_to = self . scopes . last ( ) . and_then ( |next_scope| {
373- next_scope. cached_unwind . get ( false )
353+ next_scope. cached_unwind . get ( )
374354 } ) . unwrap_or_else ( || self . resume_block ( ) ) ;
375355
376356 unpack ! ( block = build_scope_drops(
@@ -379,7 +359,6 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
379359 block,
380360 unwind_to,
381361 self . arg_count,
382- false ,
383362 ) ) ;
384363
385364 block. unit ( )
@@ -434,7 +413,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
434413 }
435414 } ;
436415
437- let unwind_to = next_scope. cached_unwind . get ( false ) . unwrap_or_else ( || {
416+ let unwind_to = next_scope. cached_unwind . get ( ) . unwrap_or_else ( || {
438417 debug_assert ! ( !may_panic, "cached block not present?" ) ;
439418 START_BLOCK
440419 } ) ;
@@ -445,7 +424,6 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
445424 block,
446425 unwind_to,
447426 self . arg_count,
448- false ,
449427 ) ) ;
450428
451429 scope = next_scope;
@@ -462,7 +440,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
462440 /// None indicates there’s no cleanup to do at this point.
463441 pub fn generator_drop_cleanup ( & mut self ) -> Option < BasicBlock > {
464442 // Fill in the cache for unwinds
465- self . diverge_cleanup_gen ( true ) ;
443+ self . diverge_cleanup_gen ( ) ;
466444
467445 let src_info = self . scopes [ 0 ] . source_info ( self . fn_span ) ;
468446 let resume_block = self . resume_block ( ) ;
@@ -484,7 +462,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
484462 } ;
485463
486464 let unwind_to = scopes. peek ( ) . as_ref ( ) . map ( |scope| {
487- scope. cached_unwind . get ( true ) . unwrap_or_else ( || {
465+ scope. cached_unwind . get ( ) . unwrap_or_else ( || {
488466 span_bug ! ( src_info. span, "cached block not present?" )
489467 } )
490468 } ) . unwrap_or ( resume_block) ;
@@ -495,7 +473,6 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
495473 block,
496474 unwind_to,
497475 self . arg_count,
498- true ,
499476 ) ) ;
500477 }
501478
@@ -748,7 +725,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
748725 /// This path terminates in Resume. Returns the start of the path.
749726 /// See module comment for more details.
750727 pub fn diverge_cleanup ( & mut self ) -> BasicBlock {
751- self . diverge_cleanup_gen ( false )
728+ self . diverge_cleanup_gen ( )
752729 }
753730
754731 fn resume_block ( & mut self ) -> BasicBlock {
@@ -767,7 +744,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
767744 }
768745 }
769746
770- fn diverge_cleanup_gen ( & mut self , generator_drop : bool ) -> BasicBlock {
747+ fn diverge_cleanup_gen ( & mut self ) -> BasicBlock {
771748 // Build up the drops in **reverse** order. The end result will
772749 // look like:
773750 //
@@ -781,15 +758,15 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
781758
782759 // Find the last cached block
783760 let ( mut target, first_uncached) = if let Some ( cached_index) = self . scopes . iter ( )
784- . rposition ( |scope| scope. cached_unwind . get ( generator_drop ) . is_some ( ) ) {
785- ( self . scopes [ cached_index] . cached_unwind . get ( generator_drop ) . unwrap ( ) , cached_index + 1 )
761+ . rposition ( |scope| scope. cached_unwind . get ( ) . is_some ( ) ) {
762+ ( self . scopes [ cached_index] . cached_unwind . get ( ) . unwrap ( ) , cached_index + 1 )
786763 } else {
787764 ( self . resume_block ( ) , 0 )
788765 } ;
789766
790767 for scope in self . scopes [ first_uncached..] . iter_mut ( ) {
791768 target = build_diverge_scope ( & mut self . cfg , scope. region_scope_span ,
792- scope, target, generator_drop ) ;
769+ scope, target) ;
793770 }
794771
795772 target
@@ -869,7 +846,6 @@ fn build_scope_drops<'tcx>(
869846 mut block : BasicBlock ,
870847 last_unwind_to : BasicBlock ,
871848 arg_count : usize ,
872- generator_drop : bool ,
873849) -> BlockAnd < ( ) > {
874850 debug ! ( "build_scope_drops({:?} -> {:?}" , block, scope) ;
875851
@@ -890,7 +866,7 @@ fn build_scope_drops<'tcx>(
890866
891867 let mut unwind_blocks = scope. drops . iter ( ) . rev ( ) . filter_map ( |drop_data| {
892868 if let DropKind :: Value { cached_block } = drop_data. kind {
893- Some ( cached_block. get ( generator_drop ) . unwrap_or_else ( || {
869+ Some ( cached_block. get ( ) . unwrap_or_else ( || {
894870 span_bug ! ( drop_data. span, "cached block not present?" )
895871 } ) )
896872 } else {
@@ -937,8 +913,7 @@ fn build_scope_drops<'tcx>(
937913fn build_diverge_scope ( cfg : & mut CFG < ' tcx > ,
938914 span : Span ,
939915 scope : & mut Scope < ' tcx > ,
940- mut target : BasicBlock ,
941- generator_drop : bool )
916+ mut target : BasicBlock )
942917 -> BasicBlock
943918{
944919 // Build up the drops in **reverse** order. The end result will
@@ -990,7 +965,7 @@ fn build_diverge_scope(cfg: &mut CFG<'tcx>,
990965 } ;
991966 }
992967 DropKind :: Value { ref mut cached_block } => {
993- let cached_block = cached_block. ref_mut ( generator_drop ) ;
968+ let cached_block = cached_block. ref_mut ( ) ;
994969 target = if let Some ( cached_block) = * cached_block {
995970 storage_deads. clear ( ) ;
996971 target_built_by_us = false ;
@@ -1013,7 +988,7 @@ fn build_diverge_scope(cfg: &mut CFG<'tcx>,
1013988 } ;
1014989 }
1015990 push_storage_deads ( cfg, & mut target, & mut storage_deads, target_built_by_us, source_scope) ;
1016- * scope. cached_unwind . ref_mut ( generator_drop ) = Some ( target) ;
991+ * scope. cached_unwind . ref_mut ( ) = Some ( target) ;
1017992
1018993 assert ! ( storage_deads. is_empty( ) ) ;
1019994 debug ! ( "build_diverge_scope({:?}, {:?}) = {:?}" , scope, span, target) ;
0 commit comments