@@ -206,7 +206,6 @@ struct IfThenScope {
206206pub ( crate ) enum BreakableTarget {
207207 Continue ( region:: Scope ) ,
208208 Break ( region:: Scope ) ,
209- ConstContinue ( region:: Scope ) ,
210209 Return ,
211210}
212211
@@ -750,168 +749,6 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
750749 let break_index = get_scope_index ( scope) ;
751750 ( break_index, None )
752751 }
753- BreakableTarget :: ConstContinue ( scope) => {
754- let Some ( value) = value else {
755- span_bug ! ( span, "#[const_continue] must break with a value" )
756- } ;
757-
758- // A break can only break out of a scope, so the value should be a scope
759- let rustc_middle:: thir:: ExprKind :: Scope { value, .. } = self . thir [ value] . kind
760- else {
761- span_bug ! ( span, "break value must be a scope" )
762- } ;
763-
764- let break_index = self
765- . scopes
766- . const_continuable_scopes
767- . iter ( )
768- . rposition ( |const_continuable_scope| {
769- const_continuable_scope. region_scope == scope
770- } )
771- . unwrap_or_else ( || {
772- span_bug ! ( span, "no enclosing const-continuable scope found" )
773- } ) ;
774-
775- let scope = & self . scopes . const_continuable_scopes [ break_index] ;
776-
777- let state_ty = self . local_decls [ scope. state_place . as_local ( ) . unwrap ( ) ] . ty ;
778- let discriminant_ty = match state_ty {
779- ty if ty. is_enum ( ) => ty. discriminant_ty ( self . tcx ) ,
780- ty if ty. is_integral ( ) => ty,
781- _ => todo ! ( ) ,
782- } ;
783-
784- let rvalue = match state_ty {
785- ty if ty. is_enum ( ) => Rvalue :: Discriminant ( scope. state_place ) ,
786- ty if ty. is_integral ( ) => Rvalue :: Use ( Operand :: Copy ( scope. state_place ) ) ,
787- _ => todo ! ( ) ,
788- } ;
789-
790- // the PatCtxt is normally used in pattern exhaustiveness checking, but reused here
791- // because it performs normalization and const evaluation.
792- let dropless_arena = rustc_arena:: DroplessArena :: default ( ) ;
793- let typeck_results = self . tcx . typeck ( self . def_id ) ;
794- let cx = RustcPatCtxt {
795- tcx : self . tcx ,
796- typeck_results,
797- module : self . tcx . parent_module ( self . hir_id ) . to_def_id ( ) ,
798- // FIXME(#132279): We're in a body, should handle opaques.
799- typing_env : rustc_middle:: ty:: TypingEnv :: non_body_analysis (
800- self . tcx ,
801- self . def_id ,
802- ) ,
803- dropless_arena : & dropless_arena,
804- match_lint_level : self . hir_id ,
805- whole_match_span : Some ( rustc_span:: Span :: default ( ) ) ,
806- scrut_span : rustc_span:: Span :: default ( ) ,
807- refutable : true ,
808- known_valid_scrutinee : true ,
809- } ;
810-
811- let Some ( real_target) =
812- self . static_pattern_match ( & cx, value, & * scope. arms , & scope. built_match_tree )
813- else {
814- self . tcx . dcx ( ) . emit_fatal ( ConstContinueUnknownJumpTarget { span } )
815- } ;
816-
817- self . block_context . push ( BlockFrame :: SubExpr ) ;
818- let state_place = scope. state_place ;
819- block = self . expr_into_dest ( state_place, block, value) . into_block ( ) ;
820- self . block_context . pop ( ) ;
821-
822- let discr = self . temp ( discriminant_ty, source_info. span ) ;
823- let scope_index = self . scopes . scope_index (
824- self . scopes . const_continuable_scopes [ break_index] . region_scope ,
825- span,
826- ) ;
827- let scope = & mut self . scopes . const_continuable_scopes [ break_index] ;
828- self . cfg . push_assign ( block, source_info, discr, rvalue) ;
829- let drop_and_continue_block = self . cfg . start_new_block ( ) ;
830- let imaginary_target = self . cfg . start_new_block ( ) ;
831- self . cfg . terminate (
832- block,
833- source_info,
834- TerminatorKind :: FalseEdge {
835- real_target : drop_and_continue_block,
836- imaginary_target,
837- } ,
838- ) ;
839-
840- let drops = & mut scope. break_drops ;
841-
842- let drop_idx = self . scopes . scopes [ scope_index + 1 ..]
843- . iter ( )
844- . flat_map ( |scope| & scope. drops )
845- . fold ( ROOT_NODE , |drop_idx, & drop| drops. add_drop ( drop, drop_idx) ) ;
846-
847- drops. add_entry_point ( imaginary_target, drop_idx) ;
848-
849- self . cfg . terminate ( imaginary_target, source_info, TerminatorKind :: UnwindResume ) ;
850-
851- // FIXME add to drop tree for loop_head
852-
853- let region_scope = scope. region_scope ;
854- let scope_index = self . scopes . scope_index ( region_scope, span) ;
855- let mut drops = DropTree :: new ( ) ;
856-
857- let drop_idx = self . scopes . scopes [ scope_index + 1 ..]
858- . iter ( )
859- . flat_map ( |scope| & scope. drops )
860- . fold ( ROOT_NODE , |drop_idx, & drop| drops. add_drop ( drop, drop_idx) ) ;
861-
862- drops. add_entry_point ( drop_and_continue_block, drop_idx) ;
863-
864- // `build_drop_trees` doesn't have access to our source_info, so we
865- // create a dummy terminator now. `TerminatorKind::UnwindResume` is used
866- // because MIR type checking will panic if it hasn't been overwritten.
867- // (See `<ExitScopes as DropTreeBuilder>::link_entry_point`.)
868- self . cfg . terminate (
869- drop_and_continue_block,
870- source_info,
871- TerminatorKind :: UnwindResume ,
872- ) ;
873-
874- {
875- let this = & mut * self ;
876- let blocks = drops. build_mir :: < ExitScopes > ( & mut this. cfg , Some ( real_target) ) ;
877- //let is_coroutine = this.coroutine.is_some();
878-
879- /*// Link the exit drop tree to unwind drop tree.
880- if drops.drops.iter().any(|drop_node| drop_node.data.kind == DropKind::Value) {
881- let unwind_target = this.diverge_cleanup_target(region_scope, span);
882- let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1);
883- for (drop_idx, drop_node) in drops.drops.iter_enumerated().skip(1) {
884- match drop_node.data.kind {
885- DropKind::Storage | DropKind::ForLint => {
886- if is_coroutine {
887- let unwind_drop = this.scopes.unwind_drops.add_drop(
888- drop_node.data,
889- unwind_indices[drop_node.next],
890- );
891- unwind_indices.push(unwind_drop);
892- } else {
893- unwind_indices.push(unwind_indices[drop_node.next]);
894- }
895- }
896- DropKind::Value => {
897- let unwind_drop = this
898- .scopes
899- .unwind_drops
900- .add_drop(drop_node.data, unwind_indices[drop_node.next]);
901- this.scopes.unwind_drops.add_entry_point(
902- blocks[drop_idx].unwrap(),
903- unwind_indices[drop_node.next],
904- );
905- unwind_indices.push(unwind_drop);
906- }
907- }
908- }
909- }*/
910- blocks[ ROOT_NODE ] . map ( BasicBlock :: unit)
911- } ;
912-
913- return self . cfg . start_new_block ( ) . unit ( ) ;
914- }
915752 } ;
916753
917754 match ( destination, value) {
@@ -972,6 +809,158 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
972809 self . cfg . start_new_block ( ) . unit ( )
973810 }
974811
812+ /// Sets up the drops for jumping from `block` to `scope`.
813+ pub ( crate ) fn break_const_continuable_scope (
814+ & mut self ,
815+ mut block : BasicBlock ,
816+ value : ExprId ,
817+ scope : region:: Scope ,
818+ source_info : SourceInfo ,
819+ ) -> BlockAnd < ( ) > {
820+ let span = source_info. span ;
821+
822+ // A break can only break out of a scope, so the value should be a scope.
823+ let rustc_middle:: thir:: ExprKind :: Scope { value, .. } = self . thir [ value] . kind else {
824+ span_bug ! ( span, "break value must be a scope" )
825+ } ;
826+
827+ let break_index = self
828+ . scopes
829+ . const_continuable_scopes
830+ . iter ( )
831+ . rposition ( |const_continuable_scope| const_continuable_scope. region_scope == scope)
832+ . unwrap_or_else ( || span_bug ! ( span, "no enclosing const-continuable scope found" ) ) ;
833+
834+ let scope = & self . scopes . const_continuable_scopes [ break_index] ;
835+
836+ let state_ty = self . local_decls [ scope. state_place . as_local ( ) . unwrap ( ) ] . ty ;
837+ let discriminant_ty = match state_ty {
838+ ty if ty. is_enum ( ) => ty. discriminant_ty ( self . tcx ) ,
839+ ty if ty. is_integral ( ) => ty,
840+ _ => todo ! ( ) ,
841+ } ;
842+
843+ let rvalue = match state_ty {
844+ ty if ty. is_enum ( ) => Rvalue :: Discriminant ( scope. state_place ) ,
845+ ty if ty. is_integral ( ) => Rvalue :: Use ( Operand :: Copy ( scope. state_place ) ) ,
846+ _ => todo ! ( ) ,
847+ } ;
848+
849+ // the PatCtxt is normally used in pattern exhaustiveness checking, but reused here
850+ // because it performs normalization and const evaluation.
851+ let dropless_arena = rustc_arena:: DroplessArena :: default ( ) ;
852+ let typeck_results = self . tcx . typeck ( self . def_id ) ;
853+ let cx = RustcPatCtxt {
854+ tcx : self . tcx ,
855+ typeck_results,
856+ module : self . tcx . parent_module ( self . hir_id ) . to_def_id ( ) ,
857+ // FIXME(#132279): We're in a body, should handle opaques.
858+ typing_env : rustc_middle:: ty:: TypingEnv :: non_body_analysis ( self . tcx , self . def_id ) ,
859+ dropless_arena : & dropless_arena,
860+ match_lint_level : self . hir_id ,
861+ whole_match_span : Some ( rustc_span:: Span :: default ( ) ) ,
862+ scrut_span : rustc_span:: Span :: default ( ) ,
863+ refutable : true ,
864+ known_valid_scrutinee : true ,
865+ } ;
866+
867+ let Some ( real_target) =
868+ self . static_pattern_match ( & cx, value, & * scope. arms , & scope. built_match_tree )
869+ else {
870+ self . tcx . dcx ( ) . emit_fatal ( ConstContinueUnknownJumpTarget { span } )
871+ } ;
872+
873+ self . block_context . push ( BlockFrame :: SubExpr ) ;
874+ let state_place = scope. state_place ;
875+ block = self . expr_into_dest ( state_place, block, value) . into_block ( ) ;
876+ self . block_context . pop ( ) ;
877+
878+ let discr = self . temp ( discriminant_ty, source_info. span ) ;
879+ let scope_index = self
880+ . scopes
881+ . scope_index ( self . scopes . const_continuable_scopes [ break_index] . region_scope , span) ;
882+ let scope = & mut self . scopes . const_continuable_scopes [ break_index] ;
883+ self . cfg . push_assign ( block, source_info, discr, rvalue) ;
884+ let drop_and_continue_block = self . cfg . start_new_block ( ) ;
885+ let imaginary_target = self . cfg . start_new_block ( ) ;
886+ self . cfg . terminate (
887+ block,
888+ source_info,
889+ TerminatorKind :: FalseEdge { real_target : drop_and_continue_block, imaginary_target } ,
890+ ) ;
891+
892+ let drops = & mut scope. break_drops ;
893+
894+ let drop_idx = self . scopes . scopes [ scope_index + 1 ..]
895+ . iter ( )
896+ . flat_map ( |scope| & scope. drops )
897+ . fold ( ROOT_NODE , |drop_idx, & drop| drops. add_drop ( drop, drop_idx) ) ;
898+
899+ drops. add_entry_point ( imaginary_target, drop_idx) ;
900+
901+ self . cfg . terminate ( imaginary_target, source_info, TerminatorKind :: UnwindResume ) ;
902+
903+ // FIXME add to drop tree for loop_head
904+
905+ let region_scope = scope. region_scope ;
906+ let scope_index = self . scopes . scope_index ( region_scope, span) ;
907+ let mut drops = DropTree :: new ( ) ;
908+
909+ let drop_idx = self . scopes . scopes [ scope_index + 1 ..]
910+ . iter ( )
911+ . flat_map ( |scope| & scope. drops )
912+ . fold ( ROOT_NODE , |drop_idx, & drop| drops. add_drop ( drop, drop_idx) ) ;
913+
914+ drops. add_entry_point ( drop_and_continue_block, drop_idx) ;
915+
916+ // `build_drop_trees` doesn't have access to our source_info, so we
917+ // create a dummy terminator now. `TerminatorKind::UnwindResume` is used
918+ // because MIR type checking will panic if it hasn't been overwritten.
919+ // (See `<ExitScopes as DropTreeBuilder>::link_entry_point`.)
920+ self . cfg . terminate ( drop_and_continue_block, source_info, TerminatorKind :: UnwindResume ) ;
921+
922+ {
923+ let this = & mut * self ;
924+ let blocks = drops. build_mir :: < ExitScopes > ( & mut this. cfg , Some ( real_target) ) ;
925+ //let is_coroutine = this.coroutine.is_some();
926+
927+ /*// Link the exit drop tree to unwind drop tree.
928+ if drops.drops.iter().any(|drop_node| drop_node.data.kind == DropKind::Value) {
929+ let unwind_target = this.diverge_cleanup_target(region_scope, span);
930+ let mut unwind_indices = IndexVec::from_elem_n(unwind_target, 1);
931+ for (drop_idx, drop_node) in drops.drops.iter_enumerated().skip(1) {
932+ match drop_node.data.kind {
933+ DropKind::Storage | DropKind::ForLint => {
934+ if is_coroutine {
935+ let unwind_drop = this.scopes.unwind_drops.add_drop(
936+ drop_node.data,
937+ unwind_indices[drop_node.next],
938+ );
939+ unwind_indices.push(unwind_drop);
940+ } else {
941+ unwind_indices.push(unwind_indices[drop_node.next]);
942+ }
943+ }
944+ DropKind::Value => {
945+ let unwind_drop = this
946+ .scopes
947+ .unwind_drops
948+ .add_drop(drop_node.data, unwind_indices[drop_node.next]);
949+ this.scopes.unwind_drops.add_entry_point(
950+ blocks[drop_idx].unwrap(),
951+ unwind_indices[drop_node.next],
952+ );
953+ unwind_indices.push(unwind_drop);
954+ }
955+ }
956+ }
957+ }*/
958+ blocks[ ROOT_NODE ] . map ( BasicBlock :: unit)
959+ } ;
960+
961+ return self . cfg . start_new_block ( ) . unit ( ) ;
962+ }
963+
975964 /// Sets up the drops for breaking from `block` due to an `if` condition
976965 /// that turned out to be false.
977966 ///
0 commit comments