@@ -19,13 +19,14 @@ use std::hash::Hash;
1919use std:: marker:: PhantomData ;
2020
2121use derive_where:: derive_where;
22- use rustc_index:: { Idx , IndexVec } ;
2322#[ cfg( feature = "nightly" ) ]
2423use rustc_macros:: { Decodable_NoContext , Encodable_NoContext , HashStable_NoContext } ;
2524use tracing:: debug;
2625
2726use crate :: data_structures:: HashMap ;
2827
28+ mod stack;
29+ use stack:: { Stack , StackDepth , StackEntry } ;
2930mod global_cache;
3031use global_cache:: CacheData ;
3132pub use global_cache:: GlobalCache ;
@@ -225,9 +226,9 @@ impl AvailableDepth {
225226 /// in case there is exponential blowup.
226227 fn allowed_depth_for_nested < D : Delegate > (
227228 root_depth : AvailableDepth ,
228- stack : & IndexVec < StackDepth , StackEntry < D :: Cx > > ,
229+ stack : & Stack < D :: Cx > ,
229230 ) -> Option < AvailableDepth > {
230- if let Some ( last) = stack. raw . last ( ) {
231+ if let Some ( last) = stack. last ( ) {
231232 if last. available_depth . 0 == 0 {
232233 return None ;
233234 }
@@ -433,50 +434,6 @@ impl<X: Cx> NestedGoals<X> {
433434 }
434435}
435436
436- rustc_index:: newtype_index! {
437- #[ orderable]
438- #[ gate_rustc_only]
439- pub struct StackDepth { }
440- }
441-
442- /// Stack entries of the evaluation stack. Its fields tend to be lazily
443- /// when popping a child goal or completely immutable.
444- #[ derive_where( Debug ; X : Cx ) ]
445- struct StackEntry < X : Cx > {
446- input : X :: Input ,
447-
448- /// Whether proving this goal is a coinductive step.
449- ///
450- /// This is used when encountering a trait solver cycle to
451- /// decide whether the initial provisional result of the cycle.
452- step_kind_from_parent : PathKind ,
453-
454- /// The available depth of a given goal, immutable.
455- available_depth : AvailableDepth ,
456-
457- /// The maximum depth reached by this stack entry, only up-to date
458- /// for the top of the stack and lazily updated for the rest.
459- reached_depth : StackDepth ,
460-
461- /// All cycle heads this goal depends on. Lazily updated and only
462- /// up-to date for the top of the stack.
463- heads : CycleHeads ,
464-
465- /// Whether evaluating this goal encountered overflow. Lazily updated.
466- encountered_overflow : bool ,
467-
468- /// Whether this goal has been used as the root of a cycle. This gets
469- /// eagerly updated when encountering a cycle.
470- has_been_used : Option < UsageKind > ,
471-
472- /// The nested goals of this goal, see the doc comment of the type.
473- nested_goals : NestedGoals < X > ,
474-
475- /// Starts out as `None` and gets set when rerunning this
476- /// goal in case we encounter a cycle.
477- provisional_result : Option < X :: Result > ,
478- }
479-
480437/// A provisional result of an already computed goals which depends on other
481438/// goals still on the stack.
482439#[ derive_where( Debug ; X : Cx ) ]
@@ -498,7 +455,7 @@ pub struct SearchGraph<D: Delegate<Cx = X>, X: Cx = <D as Delegate>::Cx> {
498455 /// The stack of goals currently being computed.
499456 ///
500457 /// An element is *deeper* in the stack if its index is *lower*.
501- stack : IndexVec < StackDepth , StackEntry < X > > ,
458+ stack : Stack < X > ,
502459 /// The provisional cache contains entries for already computed goals which
503460 /// still depend on goals higher-up in the stack. We don't move them to the
504461 /// global cache and track them locally instead. A provisional cache entry
@@ -537,16 +494,16 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
537494 /// and using existing global cache entries to make sure they
538495 /// have the same impact on the remaining evaluation.
539496 fn update_parent_goal (
540- stack : & mut IndexVec < StackDepth , StackEntry < X > > ,
497+ stack : & mut Stack < X > ,
541498 step_kind_from_parent : PathKind ,
542- reached_depth : StackDepth ,
499+ required_depth_for_nested : usize ,
543500 heads : & CycleHeads ,
544501 encountered_overflow : bool ,
545502 context : UpdateParentGoalCtxt < ' _ , X > ,
546503 ) {
547504 if let Some ( parent_index) = stack. last_index ( ) {
548505 let parent = & mut stack[ parent_index] ;
549- parent. reached_depth = parent. reached_depth . max ( reached_depth ) ;
506+ parent. required_depth = parent. required_depth . max ( required_depth_for_nested + 1 ) ;
550507 parent. encountered_overflow |= encountered_overflow;
551508
552509 parent. heads . extend_from_child ( parent_index, step_kind_from_parent, heads) ;
@@ -588,13 +545,11 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
588545 /// the stack which completes the cycle. This given an inductive step AB which then cycles
589546 /// coinductively with A, we need to treat this cycle as coinductive.
590547 fn cycle_path_kind (
591- stack : & IndexVec < StackDepth , StackEntry < X > > ,
548+ stack : & Stack < X > ,
592549 step_kind_to_head : PathKind ,
593550 head : StackDepth ,
594551 ) -> PathKind {
595- stack. raw [ head. index ( ) + 1 ..]
596- . iter ( )
597- . fold ( step_kind_to_head, |curr, entry| curr. extend ( entry. step_kind_from_parent ) )
552+ stack. cycle_step_kinds ( head) . fold ( step_kind_to_head, |curr, step| curr. extend ( step) )
598553 }
599554
600555 /// Probably the most involved method of the whole solver.
@@ -656,20 +611,18 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
656611 return result;
657612 }
658613
659- // Unfortunate, it looks like we actually have to compute this goalrar.
660- let depth = self . stack . next_index ( ) ;
661- let entry = StackEntry {
614+ // Unfortunate, it looks like we actually have to compute this goal.
615+ self . stack . push ( StackEntry {
662616 input,
663617 step_kind_from_parent,
664618 available_depth,
665- reached_depth : depth ,
619+ required_depth : 0 ,
666620 heads : Default :: default ( ) ,
667621 encountered_overflow : false ,
668622 has_been_used : None ,
669623 nested_goals : Default :: default ( ) ,
670624 provisional_result : None ,
671- } ;
672- assert_eq ! ( self . stack. push( entry) , depth) ;
625+ } ) ;
673626
674627 // This is for global caching, so we properly track query dependencies.
675628 // Everything that affects the `result` should be performed within this
@@ -686,7 +639,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
686639 Self :: update_parent_goal (
687640 & mut self . stack ,
688641 final_entry. step_kind_from_parent ,
689- final_entry. reached_depth ,
642+ final_entry. required_depth ,
690643 & final_entry. heads ,
691644 final_entry. encountered_overflow ,
692645 UpdateParentGoalCtxt :: Ordinary ( & final_entry. nested_goals ) ,
@@ -700,7 +653,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
700653 // the global cache.
701654 assert_eq ! ( result, expected, "input={input:?}" ) ;
702655 } else if D :: inspect_is_noop ( inspect) {
703- self . insert_global_cache ( cx, input , final_entry, result, dep_node)
656+ self . insert_global_cache ( cx, final_entry, result, dep_node)
704657 }
705658 } else if D :: ENABLE_PROVISIONAL_CACHE {
706659 debug_assert ! ( validate_cache. is_none( ) , "unexpected non-root: {input:?}" ) ;
@@ -728,7 +681,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
728681 input : X :: Input ,
729682 inspect : & mut D :: ProofTreeBuilder ,
730683 ) -> X :: Result {
731- if let Some ( last) = self . stack . raw . last_mut ( ) {
684+ if let Some ( last) = self . stack . last_mut ( ) {
732685 last. encountered_overflow = true ;
733686 // If computing a goal `B` depends on another goal `A` and
734687 // `A` has a nested goal which overflows, then computing `B`
@@ -859,7 +812,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
859812 // apply provisional cache entries which encountered overflow once the
860813 // current goal is already part of the same cycle. This check could be
861814 // improved but seems to be good enough for now.
862- let last = self . stack . raw . last ( ) . unwrap ( ) ;
815+ let last = self . stack . last ( ) . unwrap ( ) ;
863816 if last. heads . opt_lowest_cycle_head ( ) . is_none_or ( |lowest| lowest > head) {
864817 continue ;
865818 }
@@ -868,14 +821,10 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
868821 // A provisional cache entry is only valid if the current path from its
869822 // highest cycle head to the goal is the same.
870823 if path_from_head == Self :: cycle_path_kind ( & self . stack , step_kind_from_parent, head) {
871- // While we don't have to track the full depth of the provisional cache entry,
872- // we do have to increment the required depth by one as we'd have already failed
873- // with overflow otherwise
874- let next_index = self . stack . next_index ( ) ;
875824 Self :: update_parent_goal (
876825 & mut self . stack ,
877826 step_kind_from_parent,
878- next_index ,
827+ 0 ,
879828 heads,
880829 encountered_overflow,
881830 UpdateParentGoalCtxt :: ProvisionalCacheHit ,
@@ -893,7 +842,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
893842 /// evaluating this entry would not have ended up depending on either a goal
894843 /// already on the stack or a provisional cache entry.
895844 fn candidate_is_applicable (
896- stack : & IndexVec < StackDepth , StackEntry < X > > ,
845+ stack : & Stack < X > ,
897846 step_kind_from_parent : PathKind ,
898847 provisional_cache : & HashMap < X :: Input , Vec < ProvisionalCacheEntry < X > > > ,
899848 nested_goals : & NestedGoals < X > ,
@@ -991,7 +940,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
991940 available_depth : AvailableDepth ,
992941 ) -> Option < X :: Result > {
993942 cx. with_global_cache ( |cache| {
994- let CacheData { result, additional_depth , encountered_overflow, nested_goals } = cache
943+ let CacheData { result, required_depth , encountered_overflow, nested_goals } = cache
995944 . get ( cx, input, available_depth, |nested_goals| {
996945 Self :: candidate_is_applicable (
997946 & self . stack ,
@@ -1001,23 +950,19 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
1001950 )
1002951 } ) ?;
1003952
1004- // Update the reached depth of the current goal to make sure
1005- // its state is the same regardless of whether we've used the
1006- // global cache or not.
1007- let reached_depth = self . stack . next_index ( ) . plus ( additional_depth) ;
1008953 // We don't move cycle participants to the global cache, so the
1009954 // cycle heads are always empty.
1010955 let heads = Default :: default ( ) ;
1011956 Self :: update_parent_goal (
1012957 & mut self . stack ,
1013958 step_kind_from_parent,
1014- reached_depth ,
959+ required_depth ,
1015960 & heads,
1016961 encountered_overflow,
1017962 UpdateParentGoalCtxt :: Ordinary ( nested_goals) ,
1018963 ) ;
1019964
1020- debug ! ( ?additional_depth , "global cache hit" ) ;
965+ debug ! ( ?required_depth , "global cache hit" ) ;
1021966 Some ( result)
1022967 } )
1023968 }
@@ -1028,7 +973,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
1028973 input : X :: Input ,
1029974 step_kind_from_parent : PathKind ,
1030975 ) -> Option < X :: Result > {
1031- let ( head, _stack_entry ) = self . stack . iter_enumerated ( ) . find ( | ( _ , e ) | e . input == input) ?;
976+ let head = self . stack . find ( input) ?;
1032977 // We have a nested goal which directly relies on a goal deeper in the stack.
1033978 //
1034979 // We start by tagging all cycle participants, as that's necessary for caching.
@@ -1043,10 +988,9 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
1043988
1044989 // Subtle: when encountering a cyclic goal, we still first checked for overflow,
1045990 // so we have to update the reached depth.
1046- let next_index = self . stack . next_index ( ) ;
1047991 let last_index = self . stack . last_index ( ) . unwrap ( ) ;
1048992 let last = & mut self . stack [ last_index] ;
1049- last. reached_depth = last. reached_depth . max ( next_index ) ;
993+ last. required_depth = last. required_depth . max ( 1 ) ;
1050994
1051995 last. nested_goals . insert ( input, step_kind_from_parent. into ( ) ) ;
1052996 last. nested_goals . insert ( last. input , PathsToNested :: EMPTY ) ;
@@ -1095,7 +1039,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
10951039 let mut i = 0 ;
10961040 loop {
10971041 let result = evaluate_goal ( self , inspect) ;
1098- let stack_entry = self . stack . pop ( ) . unwrap ( ) ;
1042+ let stack_entry = self . stack . pop ( ) ;
10991043 debug_assert_eq ! ( stack_entry. input, input) ;
11001044
11011045 // If the current goal is not the root of a cycle, we are done.
@@ -1176,20 +1120,18 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
11761120 fn insert_global_cache (
11771121 & mut self ,
11781122 cx : X ,
1179- input : X :: Input ,
11801123 final_entry : StackEntry < X > ,
11811124 result : X :: Result ,
11821125 dep_node : X :: DepNodeIndex ,
11831126 ) {
1184- let additional_depth = final_entry. reached_depth . as_usize ( ) - self . stack . len ( ) ;
11851127 debug ! ( ?final_entry, ?result, "insert global cache" ) ;
11861128 cx. with_global_cache ( |cache| {
11871129 cache. insert (
11881130 cx,
1189- input,
1131+ final_entry . input ,
11901132 result,
11911133 dep_node,
1192- additional_depth ,
1134+ final_entry . required_depth ,
11931135 final_entry. encountered_overflow ,
11941136 final_entry. nested_goals ,
11951137 )
0 commit comments