@@ -21,10 +21,9 @@ use std::marker::PhantomData;
2121use derive_where:: derive_where;
2222#[ cfg( feature = "nightly" ) ]
2323use rustc_macros:: { Decodable_NoContext , Encodable_NoContext , HashStable_NoContext } ;
24+ use rustc_type_ir:: data_structures:: HashMap ;
2425use tracing:: { debug, instrument} ;
2526
26- use crate :: data_structures:: HashMap ;
27-
2827mod stack;
2928use stack:: { Stack , StackDepth , StackEntry } ;
3029mod global_cache;
@@ -137,6 +136,12 @@ pub enum PathKind {
137136 Unknown ,
138137 /// A path with at least one coinductive step. Such cycles hold.
139138 Coinductive ,
139+ /// A path which is treated as ambiguous. Once a path has this path kind
140+ /// any other segment does not change its kind.
141+ ///
142+ /// This is currently only used when fuzzing to support negative reasoning.
143+ /// For more details, see #143054.
144+ ForcedAmbiguity ,
140145}
141146
142147impl PathKind {
@@ -149,6 +154,9 @@ impl PathKind {
149154 /// to `max(self, rest)`.
150155 fn extend ( self , rest : PathKind ) -> PathKind {
151156 match ( self , rest) {
157+ ( PathKind :: ForcedAmbiguity , _) | ( _, PathKind :: ForcedAmbiguity ) => {
158+ PathKind :: ForcedAmbiguity
159+ }
152160 ( PathKind :: Coinductive , _) | ( _, PathKind :: Coinductive ) => PathKind :: Coinductive ,
153161 ( PathKind :: Unknown , _) | ( _, PathKind :: Unknown ) => PathKind :: Unknown ,
154162 ( PathKind :: Inductive , PathKind :: Inductive ) => PathKind :: Inductive ,
@@ -187,41 +195,6 @@ impl UsageKind {
187195 }
188196}
189197
190- /// For each goal we track whether the paths from this goal
191- /// to its cycle heads are coinductive.
192- ///
193- /// This is a necessary condition to rebase provisional cache
194- /// entries.
195- #[ derive( Debug , Clone , Copy , PartialEq , Eq ) ]
196- pub enum AllPathsToHeadCoinductive {
197- Yes ,
198- No ,
199- }
200- impl From < PathKind > for AllPathsToHeadCoinductive {
201- fn from ( path : PathKind ) -> AllPathsToHeadCoinductive {
202- match path {
203- PathKind :: Coinductive => AllPathsToHeadCoinductive :: Yes ,
204- _ => AllPathsToHeadCoinductive :: No ,
205- }
206- }
207- }
208- impl AllPathsToHeadCoinductive {
209- #[ must_use]
210- fn merge ( self , other : impl Into < Self > ) -> Self {
211- match ( self , other. into ( ) ) {
212- ( AllPathsToHeadCoinductive :: Yes , AllPathsToHeadCoinductive :: Yes ) => {
213- AllPathsToHeadCoinductive :: Yes
214- }
215- ( AllPathsToHeadCoinductive :: No , _) | ( _, AllPathsToHeadCoinductive :: No ) => {
216- AllPathsToHeadCoinductive :: No
217- }
218- }
219- }
220- fn and_merge ( & mut self , other : impl Into < Self > ) {
221- * self = self . merge ( other) ;
222- }
223- }
224-
225198#[ derive( Debug , Clone , Copy ) ]
226199struct AvailableDepth ( usize ) ;
227200impl AvailableDepth {
@@ -261,9 +234,9 @@ impl AvailableDepth {
261234///
262235/// We also track all paths from this goal to that head. This is necessary
263236/// when rebasing provisional cache results.
264- #[ derive( Clone , Debug , PartialEq , Eq , Default ) ]
237+ #[ derive( Clone , Debug , Default ) ]
265238struct CycleHeads {
266- heads : BTreeMap < StackDepth , AllPathsToHeadCoinductive > ,
239+ heads : BTreeMap < StackDepth , PathsToNested > ,
267240}
268241
269242impl CycleHeads {
@@ -283,27 +256,16 @@ impl CycleHeads {
283256 self . heads . first_key_value ( ) . map ( |( k, _) | * k)
284257 }
285258
286- fn remove_highest_cycle_head ( & mut self ) {
259+ fn remove_highest_cycle_head ( & mut self ) -> PathsToNested {
287260 let last = self . heads . pop_last ( ) ;
288- debug_assert_ne ! ( last, None ) ;
289- }
290-
291- fn insert (
292- & mut self ,
293- head : StackDepth ,
294- path_from_entry : impl Into < AllPathsToHeadCoinductive > + Copy ,
295- ) {
296- self . heads . entry ( head) . or_insert ( path_from_entry. into ( ) ) . and_merge ( path_from_entry) ;
261+ last. unwrap ( ) . 1
297262 }
298263
299- fn merge ( & mut self , heads : & CycleHeads ) {
300- for ( & head, & path_from_entry) in heads. heads . iter ( ) {
301- self . insert ( head, path_from_entry) ;
302- debug_assert ! ( matches!( self . heads[ & head] , AllPathsToHeadCoinductive :: Yes ) ) ;
303- }
264+ fn insert ( & mut self , head : StackDepth , path_from_entry : impl Into < PathsToNested > + Copy ) {
265+ * self . heads . entry ( head) . or_insert ( path_from_entry. into ( ) ) |= path_from_entry. into ( ) ;
304266 }
305267
306- fn iter ( & self ) -> impl Iterator < Item = ( StackDepth , AllPathsToHeadCoinductive ) > + ' _ {
268+ fn iter ( & self ) -> impl Iterator < Item = ( StackDepth , PathsToNested ) > + ' _ {
307269 self . heads . iter ( ) . map ( |( k, v) | ( * k, * v) )
308270 }
309271
@@ -317,13 +279,7 @@ impl CycleHeads {
317279 Ordering :: Equal => continue ,
318280 Ordering :: Greater => unreachable ! ( ) ,
319281 }
320-
321- let path_from_entry = match step_kind {
322- PathKind :: Coinductive => AllPathsToHeadCoinductive :: Yes ,
323- PathKind :: Unknown | PathKind :: Inductive => path_from_entry,
324- } ;
325-
326- self . insert ( head, path_from_entry) ;
282+ self . insert ( head, path_from_entry. extend_with ( step_kind) ) ;
327283 }
328284 }
329285}
@@ -332,13 +288,14 @@ bitflags::bitflags! {
332288 /// Tracks how nested goals have been accessed. This is necessary to disable
333289 /// global cache entries if computing them would otherwise result in a cycle or
334290 /// access a provisional cache entry.
335- #[ derive( Debug , Clone , Copy ) ]
291+ #[ derive( Debug , Clone , Copy , PartialEq , Eq ) ]
336292 pub struct PathsToNested : u8 {
337293 /// The initial value when adding a goal to its own nested goals.
338294 const EMPTY = 1 << 0 ;
339295 const INDUCTIVE = 1 << 1 ;
340296 const UNKNOWN = 1 << 2 ;
341297 const COINDUCTIVE = 1 << 3 ;
298+ const FORCED_AMBIGUITY = 1 << 4 ;
342299 }
343300}
344301impl From < PathKind > for PathsToNested {
@@ -347,6 +304,7 @@ impl From<PathKind> for PathsToNested {
347304 PathKind :: Inductive => PathsToNested :: INDUCTIVE ,
348305 PathKind :: Unknown => PathsToNested :: UNKNOWN ,
349306 PathKind :: Coinductive => PathsToNested :: COINDUCTIVE ,
307+ PathKind :: ForcedAmbiguity => PathsToNested :: FORCED_AMBIGUITY ,
350308 }
351309 }
352310}
@@ -379,10 +337,46 @@ impl PathsToNested {
379337 self . insert ( PathsToNested :: COINDUCTIVE ) ;
380338 }
381339 }
340+ PathKind :: ForcedAmbiguity => {
341+ if self . intersects (
342+ PathsToNested :: EMPTY
343+ | PathsToNested :: INDUCTIVE
344+ | PathsToNested :: UNKNOWN
345+ | PathsToNested :: COINDUCTIVE ,
346+ ) {
347+ self . remove (
348+ PathsToNested :: EMPTY
349+ | PathsToNested :: INDUCTIVE
350+ | PathsToNested :: UNKNOWN
351+ | PathsToNested :: COINDUCTIVE ,
352+ ) ;
353+ self . insert ( PathsToNested :: FORCED_AMBIGUITY ) ;
354+ }
355+ }
382356 }
383357
384358 self
385359 }
360+
361+ #[ must_use]
362+ fn extend_with_paths ( self , path : PathsToNested ) -> Self {
363+ let mut new = PathsToNested :: empty ( ) ;
364+ let ( PathKind :: Inductive
365+ | PathKind :: Unknown
366+ | PathKind :: Coinductive
367+ | PathKind :: ForcedAmbiguity ) ;
368+ for n in [
369+ PathKind :: Inductive ,
370+ PathKind :: Unknown ,
371+ PathKind :: Coinductive ,
372+ PathKind :: ForcedAmbiguity ,
373+ ] {
374+ if path. contains ( n. into ( ) ) {
375+ new |= self . extend_with ( n) ;
376+ }
377+ }
378+ new
379+ }
386380}
387381
388382/// The nested goals of each stack entry and the path from the
@@ -693,7 +687,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
693687 if let Some ( ( _scope, expected) ) = validate_cache {
694688 // Do not try to move a goal into the cache again if we're testing
695689 // the global cache.
696- assert_eq ! ( evaluation_result. result, expected , "input={input:?}" ) ;
690+ assert_eq ! ( expected , evaluation_result. result, "input={input:?}" ) ;
697691 } else if D :: inspect_is_noop ( inspect) {
698692 self . insert_global_cache ( cx, input, evaluation_result, dep_node)
699693 }
@@ -782,7 +776,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
782776 stack_entry : & StackEntry < X > ,
783777 mut mutate_result : impl FnMut ( X :: Input , X :: Result ) -> X :: Result ,
784778 ) {
785- let head = self . stack . next_index ( ) ;
779+ let popped_head = self . stack . next_index ( ) ;
786780 #[ allow( rustc:: potential_query_instability) ]
787781 self . provisional_cache . retain ( |& input, entries| {
788782 entries. retain_mut ( |entry| {
@@ -792,30 +786,32 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
792786 path_from_head,
793787 result,
794788 } = entry;
795- if heads. highest_cycle_head ( ) == head {
789+ let ep = if heads. highest_cycle_head ( ) == popped_head {
796790 heads. remove_highest_cycle_head ( )
797791 } else {
798792 return true ;
799- }
800-
801- // We only try to rebase if all paths from the cache entry
802- // to its heads are coinductive. In this case these cycle
803- // kinds won't change, no matter the goals between these
804- // heads and the provisional cache entry.
805- if heads. iter ( ) . any ( |( _, p) | matches ! ( p, AllPathsToHeadCoinductive :: No ) ) {
806- return false ;
807- }
793+ } ;
808794
809- // The same for nested goals of the cycle head.
810- if stack_entry. heads . iter ( ) . any ( |( _, p) | matches ! ( p, AllPathsToHeadCoinductive :: No ) )
811- {
812- return false ;
795+ // We're rebasing an entry `e` over a head `p`. This head
796+ // has a number of own heads `h` it depends on. We need to
797+ // make sure that the cycle `hph` cannot change after rebasing.
798+ //
799+ // After rebasing the cycles `hph` will go through the path `heph` instead.
800+ for ( h, ph) in stack_entry. heads . iter ( ) {
801+ let hp =
802+ Self :: cycle_path_kind ( & self . stack , stack_entry. step_kind_from_parent , h) ;
803+ let hph = ph. extend_with ( hp) ;
804+ let he = hp. extend ( * path_from_head) ;
805+ let hep = ep. extend_with ( he) ;
806+ let heph = hep. extend_with_paths ( ph) ;
807+ if hph == heph {
808+ let eph = ep. extend_with_paths ( ph) ;
809+ heads. insert ( h, eph) ;
810+ } else {
811+ return false ;
812+ }
813813 }
814814
815- // Merge the cycle heads of the provisional cache entry and the
816- // popped head. If the popped cycle head was a root, discard all
817- // provisional cache entries which depend on it.
818- heads. merge ( & stack_entry. heads ) ;
819815 let Some ( head) = heads. opt_highest_cycle_head ( ) else {
820816 return false ;
821817 } ;
0 commit comments