13
13
/// behavior as long as the resulting behavior is still correct.
14
14
use std:: cmp:: Ordering ;
15
15
use std:: collections:: BTreeMap ;
16
+ use std:: collections:: hash_map:: Entry ;
16
17
use std:: fmt:: Debug ;
17
18
use std:: hash:: Hash ;
18
19
use std:: marker:: PhantomData ;
@@ -111,18 +112,26 @@ pub trait Delegate {
111
112
/// on the kind of cycle.
112
113
#[ derive( Debug , Clone , Copy , PartialEq , Eq ) ]
113
114
pub enum PathKind {
114
- Coinductive ,
115
+ /// A path consisting of only inductive/unproductive steps.
115
116
Inductive ,
117
+ /// A path which is not be coinductive right now but we may want
118
+ /// to change of them to be so in the future. We return an ambiguous
119
+ /// result in this case to prevent people from relying on this.
120
+ Unknown ,
121
+ /// A path with at least one coinductive step. Such cycles hold.
122
+ Coinductive ,
116
123
}
124
+
117
125
impl PathKind {
118
126
/// Returns the path kind when merging `self` with `rest`.
119
127
///
120
128
/// Given an inductive path `self` and a coinductive path `rest`,
121
129
/// the path `self -> rest` would be coinductive.
122
130
fn extend ( self , rest : PathKind ) -> PathKind {
123
- match self {
124
- PathKind :: Coinductive => PathKind :: Coinductive ,
125
- PathKind :: Inductive => rest,
131
+ match ( self , rest) {
132
+ ( PathKind :: Coinductive , _) | ( _, PathKind :: Coinductive ) => PathKind :: Coinductive ,
133
+ ( PathKind :: Unknown , _) | ( _, PathKind :: Unknown ) => PathKind :: Unknown ,
134
+ ( PathKind :: Inductive , PathKind :: Inductive ) => PathKind :: Inductive ,
126
135
}
127
136
}
128
137
}
@@ -156,9 +165,6 @@ impl UsageKind {
156
165
}
157
166
}
158
167
}
159
- fn and_merge ( & mut self , other : impl Into < Self > ) {
160
- * self = self . merge ( other) ;
161
- }
162
168
}
163
169
164
170
/// For each goal we track whether the paths from this goal
@@ -294,14 +300,68 @@ impl CycleHeads {
294
300
295
301
let path_from_entry = match step_kind {
296
302
PathKind :: Coinductive => AllPathsToHeadCoinductive :: Yes ,
297
- PathKind :: Inductive => path_from_entry,
303
+ PathKind :: Unknown | PathKind :: Inductive => path_from_entry,
298
304
} ;
299
305
300
306
self . insert ( head, path_from_entry) ;
301
307
}
302
308
}
303
309
}
304
310
311
+ bitflags:: bitflags! {
312
+ /// Tracks how nested goals have been accessed. This is necessary to disable
313
+ /// global cache entries if computing them would otherwise result in a cycle or
314
+ /// access a provisional cache entry.
315
+ #[ derive( Debug , Clone , Copy ) ]
316
+ pub struct PathsToNested : u8 {
317
+ /// The initial value when adding a goal to its own nested goals.
318
+ const EMPTY = 1 << 0 ;
319
+ const INDUCTIVE = 1 << 1 ;
320
+ const UNKNOWN = 1 << 2 ;
321
+ const COINDUCTIVE = 1 << 3 ;
322
+ }
323
+ }
324
+ impl From < PathKind > for PathsToNested {
325
+ fn from ( path : PathKind ) -> PathsToNested {
326
+ match path {
327
+ PathKind :: Inductive => PathsToNested :: INDUCTIVE ,
328
+ PathKind :: Unknown => PathsToNested :: UNKNOWN ,
329
+ PathKind :: Coinductive => PathsToNested :: COINDUCTIVE ,
330
+ }
331
+ }
332
+ }
333
+ impl PathsToNested {
334
+ #[ must_use]
335
+ fn extend_with ( mut self , path : PathKind ) -> Self {
336
+ match path {
337
+ PathKind :: Inductive => {
338
+ if self . intersects ( PathsToNested :: EMPTY ) {
339
+ self . remove ( PathsToNested :: EMPTY ) ;
340
+ self . insert ( PathsToNested :: INDUCTIVE ) ;
341
+ }
342
+ }
343
+ PathKind :: Unknown => {
344
+ if self . intersects ( PathsToNested :: EMPTY | PathsToNested :: INDUCTIVE ) {
345
+ self . remove ( PathsToNested :: EMPTY | PathsToNested :: INDUCTIVE ) ;
346
+ self . insert ( PathsToNested :: UNKNOWN ) ;
347
+ }
348
+ }
349
+ PathKind :: Coinductive => {
350
+ if self . intersects (
351
+ PathsToNested :: EMPTY | PathsToNested :: INDUCTIVE | PathsToNested :: UNKNOWN ,
352
+ ) {
353
+ self . remove (
354
+ PathsToNested :: EMPTY | PathsToNested :: INDUCTIVE | PathsToNested :: UNKNOWN ,
355
+ ) ;
356
+ self . insert ( PathsToNested :: COINDUCTIVE ) ;
357
+ }
358
+ }
359
+ }
360
+
361
+ self
362
+ }
363
+ }
364
+
305
365
/// The nested goals of each stack entry and the path from the
306
366
/// stack entry to that nested goal.
307
367
///
@@ -319,15 +379,18 @@ impl CycleHeads {
319
379
/// results from a the cycle BAB depending on the cycle root.
320
380
#[ derive_where( Debug , Default , Clone ; X : Cx ) ]
321
381
struct NestedGoals < X : Cx > {
322
- nested_goals : HashMap < X :: Input , UsageKind > ,
382
+ nested_goals : HashMap < X :: Input , PathsToNested > ,
323
383
}
324
384
impl < X : Cx > NestedGoals < X > {
325
385
fn is_empty ( & self ) -> bool {
326
386
self . nested_goals . is_empty ( )
327
387
}
328
388
329
- fn insert ( & mut self , input : X :: Input , path_from_entry : UsageKind ) {
330
- self . nested_goals . entry ( input) . or_insert ( path_from_entry) . and_merge ( path_from_entry) ;
389
+ fn insert ( & mut self , input : X :: Input , paths_to_nested : PathsToNested ) {
390
+ match self . nested_goals . entry ( input) {
391
+ Entry :: Occupied ( mut entry) => * entry. get_mut ( ) |= paths_to_nested,
392
+ Entry :: Vacant ( entry) => drop ( entry. insert ( paths_to_nested) ) ,
393
+ }
331
394
}
332
395
333
396
/// Adds the nested goals of a nested goal, given that the path `step_kind` from this goal
@@ -338,18 +401,15 @@ impl<X: Cx> NestedGoals<X> {
338
401
/// the same as for the child.
339
402
fn extend_from_child ( & mut self , step_kind : PathKind , nested_goals : & NestedGoals < X > ) {
340
403
#[ allow( rustc:: potential_query_instability) ]
341
- for ( input, path_from_entry) in nested_goals. iter ( ) {
342
- let path_from_entry = match step_kind {
343
- PathKind :: Coinductive => UsageKind :: Single ( PathKind :: Coinductive ) ,
344
- PathKind :: Inductive => path_from_entry,
345
- } ;
346
- self . insert ( input, path_from_entry) ;
404
+ for ( input, paths_to_nested) in nested_goals. iter ( ) {
405
+ let paths_to_nested = paths_to_nested. extend_with ( step_kind) ;
406
+ self . insert ( input, paths_to_nested) ;
347
407
}
348
408
}
349
409
350
410
#[ cfg_attr( feature = "nightly" , rustc_lint_query_instability) ]
351
411
#[ allow( rustc:: potential_query_instability) ]
352
- fn iter ( & self ) -> impl Iterator < Item = ( X :: Input , UsageKind ) > + ' _ {
412
+ fn iter ( & self ) -> impl Iterator < Item = ( X :: Input , PathsToNested ) > + ' _ {
353
413
self . nested_goals . iter ( ) . map ( |( i, p) | ( * i, * p) )
354
414
}
355
415
@@ -487,7 +547,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
487
547
// goals as this change may cause them to now depend on additional
488
548
// goals, resulting in new cycles. See the dev-guide for examples.
489
549
if parent_depends_on_cycle {
490
- parent. nested_goals . insert ( parent. input , UsageKind :: Single ( PathKind :: Inductive ) )
550
+ parent. nested_goals . insert ( parent. input , PathsToNested :: EMPTY ) ;
491
551
}
492
552
}
493
553
}
@@ -663,7 +723,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
663
723
//
664
724
// We must therefore not use the global cache entry for `B` in that case.
665
725
// See tests/ui/traits/next-solver/cycles/hidden-by-overflow.rs
666
- last. nested_goals . insert ( last. input , UsageKind :: Single ( PathKind :: Inductive ) ) ;
726
+ last. nested_goals . insert ( last. input , PathsToNested :: EMPTY ) ;
667
727
}
668
728
669
729
debug ! ( "encountered stack overflow" ) ;
@@ -745,16 +805,11 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
745
805
746
806
// We now care about the path from the next highest cycle head to the
747
807
// provisional cache entry.
748
- match path_from_head {
749
- PathKind :: Coinductive => { }
750
- PathKind :: Inductive => {
751
- * path_from_head = Self :: cycle_path_kind (
752
- & self . stack ,
753
- stack_entry. step_kind_from_parent ,
754
- head,
755
- )
756
- }
757
- }
808
+ * path_from_head = path_from_head. extend ( Self :: cycle_path_kind (
809
+ & self . stack ,
810
+ stack_entry. step_kind_from_parent ,
811
+ head,
812
+ ) ) ;
758
813
// Mutate the result of the provisional cache entry in case we did
759
814
// not reach a fixpoint.
760
815
* result = mutate_result ( input, * result) ;
@@ -854,7 +909,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
854
909
for & ProvisionalCacheEntry {
855
910
encountered_overflow,
856
911
ref heads,
857
- path_from_head,
912
+ path_from_head : head_to_provisional ,
858
913
result : _,
859
914
} in entries. iter ( )
860
915
{
@@ -866,24 +921,19 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
866
921
867
922
// A provisional cache entry only applies if the path from its highest head
868
923
// matches the path when encountering the goal.
924
+ //
925
+ // We check if any of the paths taken while computing the global goal
926
+ // would end up with an applicable provisional cache entry.
869
927
let head = heads. highest_cycle_head ( ) ;
870
- let full_path = match Self :: cycle_path_kind ( stack, step_kind_from_parent, head) {
871
- PathKind :: Coinductive => UsageKind :: Single ( PathKind :: Coinductive ) ,
872
- PathKind :: Inductive => path_from_global_entry,
873
- } ;
874
-
875
- match ( full_path, path_from_head) {
876
- ( UsageKind :: Mixed , _)
877
- | ( UsageKind :: Single ( PathKind :: Coinductive ) , PathKind :: Coinductive )
878
- | ( UsageKind :: Single ( PathKind :: Inductive ) , PathKind :: Inductive ) => {
879
- debug ! (
880
- ?full_path,
881
- ?path_from_head,
882
- "cache entry not applicable due to matching paths"
883
- ) ;
884
- return false ;
885
- }
886
- _ => debug ! ( ?full_path, ?path_from_head, "paths don't match" ) ,
928
+ let head_to_curr = Self :: cycle_path_kind ( stack, step_kind_from_parent, head) ;
929
+ let full_paths = path_from_global_entry. extend_with ( head_to_curr) ;
930
+ if full_paths. contains ( head_to_provisional. into ( ) ) {
931
+ debug ! (
932
+ ?full_paths,
933
+ ?head_to_provisional,
934
+ "cache entry not applicable due to matching paths"
935
+ ) ;
936
+ return false ;
887
937
}
888
938
}
889
939
}
@@ -982,8 +1032,8 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
982
1032
let last = & mut self . stack [ last_index] ;
983
1033
last. reached_depth = last. reached_depth . max ( next_index) ;
984
1034
985
- last. nested_goals . insert ( input, UsageKind :: Single ( step_kind_from_parent) ) ;
986
- last. nested_goals . insert ( last. input , UsageKind :: Single ( PathKind :: Inductive ) ) ;
1035
+ last. nested_goals . insert ( input, step_kind_from_parent. into ( ) ) ;
1036
+ last. nested_goals . insert ( last. input , PathsToNested :: EMPTY ) ;
987
1037
if last_index != head {
988
1038
last. heads . insert ( head, step_kind_from_parent) ;
989
1039
}
0 commit comments