@@ -19,6 +19,9 @@ rustc_index::newtype_index! {
19
19
}
20
20
21
21
bitflags:: bitflags! {
22
+ /// Whether and how this goal has been used as the root of a
23
+ /// cycle. We track the kind of cycle as we're otherwise forced
24
+ /// to always rerun at least once.
22
25
#[ derive( Debug , Clone , Copy , PartialEq , Eq ) ]
23
26
struct HasBeenUsed : u8 {
24
27
const INDUCTIVE_CYCLE = 1 << 0 ;
@@ -29,23 +32,30 @@ bitflags::bitflags! {
29
32
#[ derive( Debug ) ]
30
33
struct StackEntry < ' tcx > {
31
34
input : CanonicalInput < ' tcx > ,
35
+
32
36
available_depth : Limit ,
37
+
33
38
/// The maximum depth reached by this stack entry, only up-to date
34
39
/// for the top of the stack and lazily updated for the rest.
35
40
reached_depth : StackDepth ,
36
- /// Whether this entry is a cycle participant which is not a root.
41
+
42
+ /// Whether this entry is a non-root cycle participant.
37
43
///
38
- /// If so, it must not be moved to the global cache. See
39
- /// [SearchGraph::cycle_participants] for more details.
44
+ /// We must not move the result of non-root cycle participants to the
45
+ /// global cache. See [SearchGraph::cycle_participants] for more details.
46
+ /// We store the highest stack depth of a head of a cycle this goal is involved
47
+ /// in. This necessary to soundly cache its provisional result.
40
48
non_root_cycle_participant : Option < StackDepth > ,
41
49
42
50
encountered_overflow : bool ,
51
+
43
52
has_been_used : HasBeenUsed ,
44
53
/// Starts out as `None` and gets set when rerunning this
45
54
/// goal in case we encounter a cycle.
46
55
provisional_result : Option < QueryResult < ' tcx > > ,
47
56
}
48
57
58
+ /// The provisional result for a goal which is not on the stack.
49
59
struct DetachedEntry < ' tcx > {
50
60
/// The head of the smallest non-trivial cycle involving this entry.
51
61
///
@@ -59,6 +69,18 @@ struct DetachedEntry<'tcx> {
59
69
result : QueryResult < ' tcx > ,
60
70
}
61
71
72
+ /// Stores the stack depth of a currently evaluated goal *and* already
73
+ /// computed results for goals which depend on other goals still on the stack.
74
+ ///
75
+ /// The provisional result may depend on whether the stack above it is inductive
76
+ /// or coinductive. Because of this, we store separate provisional results for
77
+ /// each case. If an provisional entry is not applicable, it may be the case
78
+ /// that we already have provisional result while computing a goal. In this case
79
+ /// we prefer the provisional result to potentially avoid fixpoint iterations.
80
+ /// See tests/ui/traits/next-solver/cycles/mixed-cycles-2.rs for an example.
81
+ ///
82
+ /// The provisional cache can theoretically result in changes to the observable behavior,
83
+ /// see tests/ui/traits/next-solver/cycles/provisional-cache-impacts-behavior.rs.
62
84
#[ derive( Default ) ]
63
85
struct ProvisionalCacheEntry < ' tcx > {
64
86
stack_depth : Option < StackDepth > ,
@@ -200,6 +222,16 @@ impl<'tcx> SearchGraph<'tcx> {
200
222
. all ( |entry| entry. input . value . goal . predicate . is_coinductive ( tcx) )
201
223
}
202
224
225
+ // When encountering a solver cycle, the result of the current goal
226
+ // depends on goals lower on the stack.
227
+ //
228
+ // We have to therefore be careful when caching goals. Only the final result
229
+ // of the cycle root, i.e. the lowest goal on the stack involved in this cycle,
230
+ // is moved to the global cache while all others are stored in a provisional cache.
231
+ //
232
+ // We update both the head of this cycle to rerun its evaluation until
233
+ // we reach a fixpoint and all other cycle participants to make sure that
234
+ // their result does not get moved to the global cache.
203
235
fn tag_cycle_participants (
204
236
stack : & mut IndexVec < StackDepth , StackEntry < ' tcx > > ,
205
237
cycle_participants : & mut FxHashSet < CanonicalInput < ' tcx > > ,
@@ -281,24 +313,20 @@ impl<'tcx> SearchGraph<'tcx> {
281
313
}
282
314
283
315
// Check whether the goal is in the provisional cache.
316
+ // The provisional result may rely on the path to its cycle roots,
317
+ // so we have to check the path of the current goal matches that of
318
+ // the cache entry.
284
319
let cache_entry = self . provisional_cache . entry ( input) . or_default ( ) ;
285
- if let Some ( with_coinductive_stack) = & cache_entry. with_coinductive_stack
286
- && Self :: stack_coinductive_from ( tcx, & self . stack , with_coinductive_stack. head )
287
- {
288
- // We have a nested goal which is already in the provisional cache, use
289
- // its result. We do not provide any usage kind as that should have been
290
- // already set correctly while computing the cache entry.
291
- inspect
292
- . goal_evaluation_kind ( inspect:: WipCanonicalGoalEvaluationKind :: ProvisionalCacheHit ) ;
293
- Self :: tag_cycle_participants (
294
- & mut self . stack ,
295
- & mut self . cycle_participants ,
296
- HasBeenUsed :: empty ( ) ,
297
- with_coinductive_stack. head ,
298
- ) ;
299
- return with_coinductive_stack. result ;
300
- } else if let Some ( with_inductive_stack) = & cache_entry. with_inductive_stack
301
- && !Self :: stack_coinductive_from ( tcx, & self . stack , with_inductive_stack. head )
320
+ if let Some ( entry) = cache_entry
321
+ . with_coinductive_stack
322
+ . as_ref ( )
323
+ . filter ( |p| Self :: stack_coinductive_from ( tcx, & self . stack , p. head ) )
324
+ . or_else ( || {
325
+ cache_entry
326
+ . with_inductive_stack
327
+ . as_ref ( )
328
+ . filter ( |p| !Self :: stack_coinductive_from ( tcx, & self . stack , p. head ) )
329
+ } )
302
330
{
303
331
// We have a nested goal which is already in the provisional cache, use
304
332
// its result. We do not provide any usage kind as that should have been
@@ -309,20 +337,17 @@ impl<'tcx> SearchGraph<'tcx> {
309
337
& mut self . stack ,
310
338
& mut self . cycle_participants ,
311
339
HasBeenUsed :: empty ( ) ,
312
- with_inductive_stack . head ,
340
+ entry . head ,
313
341
) ;
314
- return with_inductive_stack . result ;
342
+ return entry . result ;
315
343
} else if let Some ( stack_depth) = cache_entry. stack_depth {
316
344
debug ! ( "encountered cycle with depth {stack_depth:?}" ) ;
317
- // We have a nested goal which relies on a goal `root` deeper in the stack.
345
+ // We have a nested goal which directly relies on a goal deeper in the stack.
318
346
//
319
- // We first store that we may have to reprove `root` in case the provisional
320
- // response is not equal to the final response. We also update the depth of all
321
- // goals which recursively depend on our current goal to depend on `root`
322
- // instead.
347
+ // We start by tagging all cycle participants, as that's necessary for caching.
323
348
//
324
- // Finally we can return either the provisional response for that goal if we have a
325
- // coinductive cycle or an ambiguous result if the cycle is inductive .
349
+ // Finally we can return either the provisional response or the initial response
350
+ // in case we're in the first fixpoint iteration for this goal .
326
351
inspect. goal_evaluation_kind ( inspect:: WipCanonicalGoalEvaluationKind :: CycleInStack ) ;
327
352
let is_coinductive_cycle = Self :: stack_coinductive_from ( tcx, & self . stack , stack_depth) ;
328
353
let usage_kind = if is_coinductive_cycle {
@@ -410,10 +435,10 @@ impl<'tcx> SearchGraph<'tcx> {
410
435
false
411
436
} ;
412
437
438
+ // If we did not reach a fixpoint, update the provisional result and reevaluate.
413
439
if reached_fixpoint {
414
440
return ( stack_entry, result) ;
415
441
} else {
416
- // Did not reach a fixpoint, update the provisional result and reevaluate.
417
442
let depth = self . stack . push ( StackEntry {
418
443
has_been_used : HasBeenUsed :: empty ( ) ,
419
444
provisional_result : Some ( result) ,
@@ -435,9 +460,6 @@ impl<'tcx> SearchGraph<'tcx> {
435
460
// We're now done with this goal. In case this goal is involved in a larger cycle
436
461
// do not remove it from the provisional cache and update its provisional result.
437
462
// We only add the root of cycles to the global cache.
438
- //
439
- // It is not possible for any nested goal to depend on something deeper on the
440
- // stack, as this would have also updated the depth of the current goal.
441
463
if let Some ( head) = final_entry. non_root_cycle_participant {
442
464
let coinductive_stack = Self :: stack_coinductive_from ( tcx, & self . stack , head) ;
443
465
@@ -449,6 +471,9 @@ impl<'tcx> SearchGraph<'tcx> {
449
471
entry. with_inductive_stack = Some ( DetachedEntry { head, result } ) ;
450
472
}
451
473
} else {
474
+ self . provisional_cache . remove ( & input) ;
475
+ let reached_depth = final_entry. reached_depth . as_usize ( ) - self . stack . len ( ) ;
476
+ let cycle_participants = mem:: take ( & mut self . cycle_participants ) ;
452
477
// When encountering a cycle, both inductive and coinductive, we only
453
478
// move the root into the global cache. We also store all other cycle
454
479
// participants involved.
@@ -457,9 +482,6 @@ impl<'tcx> SearchGraph<'tcx> {
457
482
// participant is on the stack. This is necessary to prevent unstable
458
483
// results. See the comment of `SearchGraph::cycle_participants` for
459
484
// more details.
460
- self . provisional_cache . remove ( & input) ;
461
- let reached_depth = final_entry. reached_depth . as_usize ( ) - self . stack . len ( ) ;
462
- let cycle_participants = mem:: take ( & mut self . cycle_participants ) ;
463
485
self . global_cache ( tcx) . insert (
464
486
tcx,
465
487
input,
0 commit comments