diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs index e3d7dff3c66bb..37c10b14054c5 100644 --- a/compiler/rustc_middle/src/arena.rs +++ b/compiler/rustc_middle/src/arena.rs @@ -61,10 +61,6 @@ macro_rules! arena_types { [] dtorck_constraint: rustc_middle::traits::query::DropckConstraint<'tcx>, [] candidate_step: rustc_middle::traits::query::CandidateStep<'tcx>, [] autoderef_bad_ty: rustc_middle::traits::query::MethodAutoderefBadTy<'tcx>, - [] canonical_goal_evaluation: - rustc_type_ir::solve::inspect::CanonicalGoalEvaluationStep< - rustc_middle::ty::TyCtxt<'tcx> - >, [] query_region_constraints: rustc_middle::infer::canonical::QueryRegionConstraints<'tcx>, [] type_op_subtype: rustc_middle::infer::canonical::Canonical<'tcx, diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index fd41668ae44c8..7509a7584f6db 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -112,8 +112,6 @@ impl<'tcx> Interner for TyCtxt<'tcx> { self.mk_predefined_opaques_in_body(data) } type DefiningOpaqueTypes = &'tcx ty::List; - type CanonicalGoalEvaluationStepRef = - &'tcx solve::inspect::CanonicalGoalEvaluationStep>; type CanonicalVars = CanonicalVarInfos<'tcx>; fn mk_canonical_var_infos(self, infos: &[ty::CanonicalVarInfo]) -> Self::CanonicalVars { self.mk_canonical_var_infos(infos) @@ -282,13 +280,6 @@ impl<'tcx> Interner for TyCtxt<'tcx> { self.debug_assert_args_compatible(def_id, args); } - fn intern_canonical_goal_evaluation_step( - self, - step: solve::inspect::CanonicalGoalEvaluationStep>, - ) -> &'tcx solve::inspect::CanonicalGoalEvaluationStep> { - self.arena.alloc(step) - } - fn mk_type_list_from_iter(self, args: I) -> T::Output where I: Iterator, diff --git a/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs b/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs index 3e266ddac71fd..28953528f6654 100644 --- a/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs +++ b/compiler/rustc_next_trait_solver/src/solve/inspect/build.rs @@ -5,10 +5,9 @@ //! see the comment on [ProofTreeBuilder]. use std::marker::PhantomData; -use std::mem; use rustc_type_ir::inherent::*; -use rustc_type_ir::{self as ty, search_graph, Interner}; +use rustc_type_ir::{self as ty, Interner}; use crate::delegate::SolverDelegate; use crate::solve::eval_ctxt::canonical; @@ -96,33 +95,11 @@ impl WipGoalEvaluation { } } -#[derive(derivative::Derivative)] -#[derivative(PartialEq(bound = ""), Eq(bound = ""))] -pub(in crate::solve) enum WipCanonicalGoalEvaluationKind { - Overflow, - CycleInStack, - ProvisionalCacheHit, - Interned { final_revision: I::CanonicalGoalEvaluationStepRef }, -} - -impl std::fmt::Debug for WipCanonicalGoalEvaluationKind { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::Overflow => write!(f, "Overflow"), - Self::CycleInStack => write!(f, "CycleInStack"), - Self::ProvisionalCacheHit => write!(f, "ProvisionalCacheHit"), - Self::Interned { final_revision: _ } => { - f.debug_struct("Interned").finish_non_exhaustive() - } - } - } -} - #[derive(derivative::Derivative)] #[derivative(PartialEq(bound = ""), Eq(bound = ""), Debug(bound = ""))] struct WipCanonicalGoalEvaluation { goal: CanonicalInput, - kind: Option>, + encountered_overflow: bool, /// Only used for uncached goals. After we finished evaluating /// the goal, this is interned and moved into `kind`. final_revision: Option>, @@ -131,25 +108,17 @@ struct WipCanonicalGoalEvaluation { impl WipCanonicalGoalEvaluation { fn finalize(self) -> inspect::CanonicalGoalEvaluation { - // We've already interned the final revision in - // `fn finalize_canonical_goal_evaluation`. - assert!(self.final_revision.is_none()); - let kind = match self.kind.unwrap() { - WipCanonicalGoalEvaluationKind::Overflow => { + inspect::CanonicalGoalEvaluation { + goal: self.goal, + kind: if self.encountered_overflow { + assert!(self.final_revision.is_none()); inspect::CanonicalGoalEvaluationKind::Overflow - } - WipCanonicalGoalEvaluationKind::CycleInStack => { - inspect::CanonicalGoalEvaluationKind::CycleInStack - } - WipCanonicalGoalEvaluationKind::ProvisionalCacheHit => { - inspect::CanonicalGoalEvaluationKind::ProvisionalCacheHit - } - WipCanonicalGoalEvaluationKind::Interned { final_revision } => { + } else { + let final_revision = self.final_revision.unwrap().finalize(); inspect::CanonicalGoalEvaluationKind::Evaluation { final_revision } - } - }; - - inspect::CanonicalGoalEvaluation { goal: self.goal, kind, result: self.result.unwrap() } + }, + result: self.result.unwrap(), + } } } @@ -315,7 +284,7 @@ impl, I: Interner> ProofTreeBuilder { ) -> ProofTreeBuilder { self.nested(|| WipCanonicalGoalEvaluation { goal, - kind: None, + encountered_overflow: false, final_revision: None, result: None, }) @@ -336,11 +305,11 @@ impl, I: Interner> ProofTreeBuilder { } } - pub fn canonical_goal_evaluation_kind(&mut self, kind: WipCanonicalGoalEvaluationKind) { + pub fn canonical_goal_evaluation_overflow(&mut self) { if let Some(this) = self.as_mut() { match this { DebugSolver::CanonicalGoalEvaluation(canonical_goal_evaluation) => { - assert_eq!(canonical_goal_evaluation.kind.replace(kind), None); + canonical_goal_evaluation.encountered_overflow = true; } _ => unreachable!(), }; @@ -554,51 +523,3 @@ impl, I: Interner> ProofTreeBuilder { } } } - -impl search_graph::ProofTreeBuilder for ProofTreeBuilder -where - D: SolverDelegate, - I: Interner, -{ - fn try_apply_proof_tree( - &mut self, - proof_tree: Option, - ) -> bool { - if !self.is_noop() { - if let Some(final_revision) = proof_tree { - let kind = WipCanonicalGoalEvaluationKind::Interned { final_revision }; - self.canonical_goal_evaluation_kind(kind); - true - } else { - false - } - } else { - true - } - } - - fn on_provisional_cache_hit(&mut self) { - self.canonical_goal_evaluation_kind(WipCanonicalGoalEvaluationKind::ProvisionalCacheHit); - } - - fn on_cycle_in_stack(&mut self) { - self.canonical_goal_evaluation_kind(WipCanonicalGoalEvaluationKind::CycleInStack); - } - - fn finalize_canonical_goal_evaluation( - &mut self, - tcx: I, - ) -> Option { - self.as_mut().map(|this| match this { - DebugSolver::CanonicalGoalEvaluation(evaluation) => { - let final_revision = mem::take(&mut evaluation.final_revision).unwrap(); - let final_revision = - tcx.intern_canonical_goal_evaluation_step(final_revision.finalize()); - let kind = WipCanonicalGoalEvaluationKind::Interned { final_revision }; - assert_eq!(evaluation.kind.replace(kind), None); - final_revision - } - _ => unreachable!(), - }) - } -} diff --git a/compiler/rustc_next_trait_solver/src/solve/search_graph.rs b/compiler/rustc_next_trait_solver/src/solve/search_graph.rs index fe053a506e712..0994d0e3b3d88 100644 --- a/compiler/rustc_next_trait_solver/src/solve/search_graph.rs +++ b/compiler/rustc_next_trait_solver/src/solve/search_graph.rs @@ -1,3 +1,4 @@ +use std::convert::Infallible; use std::marker::PhantomData; use rustc_type_ir::inherent::*; @@ -5,7 +6,7 @@ use rustc_type_ir::search_graph::{self, CycleKind, UsageKind}; use rustc_type_ir::solve::{CanonicalInput, Certainty, QueryResult}; use rustc_type_ir::Interner; -use super::inspect::{self, ProofTreeBuilder}; +use super::inspect::ProofTreeBuilder; use super::FIXPOINT_STEP_LIMIT; use crate::delegate::SolverDelegate; @@ -22,9 +23,20 @@ where { type Cx = D::Interner; + type ValidationScope = Infallible; + fn enter_validation_scope( + _cx: Self::Cx, + _input: ::Input, + ) -> Option { + None + } + const FIXPOINT_STEP_LIMIT: usize = FIXPOINT_STEP_LIMIT; type ProofTreeBuilder = ProofTreeBuilder; + fn inspect_is_noop(inspect: &mut Self::ProofTreeBuilder) -> bool { + inspect.is_noop() + } fn recursion_limit(cx: I) -> usize { cx.recursion_limit() @@ -68,7 +80,7 @@ where inspect: &mut ProofTreeBuilder, input: CanonicalInput, ) -> QueryResult { - inspect.canonical_goal_evaluation_kind(inspect::WipCanonicalGoalEvaluationKind::Overflow); + inspect.canonical_goal_evaluation_overflow(); response_no_constraints(cx, input, Certainty::overflow(true)) } diff --git a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs index e8de8457440ff..4e4022830d46e 100644 --- a/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs +++ b/compiler/rustc_trait_selection/src/solve/inspect/analyse.rs @@ -332,13 +332,9 @@ impl<'a, 'tcx> InspectGoal<'a, 'tcx> { pub fn candidates(&'a self) -> Vec> { let mut candidates = vec![]; - let last_eval_step = match self.evaluation_kind { - inspect::CanonicalGoalEvaluationKind::Overflow - | inspect::CanonicalGoalEvaluationKind::CycleInStack - | inspect::CanonicalGoalEvaluationKind::ProvisionalCacheHit => { - warn!("unexpected root evaluation: {:?}", self.evaluation_kind); - return vec![]; - } + let last_eval_step = match &self.evaluation_kind { + // An annoying edge case in case the recursion limit is 0. + inspect::CanonicalGoalEvaluationKind::Overflow => return vec![], inspect::CanonicalGoalEvaluationKind::Evaluation { final_revision } => final_revision, }; diff --git a/compiler/rustc_type_ir/src/binder.rs b/compiler/rustc_type_ir/src/binder.rs index 17b35a2807a2d..a76c138393545 100644 --- a/compiler/rustc_type_ir/src/binder.rs +++ b/compiler/rustc_type_ir/src/binder.rs @@ -7,7 +7,7 @@ use std::ops::{ControlFlow, Deref}; use rustc_macros::{HashStable_NoContext, TyDecodable, TyEncodable}; #[cfg(feature = "nightly")] use rustc_serialize::Decodable; -use tracing::debug; +use tracing::instrument; use crate::data_structures::SsoHashSet; use crate::fold::{FallibleTypeFolder, TypeFoldable, TypeFolder, TypeSuperFoldable}; @@ -836,28 +836,20 @@ impl<'a, I: Interner> ArgFolder<'a, I> { /// As indicated in the diagram, here the same type `&'a i32` is instantiated once, but in the /// first case we do not increase the De Bruijn index and in the second case we do. The reason /// is that only in the second case have we passed through a fn binder. + #[instrument(level = "trace", skip(self), fields(binders_passed = self.binders_passed), ret)] fn shift_vars_through_binders>(&self, val: T) -> T { - debug!( - "shift_vars(val={:?}, binders_passed={:?}, has_escaping_bound_vars={:?})", - val, - self.binders_passed, - val.has_escaping_bound_vars() - ); - if self.binders_passed == 0 || !val.has_escaping_bound_vars() { - return val; + val + } else { + ty::fold::shift_vars(self.cx, val, self.binders_passed) } - - let result = ty::fold::shift_vars(TypeFolder::cx(self), val, self.binders_passed); - debug!("shift_vars: shifted result = {:?}", result); - - result } fn shift_region_through_binders(&self, region: I::Region) -> I::Region { if self.binders_passed == 0 || !region.has_escaping_bound_vars() { - return region; + region + } else { + ty::fold::shift_region(self.cx, region, self.binders_passed) } - ty::fold::shift_region(self.cx, region, self.binders_passed) } } diff --git a/compiler/rustc_type_ir/src/fold.rs b/compiler/rustc_type_ir/src/fold.rs index a4d8dafb246e5..d9aa678bb7471 100644 --- a/compiler/rustc_type_ir/src/fold.rs +++ b/compiler/rustc_type_ir/src/fold.rs @@ -47,7 +47,7 @@ use rustc_index::{Idx, IndexVec}; use std::mem; -use tracing::debug; +use tracing::instrument; use crate::data_structures::Lrc; use crate::inherent::*; @@ -414,15 +414,14 @@ pub fn shift_region(cx: I, region: I::Region, amount: u32) -> I::Re } } +#[instrument(level = "trace", skip(cx), ret)] pub fn shift_vars(cx: I, value: T, amount: u32) -> T where T: TypeFoldable, { - debug!("shift_vars(value={:?}, amount={})", value, amount); - if amount == 0 || !value.has_escaping_bound_vars() { - return value; + value + } else { + value.fold_with(&mut Shifter::new(cx, amount)) } - - value.fold_with(&mut Shifter::new(cx, amount)) } diff --git a/compiler/rustc_type_ir/src/interner.rs b/compiler/rustc_type_ir/src/interner.rs index 14ebbb12fe2f0..d8336e3cd6d64 100644 --- a/compiler/rustc_type_ir/src/interner.rs +++ b/compiler/rustc_type_ir/src/interner.rs @@ -11,7 +11,6 @@ use crate::ir_print::IrPrint; use crate::lang_items::TraitSolverLangItem; use crate::relate::Relate; use crate::search_graph; -use crate::solve::inspect::CanonicalGoalEvaluationStep; use crate::solve::{ CanonicalInput, ExternalConstraintsData, PredefinedOpaquesData, QueryResult, SolverMode, }; @@ -63,11 +62,6 @@ pub trait Interner: + Eq + TypeVisitable + SliceLike; - type CanonicalGoalEvaluationStepRef: Copy - + Debug - + Hash - + Eq - + Deref>; type CanonicalVars: Copy + Debug @@ -175,11 +169,6 @@ pub trait Interner: fn debug_assert_args_compatible(self, def_id: Self::DefId, args: Self::GenericArgs); - fn intern_canonical_goal_evaluation_step( - self, - step: CanonicalGoalEvaluationStep, - ) -> Self::CanonicalGoalEvaluationStepRef; - fn mk_type_list_from_iter(self, args: I) -> T::Output where I: Iterator, @@ -388,7 +377,6 @@ impl CollectAndApply for Result { } impl search_graph::Cx for I { - type ProofTree = Option; type Input = CanonicalInput; type Result = QueryResult; diff --git a/compiler/rustc_type_ir/src/search_graph/global_cache.rs b/compiler/rustc_type_ir/src/search_graph/global_cache.rs index 5ccda931f9c5f..796a0ab807646 100644 --- a/compiler/rustc_type_ir/src/search_graph/global_cache.rs +++ b/compiler/rustc_type_ir/src/search_graph/global_cache.rs @@ -3,15 +3,8 @@ use rustc_index::IndexVec; use super::{AvailableDepth, Cx, StackDepth, StackEntry}; use crate::data_structures::{HashMap, HashSet}; -#[derive(derivative::Derivative)] -#[derivative(Debug(bound = ""), Clone(bound = ""), Copy(bound = ""))] -struct QueryData { - result: X::Result, - proof_tree: X::ProofTree, -} - struct Success { - data: X::Tracked>, + result: X::Tracked, additional_depth: usize, } @@ -29,14 +22,13 @@ struct CacheEntry { /// See the doc comment of `StackEntry::cycle_participants` for more /// details. nested_goals: HashSet, - with_overflow: HashMap>>, + with_overflow: HashMap>, } #[derive(derivative::Derivative)] #[derivative(Debug(bound = ""))] pub(super) struct CacheData<'a, X: Cx> { pub(super) result: X::Result, - pub(super) proof_tree: X::ProofTree, pub(super) additional_depth: usize, pub(super) encountered_overflow: bool, // FIXME: This is currently unused, but impacts the design @@ -58,20 +50,19 @@ impl GlobalCache { input: X::Input, result: X::Result, - proof_tree: X::ProofTree, dep_node: X::DepNodeIndex, additional_depth: usize, encountered_overflow: bool, nested_goals: &HashSet, ) { - let data = cx.mk_tracked(QueryData { result, proof_tree }, dep_node); + let result = cx.mk_tracked(result, dep_node); let entry = self.map.entry(input).or_default(); entry.nested_goals.extend(nested_goals); if encountered_overflow { - entry.with_overflow.insert(additional_depth, data); + entry.with_overflow.insert(additional_depth, result); } else { - entry.success = Some(Success { data, additional_depth }); + entry.success = Some(Success { result, additional_depth }); } } @@ -93,10 +84,8 @@ impl GlobalCache { if let Some(ref success) = entry.success { if available_depth.cache_entry_is_applicable(success.additional_depth) { - let QueryData { result, proof_tree } = cx.get_tracked(&success.data); return Some(CacheData { - result, - proof_tree, + result: cx.get_tracked(&success.result), additional_depth: success.additional_depth, encountered_overflow: false, nested_goals: &entry.nested_goals, @@ -104,15 +93,11 @@ impl GlobalCache { } } - entry.with_overflow.get(&available_depth.0).map(|e| { - let QueryData { result, proof_tree } = cx.get_tracked(e); - CacheData { - result, - proof_tree, - additional_depth: available_depth.0, - encountered_overflow: true, - nested_goals: &entry.nested_goals, - } + entry.with_overflow.get(&available_depth.0).map(|e| CacheData { + result: cx.get_tracked(e), + additional_depth: available_depth.0, + encountered_overflow: true, + nested_goals: &entry.nested_goals, }) } } diff --git a/compiler/rustc_type_ir/src/search_graph/mod.rs b/compiler/rustc_type_ir/src/search_graph/mod.rs index c2204becdfd71..171a50bb3b1fe 100644 --- a/compiler/rustc_type_ir/src/search_graph/mod.rs +++ b/compiler/rustc_type_ir/src/search_graph/mod.rs @@ -21,7 +21,6 @@ mod validate; /// about `Input` and `Result` as they are implementation details /// of the search graph. pub trait Cx: Copy { - type ProofTree: Debug + Copy; type Input: Debug + Eq + Hash + Copy; type Result: Debug + Eq + Hash + Copy; @@ -42,17 +41,25 @@ pub trait Cx: Copy { ) -> R; } -pub trait ProofTreeBuilder { - fn try_apply_proof_tree(&mut self, proof_tree: X::ProofTree) -> bool; - fn on_provisional_cache_hit(&mut self); - fn on_cycle_in_stack(&mut self); - fn finalize_canonical_goal_evaluation(&mut self, cx: X) -> X::ProofTree; -} - pub trait Delegate { type Cx: Cx; + type ValidationScope; + /// Returning `Some` disables the global cache for the current goal. + /// + /// The `ValidationScope` is used when fuzzing the search graph to track + /// for which goals the global cache has been disabled. This is necessary + /// as we may otherwise ignore the global cache entry for some goal `G` + /// only to later use it, failing to detect a cycle goal and potentially + /// changing the result. + fn enter_validation_scope( + cx: Self::Cx, + input: ::Input, + ) -> Option; + const FIXPOINT_STEP_LIMIT: usize; - type ProofTreeBuilder: ProofTreeBuilder; + + type ProofTreeBuilder; + fn inspect_is_noop(inspect: &mut Self::ProofTreeBuilder) -> bool; fn recursion_limit(cx: Self::Cx) -> usize; @@ -98,6 +105,7 @@ pub enum UsageKind { impl UsageKind { fn merge(self, other: Self) -> Self { match (self, other) { + (UsageKind::Mixed, _) | (_, UsageKind::Mixed) => UsageKind::Mixed, (UsageKind::Single(lhs), UsageKind::Single(rhs)) => { if lhs == rhs { UsageKind::Single(lhs) @@ -105,13 +113,15 @@ impl UsageKind { UsageKind::Mixed } } - (UsageKind::Mixed, UsageKind::Mixed) - | (UsageKind::Mixed, UsageKind::Single(_)) - | (UsageKind::Single(_), UsageKind::Mixed) => UsageKind::Mixed, } } } +enum StepResult { + Done(StackEntry, X::Result), + HasChanged, +} + #[derive(Debug, Clone, Copy)] struct AvailableDepth(usize); impl AvailableDepth { @@ -214,8 +224,8 @@ struct DetachedEntry { result: X::Result, } -/// Stores the stack depth of a currently evaluated goal *and* already -/// computed results for goals which depend on other goals still on the stack. +/// Stores the provisional result of already computed results for goals which +/// depend on other goals still on the stack. /// /// The provisional result may depend on whether the stack above it is inductive /// or coinductive. Because of this, we store separate provisional results for @@ -229,16 +239,13 @@ struct DetachedEntry { #[derive(derivative::Derivative)] #[derivative(Default(bound = ""))] struct ProvisionalCacheEntry { - stack_depth: Option, with_inductive_stack: Option>, with_coinductive_stack: Option>, } impl ProvisionalCacheEntry { fn is_empty(&self) -> bool { - self.stack_depth.is_none() - && self.with_inductive_stack.is_none() - && self.with_coinductive_stack.is_none() + self.with_inductive_stack.is_none() && self.with_coinductive_stack.is_none() } } @@ -296,17 +303,7 @@ impl, X: Cx> SearchGraph { // We update both the head of this cycle to rerun its evaluation until // we reach a fixpoint and all other cycle participants to make sure that // their result does not get moved to the global cache. - fn tag_cycle_participants( - stack: &mut IndexVec>, - usage_kind: Option, - head: StackDepth, - ) { - if let Some(usage_kind) = usage_kind { - stack[head].has_been_used = - Some(stack[head].has_been_used.map_or(usage_kind, |prev| prev.merge(usage_kind))); - } - debug_assert!(stack[head].has_been_used.is_some()); - + fn tag_cycle_participants(stack: &mut IndexVec>, head: StackDepth) { // The current root of these cycles. Note that this may not be the final // root in case a later goal depends on a goal higher up the stack. let mut current_root = head; @@ -363,75 +360,42 @@ impl, X: Cx> SearchGraph { return D::on_stack_overflow(cx, inspect, input); }; - if let Some(result) = self.lookup_global_cache(cx, input, available_depth, inspect) { + let validate_cache = if !D::inspect_is_noop(inspect) { + None + } else if let Some(scope) = D::enter_validation_scope(cx, input) { + // When validating the global cache we need to track the goals for which the + // global cache has been disabled as it may otherwise change the result for + // cyclic goals. We don't care about goals which are not on the current stack + // so it's fine to drop their scope eagerly. + self.lookup_global_cache_untracked(cx, input, available_depth) + .inspect(|expected| debug!(?expected, "validate cache entry")) + .map(|r| (scope, r)) + } else if let Some(result) = self.lookup_global_cache(cx, input, available_depth) { + return result; + } else { + None + }; + + if let Some(result) = self.lookup_provisional_cache(cx, input) { return result; } - // Check whether the goal is in the provisional cache. - // The provisional result may rely on the path to its cycle roots, - // so we have to check the path of the current goal matches that of - // the cache entry. - let cache_entry = self.provisional_cache.entry(input).or_default(); - if let Some(entry) = cache_entry - .with_coinductive_stack - .as_ref() - .filter(|p| Self::stack_coinductive_from(cx, &self.stack, p.head)) - .or_else(|| { - cache_entry - .with_inductive_stack - .as_ref() - .filter(|p| !Self::stack_coinductive_from(cx, &self.stack, p.head)) - }) - { - debug!("provisional cache hit"); - // We have a nested goal which is already in the provisional cache, use - // its result. We do not provide any usage kind as that should have been - // already set correctly while computing the cache entry. - inspect.on_provisional_cache_hit(); - Self::tag_cycle_participants(&mut self.stack, None, entry.head); - return entry.result; - } else if let Some(stack_depth) = cache_entry.stack_depth { - debug!("encountered cycle with depth {stack_depth:?}"); - // We have a nested goal which directly relies on a goal deeper in the stack. - // - // We start by tagging all cycle participants, as that's necessary for caching. - // - // Finally we can return either the provisional response or the initial response - // in case we're in the first fixpoint iteration for this goal. - inspect.on_cycle_in_stack(); - - let is_coinductive_cycle = Self::stack_coinductive_from(cx, &self.stack, stack_depth); - let cycle_kind = - if is_coinductive_cycle { CycleKind::Coinductive } else { CycleKind::Inductive }; - Self::tag_cycle_participants( - &mut self.stack, - Some(UsageKind::Single(cycle_kind)), - stack_depth, - ); - - // Return the provisional result or, if we're in the first iteration, - // start with no constraints. - return if let Some(result) = self.stack[stack_depth].provisional_result { - result - } else { - D::initial_provisional_result(cx, cycle_kind, input) - }; - } else { - // No entry, we push this goal on the stack and try to prove it. - let depth = self.stack.next_index(); - let entry = StackEntry { - input, - available_depth, - reached_depth: depth, - non_root_cycle_participant: None, - encountered_overflow: false, - has_been_used: None, - nested_goals: Default::default(), - provisional_result: None, - }; - assert_eq!(self.stack.push(entry), depth); - cache_entry.stack_depth = Some(depth); + if let Some(result) = self.check_cycle_on_stack(cx, input) { + return result; + } + + let depth = self.stack.next_index(); + let entry = StackEntry { + input, + available_depth, + reached_depth: depth, + non_root_cycle_participant: None, + encountered_overflow: false, + has_been_used: None, + nested_goals: Default::default(), + provisional_result: None, }; + assert_eq!(self.stack.push(entry), depth); // This is for global caching, so we properly track query dependencies. // Everything that affects the `result` should be performed within this @@ -443,7 +407,7 @@ impl, X: Cx> SearchGraph { for _ in 0..D::FIXPOINT_STEP_LIMIT { match self.fixpoint_step_in_task(cx, input, inspect, &mut prove_goal) { StepResult::Done(final_entry, result) => return (final_entry, result), - StepResult::HasChanged => debug!("fixpoint changed provisional results"), + StepResult::HasChanged => {} } } @@ -454,53 +418,46 @@ impl, X: Cx> SearchGraph { (current_entry, result) }); - let proof_tree = inspect.finalize_canonical_goal_evaluation(cx); - self.update_parent_goal(final_entry.reached_depth, final_entry.encountered_overflow); // We're now done with this goal. In case this goal is involved in a larger cycle // do not remove it from the provisional cache and update its provisional result. // We only add the root of cycles to the global cache. if let Some(head) = final_entry.non_root_cycle_participant { + debug_assert!(validate_cache.is_none()); let coinductive_stack = Self::stack_coinductive_from(cx, &self.stack, head); - let entry = self.provisional_cache.get_mut(&input).unwrap(); - entry.stack_depth = None; + let entry = self.provisional_cache.entry(input).or_default(); if coinductive_stack { entry.with_coinductive_stack = Some(DetachedEntry { head, result }); } else { entry.with_inductive_stack = Some(DetachedEntry { head, result }); } } else { - // When encountering a cycle, both inductive and coinductive, we only - // move the root into the global cache. We also store all other cycle - // participants involved. - // - // We must not use the global cache entry of a root goal if a cycle - // participant is on the stack. This is necessary to prevent unstable - // results. See the comment of `StackEntry::nested_goals` for - // more details. self.provisional_cache.remove(&input); - let additional_depth = final_entry.reached_depth.as_usize() - self.stack.len(); - cx.with_global_cache(self.mode, |cache| { - cache.insert( - cx, - input, - result, - proof_tree, - dep_node, - additional_depth, - final_entry.encountered_overflow, - &final_entry.nested_goals, - ) - }) + if let Some((_scope, expected)) = validate_cache { + // Do not try to move a goal into the cache again if we're testing + // the global cache. + assert_eq!(result, expected, "input={input:?}"); + } else if D::inspect_is_noop(inspect) { + self.insert_global_cache(cx, input, final_entry, result, dep_node) + } } - self.check_invariants(); - result } + fn lookup_global_cache_untracked( + &self, + cx: X, + input: X::Input, + available_depth: AvailableDepth, + ) -> Option { + cx.with_global_cache(self.mode, |cache| { + cache.get(cx, input, &self.stack, available_depth).map(|c| c.result) + }) + } + /// Try to fetch a previously computed result from the global cache, /// making sure to only do so if it would match the result of reevaluating /// this goal. @@ -509,43 +466,74 @@ impl, X: Cx> SearchGraph { cx: X, input: X::Input, available_depth: AvailableDepth, - inspect: &mut D::ProofTreeBuilder, ) -> Option { cx.with_global_cache(self.mode, |cache| { let CacheData { result, - proof_tree, additional_depth, encountered_overflow, nested_goals: _, // FIXME: consider nested goals here. } = cache.get(cx, input, &self.stack, available_depth)?; - // If we're building a proof tree and the current cache entry does not - // contain a proof tree, we do not use the entry but instead recompute - // the goal. We simply overwrite the existing entry once we're done, - // caching the proof tree. - if !inspect.try_apply_proof_tree(proof_tree) { - return None; - } - // Update the reached depth of the current goal to make sure // its state is the same regardless of whether we've used the // global cache or not. let reached_depth = self.stack.next_index().plus(additional_depth); self.update_parent_goal(reached_depth, encountered_overflow); - debug!("global cache hit"); + debug!(?additional_depth, "global cache hit"); Some(result) }) } -} -enum StepResult { - Done(StackEntry, X::Result), - HasChanged, -} + fn lookup_provisional_cache(&mut self, cx: X, input: X::Input) -> Option { + let cache_entry = self.provisional_cache.get(&input)?; + let &DetachedEntry { head, result } = cache_entry + .with_coinductive_stack + .as_ref() + .filter(|p| Self::stack_coinductive_from(cx, &self.stack, p.head)) + .or_else(|| { + cache_entry + .with_inductive_stack + .as_ref() + .filter(|p| !Self::stack_coinductive_from(cx, &self.stack, p.head)) + })?; + + debug!("provisional cache hit"); + // We have a nested goal which is already in the provisional cache, use + // its result. We do not provide any usage kind as that should have been + // already set correctly while computing the cache entry. + Self::tag_cycle_participants(&mut self.stack, head); + debug_assert!(self.stack[head].has_been_used.is_some()); + Some(result) + } + + fn check_cycle_on_stack(&mut self, cx: X, input: X::Input) -> Option { + let (head, _stack_entry) = self.stack.iter_enumerated().find(|(_, e)| e.input == input)?; + debug!("encountered cycle with depth {head:?}"); + // We have a nested goal which directly relies on a goal deeper in the stack. + // + // We start by tagging all cycle participants, as that's necessary for caching. + // + // Finally we can return either the provisional response or the initial response + // in case we're in the first fixpoint iteration for this goal. + let is_coinductive_cycle = Self::stack_coinductive_from(cx, &self.stack, head); + let cycle_kind = + if is_coinductive_cycle { CycleKind::Coinductive } else { CycleKind::Inductive }; + let usage_kind = UsageKind::Single(cycle_kind); + self.stack[head].has_been_used = + Some(self.stack[head].has_been_used.map_or(usage_kind, |prev| prev.merge(usage_kind))); + Self::tag_cycle_participants(&mut self.stack, head); + + // Return the provisional result or, if we're in the first iteration, + // start with no constraints. + if let Some(result) = self.stack[head].provisional_result { + Some(result) + } else { + Some(D::initial_provisional_result(cx, cycle_kind, input)) + } + } -impl, X: Cx> SearchGraph { /// When we encounter a coinductive cycle, we have to fetch the /// result of that cycle while we are still computing it. Because /// of this we continuously recompute the cycle until the result @@ -593,13 +581,44 @@ impl, X: Cx> SearchGraph { if D::reached_fixpoint(cx, usage_kind, input, stack_entry.provisional_result, result) { StepResult::Done(stack_entry, result) } else { - let depth = self.stack.push(StackEntry { + debug!(?result, "fixpoint changed provisional results"); + self.stack.push(StackEntry { has_been_used: None, provisional_result: Some(result), ..stack_entry }); - debug_assert_eq!(self.provisional_cache[&input].stack_depth, Some(depth)); StepResult::HasChanged } } + + /// When encountering a cycle, both inductive and coinductive, we only + /// move the root into the global cache. We also store all other cycle + /// participants involved. + /// + /// We must not use the global cache entry of a root goal if a cycle + /// participant is on the stack. This is necessary to prevent unstable + /// results. See the comment of `StackEntry::nested_goals` for + /// more details. + fn insert_global_cache( + &mut self, + cx: X, + input: X::Input, + final_entry: StackEntry, + result: X::Result, + dep_node: X::DepNodeIndex, + ) { + let additional_depth = final_entry.reached_depth.as_usize() - self.stack.len(); + debug!(?final_entry, ?result, "insert global cache"); + cx.with_global_cache(self.mode, |cache| { + cache.insert( + cx, + input, + result, + dep_node, + additional_depth, + final_entry.encountered_overflow, + &final_entry.nested_goals, + ) + }) + } } diff --git a/compiler/rustc_type_ir/src/search_graph/validate.rs b/compiler/rustc_type_ir/src/search_graph/validate.rs index 1ae806834ba7d..b4802811b0f57 100644 --- a/compiler/rustc_type_ir/src/search_graph/validate.rs +++ b/compiler/rustc_type_ir/src/search_graph/validate.rs @@ -23,8 +23,6 @@ impl, X: Cx> SearchGraph { ref nested_goals, provisional_result, } = *entry; - let cache_entry = provisional_cache.get(&entry.input).unwrap(); - assert_eq!(cache_entry.stack_depth, Some(depth)); if let Some(head) = non_root_cycle_participant { assert!(head < depth); assert!(nested_goals.is_empty()); @@ -45,19 +43,9 @@ impl, X: Cx> SearchGraph { } } - for (&input, entry) in &self.provisional_cache { - let ProvisionalCacheEntry { stack_depth, with_coinductive_stack, with_inductive_stack } = - entry; - assert!( - stack_depth.is_some() - || with_coinductive_stack.is_some() - || with_inductive_stack.is_some() - ); - - if let &Some(stack_depth) = stack_depth { - assert_eq!(stack[stack_depth].input, input); - } - + for (&_input, entry) in &self.provisional_cache { + let ProvisionalCacheEntry { with_coinductive_stack, with_inductive_stack } = entry; + assert!(with_coinductive_stack.is_some() || with_inductive_stack.is_some()); let check_detached = |detached_entry: &DetachedEntry| { let DetachedEntry { head, result: _ } = *detached_entry; assert_ne!(stack[head].has_been_used, None); diff --git a/compiler/rustc_type_ir/src/solve/inspect.rs b/compiler/rustc_type_ir/src/solve/inspect.rs index 0733c730064b0..8c88b7cda6eb5 100644 --- a/compiler/rustc_type_ir/src/solve/inspect.rs +++ b/compiler/rustc_type_ir/src/solve/inspect.rs @@ -72,9 +72,7 @@ pub struct CanonicalGoalEvaluation { #[derivative(PartialEq(bound = ""), Eq(bound = ""), Hash(bound = ""), Debug(bound = ""))] pub enum CanonicalGoalEvaluationKind { Overflow, - CycleInStack, - ProvisionalCacheHit, - Evaluation { final_revision: I::CanonicalGoalEvaluationStepRef }, + Evaluation { final_revision: CanonicalGoalEvaluationStep }, } #[derive(derivative::Derivative)] diff --git a/compiler/rustc_type_ir/src/solve/mod.rs b/compiler/rustc_type_ir/src/solve/mod.rs index 2449ac47db6fa..f91ffd53c2b6e 100644 --- a/compiler/rustc_type_ir/src/solve/mod.rs +++ b/compiler/rustc_type_ir/src/solve/mod.rs @@ -385,12 +385,3 @@ impl MaybeCause { } } } - -#[derive(derivative::Derivative)] -#[derivative(PartialEq(bound = ""), Eq(bound = ""), Debug(bound = ""))] -pub struct CacheData { - pub result: QueryResult, - pub proof_tree: Option, - pub additional_depth: usize, - pub encountered_overflow: bool, -}