diff --git a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs index 5e08c3a03d8ac..a4a8317912a81 100644 --- a/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/assembly/mod.rs @@ -8,6 +8,7 @@ use std::ops::ControlFlow; use derive_where::derive_where; use rustc_type_ir::inherent::*; use rustc_type_ir::lang_items::TraitSolverLangItem; +use rustc_type_ir::search_graph::CandidateHeadUsages; use rustc_type_ir::solve::SizedTraitKind; use rustc_type_ir::{ self as ty, Interner, TypeFlags, TypeFoldable, TypeSuperVisitable, TypeVisitable, @@ -33,10 +34,11 @@ enum AliasBoundKind { /// /// It consists of both the `source`, which describes how that goal would be proven, /// and the `result` when using the given `source`. -#[derive_where(Clone, Debug; I: Interner)] +#[derive_where(Debug; I: Interner)] pub(super) struct Candidate { pub(super) source: CandidateSource, pub(super) result: CanonicalResponse, + pub(super) head_usages: CandidateHeadUsages, } /// Methods used to assemble candidates for either trait or projection goals. @@ -116,8 +118,11 @@ where ecx: &mut EvalCtxt<'_, D>, goal: Goal, assumption: I::Clause, - ) -> Result, NoSolution> { - Self::fast_reject_assumption(ecx, goal, assumption)?; + ) -> Result, CandidateHeadUsages> { + match Self::fast_reject_assumption(ecx, goal, assumption) { + Ok(()) => {} + Err(NoSolution) => return Err(CandidateHeadUsages::default()), + } // Dealing with `ParamEnv` candidates is a bit of a mess as we need to lazily // check whether the candidate is global while considering normalization. @@ -126,18 +131,23 @@ where // in `probe` even if the candidate does not apply before we get there. We handle this // by using a `Cell` here. We only ever write into it inside of `match_assumption`. let source = Cell::new(CandidateSource::ParamEnv(ParamEnvSource::Global)); - ecx.probe(|result: &QueryResult| inspect::ProbeKind::TraitCandidate { - source: source.get(), - result: *result, - }) - .enter(|ecx| { - Self::match_assumption(ecx, goal, assumption, |ecx| { - ecx.try_evaluate_added_goals()?; - source.set(ecx.characterize_param_env_assumption(goal.param_env, assumption)?); - ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + let (result, head_usages) = ecx + .probe(|result: &QueryResult| inspect::ProbeKind::TraitCandidate { + source: source.get(), + result: *result, }) - }) - .map(|result| Candidate { source: source.get(), result }) + .enter_single_candidate(|ecx| { + Self::match_assumption(ecx, goal, assumption, |ecx| { + ecx.try_evaluate_added_goals()?; + source.set(ecx.characterize_param_env_assumption(goal.param_env, assumption)?); + ecx.evaluate_added_goals_and_make_canonical_response(Certainty::Yes) + }) + }); + + match result { + Ok(result) => Ok(Candidate { source: source.get(), result, head_usages }), + Err(NoSolution) => Err(head_usages), + } } /// Try equating an assumption predicate against a goal's predicate. If it @@ -355,6 +365,19 @@ pub(super) enum AssembleCandidatesFrom { EnvAndBounds, } +/// This is currently used to track the [CandidateHeadUsages] of all failed `ParamEnv` +/// candidates. This is then used to ignore their head usages in case there's another +/// always applicable `ParamEnv` candidate. Look at how `param_env_head_usages` is +/// used in the code for more details. +/// +/// We could easily extend this to also ignore head usages of other ignored candidates. +/// However, we currently don't have any tests where this matters and the complexity of +/// doing so does not feel worth it for now. +#[derive(Debug)] +pub(super) struct FailedCandidateInfo { + pub param_env_head_usages: CandidateHeadUsages, +} + impl EvalCtxt<'_, D> where D: SolverDelegate, @@ -364,16 +387,20 @@ where &mut self, goal: Goal, assemble_from: AssembleCandidatesFrom, - ) -> Vec> { + ) -> (Vec>, FailedCandidateInfo) { + let mut candidates = vec![]; + let mut failed_candidate_info = + FailedCandidateInfo { param_env_head_usages: CandidateHeadUsages::default() }; let Ok(normalized_self_ty) = self.structurally_normalize_ty(goal.param_env, goal.predicate.self_ty()) else { - return vec![]; + return (candidates, failed_candidate_info); }; if normalized_self_ty.is_ty_var() { debug!("self type has been normalized to infer"); - return self.forced_ambiguity(MaybeCause::Ambiguity).into_iter().collect(); + candidates.extend(self.forced_ambiguity(MaybeCause::Ambiguity)); + return (candidates, failed_candidate_info); } let goal: Goal = goal @@ -382,16 +409,15 @@ where // normalizing the self type as well, since type variables are not uniquified. let goal = self.resolve_vars_if_possible(goal); - let mut candidates = vec![]; - if let TypingMode::Coherence = self.typing_mode() && let Ok(candidate) = self.consider_coherence_unknowable_candidate(goal) { - return vec![candidate]; + candidates.push(candidate); + return (candidates, failed_candidate_info); } self.assemble_alias_bound_candidates(goal, &mut candidates); - self.assemble_param_env_candidates(goal, &mut candidates); + self.assemble_param_env_candidates(goal, &mut candidates, &mut failed_candidate_info); match assemble_from { AssembleCandidatesFrom::All => { @@ -423,7 +449,7 @@ where AssembleCandidatesFrom::EnvAndBounds => {} } - candidates + (candidates, failed_candidate_info) } pub(super) fn forced_ambiguity( @@ -584,9 +610,15 @@ where &mut self, goal: Goal, candidates: &mut Vec>, + failed_candidate_info: &mut FailedCandidateInfo, ) { for assumption in goal.param_env.caller_bounds().iter() { - candidates.extend(G::probe_and_consider_param_env_candidate(self, goal, assumption)); + match G::probe_and_consider_param_env_candidate(self, goal, assumption) { + Ok(candidate) => candidates.push(candidate), + Err(head_usages) => { + failed_candidate_info.param_env_head_usages.merge_usages(head_usages) + } + } } } @@ -661,7 +693,11 @@ where if let Ok(result) = self.evaluate_added_goals_and_make_canonical_response(Certainty::AMBIGUOUS) { - candidates.push(Candidate { source: CandidateSource::AliasBound, result }); + candidates.push(Candidate { + source: CandidateSource::AliasBound, + result, + head_usages: CandidateHeadUsages::default(), + }); } return; } @@ -959,7 +995,7 @@ where // Even when a trait bound has been proven using a where-bound, we // still need to consider alias-bounds for normalization, see // `tests/ui/next-solver/alias-bound-shadowed-by-env.rs`. - let mut candidates: Vec<_> = self + let (mut candidates, _) = self .assemble_and_evaluate_candidates(goal, AssembleCandidatesFrom::EnvAndBounds); // We still need to prefer where-bounds over alias-bounds however. @@ -972,23 +1008,20 @@ where return inject_normalize_to_rigid_candidate(self); } - if let Some(response) = self.try_merge_candidates(&candidates) { + if let Some((response, _)) = self.try_merge_candidates(&candidates) { Ok(response) } else { self.flounder(&candidates) } } TraitGoalProvenVia::Misc => { - let mut candidates = + let (mut candidates, _) = self.assemble_and_evaluate_candidates(goal, AssembleCandidatesFrom::All); // Prefer "orphaned" param-env normalization predicates, which are used // (for example, and ideally only) when proving item bounds for an impl. - let candidates_from_env: Vec<_> = candidates - .extract_if(.., |c| matches!(c.source, CandidateSource::ParamEnv(_))) - .collect(); - if let Some(response) = self.try_merge_candidates(&candidates_from_env) { - return Ok(response); + if candidates.iter().any(|c| matches!(c.source, CandidateSource::ParamEnv(_))) { + candidates.retain(|c| matches!(c.source, CandidateSource::ParamEnv(_))); } // We drop specialized impls to allow normalization via a final impl here. In case @@ -997,7 +1030,7 @@ where // means we can just ignore inference constraints and don't have to special-case // constraining the normalized-to `term`. self.filter_specialized_impls(AllowInferenceConstraints::Yes, &mut candidates); - if let Some(response) = self.try_merge_candidates(&candidates) { + if let Some((response, _)) = self.try_merge_candidates(&candidates) { Ok(response) } else { self.flounder(&candidates) diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs index 8671cc7c3d309..a4738306181cc 100644 --- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs @@ -8,7 +8,7 @@ use rustc_type_ir::fast_reject::DeepRejectCtxt; use rustc_type_ir::inherent::*; use rustc_type_ir::relate::Relate; use rustc_type_ir::relate::solver_relating::RelateExt; -use rustc_type_ir::search_graph::PathKind; +use rustc_type_ir::search_graph::{CandidateHeadUsages, PathKind}; use rustc_type_ir::{ self as ty, CanonicalVarValues, InferCtxtLike, Interner, TypeFoldable, TypeFolder, TypeSuperFoldable, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor, @@ -399,6 +399,10 @@ where result } + pub(super) fn ignore_candidate_head_usages(&mut self, usages: CandidateHeadUsages) { + self.search_graph.ignore_candidate_head_usages(usages); + } + /// Recursively evaluates `goal`, returning whether any inference vars have /// been constrained and the certainty of the result. fn evaluate_goal( diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/probe.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/probe.rs index ed0cedc407746..e5658ba32ff67 100644 --- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/probe.rs +++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/probe.rs @@ -1,5 +1,6 @@ use std::marker::PhantomData; +use rustc_type_ir::search_graph::CandidateHeadUsages; use rustc_type_ir::{InferCtxtLike, Interner}; use tracing::instrument; @@ -25,6 +26,20 @@ where D: SolverDelegate, I: Interner, { + pub(in crate::solve) fn enter_single_candidate( + self, + f: impl FnOnce(&mut EvalCtxt<'_, D>) -> T, + ) -> (T, CandidateHeadUsages) { + self.ecx.search_graph.enter_single_candidate(); + let mut candidate_usages = CandidateHeadUsages::default(); + let result = self.enter(|ecx| { + let result = f(ecx); + candidate_usages = ecx.search_graph.finish_single_candidate(); + result + }); + (result, candidate_usages) + } + pub(in crate::solve) fn enter(self, f: impl FnOnce(&mut EvalCtxt<'_, D>) -> T) -> T { let ProbeCtxt { ecx: outer, probe_kind, _result } = self; @@ -78,7 +93,8 @@ where self, f: impl FnOnce(&mut EvalCtxt<'_, D>) -> QueryResult, ) -> Result, NoSolution> { - self.cx.enter(|ecx| f(ecx)).map(|result| Candidate { source: self.source, result }) + let (result, head_usages) = self.cx.enter_single_candidate(f); + result.map(|result| Candidate { source: self.source, result, head_usages }) } } diff --git a/compiler/rustc_next_trait_solver/src/solve/mod.rs b/compiler/rustc_next_trait_solver/src/solve/mod.rs index 2feebe270a6bb..c2745c878dc12 100644 --- a/compiler/rustc_next_trait_solver/src/solve/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/mod.rs @@ -236,6 +236,12 @@ where } } +#[derive(Debug)] +enum MergeCandidateInfo { + AlwaysApplicable(usize), + EqualResponse, +} + impl EvalCtxt<'_, D> where D: SolverDelegate, @@ -248,23 +254,25 @@ where fn try_merge_candidates( &mut self, candidates: &[Candidate], - ) -> Option> { + ) -> Option<(CanonicalResponse, MergeCandidateInfo)> { if candidates.is_empty() { return None; } + let always_applicable = candidates.iter().enumerate().find(|(_, candidate)| { + candidate.result.value.certainty == Certainty::Yes + && has_no_inference_or_external_constraints(candidate.result) + }); + if let Some((i, c)) = always_applicable { + return Some((c.result, MergeCandidateInfo::AlwaysApplicable(i))); + } + let one: CanonicalResponse = candidates[0].result; if candidates[1..].iter().all(|candidate| candidate.result == one) { - return Some(one); + return Some((one, MergeCandidateInfo::EqualResponse)); } - candidates - .iter() - .find(|candidate| { - candidate.result.value.certainty == Certainty::Yes - && has_no_inference_or_external_constraints(candidate.result) - }) - .map(|candidate| candidate.result) + None } fn bail_with_ambiguity(&mut self, candidates: &[Candidate]) -> CanonicalResponse { diff --git a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs index 60bae738e61b8..03f7bc1f49e9c 100644 --- a/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs +++ b/compiler/rustc_next_trait_solver/src/solve/trait_goals.rs @@ -13,11 +13,13 @@ use tracing::{debug, instrument, trace}; use crate::delegate::SolverDelegate; use crate::solve::assembly::structural_traits::{self, AsyncCallableRelevantTypes}; -use crate::solve::assembly::{self, AllowInferenceConstraints, AssembleCandidatesFrom, Candidate}; +use crate::solve::assembly::{ + self, AllowInferenceConstraints, AssembleCandidatesFrom, Candidate, FailedCandidateInfo, +}; use crate::solve::inspect::ProbeKind; use crate::solve::{ BuiltinImplSource, CandidateSource, Certainty, EvalCtxt, Goal, GoalSource, MaybeCause, - NoSolution, ParamEnvSource, QueryResult, has_only_region_constraints, + MergeCandidateInfo, NoSolution, ParamEnvSource, QueryResult, has_only_region_constraints, }; impl assembly::GoalKind for TraitPredicate @@ -1344,9 +1346,10 @@ where pub(super) fn merge_trait_candidates( &mut self, mut candidates: Vec>, + failed_candidate_info: FailedCandidateInfo, ) -> Result<(CanonicalResponse, Option), NoSolution> { if let TypingMode::Coherence = self.typing_mode() { - return if let Some(response) = self.try_merge_candidates(&candidates) { + return if let Some((response, _)) = self.try_merge_candidates(&candidates) { Ok((response, Some(TraitGoalProvenVia::Misc))) } else { self.flounder(&candidates).map(|r| (r, None)) @@ -1376,10 +1379,29 @@ where let where_bounds: Vec<_> = candidates .extract_if(.., |c| matches!(c.source, CandidateSource::ParamEnv(_))) .collect(); - return if let Some(response) = self.try_merge_candidates(&where_bounds) { - Ok((response, Some(TraitGoalProvenVia::ParamEnv))) + if let Some((response, info)) = self.try_merge_candidates(&where_bounds) { + match info { + // If there's an always applicable candidate, the result of all + // other candidates does not matter. This means we can ignore + // them when checking whether we've reached a fixpoint. + MergeCandidateInfo::AlwaysApplicable(i) => { + for (j, c) in where_bounds.into_iter().enumerate() { + if i != j { + self.ignore_candidate_head_usages(c.head_usages) + } + } + // If a where-bound does not apply, we don't actually get a + // candidate for it. We manually track the head usages + // of all failed `ParamEnv` candidates instead. + self.ignore_candidate_head_usages( + failed_candidate_info.param_env_head_usages, + ); + } + MergeCandidateInfo::EqualResponse => {} + } + return Ok((response, Some(TraitGoalProvenVia::ParamEnv))); } else { - Ok((self.bail_with_ambiguity(&where_bounds), None)) + return Ok((self.bail_with_ambiguity(&where_bounds), None)); }; } @@ -1387,7 +1409,7 @@ where let alias_bounds: Vec<_> = candidates .extract_if(.., |c| matches!(c.source, CandidateSource::AliasBound)) .collect(); - return if let Some(response) = self.try_merge_candidates(&alias_bounds) { + return if let Some((response, _)) = self.try_merge_candidates(&alias_bounds) { Ok((response, Some(TraitGoalProvenVia::AliasBound))) } else { Ok((self.bail_with_ambiguity(&alias_bounds), None)) @@ -1412,7 +1434,7 @@ where TraitGoalProvenVia::Misc }; - if let Some(response) = self.try_merge_candidates(&candidates) { + if let Some((response, _)) = self.try_merge_candidates(&candidates) { Ok((response, Some(proven_via))) } else { self.flounder(&candidates).map(|r| (r, None)) @@ -1424,8 +1446,9 @@ where &mut self, goal: Goal>, ) -> Result<(CanonicalResponse, Option), NoSolution> { - let candidates = self.assemble_and_evaluate_candidates(goal, AssembleCandidatesFrom::All); - self.merge_trait_candidates(candidates) + let (candidates, failed_candidate_info) = + self.assemble_and_evaluate_candidates(goal, AssembleCandidatesFrom::All); + self.merge_trait_candidates(candidates, failed_candidate_info) } fn try_stall_coroutine(&mut self, self_ty: I::Ty) -> Option, NoSolution>> { diff --git a/compiler/rustc_type_ir/src/search_graph/mod.rs b/compiler/rustc_type_ir/src/search_graph/mod.rs index daacb4a3e446e..348178a527fbe 100644 --- a/compiler/rustc_type_ir/src/search_graph/mod.rs +++ b/compiler/rustc_type_ir/src/search_graph/mod.rs @@ -12,10 +12,11 @@ //! The global cache has to be completely unobservable, while the per-cycle cache may impact //! behavior as long as the resulting behavior is still correct. use std::cmp::Ordering; -use std::collections::BTreeMap; use std::collections::hash_map::Entry; +use std::collections::{BTreeMap, btree_map}; use std::fmt::Debug; use std::hash::Hash; +use std::iter; use std::marker::PhantomData; use derive_where::derive_where; @@ -169,27 +170,76 @@ impl PathKind { /// This is used to avoid rerunning a cycle if there's /// just a single usage kind and the final result matches /// its provisional result. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] -pub enum UsageKind { - Single(PathKind), - Mixed, +/// +/// While it tracks the amount of usages using `u32`, we only ever +/// care whether there are any. We only count them to be able to ignore +/// usages from irrelevant candidates while evaluating a goal. +/// +/// This cares about how nested goals relied on a cycle head. It does +/// not care about how frequently the nested goal relied on it. +#[derive(Default, Debug, Clone, Copy, PartialEq, Eq)] +struct HeadUsages { + inductive: u32, + unknown: u32, + coinductive: u32, + forced_ambiguity: u32, } -impl From for UsageKind { - fn from(path: PathKind) -> UsageKind { - UsageKind::Single(path) + +impl HeadUsages { + fn add_usage(&mut self, path: PathKind) { + match path { + PathKind::Inductive => self.inductive += 1, + PathKind::Unknown => self.unknown += 1, + PathKind::Coinductive => self.coinductive += 1, + PathKind::ForcedAmbiguity => self.forced_ambiguity += 1, + } + } + + /// This adds the usages which occurred while computing a nested goal. + /// + /// We don't actually care about how frequently the nested goal relied + /// on its cycle heads, only whether it did. + fn add_usages_from_nested(&mut self, usages: HeadUsages) { + let HeadUsages { inductive, unknown, coinductive, forced_ambiguity } = usages; + self.inductive += if inductive == 0 { 0 } else { 1 }; + self.unknown += if unknown == 0 { 0 } else { 1 }; + self.coinductive += if coinductive == 0 { 0 } else { 1 }; + self.forced_ambiguity += if forced_ambiguity == 0 { 0 } else { 1 }; + } + + fn ignore_usages(&mut self, usages: HeadUsages) { + let HeadUsages { inductive, unknown, coinductive, forced_ambiguity } = usages; + self.inductive = self.inductive.checked_sub(inductive).unwrap(); + self.unknown = self.unknown.checked_sub(unknown).unwrap(); + self.coinductive = self.coinductive.checked_sub(coinductive).unwrap(); + self.forced_ambiguity = self.forced_ambiguity.checked_sub(forced_ambiguity).unwrap(); + } + + fn is_empty(self) -> bool { + let HeadUsages { inductive, unknown, coinductive, forced_ambiguity } = self; + inductive == 0 && unknown == 0 && coinductive == 0 && forced_ambiguity == 0 } } -impl UsageKind { - #[must_use] - fn merge(self, other: impl Into) -> Self { - match (self, other.into()) { - (UsageKind::Mixed, _) | (_, UsageKind::Mixed) => UsageKind::Mixed, - (UsageKind::Single(lhs), UsageKind::Single(rhs)) => { - if lhs == rhs { - UsageKind::Single(lhs) - } else { - UsageKind::Mixed + +#[derive(Debug, Default)] +pub struct CandidateHeadUsages { + usages: Option>>, +} +impl CandidateHeadUsages { + pub fn merge_usages(&mut self, other: CandidateHeadUsages) { + if let Some(other_usages) = other.usages { + if let Some(ref mut self_usages) = self.usages { + #[allow(rustc::potential_query_instability)] + for (head_index, head) in other_usages.into_iter() { + let HeadUsages { inductive, unknown, coinductive, forced_ambiguity } = head; + let self_usages = self_usages.entry(head_index).or_default(); + self_usages.inductive += inductive; + self_usages.unknown += unknown; + self_usages.coinductive += coinductive; + self_usages.forced_ambiguity += forced_ambiguity; } + } else { + self.usages = Some(other_usages); } } } @@ -230,13 +280,24 @@ impl AvailableDepth { } } +#[derive(Clone, Copy, Debug)] +struct CycleHead { + paths_to_head: PathsToNested, + /// If the `usages` are empty, the result of that head does not matter + /// for the current goal. However, we still don't completely drop this + /// cycle head as whether or not it exists impacts which queries we + /// access, so ignoring it would cause incremental compilation verification + /// failures or hide query cycles. + usages: HeadUsages, +} + /// All cycle heads a given goal depends on, ordered by their stack depth. /// /// We also track all paths from this goal to that head. This is necessary /// when rebasing provisional cache results. #[derive(Clone, Debug, Default)] struct CycleHeads { - heads: BTreeMap, + heads: BTreeMap, } impl CycleHeads { @@ -244,43 +305,51 @@ impl CycleHeads { self.heads.is_empty() } - fn highest_cycle_head(&self) -> StackDepth { - self.opt_highest_cycle_head().unwrap() + fn highest_cycle_head(&self) -> (StackDepth, CycleHead) { + self.heads.last_key_value().map(|(k, v)| (*k, *v)).unwrap() } - fn opt_highest_cycle_head(&self) -> Option { + fn highest_cycle_head_index(&self) -> StackDepth { + self.opt_highest_cycle_head_index().unwrap() + } + + fn opt_highest_cycle_head_index(&self) -> Option { self.heads.last_key_value().map(|(k, _)| *k) } - fn opt_lowest_cycle_head(&self) -> Option { + fn opt_lowest_cycle_head_index(&self) -> Option { self.heads.first_key_value().map(|(k, _)| *k) } - fn remove_highest_cycle_head(&mut self) -> PathsToNested { + fn remove_highest_cycle_head(&mut self) -> CycleHead { let last = self.heads.pop_last(); last.unwrap().1 } - fn insert(&mut self, head: StackDepth, path_from_entry: impl Into + Copy) { - *self.heads.entry(head).or_insert(path_from_entry.into()) |= path_from_entry.into(); + fn insert( + &mut self, + head_index: StackDepth, + path_from_entry: impl Into + Copy, + usages: HeadUsages, + ) { + match self.heads.entry(head_index) { + btree_map::Entry::Vacant(entry) => { + entry.insert(CycleHead { paths_to_head: path_from_entry.into(), usages }); + } + btree_map::Entry::Occupied(entry) => { + let head = entry.into_mut(); + head.paths_to_head |= path_from_entry.into(); + head.usages.add_usages_from_nested(usages); + } + } } - fn iter(&self) -> impl Iterator + '_ { - self.heads.iter().map(|(k, v)| (*k, *v)) + fn ignore_usages(&mut self, head_index: StackDepth, usages: HeadUsages) { + self.heads.get_mut(&head_index).unwrap().usages.ignore_usages(usages) } - /// Update the cycle heads of a goal at depth `this` given the cycle heads - /// of a nested goal. This merges the heads after filtering the parent goal - /// itself. - fn extend_from_child(&mut self, this: StackDepth, step_kind: PathKind, child: &CycleHeads) { - for (&head, &path_from_entry) in child.heads.iter() { - match head.cmp(&this) { - Ordering::Less => {} - Ordering::Equal => continue, - Ordering::Greater => unreachable!(), - } - self.insert(head, path_from_entry.extend_with(step_kind)); - } + fn iter(&self) -> impl Iterator + '_ { + self.heads.iter().map(|(k, v)| (*k, *v)) } } @@ -487,9 +556,6 @@ impl EvaluationResult { pub struct SearchGraph, X: Cx = ::Cx> { root_depth: AvailableDepth, - /// The stack of goals currently being computed. - /// - /// An element is *deeper* in the stack if its index is *lower*. stack: Stack, /// The provisional cache contains entries for already computed goals which /// still depend on goals higher-up in the stack. We don't move them to the @@ -511,6 +577,7 @@ pub struct SearchGraph, X: Cx = ::Cx> { /// cache entry. enum UpdateParentGoalCtxt<'a, X: Cx> { Ordinary(&'a NestedGoals), + CycleOnStack(X::Input), ProvisionalCacheHit, } @@ -532,21 +599,47 @@ impl, X: Cx> SearchGraph { stack: &mut Stack, step_kind_from_parent: PathKind, required_depth_for_nested: usize, - heads: &CycleHeads, + heads: impl Iterator, encountered_overflow: bool, context: UpdateParentGoalCtxt<'_, X>, ) { - if let Some(parent_index) = stack.last_index() { - let parent = &mut stack[parent_index]; + if let Some((parent_index, parent)) = stack.last_mut_with_index() { parent.required_depth = parent.required_depth.max(required_depth_for_nested + 1); parent.encountered_overflow |= encountered_overflow; - parent.heads.extend_from_child(parent_index, step_kind_from_parent, heads); + for (head_index, head) in heads { + if let Some(candidate_usages) = &mut parent.candidate_usages { + candidate_usages + .usages + .get_or_insert_default() + .entry(head_index) + .or_default() + .add_usages_from_nested(head.usages); + } + match head_index.cmp(&parent_index) { + Ordering::Less => parent.heads.insert( + head_index, + head.paths_to_head.extend_with(step_kind_from_parent), + head.usages, + ), + Ordering::Equal => { + parent.usages.get_or_insert_default().add_usages_from_nested(head.usages); + } + Ordering::Greater => unreachable!(), + } + } let parent_depends_on_cycle = match context { UpdateParentGoalCtxt::Ordinary(nested_goals) => { parent.nested_goals.extend_from_child(step_kind_from_parent, nested_goals); !nested_goals.is_empty() } + UpdateParentGoalCtxt::CycleOnStack(head) => { + // We lookup provisional cache entries before detecting cycles. + // We therefore can't use a global cache entry if it contains a cycle + // whose head is in the provisional cache. + parent.nested_goals.insert(head, step_kind_from_parent.into()); + true + } UpdateParentGoalCtxt::ProvisionalCacheHit => true, }; // Once we've got goals which encountered overflow or a cycle, @@ -587,6 +680,29 @@ impl, X: Cx> SearchGraph { stack.cycle_step_kinds(head).fold(step_kind_to_head, |curr, step| curr.extend(step)) } + pub fn enter_single_candidate(&mut self) { + let prev = self.stack.last_mut().unwrap().candidate_usages.replace(Default::default()); + debug_assert!(prev.is_none(), "existing candidate_usages: {prev:?}"); + } + + pub fn finish_single_candidate(&mut self) -> CandidateHeadUsages { + self.stack.last_mut().unwrap().candidate_usages.take().unwrap() + } + + pub fn ignore_candidate_head_usages(&mut self, usages: CandidateHeadUsages) { + if let Some(usages) = usages.usages { + let (entry_index, entry) = self.stack.last_mut_with_index().unwrap(); + #[allow(rustc::potential_query_instability)] + for (head_index, usages) in usages.into_iter() { + if head_index == entry_index { + entry.usages.unwrap().ignore_usages(usages); + } else { + entry.heads.ignore_usages(head_index, usages); + } + } + } + } + /// Probably the most involved method of the whole solver. /// /// While goals get computed via `D::compute_goal`, this function handles @@ -655,7 +771,8 @@ impl, X: Cx> SearchGraph { required_depth: 0, heads: Default::default(), encountered_overflow: false, - has_been_used: None, + usages: None, + candidate_usages: None, nested_goals: Default::default(), }); @@ -674,7 +791,7 @@ impl, X: Cx> SearchGraph { &mut self.stack, step_kind_from_parent, evaluation_result.required_depth, - &evaluation_result.heads, + evaluation_result.heads.iter(), evaluation_result.encountered_overflow, UpdateParentGoalCtxt::Ordinary(&evaluation_result.nested_goals), ); @@ -703,7 +820,7 @@ impl, X: Cx> SearchGraph { let path_from_head = Self::cycle_path_kind( &self.stack, step_kind_from_parent, - heads.highest_cycle_head(), + heads.highest_cycle_head_index(), ); let provisional_cache_entry = ProvisionalCacheEntry { encountered_overflow, heads, path_from_head, result }; @@ -741,11 +858,17 @@ impl, X: Cx> SearchGraph { /// When reevaluating a goal with a changed provisional result, all provisional cache entry /// which depend on this goal get invalidated. - fn clear_dependent_provisional_results(&mut self) { - let head = self.stack.next_index(); + /// + /// Note that we keep provisional cache entries which accessed this goal as a cycle head, but + /// don't depend on its value. + fn clear_dependent_provisional_results_for_rerun(&mut self) { + let rerun_index = self.stack.next_index(); #[allow(rustc::potential_query_instability)] self.provisional_cache.retain(|_, entries| { - entries.retain(|entry| entry.heads.highest_cycle_head() != head); + entries.retain(|entry| { + let (head_index, head) = entry.heads.highest_cycle_head(); + head_index != rerun_index || head.usages.is_empty() + }); !entries.is_empty() }); } @@ -772,7 +895,7 @@ impl, X: Cx> SearchGraph { stack_entry: &StackEntry, mut mutate_result: impl FnMut(X::Input, X::Result) -> X::Result, ) { - let popped_head = self.stack.next_index(); + let popped_head_index = self.stack.next_index(); #[allow(rustc::potential_query_instability)] self.provisional_cache.retain(|&input, entries| { entries.retain_mut(|entry| { @@ -782,45 +905,65 @@ impl, X: Cx> SearchGraph { path_from_head, result, } = entry; - let ep = if heads.highest_cycle_head() == popped_head { + let popped_head = if heads.highest_cycle_head_index() == popped_head_index { heads.remove_highest_cycle_head() } else { return true; }; // We're rebasing an entry `e` over a head `p`. This head - // has a number of own heads `h` it depends on. We need to - // make sure that the path kind of all paths `hph` remain the - // same after rebasing. + // has a number of own heads `h` it depends on. // - // After rebasing the cycles `hph` will go through `e`. We need to make - // sure that forall possible paths `hep`, `heph` is equal to `hph.` - for (h, ph) in stack_entry.heads.iter() { - let hp = - Self::cycle_path_kind(&self.stack, stack_entry.step_kind_from_parent, h); - - // We first validate that all cycles while computing `p` would stay - // the same if we were to recompute it as a nested goal of `e`. - let he = hp.extend(*path_from_head); - for ph in ph.iter_paths() { - let hph = hp.extend(ph); - for ep in ep.iter_paths() { - let hep = ep.extend(he); - let heph = hep.extend(ph); - if hph != heph { - return false; + // This causes our provisional result to depend on the heads + // of `p` to avoid moving any goal which uses this cache entry to + // the global cache. + if popped_head.usages.is_empty() { + // The result of `e` does not depend on the value of `p`. This we can + // keep using the result of this provisional cache entry even if evaluating + // `p` as a nested goal of `e` would have a different result. + for (head_index, _) in stack_entry.heads.iter() { + heads.insert(head_index, PathsToNested::EMPTY, HeadUsages::default()); + } + } else { + // The entry `e` actually depends on the value of `p`. We need + // to make sure that the value of `p` wouldn't change even if we + // were to reevaluate it as a nested goal of `e` instead. For this + // we check that the path kind of all paths `hph` remain the + // same after rebasing. + // + // After rebasing the cycles `hph` will go through `e`. We need to make + // sure that forall possible paths `hep`, `heph` is equal to `hph.` + let ep = popped_head.paths_to_head; + for (head_index, head) in stack_entry.heads.iter() { + let ph = head.paths_to_head; + let hp = Self::cycle_path_kind( + &self.stack, + stack_entry.step_kind_from_parent, + head_index, + ); + // We first validate that all cycles while computing `p` would stay + // the same if we were to recompute it as a nested goal of `e`. + let he = hp.extend(*path_from_head); + for ph in ph.iter_paths() { + let hph = hp.extend(ph); + for ep in ep.iter_paths() { + let hep = ep.extend(he); + let heph = hep.extend(ph); + if hph != heph { + return false; + } } } - } - // If so, all paths reached while computing `p` have to get added - // the heads of `e` to make sure that rebasing `e` again also considers - // them. - let eph = ep.extend_with_paths(ph); - heads.insert(h, eph); + // If so, all paths reached while computing `p` have to get added + // the heads of `e` to make sure that rebasing `e` again also considers + // them. + let eph = ep.extend_with_paths(ph); + heads.insert(head_index, eph, head.usages); + } } - let Some(head) = heads.opt_highest_cycle_head() else { + let Some(head_index) = heads.opt_highest_cycle_head_index() else { return false; }; @@ -829,7 +972,7 @@ impl, X: Cx> SearchGraph { *path_from_head = path_from_head.extend(Self::cycle_path_kind( &self.stack, stack_entry.step_kind_from_parent, - head, + head_index, )); // Mutate the result of the provisional cache entry in case we did // not reach a fixpoint. @@ -853,7 +996,7 @@ impl, X: Cx> SearchGraph { for &ProvisionalCacheEntry { encountered_overflow, ref heads, path_from_head, result } in entries { - let head = heads.highest_cycle_head(); + let head_index = heads.highest_cycle_head_index(); if encountered_overflow { // This check is overly strict and very subtle. We need to make sure that if // a global cache entry depends on some goal without adding it to its @@ -865,24 +1008,26 @@ impl, X: Cx> SearchGraph { // current goal is already part of the same cycle. This check could be // improved but seems to be good enough for now. let last = self.stack.last().unwrap(); - if last.heads.opt_lowest_cycle_head().is_none_or(|lowest| lowest > head) { + if last.heads.opt_lowest_cycle_head_index().is_none_or(|lowest| lowest > head_index) + { continue; } } // A provisional cache entry is only valid if the current path from its // highest cycle head to the goal is the same. - if path_from_head == Self::cycle_path_kind(&self.stack, step_kind_from_parent, head) { + if path_from_head + == Self::cycle_path_kind(&self.stack, step_kind_from_parent, head_index) + { Self::update_parent_goal( &mut self.stack, step_kind_from_parent, 0, - heads, + heads.iter(), encountered_overflow, UpdateParentGoalCtxt::ProvisionalCacheHit, ); - debug_assert!(self.stack[head].has_been_used.is_some()); - debug!(?head, ?path_from_head, "provisional cache hit"); + debug!(?head_index, ?path_from_head, "provisional cache hit"); return Some(result); } } @@ -940,8 +1085,9 @@ impl, X: Cx> SearchGraph { // // We check if any of the paths taken while computing the global goal // would end up with an applicable provisional cache entry. - let head = heads.highest_cycle_head(); - let head_to_curr = Self::cycle_path_kind(&self.stack, step_kind_from_parent, head); + let head_index = heads.highest_cycle_head_index(); + let head_to_curr = + Self::cycle_path_kind(&self.stack, step_kind_from_parent, head_index); let full_paths = path_from_global_entry.extend_with(head_to_curr); if full_paths.contains(head_to_provisional.into()) { debug!( @@ -993,12 +1139,12 @@ impl, X: Cx> SearchGraph { // We don't move cycle participants to the global cache, so the // cycle heads are always empty. - let heads = Default::default(); + let heads = iter::empty(); Self::update_parent_goal( &mut self.stack, step_kind_from_parent, required_depth, - &heads, + heads, encountered_overflow, UpdateParentGoalCtxt::Ordinary(nested_goals), ); @@ -1014,34 +1160,30 @@ impl, X: Cx> SearchGraph { input: X::Input, step_kind_from_parent: PathKind, ) -> Option { - let head = self.stack.find(input)?; + let head_index = self.stack.find(input)?; // We have a nested goal which directly relies on a goal deeper in the stack. // // We start by tagging all cycle participants, as that's necessary for caching. // // Finally we can return either the provisional response or the initial response // in case we're in the first fixpoint iteration for this goal. - let path_kind = Self::cycle_path_kind(&self.stack, step_kind_from_parent, head); - debug!(?path_kind, "encountered cycle with depth {head:?}"); - let usage_kind = UsageKind::Single(path_kind); - self.stack[head].has_been_used = - Some(self.stack[head].has_been_used.map_or(usage_kind, |prev| prev.merge(usage_kind))); - - // Subtle: when encountering a cyclic goal, we still first checked for overflow, - // so we have to update the reached depth. - let last_index = self.stack.last_index().unwrap(); - let last = &mut self.stack[last_index]; - last.required_depth = last.required_depth.max(1); - - last.nested_goals.insert(input, step_kind_from_parent.into()); - last.nested_goals.insert(last.input, PathsToNested::EMPTY); - if last_index != head { - last.heads.insert(head, step_kind_from_parent); - } + let path_kind = Self::cycle_path_kind(&self.stack, step_kind_from_parent, head_index); + debug!(?path_kind, "encountered cycle with depth {head_index:?}"); + let mut usages = HeadUsages::default(); + usages.add_usage(path_kind); + let head = CycleHead { paths_to_head: step_kind_from_parent.into(), usages }; + Self::update_parent_goal( + &mut self.stack, + step_kind_from_parent, + 0, + iter::once((head_index, head)), + false, + UpdateParentGoalCtxt::CycleOnStack(input), + ); // Return the provisional result or, if we're in the first iteration, // start with no constraints. - if let Some(result) = self.stack[head].provisional_result { + if let Some(result) = self.stack[head_index].provisional_result { Some(result) } else { Some(D::initial_provisional_result(cx, path_kind, input)) @@ -1053,15 +1195,31 @@ impl, X: Cx> SearchGraph { &mut self, cx: X, stack_entry: &StackEntry, - usage_kind: UsageKind, + usages: HeadUsages, result: X::Result, ) -> bool { - if let Some(prev) = stack_entry.provisional_result { - prev == result - } else if let UsageKind::Single(kind) = usage_kind { - D::is_initial_provisional_result(cx, kind, stack_entry.input, result) + let provisional_result = stack_entry.provisional_result; + if usages.is_empty() { + true + } else if let Some(provisional_result) = provisional_result { + provisional_result == result } else { - false + let check = |k| D::is_initial_provisional_result(cx, k, stack_entry.input, result); + match usages { + HeadUsages { inductive: _, unknown: 0, coinductive: 0, forced_ambiguity: 0 } => { + check(PathKind::Inductive) + } + HeadUsages { inductive: 0, unknown: _, coinductive: 0, forced_ambiguity: 0 } => { + check(PathKind::Unknown) + } + HeadUsages { inductive: 0, unknown: 0, coinductive: _, forced_ambiguity: 0 } => { + check(PathKind::Coinductive) + } + HeadUsages { inductive: 0, unknown: 0, coinductive: 0, forced_ambiguity: _ } => { + check(PathKind::ForcedAmbiguity) + } + _ => false, + } } } @@ -1091,7 +1249,7 @@ impl, X: Cx> SearchGraph { // If the current goal is not the root of a cycle, we are done. // // There are no provisional cache entries which depend on this goal. - let Some(usage_kind) = stack_entry.has_been_used else { + let Some(usages) = stack_entry.usages else { return EvaluationResult::finalize(stack_entry, encountered_overflow, result); }; @@ -1106,7 +1264,7 @@ impl, X: Cx> SearchGraph { // is equal to the provisional result of the previous iteration, or because // this was only the root of either coinductive or inductive cycles, and the // final result is equal to the initial response for that case. - if self.reached_fixpoint(cx, &stack_entry, usage_kind, result) { + if self.reached_fixpoint(cx, &stack_entry, usages, result) { self.rebase_provisional_cache_entries(&stack_entry, |_, result| result); return EvaluationResult::finalize(stack_entry, encountered_overflow, result); } @@ -1144,7 +1302,7 @@ impl, X: Cx> SearchGraph { // Clear all provisional cache entries which depend on a previous provisional // result of this goal and rerun. - self.clear_dependent_provisional_results(); + self.clear_dependent_provisional_results_for_rerun(); debug!(?result, "fixpoint changed provisional results"); self.stack.push(StackEntry { @@ -1163,7 +1321,8 @@ impl, X: Cx> SearchGraph { // similar to the previous iterations when reevaluating, it's better // for caching if the reevaluation also starts out with `false`. encountered_overflow: false, - has_been_used: None, + usages: None, + candidate_usages: None, }); } } diff --git a/compiler/rustc_type_ir/src/search_graph/stack.rs b/compiler/rustc_type_ir/src/search_graph/stack.rs index a58cd82b02303..3fd8e2bd16e4e 100644 --- a/compiler/rustc_type_ir/src/search_graph/stack.rs +++ b/compiler/rustc_type_ir/src/search_graph/stack.rs @@ -1,9 +1,11 @@ -use std::ops::{Index, IndexMut}; +use std::ops::Index; use derive_where::derive_where; use rustc_index::IndexVec; -use crate::search_graph::{AvailableDepth, Cx, CycleHeads, NestedGoals, PathKind, UsageKind}; +use crate::search_graph::{ + AvailableDepth, CandidateHeadUsages, Cx, CycleHeads, HeadUsages, NestedGoals, PathKind, +}; rustc_index::newtype_index! { #[orderable] @@ -26,13 +28,13 @@ pub(super) struct StackEntry { /// The available depth of a given goal, immutable. pub available_depth: AvailableDepth, + /// The maximum depth required while evaluating this goal. + pub required_depth: usize, + /// Starts out as `None` and gets set when rerunning this /// goal in case we encounter a cycle. pub provisional_result: Option, - /// The maximum depth required while evaluating this goal. - pub required_depth: usize, - /// All cycle heads this goal depends on. Lazily updated and only /// up-to date for the top of the stack. pub heads: CycleHeads, @@ -40,14 +42,27 @@ pub(super) struct StackEntry { /// Whether evaluating this goal encountered overflow. Lazily updated. pub encountered_overflow: bool, - /// Whether this goal has been used as the root of a cycle. This gets - /// eagerly updated when encountering a cycle. - pub has_been_used: Option, + /// Whether and how this goal has been used as the root of a cycle. Lazily updated. + pub usages: Option, + + /// We want to be able to ignore head usages if they happen inside of candidates + /// which don't impact the result of a goal. This enables us to avoid rerunning goals + /// and is also used when rebasing provisional cache entries. + /// + /// To implement this, we track all usages while evaluating a candidate. If this candidate + /// then ends up ignored, we manually remove its usages from `usages` and `heads`. + pub candidate_usages: Option, /// The nested goals of this goal, see the doc comment of the type. pub nested_goals: NestedGoals, } +/// The stack of goals currently being computed. +/// +/// An element is *deeper* in the stack if its index is *lower*. +/// +/// Only the last entry of the stack is mutable. All other entries get +/// lazily updated in `update_parent_goal`. #[derive_where(Default; X: Cx)] pub(super) struct Stack { entries: IndexVec>, @@ -62,10 +77,6 @@ impl Stack { self.entries.len() } - pub(super) fn last_index(&self) -> Option { - self.entries.last_index() - } - pub(super) fn last(&self) -> Option<&StackEntry> { self.entries.raw.last() } @@ -74,6 +85,10 @@ impl Stack { self.entries.raw.last_mut() } + pub(super) fn last_mut_with_index(&mut self) -> Option<(StackDepth, &mut StackEntry)> { + self.entries.last_index().map(|idx| (idx, &mut self.entries[idx])) + } + pub(super) fn next_index(&self) -> StackDepth { self.entries.next_index() } @@ -108,9 +123,3 @@ impl Index for Stack { &self.entries[index] } } - -impl IndexMut for Stack { - fn index_mut(&mut self, index: StackDepth) -> &mut Self::Output { - &mut self.entries[index] - } -}