Skip to content

Commit 85fe615

Browse files
committed
move stack into submodule
1 parent 40daf23 commit 85fe615

File tree

2 files changed

+127
-57
lines changed

2 files changed

+127
-57
lines changed

compiler/rustc_type_ir/src/search_graph/mod.rs

Lines changed: 13 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,8 @@ use tracing::debug;
2626

2727
use crate::data_structures::HashMap;
2828

29+
mod stack;
30+
use stack::{Stack, StackDepth, StackEntry};
2931
mod global_cache;
3032
use global_cache::CacheData;
3133
pub use global_cache::GlobalCache;
@@ -225,9 +227,9 @@ impl AvailableDepth {
225227
/// in case there is exponential blowup.
226228
fn allowed_depth_for_nested<D: Delegate>(
227229
root_depth: AvailableDepth,
228-
stack: &IndexVec<StackDepth, StackEntry<D::Cx>>,
230+
stack: &Stack<D::Cx>,
229231
) -> Option<AvailableDepth> {
230-
if let Some(last) = stack.raw.last() {
232+
if let Some(last) = stack.last() {
231233
if last.available_depth.0 == 0 {
232234
return None;
233235
}
@@ -433,50 +435,6 @@ impl<X: Cx> NestedGoals<X> {
433435
}
434436
}
435437

436-
rustc_index::newtype_index! {
437-
#[orderable]
438-
#[gate_rustc_only]
439-
pub struct StackDepth {}
440-
}
441-
442-
/// Stack entries of the evaluation stack. Its fields tend to be lazily
443-
/// when popping a child goal or completely immutable.
444-
#[derive_where(Debug; X: Cx)]
445-
struct StackEntry<X: Cx> {
446-
input: X::Input,
447-
448-
/// Whether proving this goal is a coinductive step.
449-
///
450-
/// This is used when encountering a trait solver cycle to
451-
/// decide whether the initial provisional result of the cycle.
452-
step_kind_from_parent: PathKind,
453-
454-
/// The available depth of a given goal, immutable.
455-
available_depth: AvailableDepth,
456-
457-
/// The maximum depth reached by this stack entry, only up-to date
458-
/// for the top of the stack and lazily updated for the rest.
459-
reached_depth: StackDepth,
460-
461-
/// All cycle heads this goal depends on. Lazily updated and only
462-
/// up-to date for the top of the stack.
463-
heads: CycleHeads,
464-
465-
/// Whether evaluating this goal encountered overflow. Lazily updated.
466-
encountered_overflow: bool,
467-
468-
/// Whether this goal has been used as the root of a cycle. This gets
469-
/// eagerly updated when encountering a cycle.
470-
has_been_used: Option<UsageKind>,
471-
472-
/// The nested goals of this goal, see the doc comment of the type.
473-
nested_goals: NestedGoals<X>,
474-
475-
/// Starts out as `None` and gets set when rerunning this
476-
/// goal in case we encounter a cycle.
477-
provisional_result: Option<X::Result>,
478-
}
479-
480438
/// A provisional result of an already computed goals which depends on other
481439
/// goals still on the stack.
482440
#[derive_where(Debug; X: Cx)]
@@ -498,7 +456,7 @@ pub struct SearchGraph<D: Delegate<Cx = X>, X: Cx = <D as Delegate>::Cx> {
498456
/// The stack of goals currently being computed.
499457
///
500458
/// An element is *deeper* in the stack if its index is *lower*.
501-
stack: IndexVec<StackDepth, StackEntry<X>>,
459+
stack: Stack<X>,
502460
/// The provisional cache contains entries for already computed goals which
503461
/// still depend on goals higher-up in the stack. We don't move them to the
504462
/// global cache and track them locally instead. A provisional cache entry
@@ -537,7 +495,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
537495
/// and using existing global cache entries to make sure they
538496
/// have the same impact on the remaining evaluation.
539497
fn update_parent_goal(
540-
stack: &mut IndexVec<StackDepth, StackEntry<X>>,
498+
stack: &mut Stack<X>,
541499
step_kind_from_parent: PathKind,
542500
reached_depth: StackDepth,
543501
heads: &CycleHeads,
@@ -588,13 +546,11 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
588546
/// the stack which completes the cycle. This given an inductive step AB which then cycles
589547
/// coinductively with A, we need to treat this cycle as coinductive.
590548
fn cycle_path_kind(
591-
stack: &IndexVec<StackDepth, StackEntry<X>>,
549+
stack: &Stack<X>,
592550
step_kind_to_head: PathKind,
593551
head: StackDepth,
594552
) -> PathKind {
595-
stack.raw[head.index() + 1..]
596-
.iter()
597-
.fold(step_kind_to_head, |curr, entry| curr.extend(entry.step_kind_from_parent))
553+
stack.cycle_step_kinds(head).fold(step_kind_to_head, |curr, step| curr.extend(step))
598554
}
599555

600556
/// Probably the most involved method of the whole solver.
@@ -728,7 +684,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
728684
input: X::Input,
729685
inspect: &mut D::ProofTreeBuilder,
730686
) -> X::Result {
731-
if let Some(last) = self.stack.raw.last_mut() {
687+
if let Some(last) = self.stack.last_mut() {
732688
last.encountered_overflow = true;
733689
// If computing a goal `B` depends on another goal `A` and
734690
// `A` has a nested goal which overflows, then computing `B`
@@ -859,7 +815,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
859815
// apply provisional cache entries which encountered overflow once the
860816
// current goal is already part of the same cycle. This check could be
861817
// improved but seems to be good enough for now.
862-
let last = self.stack.raw.last().unwrap();
818+
let last = self.stack.last().unwrap();
863819
if last.heads.opt_lowest_cycle_head().is_none_or(|lowest| lowest > head) {
864820
continue;
865821
}
@@ -893,7 +849,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
893849
/// evaluating this entry would not have ended up depending on either a goal
894850
/// already on the stack or a provisional cache entry.
895851
fn candidate_is_applicable(
896-
stack: &IndexVec<StackDepth, StackEntry<X>>,
852+
stack: &Stack<X>,
897853
step_kind_from_parent: PathKind,
898854
provisional_cache: &HashMap<X::Input, Vec<ProvisionalCacheEntry<X>>>,
899855
nested_goals: &NestedGoals<X>,
@@ -1028,7 +984,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
1028984
input: X::Input,
1029985
step_kind_from_parent: PathKind,
1030986
) -> Option<X::Result> {
1031-
let (head, _stack_entry) = self.stack.iter_enumerated().find(|(_, e)| e.input == input)?;
987+
let head = self.stack.find(input)?;
1032988
// We have a nested goal which directly relies on a goal deeper in the stack.
1033989
//
1034990
// We start by tagging all cycle participants, as that's necessary for caching.
@@ -1095,7 +1051,7 @@ impl<D: Delegate<Cx = X>, X: Cx> SearchGraph<D> {
10951051
let mut i = 0;
10961052
loop {
10971053
let result = evaluate_goal(self, inspect);
1098-
let stack_entry = self.stack.pop().unwrap();
1054+
let stack_entry = self.stack.pop();
10991055
debug_assert_eq!(stack_entry.input, input);
11001056

11011057
// If the current goal is not the root of a cycle, we are done.
Lines changed: 114 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,114 @@
1+
use std::ops::{Index, IndexMut};
2+
3+
use derive_where::derive_where;
4+
use rustc_index::IndexVec;
5+
6+
use super::{AvailableDepth, Cx, CycleHeads, NestedGoals, PathKind, UsageKind};
7+
8+
rustc_index::newtype_index! {
9+
#[orderable]
10+
#[gate_rustc_only]
11+
pub(super) struct StackDepth {}
12+
}
13+
14+
/// Stack entries of the evaluation stack. Its fields tend to be lazily
15+
/// when popping a child goal or completely immutable.
16+
#[derive_where(Debug; X: Cx)]
17+
pub(super) struct StackEntry<X: Cx> {
18+
pub input: X::Input,
19+
20+
/// Whether proving this goal is a coinductive step.
21+
///
22+
/// This is used when encountering a trait solver cycle to
23+
/// decide whether the initial provisional result of the cycle.
24+
pub step_kind_from_parent: PathKind,
25+
26+
/// The available depth of a given goal, immutable.
27+
pub available_depth: AvailableDepth,
28+
29+
/// The maximum depth reached by this stack entry, only up-to date
30+
/// for the top of the stack and lazily updated for the rest.
31+
pub reached_depth: StackDepth,
32+
33+
/// All cycle heads this goal depends on. Lazily updated and only
34+
/// up-to date for the top of the stack.
35+
pub heads: CycleHeads,
36+
37+
/// Whether evaluating this goal encountered overflow. Lazily updated.
38+
pub encountered_overflow: bool,
39+
40+
/// Whether this goal has been used as the root of a cycle. This gets
41+
/// eagerly updated when encountering a cycle.
42+
pub has_been_used: Option<UsageKind>,
43+
44+
/// The nested goals of this goal, see the doc comment of the type.
45+
pub nested_goals: NestedGoals<X>,
46+
47+
/// Starts out as `None` and gets set when rerunning this
48+
/// goal in case we encounter a cycle.
49+
pub provisional_result: Option<X::Result>,
50+
}
51+
52+
#[derive_where(Default; X: Cx)]
53+
pub(super) struct Stack<X: Cx> {
54+
entries: IndexVec<StackDepth, StackEntry<X>>,
55+
}
56+
57+
impl<X: Cx> Stack<X> {
58+
pub(super) fn is_empty(&self) -> bool {
59+
self.entries.is_empty()
60+
}
61+
62+
pub(super) fn len(&self) -> usize {
63+
self.entries.len()
64+
}
65+
66+
pub(super) fn last_index(&self) -> Option<StackDepth> {
67+
self.entries.last_index()
68+
}
69+
70+
pub(super) fn last(&self) -> Option<&StackEntry<X>> {
71+
self.entries.raw.last()
72+
}
73+
74+
pub(super) fn last_mut(&mut self) -> Option<&mut StackEntry<X>> {
75+
self.entries.raw.last_mut()
76+
}
77+
78+
pub(super) fn next_index(&self) -> StackDepth {
79+
self.entries.next_index()
80+
}
81+
82+
pub(super) fn push(&mut self, entry: StackEntry<X>) -> StackDepth {
83+
self.entries.push(entry)
84+
}
85+
86+
pub(super) fn pop(&mut self) -> StackEntry<X> {
87+
self.entries.pop().unwrap()
88+
}
89+
90+
pub(super) fn cycle_step_kinds(&self, head: StackDepth) -> impl Iterator<Item = PathKind> {
91+
self.entries.raw[head.index() + 1..].iter().map(|entry| entry.step_kind_from_parent)
92+
}
93+
94+
pub(super) fn iter(&self) -> impl Iterator<Item = &StackEntry<X>> {
95+
self.entries.iter()
96+
}
97+
98+
pub(super) fn find(&self, input: X::Input) -> Option<StackDepth> {
99+
self.entries.iter_enumerated().find(|(_, e)| e.input == input).map(|(idx, _)| idx)
100+
}
101+
}
102+
103+
impl<X: Cx> Index<StackDepth> for Stack<X> {
104+
type Output = StackEntry<X>;
105+
fn index(&self, index: StackDepth) -> &StackEntry<X> {
106+
&self.entries[index]
107+
}
108+
}
109+
110+
impl<X: Cx> IndexMut<StackDepth> for Stack<X> {
111+
fn index_mut(&mut self, index: StackDepth) -> &mut Self::Output {
112+
&mut self.entries[index]
113+
}
114+
}

0 commit comments

Comments
 (0)