Skip to content

Commit e980c62

Browse files
committed
Auto merge of rust-lang#95524 - oli-obk:cached_stable_hash_cleanups, r=nnethercote
Cached stable hash cleanups r? `@nnethercote` Add a sanity assertion in debug mode to check that the cached hashes are actually the ones we get if we compute the hash each time. Add a new data structure that bundles all the hash-caching work to make it easier to re-use it for different interned data structures
2 parents 340f649 + 25d6f8e commit e980c62

File tree

13 files changed

+275
-222
lines changed

13 files changed

+275
-222
lines changed

compiler/rustc_data_structures/src/intern.rs

+84
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@ use std::hash::{Hash, Hasher};
44
use std::ops::Deref;
55
use std::ptr;
66

7+
use crate::fingerprint::Fingerprint;
8+
79
mod private {
810
#[derive(Clone, Copy, Debug)]
911
pub struct PrivateZst;
@@ -108,5 +110,87 @@ where
108110
}
109111
}
110112

113+
/// A helper trait so that `Interned` things can cache stable hashes reproducibly.
114+
pub trait InternedHashingContext {
115+
fn with_def_path_and_no_spans(&mut self, f: impl FnOnce(&mut Self));
116+
}
117+
118+
/// A helper type that you can wrap round your own type in order to automatically
119+
/// cache the stable hash on creation and not recompute it whenever the stable hash
120+
/// of the type is computed.
121+
/// This is only done in incremental mode. You can also opt out of caching by using
122+
/// StableHash::ZERO for the hash, in which case the hash gets computed each time.
123+
/// This is useful if you have values that you intern but never (can?) use for stable
124+
/// hashing.
125+
#[derive(Copy, Clone)]
126+
pub struct WithStableHash<T> {
127+
pub internee: T,
128+
pub stable_hash: Fingerprint,
129+
}
130+
131+
impl<T: PartialEq> PartialEq for WithStableHash<T> {
132+
#[inline]
133+
fn eq(&self, other: &Self) -> bool {
134+
self.internee.eq(&other.internee)
135+
}
136+
}
137+
138+
impl<T: Eq> Eq for WithStableHash<T> {}
139+
140+
impl<T: Ord> PartialOrd for WithStableHash<T> {
141+
fn partial_cmp(&self, other: &WithStableHash<T>) -> Option<Ordering> {
142+
Some(self.internee.cmp(&other.internee))
143+
}
144+
}
145+
146+
impl<T: Ord> Ord for WithStableHash<T> {
147+
fn cmp(&self, other: &WithStableHash<T>) -> Ordering {
148+
self.internee.cmp(&other.internee)
149+
}
150+
}
151+
152+
impl<T> Deref for WithStableHash<T> {
153+
type Target = T;
154+
155+
#[inline]
156+
fn deref(&self) -> &T {
157+
&self.internee
158+
}
159+
}
160+
161+
impl<T: Hash> Hash for WithStableHash<T> {
162+
#[inline]
163+
fn hash<H: Hasher>(&self, s: &mut H) {
164+
self.internee.hash(s)
165+
}
166+
}
167+
168+
impl<T: HashStable<CTX>, CTX: InternedHashingContext> HashStable<CTX> for WithStableHash<T> {
169+
fn hash_stable(&self, hcx: &mut CTX, hasher: &mut StableHasher) {
170+
if self.stable_hash == Fingerprint::ZERO || cfg!(debug_assertions) {
171+
// No cached hash available. This can only mean that incremental is disabled.
172+
// We don't cache stable hashes in non-incremental mode, because they are used
173+
// so rarely that the performance actually suffers.
174+
175+
// We need to build the hash as if we cached it and then hash that hash, as
176+
// otherwise the hashes will differ between cached and non-cached mode.
177+
let stable_hash: Fingerprint = {
178+
let mut hasher = StableHasher::new();
179+
hcx.with_def_path_and_no_spans(|hcx| self.internee.hash_stable(hcx, &mut hasher));
180+
hasher.finish()
181+
};
182+
if cfg!(debug_assertions) && self.stable_hash != Fingerprint::ZERO {
183+
assert_eq!(
184+
stable_hash, self.stable_hash,
185+
"cached stable hash does not match freshly computed stable hash"
186+
);
187+
}
188+
stable_hash.hash_stable(hcx, hasher);
189+
} else {
190+
self.stable_hash.hash_stable(hcx, hasher);
191+
}
192+
}
193+
}
194+
111195
#[cfg(test)]
112196
mod tests;

compiler/rustc_infer/src/infer/outlives/env.rs

+17-18
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@ use crate::infer::free_regions::FreeRegionMap;
22
use crate::infer::{GenericKind, InferCtxt};
33
use crate::traits::query::OutlivesBound;
44
use rustc_data_structures::fx::FxHashMap;
5-
use rustc_data_structures::intern::Interned;
65
use rustc_hir as hir;
76
use rustc_middle::ty::{self, ReEarlyBound, ReFree, ReVar, Region};
87

@@ -164,12 +163,6 @@ impl<'a, 'tcx> OutlivesEnvironment<'tcx> {
164163
for outlives_bound in outlives_bounds {
165164
debug!("add_outlives_bounds: outlives_bound={:?}", outlives_bound);
166165
match outlives_bound {
167-
OutlivesBound::RegionSubRegion(
168-
r_a @ (Region(Interned(ReEarlyBound(_), _)) | Region(Interned(ReFree(_), _))),
169-
Region(Interned(ReVar(vid_b), _)),
170-
) => {
171-
infcx.expect("no infcx provided but region vars found").add_given(r_a, *vid_b);
172-
}
173166
OutlivesBound::RegionSubParam(r_a, param_b) => {
174167
self.region_bound_pairs_accum.push((r_a, GenericKind::Param(param_b)));
175168
}
@@ -178,17 +171,23 @@ impl<'a, 'tcx> OutlivesEnvironment<'tcx> {
178171
.push((r_a, GenericKind::Projection(projection_b)));
179172
}
180173
OutlivesBound::RegionSubRegion(r_a, r_b) => {
181-
// In principle, we could record (and take
182-
// advantage of) every relationship here, but
183-
// we are also free not to -- it simply means
184-
// strictly less that we can successfully type
185-
// check. Right now we only look for things
186-
// relationships between free regions. (It may
187-
// also be that we should revise our inference
188-
// system to be more general and to make use
189-
// of *every* relationship that arises here,
190-
// but presently we do not.)
191-
self.free_region_map.relate_regions(r_a, r_b);
174+
if let (ReEarlyBound(_) | ReFree(_), ReVar(vid_b)) = (r_a.kind(), r_b.kind()) {
175+
infcx
176+
.expect("no infcx provided but region vars found")
177+
.add_given(r_a, vid_b);
178+
} else {
179+
// In principle, we could record (and take
180+
// advantage of) every relationship here, but
181+
// we are also free not to -- it simply means
182+
// strictly less that we can successfully type
183+
// check. Right now we only look for things
184+
// relationships between free regions. (It may
185+
// also be that we should revise our inference
186+
// system to be more general and to make use
187+
// of *every* relationship that arises here,
188+
// but presently we do not.)
189+
self.free_region_map.relate_regions(r_a, r_b);
190+
}
192191
}
193192
}
194193
}

compiler/rustc_middle/src/arena.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ macro_rules! arena_types {
8787
[] hir_id_set: rustc_hir::HirIdSet,
8888

8989
// Interned types
90-
[] tys: rustc_middle::ty::TyS<'tcx>,
90+
[] tys: rustc_data_structures::intern::WithStableHash<rustc_middle::ty::TyS<'tcx>>,
9191
[] predicates: rustc_middle::ty::PredicateS<'tcx>,
9292
[] consts: rustc_middle::ty::ConstS<'tcx>,
9393

compiler/rustc_middle/src/ty/context.rs

+10-9
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ use crate::ty::{
2626
use rustc_ast as ast;
2727
use rustc_data_structures::fingerprint::Fingerprint;
2828
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
29-
use rustc_data_structures::intern::Interned;
29+
use rustc_data_structures::intern::{Interned, WithStableHash};
3030
use rustc_data_structures::memmap::Mmap;
3131
use rustc_data_structures::profiling::SelfProfilerRef;
3232
use rustc_data_structures::sharded::{IntoPointer, ShardedHashMap};
@@ -105,7 +105,7 @@ pub struct CtxtInterners<'tcx> {
105105

106106
// Specifically use a speedy hash algorithm for these hash sets, since
107107
// they're accessed quite often.
108-
type_: InternedSet<'tcx, TyS<'tcx>>,
108+
type_: InternedSet<'tcx, WithStableHash<TyS<'tcx>>>,
109109
substs: InternedSet<'tcx, InternalSubsts<'tcx>>,
110110
canonical_var_infos: InternedSet<'tcx, List<CanonicalVarInfo<'tcx>>>,
111111
region: InternedSet<'tcx, RegionKind>,
@@ -178,10 +178,11 @@ impl<'tcx> CtxtInterners<'tcx> {
178178
kind,
179179
flags: flags.flags,
180180
outer_exclusive_binder: flags.outer_exclusive_binder,
181-
stable_hash,
182181
};
183182

184-
InternedInSet(self.arena.alloc(ty_struct))
183+
InternedInSet(
184+
self.arena.alloc(WithStableHash { internee: ty_struct, stable_hash }),
185+
)
185186
})
186187
.0,
187188
))
@@ -2048,23 +2049,23 @@ impl<'tcx, T: 'tcx + ?Sized> IntoPointer for InternedInSet<'tcx, T> {
20482049
}
20492050

20502051
#[allow(rustc::usage_of_ty_tykind)]
2051-
impl<'tcx> Borrow<TyKind<'tcx>> for InternedInSet<'tcx, TyS<'tcx>> {
2052+
impl<'tcx> Borrow<TyKind<'tcx>> for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {
20522053
fn borrow<'a>(&'a self) -> &'a TyKind<'tcx> {
20532054
&self.0.kind
20542055
}
20552056
}
20562057

2057-
impl<'tcx> PartialEq for InternedInSet<'tcx, TyS<'tcx>> {
2058-
fn eq(&self, other: &InternedInSet<'tcx, TyS<'tcx>>) -> bool {
2058+
impl<'tcx> PartialEq for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {
2059+
fn eq(&self, other: &InternedInSet<'tcx, WithStableHash<TyS<'tcx>>>) -> bool {
20592060
// The `Borrow` trait requires that `x.borrow() == y.borrow()` equals
20602061
// `x == y`.
20612062
self.0.kind == other.0.kind
20622063
}
20632064
}
20642065

2065-
impl<'tcx> Eq for InternedInSet<'tcx, TyS<'tcx>> {}
2066+
impl<'tcx> Eq for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {}
20662067

2067-
impl<'tcx> Hash for InternedInSet<'tcx, TyS<'tcx>> {
2068+
impl<'tcx> Hash for InternedInSet<'tcx, WithStableHash<TyS<'tcx>>> {
20682069
fn hash<H: Hasher>(&self, s: &mut H) {
20692070
// The `Borrow` trait requires that `x.borrow().hash(s) == x.hash(s)`.
20702071
self.0.kind.hash(s)

compiler/rustc_middle/src/ty/layout.rs

+28-40
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@ use crate::ty::subst::Subst;
55
use crate::ty::{self, subst::SubstsRef, ReprOptions, Ty, TyCtxt, TypeFoldable};
66
use rustc_ast as ast;
77
use rustc_attr as attr;
8-
use rustc_data_structures::intern::Interned;
98
use rustc_hir as hir;
109
use rustc_hir::lang_items::LangItem;
1110
use rustc_index::bit_set::BitSet;
@@ -503,42 +502,34 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
503502
}
504503

505504
// Two non-ZST fields, and they're both scalars.
506-
(
507-
Some((
508-
i,
509-
&TyAndLayout {
510-
layout: Layout(Interned(&LayoutS { abi: Abi::Scalar(a), .. }, _)),
511-
..
512-
},
513-
)),
514-
Some((
515-
j,
516-
&TyAndLayout {
517-
layout: Layout(Interned(&LayoutS { abi: Abi::Scalar(b), .. }, _)),
518-
..
519-
},
520-
)),
521-
None,
522-
) => {
523-
// Order by the memory placement, not source order.
524-
let ((i, a), (j, b)) =
525-
if offsets[i] < offsets[j] { ((i, a), (j, b)) } else { ((j, b), (i, a)) };
526-
let pair = self.scalar_pair(a, b);
527-
let pair_offsets = match pair.fields {
528-
FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
529-
assert_eq!(memory_index, &[0, 1]);
530-
offsets
505+
(Some((i, a)), Some((j, b)), None) => {
506+
match (a.abi, b.abi) {
507+
(Abi::Scalar(a), Abi::Scalar(b)) => {
508+
// Order by the memory placement, not source order.
509+
let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
510+
((i, a), (j, b))
511+
} else {
512+
((j, b), (i, a))
513+
};
514+
let pair = self.scalar_pair(a, b);
515+
let pair_offsets = match pair.fields {
516+
FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
517+
assert_eq!(memory_index, &[0, 1]);
518+
offsets
519+
}
520+
_ => bug!(),
521+
};
522+
if offsets[i] == pair_offsets[0]
523+
&& offsets[j] == pair_offsets[1]
524+
&& align == pair.align
525+
&& size == pair.size
526+
{
527+
// We can use `ScalarPair` only when it matches our
528+
// already computed layout (including `#[repr(C)]`).
529+
abi = pair.abi;
530+
}
531531
}
532-
_ => bug!(),
533-
};
534-
if offsets[i] == pair_offsets[0]
535-
&& offsets[j] == pair_offsets[1]
536-
&& align == pair.align
537-
&& size == pair.size
538-
{
539-
// We can use `ScalarPair` only when it matches our
540-
// already computed layout (including `#[repr(C)]`).
541-
abi = pair.abi;
532+
_ => {}
542533
}
543534
}
544535

@@ -791,10 +782,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
791782
}
792783

793784
// Extract the number of elements from the layout of the array field:
794-
let Ok(TyAndLayout {
795-
layout: Layout(Interned(LayoutS { fields: FieldsShape::Array { count, .. }, .. }, _)),
796-
..
797-
}) = self.layout_of(f0_ty) else {
785+
let FieldsShape::Array { count, .. } = self.layout_of(f0_ty)?.layout.fields() else {
798786
return Err(LayoutError::Unknown(ty));
799787
};
800788

0 commit comments

Comments
 (0)