88
99use std:: assert_matches:: assert_matches;
1010use std:: borrow:: { Borrow , Cow } ;
11+ use std:: cell:: Cell ;
1112use std:: collections:: VecDeque ;
12- use std:: { fmt, mem , ptr} ;
13+ use std:: { fmt, ptr} ;
1314
1415use rustc_abi:: { Align , HasDataLayout , Size } ;
1516use rustc_ast:: Mutability ;
@@ -131,7 +132,7 @@ pub struct Memory<'tcx, M: Machine<'tcx>> {
131132 /// This stores whether we are currently doing reads purely for the purpose of validation.
132133 /// Those reads do not trigger the machine's hooks for memory reads.
133134 /// Needless to say, this must only be set with great care!
134- validation_in_progress : bool ,
135+ validation_in_progress : Cell < bool > ,
135136}
136137
137138/// A reference to some allocation that was already bounds-checked for the given region
@@ -158,7 +159,7 @@ impl<'tcx, M: Machine<'tcx>> Memory<'tcx, M> {
158159 alloc_map : M :: MemoryMap :: default ( ) ,
159160 extra_fn_ptr_map : FxIndexMap :: default ( ) ,
160161 dead_alloc_map : FxIndexMap :: default ( ) ,
161- validation_in_progress : false ,
162+ validation_in_progress : Cell :: new ( false ) ,
162163 }
163164 }
164165
@@ -715,15 +716,15 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
715716 // We want to call the hook on *all* accesses that involve an AllocId, including zero-sized
716717 // accesses. That means we cannot rely on the closure above or the `Some` branch below. We
717718 // do this after `check_and_deref_ptr` to ensure some basic sanity has already been checked.
718- if !self . memory . validation_in_progress {
719+ if !self . memory . validation_in_progress . get ( ) {
719720 if let Ok ( ( alloc_id, ..) ) = self . ptr_try_get_alloc_id ( ptr, size_i64) {
720721 M :: before_alloc_read ( self , alloc_id) ?;
721722 }
722723 }
723724
724725 if let Some ( ( alloc_id, offset, prov, alloc) ) = ptr_and_alloc {
725726 let range = alloc_range ( offset, size) ;
726- if !self . memory . validation_in_progress {
727+ if !self . memory . validation_in_progress . get ( ) {
727728 M :: before_memory_read (
728729 self . tcx ,
729730 & self . machine ,
@@ -801,7 +802,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
801802 ) -> InterpResult < ' tcx , Option < AllocRefMut < ' a , ' tcx , M :: Provenance , M :: AllocExtra , M :: Bytes > > >
802803 {
803804 let tcx = self . tcx ;
804- let validation_in_progress = self . memory . validation_in_progress ;
805+ let validation_in_progress = self . memory . validation_in_progress . get ( ) ;
805806
806807 let size_i64 = i64:: try_from ( size. bytes ( ) ) . unwrap ( ) ; // it would be an error to even ask for more than isize::MAX bytes
807808 let ptr_and_alloc = Self :: check_and_deref_ptr (
@@ -1087,23 +1088,43 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
10871088 ///
10881089 /// We do this so Miri's allocation access tracking does not show the validation
10891090 /// reads as spurious accesses.
1090- pub fn run_for_validation < R > ( & mut self , f : impl FnOnce ( & mut Self ) -> R ) -> R {
1091+ pub fn run_for_validation_mut < R > ( & mut self , f : impl FnOnce ( & mut Self ) -> R ) -> R {
10911092 // This deliberately uses `==` on `bool` to follow the pattern
10921093 // `assert!(val.replace(new) == old)`.
10931094 assert ! (
1094- mem :: replace ( & mut self . memory. validation_in_progress, true ) == false ,
1095+ self . memory. validation_in_progress. replace ( true ) == false ,
10951096 "`validation_in_progress` was already set"
10961097 ) ;
10971098 let res = f ( self ) ;
10981099 assert ! (
1099- mem:: replace( & mut self . memory. validation_in_progress, false ) == true ,
1100+ self . memory. validation_in_progress. replace( false ) == true ,
1101+ "`validation_in_progress` was unset by someone else"
1102+ ) ;
1103+ res
1104+ }
1105+
1106+ /// Runs the closure in "validation" mode, which means the machine's memory read hooks will be
1107+ /// suppressed. Needless to say, this must only be set with great care! Cannot be nested.
1108+ ///
1109+ /// We do this so Miri's allocation access tracking does not show the validation
1110+ /// reads as spurious accesses.
1111+ pub fn run_for_validation_ref < R > ( & self , f : impl FnOnce ( & Self ) -> R ) -> R {
1112+ // This deliberately uses `==` on `bool` to follow the pattern
1113+ // `assert!(val.replace(new) == old)`.
1114+ assert ! (
1115+ self . memory. validation_in_progress. replace( true ) == false ,
1116+ "`validation_in_progress` was already set"
1117+ ) ;
1118+ let res = f ( self ) ;
1119+ assert ! (
1120+ self . memory. validation_in_progress. replace( false ) == true ,
11001121 "`validation_in_progress` was unset by someone else"
11011122 ) ;
11021123 res
11031124 }
11041125
11051126 pub ( super ) fn validation_in_progress ( & self ) -> bool {
1106- self . memory . validation_in_progress
1127+ self . memory . validation_in_progress . get ( )
11071128 }
11081129}
11091130
@@ -1375,7 +1396,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
13751396 } ;
13761397 let src_alloc = self . get_alloc_raw ( src_alloc_id) ?;
13771398 let src_range = alloc_range ( src_offset, size) ;
1378- assert ! ( !self . memory. validation_in_progress, "we can't be copying during validation" ) ;
1399+ assert ! ( !self . memory. validation_in_progress. get ( ) , "we can't be copying during validation" ) ;
13791400 // For the overlapping case, it is crucial that we trigger the read hook
13801401 // before the write hook -- the aliasing model cares about the order.
13811402 M :: before_memory_read (
0 commit comments