Skip to content

Commit d198400

Browse files
authored
Merge pull request #451 from TimDiekmann/master
Remove unchecked_shr/shl from intrinsics
2 parents 5a8b46d + b488b51 commit d198400

File tree

4 files changed

+47
-175
lines changed

4 files changed

+47
-175
lines changed

rust-toolchain

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
nightly-2018-09-01
1+
nightly-2018-09-15

src/intrinsic.rs

-102
Original file line numberDiff line numberDiff line change
@@ -33,39 +33,6 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super:
3333

3434
let intrinsic_name = &self.tcx.item_name(instance.def_id()).as_str()[..];
3535
match intrinsic_name {
36-
"add_with_overflow" => {
37-
let l = self.read_value(args[0])?;
38-
let r = self.read_value(args[1])?;
39-
self.binop_with_overflow(
40-
mir::BinOp::Add,
41-
l,
42-
r,
43-
dest,
44-
)?
45-
}
46-
47-
"sub_with_overflow" => {
48-
let l = self.read_value(args[0])?;
49-
let r = self.read_value(args[1])?;
50-
self.binop_with_overflow(
51-
mir::BinOp::Sub,
52-
l,
53-
r,
54-
dest,
55-
)?
56-
}
57-
58-
"mul_with_overflow" => {
59-
let l = self.read_value(args[0])?;
60-
let r = self.read_value(args[1])?;
61-
self.binop_with_overflow(
62-
mir::BinOp::Mul,
63-
l,
64-
r,
65-
dest,
66-
)?
67-
}
68-
6936
"arith_offset" => {
7037
let offset = self.read_scalar(args[1])?.to_isize(&self)?;
7138
let ptr = self.read_scalar(args[0])?.not_undef()?;
@@ -326,39 +293,6 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super:
326293
self.write_scalar(result_ptr, dest)?;
327294
}
328295

329-
"overflowing_sub" => {
330-
let l = self.read_value(args[0])?;
331-
let r = self.read_value(args[1])?;
332-
self.binop_ignore_overflow(
333-
mir::BinOp::Sub,
334-
l,
335-
r,
336-
dest,
337-
)?;
338-
}
339-
340-
"overflowing_mul" => {
341-
let l = self.read_value(args[0])?;
342-
let r = self.read_value(args[1])?;
343-
self.binop_ignore_overflow(
344-
mir::BinOp::Mul,
345-
r,
346-
l,
347-
dest,
348-
)?;
349-
}
350-
351-
"overflowing_add" => {
352-
let l = self.read_value(args[0])?;
353-
let r = self.read_value(args[1])?;
354-
self.binop_ignore_overflow(
355-
mir::BinOp::Add,
356-
r,
357-
l,
358-
dest,
359-
)?;
360-
}
361-
362296
"powf32" => {
363297
let f = self.read_scalar(args[0])?.to_f32()?;
364298
let f2 = self.read_scalar(args[1])?.to_f32()?;
@@ -443,42 +377,6 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for EvalContext<'a, 'mir, 'tcx, super:
443377
self.write_value(value, dest)?;
444378
}
445379

446-
"unchecked_shl" => {
447-
let bits = dest.layout.size.bytes() as u128 * 8;
448-
let l = self.read_value(args[0])?;
449-
let r = self.read_value(args[1])?;
450-
let rval = r.to_scalar()?.to_bytes()?;
451-
if rval >= bits {
452-
return err!(Intrinsic(
453-
format!("Overflowing shift by {} in unchecked_shl", rval),
454-
));
455-
}
456-
self.binop_ignore_overflow(
457-
mir::BinOp::Shl,
458-
l,
459-
r,
460-
dest,
461-
)?;
462-
}
463-
464-
"unchecked_shr" => {
465-
let bits = dest.layout.size.bytes() as u128 * 8;
466-
let l = self.read_value(args[0])?;
467-
let r = self.read_value(args[1])?;
468-
let rval = r.to_scalar()?.to_bytes()?;
469-
if rval >= bits {
470-
return err!(Intrinsic(
471-
format!("Overflowing shift by {} in unchecked_shr", rval),
472-
));
473-
}
474-
self.binop_ignore_overflow(
475-
mir::BinOp::Shr,
476-
l,
477-
r,
478-
dest,
479-
)?;
480-
}
481-
482380
"unchecked_div" => {
483381
let l = self.read_value(args[0])?;
484382
let r = self.read_value(args[1])?;

src/lib.rs

+5-71
Original file line numberDiff line numberDiff line change
@@ -18,14 +18,11 @@ use rustc::ty::layout::{TyLayout, LayoutOf, Size};
1818
use rustc::hir::def_id::DefId;
1919
use rustc::mir;
2020

21-
use rustc_data_structures::fx::FxHasher;
22-
2321
use syntax::ast::Mutability;
2422
use syntax::attr;
2523

2624
use std::marker::PhantomData;
27-
use std::collections::{HashMap, BTreeMap};
28-
use std::hash::{Hash, Hasher};
25+
use std::collections::HashMap;
2926

3027
pub use rustc::mir::interpret::*;
3128
pub use rustc_mir::interpret::*;
@@ -43,7 +40,7 @@ use fn_call::EvalContextExt as MissingFnsEvalContextExt;
4340
use operator::EvalContextExt as OperatorEvalContextExt;
4441
use intrinsic::EvalContextExt as IntrinsicEvalContextExt;
4542
use tls::EvalContextExt as TlsEvalContextExt;
46-
use memory::MemoryKind as MiriMemoryKind;
43+
use memory::{MemoryKind as MiriMemoryKind, TlsKey, TlsEntry, MemoryData};
4744
use locks::LockInfo;
4845
use range_map::RangeMap;
4946
use helpers::FalibleScalarExt;
@@ -63,7 +60,7 @@ pub fn create_ecx<'a, 'mir: 'a, 'tcx: 'mir>(
6360
let main_instance = ty::Instance::mono(ecx.tcx.tcx, main_id);
6461
let main_mir = ecx.load_mir(main_instance.def)?;
6562

66-
if !main_mir.return_ty().is_nil() || main_mir.arg_count != 0 {
63+
if !main_mir.return_ty().is_unit() || main_mir.arg_count != 0 {
6764
return err!(Unimplemented(
6865
"miri does not support main functions without `fn()` type signatures"
6966
.to_owned(),
@@ -214,75 +211,12 @@ pub struct Evaluator<'tcx> {
214211
_dummy : PhantomData<&'tcx ()>,
215212
}
216213

217-
impl<'tcx> Hash for Evaluator<'tcx> {
218-
fn hash<H: Hasher>(&self, state: &mut H) {
219-
let Evaluator {
220-
env_vars,
221-
_dummy: _,
222-
} = self;
223-
224-
env_vars.iter()
225-
.map(|(env, ptr)| {
226-
let mut h = FxHasher::default();
227-
env.hash(&mut h);
228-
ptr.hash(&mut h);
229-
h.finish()
230-
})
231-
.fold(0u64, |acc, hash| acc.wrapping_add(hash))
232-
.hash(state);
233-
}
234-
}
235-
236-
pub type TlsKey = u128;
237-
238-
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
239-
pub struct TlsEntry<'tcx> {
240-
data: Scalar, // Will eventually become a map from thread IDs to `Scalar`s, if we ever support more than one thread.
241-
dtor: Option<ty::Instance<'tcx>>,
242-
}
243-
244-
#[derive(Clone, PartialEq, Eq)]
245-
pub struct MemoryData<'tcx> {
246-
/// The Key to use for the next thread-local allocation.
247-
next_thread_local: TlsKey,
248-
249-
/// pthreads-style thread-local storage.
250-
thread_local: BTreeMap<TlsKey, TlsEntry<'tcx>>,
251-
252-
/// Memory regions that are locked by some function
253-
///
254-
/// Only mutable (static mut, heap, stack) allocations have an entry in this map.
255-
/// The entry is created when allocating the memory and deleted after deallocation.
256-
locks: HashMap<AllocId, RangeMap<LockInfo<'tcx>>>,
257-
}
258-
259-
impl<'tcx> MemoryData<'tcx> {
260-
fn new() -> Self {
261-
MemoryData {
262-
next_thread_local: 1, // start with 1 as we must not use 0 on Windows
263-
thread_local: BTreeMap::new(),
264-
locks: HashMap::new(),
265-
}
266-
}
267-
}
268-
269-
impl<'tcx> Hash for MemoryData<'tcx> {
270-
fn hash<H: Hasher>(&self, state: &mut H) {
271-
let MemoryData {
272-
next_thread_local: _,
273-
thread_local,
274-
locks: _,
275-
} = self;
276-
277-
thread_local.hash(state);
278-
}
279-
}
280-
281214
impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'tcx> {
282-
type MemoryData = MemoryData<'tcx>;
215+
type MemoryData = memory::MemoryData<'tcx>;
283216
type MemoryKinds = memory::MemoryKind;
284217

285218
const MUT_STATIC_KIND: Option<memory::MemoryKind> = Some(memory::MemoryKind::MutStatic);
219+
const DETECT_LOOPS: bool = false;
286220

287221
/// Returns Ok() when the function was handled, fail otherwise
288222
fn find_fn<'a>(

src/memory.rs

+41-1
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,44 @@
1-
#[derive(Debug, PartialEq, Copy, Clone, Hash, Eq)]
1+
use std::collections::{HashMap, BTreeMap};
2+
3+
use rustc::ty;
4+
5+
use super::{AllocId, Scalar, LockInfo, RangeMap};
6+
7+
pub type TlsKey = u128;
8+
9+
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
10+
pub struct TlsEntry<'tcx> {
11+
pub(crate) data: Scalar, // Will eventually become a map from thread IDs to `Scalar`s, if we ever support more than one thread.
12+
pub(crate) dtor: Option<ty::Instance<'tcx>>,
13+
}
14+
15+
#[derive(Debug, Clone, PartialEq, Eq)]
16+
pub struct MemoryData<'tcx> {
17+
/// The Key to use for the next thread-local allocation.
18+
pub(crate) next_thread_local: TlsKey,
19+
20+
/// pthreads-style thread-local storage.
21+
pub(crate) thread_local: BTreeMap<TlsKey, TlsEntry<'tcx>>,
22+
23+
/// Memory regions that are locked by some function
24+
///
25+
/// Only mutable (static mut, heap, stack) allocations have an entry in this map.
26+
/// The entry is created when allocating the memory and deleted after deallocation.
27+
pub(crate) locks: HashMap<AllocId, RangeMap<LockInfo<'tcx>>>,
28+
}
29+
30+
impl<'tcx> MemoryData<'tcx> {
31+
pub(crate) fn new() -> Self {
32+
MemoryData {
33+
next_thread_local: 1, // start with 1 as we must not use 0 on Windows
34+
thread_local: BTreeMap::new(),
35+
locks: HashMap::new(),
36+
}
37+
}
38+
}
39+
40+
41+
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
242
pub enum MemoryKind {
343
/// `__rust_alloc` memory
444
Rust,

0 commit comments

Comments
 (0)