Skip to content

Miri engine refactoring #55674

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 10 commits into from
Nov 11, 2018
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
233 changes: 233 additions & 0 deletions src/librustc/mir/interpret/allocation.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,233 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.

//! The virtual memory representation of the MIR interpreter

use super::{Pointer, EvalResult, AllocId};

use ty::layout::{Size, Align};
use syntax::ast::Mutability;
use std::iter;
use mir;
use std::ops::{Deref, DerefMut};
use rustc_data_structures::sorted_map::SortedMap;

#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct Allocation<Tag=(),Extra=()> {
/// The actual bytes of the allocation.
/// Note that the bytes of a pointer represent the offset of the pointer
pub bytes: Vec<u8>,
/// Maps from byte addresses to extra data for each pointer.
/// Only the first byte of a pointer is inserted into the map; i.e.,
/// every entry in this map applies to `pointer_size` consecutive bytes starting
/// at the given offset.
pub relocations: Relocations<Tag>,
/// Denotes undefined memory. Reading from undefined memory is forbidden in miri
pub undef_mask: UndefMask,
/// The alignment of the allocation to detect unaligned reads.
pub align: Align,
/// Whether the allocation is mutable.
/// Also used by codegen to determine if a static should be put into mutable memory,
/// which happens for `static mut` and `static` with interior mutability.
pub mutability: Mutability,
/// Extra state for the machine.
pub extra: Extra,
}

pub trait AllocationExtra<Tag>: ::std::fmt::Debug + Default + Clone {
/// Hook for performing extra checks on a memory read access.
///
/// Takes read-only access to the allocation so we can keep all the memory read
/// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
/// need to mutate.
#[inline]
fn memory_read(
_alloc: &Allocation<Tag, Self>,
_ptr: Pointer<Tag>,
_size: Size,
) -> EvalResult<'tcx> {
Ok(())
}

/// Hook for performing extra checks on a memory write access.
#[inline]
fn memory_written(
_alloc: &mut Allocation<Tag, Self>,
_ptr: Pointer<Tag>,
_size: Size,
) -> EvalResult<'tcx> {
Ok(())
}
}

impl AllocationExtra<()> for () {}

impl<Tag, Extra: Default> Allocation<Tag, Extra> {
/// Creates a read-only allocation initialized by the given bytes
pub fn from_bytes(slice: &[u8], align: Align) -> Self {
let mut undef_mask = UndefMask::new(Size::ZERO);
undef_mask.grow(Size::from_bytes(slice.len() as u64), true);
Self {
bytes: slice.to_owned(),
relocations: Relocations::new(),
undef_mask,
align,
mutability: Mutability::Immutable,
extra: Extra::default(),
}
}

pub fn from_byte_aligned_bytes(slice: &[u8]) -> Self {
Allocation::from_bytes(slice, Align::from_bytes(1, 1).unwrap())
}

pub fn undef(size: Size, align: Align) -> Self {
assert_eq!(size.bytes() as usize as u64, size.bytes());
Allocation {
bytes: vec![0; size.bytes() as usize],
relocations: Relocations::new(),
undef_mask: UndefMask::new(size),
align,
mutability: Mutability::Mutable,
extra: Extra::default(),
}
}
}

impl<'tcx> ::serialize::UseSpecializedDecodable for &'tcx Allocation {}

#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct Relocations<Tag=(), Id=AllocId>(SortedMap<Size, (Tag, Id)>);

impl<Tag, Id> Relocations<Tag, Id> {
pub fn new() -> Self {
Relocations(SortedMap::new())
}

// The caller must guarantee that the given relocations are already sorted
// by address and contain no duplicates.
pub fn from_presorted(r: Vec<(Size, (Tag, Id))>) -> Self {
Relocations(SortedMap::from_presorted_elements(r))
}
}

impl<Tag> Deref for Relocations<Tag> {
type Target = SortedMap<Size, (Tag, AllocId)>;

fn deref(&self) -> &Self::Target {
&self.0
}
}

impl<Tag> DerefMut for Relocations<Tag> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}

////////////////////////////////////////////////////////////////////////////////
// Undefined byte tracking
////////////////////////////////////////////////////////////////////////////////

type Block = u64;
const BLOCK_SIZE: u64 = 64;

#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct UndefMask {
blocks: Vec<Block>,
len: Size,
}

impl_stable_hash_for!(struct mir::interpret::UndefMask{blocks, len});

impl UndefMask {
pub fn new(size: Size) -> Self {
let mut m = UndefMask {
blocks: vec![],
len: Size::ZERO,
};
m.grow(size, false);
m
}

/// Check whether the range `start..end` (end-exclusive) is entirely defined.
///
/// Returns `Ok(())` if it's defined. Otherwise returns the index of the byte
/// at which the first undefined access begins.
#[inline]
pub fn is_range_defined(&self, start: Size, end: Size) -> Result<(), Size> {
if end > self.len {
return Err(self.len);
}

let idx = (start.bytes()..end.bytes())
.map(|i| Size::from_bytes(i))
.find(|&i| !self.get(i));

match idx {
Some(idx) => Err(idx),
None => Ok(())
}
}

pub fn set_range(&mut self, start: Size, end: Size, new_state: bool) {
let len = self.len;
if end > len {
self.grow(end - len, new_state);
}
self.set_range_inbounds(start, end, new_state);
}

pub fn set_range_inbounds(&mut self, start: Size, end: Size, new_state: bool) {
for i in start.bytes()..end.bytes() {
self.set(Size::from_bytes(i), new_state);
}
}

#[inline]
pub fn get(&self, i: Size) -> bool {
let (block, bit) = bit_index(i);
(self.blocks[block] & 1 << bit) != 0
}

#[inline]
pub fn set(&mut self, i: Size, new_state: bool) {
let (block, bit) = bit_index(i);
if new_state {
self.blocks[block] |= 1 << bit;
} else {
self.blocks[block] &= !(1 << bit);
}
}

pub fn grow(&mut self, amount: Size, new_state: bool) {
let unused_trailing_bits = self.blocks.len() as u64 * BLOCK_SIZE - self.len.bytes();
if amount.bytes() > unused_trailing_bits {
let additional_blocks = amount.bytes() / BLOCK_SIZE + 1;
assert_eq!(additional_blocks as usize as u64, additional_blocks);
self.blocks.extend(
iter::repeat(0).take(additional_blocks as usize),
);
}
let start = self.len;
self.len += amount;
self.set_range_inbounds(start, start + amount, new_state);
}
}

#[inline]
fn bit_index(bits: Size) -> (usize, usize) {
let bits = bits.bytes();
let a = bits / BLOCK_SIZE;
let b = bits % BLOCK_SIZE;
assert_eq!(a as usize as u64, a);
assert_eq!(b as usize as u64, b);
(a as usize, b as usize)
}
348 changes: 11 additions & 337 deletions src/librustc/mir/interpret/mod.rs
Original file line number Diff line number Diff line change
@@ -17,27 +17,32 @@ macro_rules! err {

mod error;
mod value;
mod allocation;
mod pointer;

pub use self::error::{
EvalError, EvalResult, EvalErrorKind, AssertMessage, ConstEvalErr, struct_error,
FrameInfo, ConstEvalResult, ErrorHandled,
};

pub use self::value::{Scalar, ConstValue};
pub use self::value::{Scalar, ConstValue, ScalarMaybeUndef};

pub use self::allocation::{
Allocation, AllocationExtra,
Relocations, UndefMask,
};

pub use self::pointer::{Pointer, PointerArithmetic};

use std::fmt;
use mir;
use hir::def_id::DefId;
use ty::{self, TyCtxt, Instance};
use ty::layout::{self, Align, HasDataLayout, Size};
use ty::layout::{self, Size};
use middle::region;
use std::iter;
use std::io;
use std::ops::{Deref, DerefMut};
use std::hash::Hash;
use syntax::ast::Mutability;
use rustc_serialize::{Encoder, Decodable, Encodable};
use rustc_data_structures::sorted_map::SortedMap;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::{Lock as Mutex, HashMapExt};
use rustc_data_structures::tiny_list::TinyList;
@@ -78,152 +83,6 @@ pub struct GlobalId<'tcx> {
pub promoted: Option<mir::Promoted>,
}

////////////////////////////////////////////////////////////////////////////////
// Pointer arithmetic
////////////////////////////////////////////////////////////////////////////////

pub trait PointerArithmetic: layout::HasDataLayout {
// These are not supposed to be overridden.

#[inline(always)]
fn pointer_size(&self) -> Size {
self.data_layout().pointer_size
}

//// Trunace the given value to the pointer size; also return whether there was an overflow
#[inline]
fn truncate_to_ptr(&self, val: u128) -> (u64, bool) {
let max_ptr_plus_1 = 1u128 << self.pointer_size().bits();
((val % max_ptr_plus_1) as u64, val >= max_ptr_plus_1)
}

#[inline]
fn offset<'tcx>(&self, val: u64, i: u64) -> EvalResult<'tcx, u64> {
let (res, over) = self.overflowing_offset(val, i);
if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
}

#[inline]
fn overflowing_offset(&self, val: u64, i: u64) -> (u64, bool) {
let (res, over1) = val.overflowing_add(i);
let (res, over2) = self.truncate_to_ptr(u128::from(res));
(res, over1 || over2)
}

#[inline]
fn signed_offset<'tcx>(&self, val: u64, i: i64) -> EvalResult<'tcx, u64> {
let (res, over) = self.overflowing_signed_offset(val, i128::from(i));
if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
}

// Overflow checking only works properly on the range from -u64 to +u64.
#[inline]
fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) {
// FIXME: is it possible to over/underflow here?
if i < 0 {
// trickery to ensure that i64::min_value() works fine
// this formula only works for true negative values, it panics for zero!
let n = u64::max_value() - (i as u64) + 1;
val.overflowing_sub(n)
} else {
self.overflowing_offset(val, i as u64)
}
}
}

impl<T: layout::HasDataLayout> PointerArithmetic for T {}


/// Pointer is generic over the type that represents a reference to Allocations,
/// thus making it possible for the most convenient representation to be used in
/// each context.
///
/// Defaults to the index based and loosely coupled AllocId.
///
/// Pointer is also generic over the `Tag` associated with each pointer,
/// which is used to do provenance tracking during execution.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
pub struct Pointer<Tag=(),Id=AllocId> {
pub alloc_id: Id,
pub offset: Size,
pub tag: Tag,
}

/// Produces a `Pointer` which points to the beginning of the Allocation
impl From<AllocId> for Pointer {
#[inline(always)]
fn from(alloc_id: AllocId) -> Self {
Pointer::new(alloc_id, Size::ZERO)
}
}

impl<'tcx> Pointer<()> {
#[inline(always)]
pub fn new(alloc_id: AllocId, offset: Size) -> Self {
Pointer { alloc_id, offset, tag: () }
}

#[inline(always)]
pub fn with_default_tag<Tag>(self) -> Pointer<Tag>
where Tag: Default
{
Pointer::new_with_tag(self.alloc_id, self.offset, Default::default())
}
}

impl<'tcx, Tag> Pointer<Tag> {
#[inline(always)]
pub fn new_with_tag(alloc_id: AllocId, offset: Size, tag: Tag) -> Self {
Pointer { alloc_id, offset, tag }
}

#[inline]
pub fn offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
Ok(Pointer::new_with_tag(
self.alloc_id,
Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?),
self.tag
))
}

#[inline]
pub fn overflowing_offset(self, i: Size, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_offset(self.offset.bytes(), i.bytes());
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
}

#[inline(always)]
pub fn wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
self.overflowing_offset(i, cx).0
}

#[inline]
pub fn signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
Ok(Pointer::new_with_tag(
self.alloc_id,
Size::from_bytes(cx.data_layout().signed_offset(self.offset.bytes(), i)?),
self.tag,
))
}

#[inline]
pub fn overflowing_signed_offset(self, i: i128, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_signed_offset(self.offset.bytes(), i);
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
}

#[inline(always)]
pub fn wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
self.overflowing_signed_offset(i128::from(i), cx).0
}

#[inline(always)]
pub fn erase_tag(self) -> Pointer {
Pointer { alloc_id: self.alloc_id, offset: self.offset, tag: () }
}
}


#[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd, Debug)]
pub struct AllocId(pub u64);

@@ -528,91 +387,6 @@ impl<'tcx, M: fmt::Debug + Eq + Hash + Clone> AllocMap<'tcx, M> {
}
}

#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct Allocation<Tag=(),Extra=()> {
/// The actual bytes of the allocation.
/// Note that the bytes of a pointer represent the offset of the pointer
pub bytes: Vec<u8>,
/// Maps from byte addresses to extra data for each pointer.
/// Only the first byte of a pointer is inserted into the map; i.e.,
/// every entry in this map applies to `pointer_size` consecutive bytes starting
/// at the given offset.
pub relocations: Relocations<Tag>,
/// Denotes undefined memory. Reading from undefined memory is forbidden in miri
pub undef_mask: UndefMask,
/// The alignment of the allocation to detect unaligned reads.
pub align: Align,
/// Whether the allocation is mutable.
/// Also used by codegen to determine if a static should be put into mutable memory,
/// which happens for `static mut` and `static` with interior mutability.
pub mutability: Mutability,
/// Extra state for the machine.
pub extra: Extra,
}

impl<Tag, Extra: Default> Allocation<Tag, Extra> {
/// Creates a read-only allocation initialized by the given bytes
pub fn from_bytes(slice: &[u8], align: Align) -> Self {
let mut undef_mask = UndefMask::new(Size::ZERO);
undef_mask.grow(Size::from_bytes(slice.len() as u64), true);
Self {
bytes: slice.to_owned(),
relocations: Relocations::new(),
undef_mask,
align,
mutability: Mutability::Immutable,
extra: Extra::default(),
}
}

pub fn from_byte_aligned_bytes(slice: &[u8]) -> Self {
Allocation::from_bytes(slice, Align::from_bytes(1, 1).unwrap())
}

pub fn undef(size: Size, align: Align) -> Self {
assert_eq!(size.bytes() as usize as u64, size.bytes());
Allocation {
bytes: vec![0; size.bytes() as usize],
relocations: Relocations::new(),
undef_mask: UndefMask::new(size),
align,
mutability: Mutability::Mutable,
extra: Extra::default(),
}
}
}

impl<'tcx> ::serialize::UseSpecializedDecodable for &'tcx Allocation {}

#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct Relocations<Tag=(), Id=AllocId>(SortedMap<Size, (Tag, Id)>);

impl<Tag, Id> Relocations<Tag, Id> {
pub fn new() -> Self {
Relocations(SortedMap::new())
}

// The caller must guarantee that the given relocations are already sorted
// by address and contain no duplicates.
pub fn from_presorted(r: Vec<(Size, (Tag, Id))>) -> Self {
Relocations(SortedMap::from_presorted_elements(r))
}
}

impl<Tag> Deref for Relocations<Tag> {
type Target = SortedMap<Size, (Tag, AllocId)>;

fn deref(&self) -> &Self::Target {
&self.0
}
}

impl<Tag> DerefMut for Relocations<Tag> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}

////////////////////////////////////////////////////////////////////////////////
// Methods to access integers in the target endianness
////////////////////////////////////////////////////////////////////////////////
@@ -655,103 +429,3 @@ pub fn truncate(value: u128, size: Size) -> u128 {
// truncate (shift left to drop out leftover values, shift right to fill with zeroes)
(value << shift) >> shift
}

////////////////////////////////////////////////////////////////////////////////
// Undefined byte tracking
////////////////////////////////////////////////////////////////////////////////

type Block = u64;
const BLOCK_SIZE: u64 = 64;

#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct UndefMask {
blocks: Vec<Block>,
len: Size,
}

impl_stable_hash_for!(struct mir::interpret::UndefMask{blocks, len});

impl UndefMask {
pub fn new(size: Size) -> Self {
let mut m = UndefMask {
blocks: vec![],
len: Size::ZERO,
};
m.grow(size, false);
m
}

/// Check whether the range `start..end` (end-exclusive) is entirely defined.
///
/// Returns `Ok(())` if it's defined. Otherwise returns the index of the byte
/// at which the first undefined access begins.
#[inline]
pub fn is_range_defined(&self, start: Size, end: Size) -> Result<(), Size> {
if end > self.len {
return Err(self.len);
}

let idx = (start.bytes()..end.bytes())
.map(|i| Size::from_bytes(i))
.find(|&i| !self.get(i));

match idx {
Some(idx) => Err(idx),
None => Ok(())
}
}

pub fn set_range(&mut self, start: Size, end: Size, new_state: bool) {
let len = self.len;
if end > len {
self.grow(end - len, new_state);
}
self.set_range_inbounds(start, end, new_state);
}

pub fn set_range_inbounds(&mut self, start: Size, end: Size, new_state: bool) {
for i in start.bytes()..end.bytes() {
self.set(Size::from_bytes(i), new_state);
}
}

#[inline]
pub fn get(&self, i: Size) -> bool {
let (block, bit) = bit_index(i);
(self.blocks[block] & 1 << bit) != 0
}

#[inline]
pub fn set(&mut self, i: Size, new_state: bool) {
let (block, bit) = bit_index(i);
if new_state {
self.blocks[block] |= 1 << bit;
} else {
self.blocks[block] &= !(1 << bit);
}
}

pub fn grow(&mut self, amount: Size, new_state: bool) {
let unused_trailing_bits = self.blocks.len() as u64 * BLOCK_SIZE - self.len.bytes();
if amount.bytes() > unused_trailing_bits {
let additional_blocks = amount.bytes() / BLOCK_SIZE + 1;
assert_eq!(additional_blocks as usize as u64, additional_blocks);
self.blocks.extend(
iter::repeat(0).take(additional_blocks as usize),
);
}
let start = self.len;
self.len += amount;
self.set_range_inbounds(start, start + amount, new_state);
}
}

#[inline]
fn bit_index(bits: Size) -> (usize, usize) {
let bits = bits.bytes();
let a = bits / BLOCK_SIZE;
let b = bits % BLOCK_SIZE;
assert_eq!(a as usize as u64, a);
assert_eq!(b as usize as u64, b);
(a as usize, b as usize)
}
151 changes: 151 additions & 0 deletions src/librustc/mir/interpret/pointer.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,151 @@
use mir;
use ty::layout::{self, HasDataLayout, Size};

use super::{
AllocId, EvalResult,
};

////////////////////////////////////////////////////////////////////////////////
// Pointer arithmetic
////////////////////////////////////////////////////////////////////////////////

pub trait PointerArithmetic: layout::HasDataLayout {
// These are not supposed to be overridden.

#[inline(always)]
fn pointer_size(&self) -> Size {
self.data_layout().pointer_size
}

//// Trunace the given value to the pointer size; also return whether there was an overflow
#[inline]
fn truncate_to_ptr(&self, val: u128) -> (u64, bool) {
let max_ptr_plus_1 = 1u128 << self.pointer_size().bits();
((val % max_ptr_plus_1) as u64, val >= max_ptr_plus_1)
}

#[inline]
fn offset<'tcx>(&self, val: u64, i: u64) -> EvalResult<'tcx, u64> {
let (res, over) = self.overflowing_offset(val, i);
if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
}

#[inline]
fn overflowing_offset(&self, val: u64, i: u64) -> (u64, bool) {
let (res, over1) = val.overflowing_add(i);
let (res, over2) = self.truncate_to_ptr(u128::from(res));
(res, over1 || over2)
}

#[inline]
fn signed_offset<'tcx>(&self, val: u64, i: i64) -> EvalResult<'tcx, u64> {
let (res, over) = self.overflowing_signed_offset(val, i128::from(i));
if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
}

// Overflow checking only works properly on the range from -u64 to +u64.
#[inline]
fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) {
// FIXME: is it possible to over/underflow here?
if i < 0 {
// trickery to ensure that i64::min_value() works fine
// this formula only works for true negative values, it panics for zero!
let n = u64::max_value() - (i as u64) + 1;
val.overflowing_sub(n)
} else {
self.overflowing_offset(val, i as u64)
}
}
}

impl<T: layout::HasDataLayout> PointerArithmetic for T {}


/// Pointer is generic over the type that represents a reference to Allocations,
/// thus making it possible for the most convenient representation to be used in
/// each context.
///
/// Defaults to the index based and loosely coupled AllocId.
///
/// Pointer is also generic over the `Tag` associated with each pointer,
/// which is used to do provenance tracking during execution.
#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
pub struct Pointer<Tag=(),Id=AllocId> {
pub alloc_id: Id,
pub offset: Size,
pub tag: Tag,
}

/// Produces a `Pointer` which points to the beginning of the Allocation
impl From<AllocId> for Pointer {
#[inline(always)]
fn from(alloc_id: AllocId) -> Self {
Pointer::new(alloc_id, Size::ZERO)
}
}

impl<'tcx> Pointer<()> {
#[inline(always)]
pub fn new(alloc_id: AllocId, offset: Size) -> Self {
Pointer { alloc_id, offset, tag: () }
}

#[inline(always)]
pub fn with_default_tag<Tag>(self) -> Pointer<Tag>
where Tag: Default
{
Pointer::new_with_tag(self.alloc_id, self.offset, Default::default())
}
}

impl<'tcx, Tag> Pointer<Tag> {
#[inline(always)]
pub fn new_with_tag(alloc_id: AllocId, offset: Size, tag: Tag) -> Self {
Pointer { alloc_id, offset, tag }
}

#[inline]
pub fn offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
Ok(Pointer::new_with_tag(
self.alloc_id,
Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?),
self.tag
))
}

#[inline]
pub fn overflowing_offset(self, i: Size, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_offset(self.offset.bytes(), i.bytes());
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
}

#[inline(always)]
pub fn wrapping_offset(self, i: Size, cx: &impl HasDataLayout) -> Self {
self.overflowing_offset(i, cx).0
}

#[inline]
pub fn signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
Ok(Pointer::new_with_tag(
self.alloc_id,
Size::from_bytes(cx.data_layout().signed_offset(self.offset.bytes(), i)?),
self.tag,
))
}

#[inline]
pub fn overflowing_signed_offset(self, i: i128, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_signed_offset(self.offset.bytes(), i);
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
}

#[inline(always)]
pub fn wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
self.overflowing_signed_offset(i128::from(i), cx).0
}

#[inline(always)]
pub fn erase_tag(self) -> Pointer {
Pointer { alloc_id: self.alloc_id, offset: self.offset, tag: () }
}
}
128 changes: 128 additions & 0 deletions src/librustc/mir/interpret/value.rs
Original file line number Diff line number Diff line change
@@ -392,3 +392,131 @@ impl<Tag> From<Pointer<Tag>> for Scalar<Tag> {
Scalar::Ptr(ptr)
}
}

#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
pub enum ScalarMaybeUndef<Tag=(), Id=AllocId> {
Scalar(Scalar<Tag, Id>),
Undef,
}

impl<Tag> From<Scalar<Tag>> for ScalarMaybeUndef<Tag> {
#[inline(always)]
fn from(s: Scalar<Tag>) -> Self {
ScalarMaybeUndef::Scalar(s)
}
}

impl<Tag> fmt::Display for ScalarMaybeUndef<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ScalarMaybeUndef::Undef => write!(f, "uninitialized bytes"),
ScalarMaybeUndef::Scalar(s) => write!(f, "{}", s),
}
}
}

impl<'tcx> ScalarMaybeUndef<()> {
#[inline]
pub fn with_default_tag<Tag>(self) -> ScalarMaybeUndef<Tag>
where Tag: Default
{
match self {
ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.with_default_tag()),
ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,
}
}
}

impl<'tcx, Tag> ScalarMaybeUndef<Tag> {
#[inline]
pub fn erase_tag(self) -> ScalarMaybeUndef
{
match self {
ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.erase_tag()),
ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,
}
}

#[inline]
pub fn not_undef(self) -> EvalResult<'static, Scalar<Tag>> {
match self {
ScalarMaybeUndef::Scalar(scalar) => Ok(scalar),
ScalarMaybeUndef::Undef => err!(ReadUndefBytes(Size::from_bytes(0))),
}
}

#[inline(always)]
pub fn to_ptr(self) -> EvalResult<'tcx, Pointer<Tag>> {
self.not_undef()?.to_ptr()
}

#[inline(always)]
pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
self.not_undef()?.to_bits(target_size)
}

#[inline(always)]
pub fn to_bool(self) -> EvalResult<'tcx, bool> {
self.not_undef()?.to_bool()
}

#[inline(always)]
pub fn to_char(self) -> EvalResult<'tcx, char> {
self.not_undef()?.to_char()
}

#[inline(always)]
pub fn to_f32(self) -> EvalResult<'tcx, f32> {
self.not_undef()?.to_f32()
}

#[inline(always)]
pub fn to_f64(self) -> EvalResult<'tcx, f64> {
self.not_undef()?.to_f64()
}

#[inline(always)]
pub fn to_u8(self) -> EvalResult<'tcx, u8> {
self.not_undef()?.to_u8()
}

#[inline(always)]
pub fn to_u32(self) -> EvalResult<'tcx, u32> {
self.not_undef()?.to_u32()
}

#[inline(always)]
pub fn to_u64(self) -> EvalResult<'tcx, u64> {
self.not_undef()?.to_u64()
}

#[inline(always)]
pub fn to_usize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, u64> {
self.not_undef()?.to_usize(cx)
}

#[inline(always)]
pub fn to_i8(self) -> EvalResult<'tcx, i8> {
self.not_undef()?.to_i8()
}

#[inline(always)]
pub fn to_i32(self) -> EvalResult<'tcx, i32> {
self.not_undef()?.to_i32()
}

#[inline(always)]
pub fn to_i64(self) -> EvalResult<'tcx, i64> {
self.not_undef()?.to_i64()
}

#[inline(always)]
pub fn to_isize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, i64> {
self.not_undef()?.to_isize(cx)
}
}

impl_stable_hash_for!(enum ::mir::interpret::ScalarMaybeUndef {
Scalar(v),
Undef
});
24 changes: 2 additions & 22 deletions src/librustc_mir/interpret/machine.rs
Original file line number Diff line number Diff line change
@@ -20,7 +20,7 @@ use rustc::mir;
use rustc::ty::{self, layout::{Size, TyLayout}, query::TyCtxtAt};

use super::{
Allocation, AllocId, EvalResult, Scalar,
Allocation, AllocId, EvalResult, Scalar, AllocationExtra,
EvalContext, PlaceTy, MPlaceTy, OpTy, Pointer, MemoryKind,
};

@@ -78,7 +78,7 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized {
type PointerTag: ::std::fmt::Debug + Default + Copy + Eq + Hash + 'static;

/// Extra data stored in every allocation.
type AllocExtra: ::std::fmt::Debug + Default + Clone;
type AllocExtra: AllocationExtra<Self::PointerTag>;

/// Memory's allocation map
type MemoryMap:
@@ -174,26 +174,6 @@ pub trait Machine<'a, 'mir, 'tcx>: Sized {
dest: PlaceTy<'tcx, Self::PointerTag>,
) -> EvalResult<'tcx>;

/// Hook for performing extra checks on a memory read access.
#[inline]
fn memory_read(
_alloc: &Allocation<Self::PointerTag, Self::AllocExtra>,
_ptr: Pointer<Self::PointerTag>,
_size: Size,
) -> EvalResult<'tcx> {
Ok(())
}

/// Hook for performing extra checks on a memory write access.
#[inline]
fn memory_written(
_alloc: &mut Allocation<Self::PointerTag, Self::AllocExtra>,
_ptr: Pointer<Self::PointerTag>,
_size: Size,
) -> EvalResult<'tcx> {
Ok(())
}

/// Hook for performing extra checks when memory gets deallocated.
#[inline]
fn memory_deallocated(
6 changes: 3 additions & 3 deletions src/librustc_mir/interpret/memory.rs
Original file line number Diff line number Diff line change
@@ -28,7 +28,7 @@ use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use syntax::ast::Mutability;

use super::{
Pointer, AllocId, Allocation, ConstValue, GlobalId,
Pointer, AllocId, Allocation, ConstValue, GlobalId, AllocationExtra,
EvalResult, Scalar, EvalErrorKind, AllocType, PointerArithmetic,
Machine, AllocMap, MayLeak, ScalarMaybeUndef, ErrorHandled,
};
@@ -637,7 +637,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
}

let alloc = self.get(ptr.alloc_id)?;
M::memory_read(alloc, ptr, size)?;
AllocationExtra::memory_read(alloc, ptr, size)?;

assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes());
assert_eq!(size.bytes() as usize as u64, size.bytes());
@@ -683,7 +683,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
self.clear_relocations(ptr, size)?;

let alloc = self.get_mut(ptr.alloc_id)?;
M::memory_written(alloc, ptr, size)?;
AllocationExtra::memory_written(alloc, ptr, size)?;

assert_eq!(ptr.offset.bytes() as usize as u64, ptr.offset.bytes());
assert_eq!(size.bytes() as usize as u64, size.bytes());
126 changes: 1 addition & 125 deletions src/librustc_mir/interpret/operand.rs
Original file line number Diff line number Diff line change
@@ -12,7 +12,6 @@
//! All high-level functions to read from memory work on operands as sources.
use std::convert::TryInto;
use std::fmt;

use rustc::{mir, ty};
use rustc::ty::layout::{self, Size, LayoutOf, TyLayout, HasDataLayout, IntegerExt};
@@ -23,130 +22,7 @@ use rustc::mir::interpret::{
EvalResult, EvalErrorKind
};
use super::{EvalContext, Machine, MemPlace, MPlaceTy, MemoryKind};

#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
pub enum ScalarMaybeUndef<Tag=(), Id=AllocId> {
Scalar(Scalar<Tag, Id>),
Undef,
}

impl<Tag> From<Scalar<Tag>> for ScalarMaybeUndef<Tag> {
#[inline(always)]
fn from(s: Scalar<Tag>) -> Self {
ScalarMaybeUndef::Scalar(s)
}
}

impl<Tag> fmt::Display for ScalarMaybeUndef<Tag> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ScalarMaybeUndef::Undef => write!(f, "uninitialized bytes"),
ScalarMaybeUndef::Scalar(s) => write!(f, "{}", s),
}
}
}

impl<'tcx> ScalarMaybeUndef<()> {
#[inline]
pub fn with_default_tag<Tag>(self) -> ScalarMaybeUndef<Tag>
where Tag: Default
{
match self {
ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.with_default_tag()),
ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,
}
}
}

impl<'tcx, Tag> ScalarMaybeUndef<Tag> {
#[inline]
pub fn erase_tag(self) -> ScalarMaybeUndef
{
match self {
ScalarMaybeUndef::Scalar(s) => ScalarMaybeUndef::Scalar(s.erase_tag()),
ScalarMaybeUndef::Undef => ScalarMaybeUndef::Undef,
}
}

#[inline]
pub fn not_undef(self) -> EvalResult<'static, Scalar<Tag>> {
match self {
ScalarMaybeUndef::Scalar(scalar) => Ok(scalar),
ScalarMaybeUndef::Undef => err!(ReadUndefBytes(Size::from_bytes(0))),
}
}

#[inline(always)]
pub fn to_ptr(self) -> EvalResult<'tcx, Pointer<Tag>> {
self.not_undef()?.to_ptr()
}

#[inline(always)]
pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
self.not_undef()?.to_bits(target_size)
}

#[inline(always)]
pub fn to_bool(self) -> EvalResult<'tcx, bool> {
self.not_undef()?.to_bool()
}

#[inline(always)]
pub fn to_char(self) -> EvalResult<'tcx, char> {
self.not_undef()?.to_char()
}

#[inline(always)]
pub fn to_f32(self) -> EvalResult<'tcx, f32> {
self.not_undef()?.to_f32()
}

#[inline(always)]
pub fn to_f64(self) -> EvalResult<'tcx, f64> {
self.not_undef()?.to_f64()
}

#[inline(always)]
pub fn to_u8(self) -> EvalResult<'tcx, u8> {
self.not_undef()?.to_u8()
}

#[inline(always)]
pub fn to_u32(self) -> EvalResult<'tcx, u32> {
self.not_undef()?.to_u32()
}

#[inline(always)]
pub fn to_u64(self) -> EvalResult<'tcx, u64> {
self.not_undef()?.to_u64()
}

#[inline(always)]
pub fn to_usize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, u64> {
self.not_undef()?.to_usize(cx)
}

#[inline(always)]
pub fn to_i8(self) -> EvalResult<'tcx, i8> {
self.not_undef()?.to_i8()
}

#[inline(always)]
pub fn to_i32(self) -> EvalResult<'tcx, i32> {
self.not_undef()?.to_i32()
}

#[inline(always)]
pub fn to_i64(self) -> EvalResult<'tcx, i64> {
self.not_undef()?.to_i64()
}

#[inline(always)]
pub fn to_isize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, i64> {
self.not_undef()?.to_isize(cx)
}
}

pub use rustc::mir::interpret::ScalarMaybeUndef;

/// A `Value` represents a single immediate self-contained Rust value.
///
3 changes: 2 additions & 1 deletion src/librustc_mir/interpret/place.rs
Original file line number Diff line number Diff line change
@@ -24,7 +24,7 @@ use rustc::mir::interpret::{
GlobalId, AllocId, Allocation, Scalar, EvalResult, Pointer, PointerArithmetic
};
use super::{
EvalContext, Machine, AllocMap,
EvalContext, Machine, AllocMap, AllocationExtra,
Immediate, ImmTy, ScalarMaybeUndef, Operand, OpTy, MemoryKind
};

@@ -264,6 +264,7 @@ where
Tag: ::std::fmt::Debug+Default+Copy+Eq+Hash+'static,
M: Machine<'a, 'mir, 'tcx, PointerTag=Tag>,
M::MemoryMap: AllocMap<AllocId, (MemoryKind<M::MemoryKinds>, Allocation<Tag, M::AllocExtra>)>,
M::AllocExtra: AllocationExtra<Tag>,
{
/// Take a value, which represents a (thin or fat) reference, and make it a place.
/// Alignment is just based on the type. This is the inverse of `create_ref`.
5 changes: 0 additions & 5 deletions src/librustc_mir/interpret/snapshot.rs
Original file line number Diff line number Diff line change
@@ -195,11 +195,6 @@ impl<'a, Ctx> Snapshot<'a, Ctx> for Scalar
}
}

impl_stable_hash_for!(enum ::interpret::ScalarMaybeUndef {
Scalar(v),
Undef
});

impl_snapshot_for!(enum ScalarMaybeUndef {
Scalar(s),
Undef,
2 changes: 1 addition & 1 deletion src/librustc_mir/interpret/validity.rs
Original file line number Diff line number Diff line change
@@ -17,7 +17,7 @@ use rustc::ty::layout::{self, Size, Align, TyLayout, LayoutOf};
use rustc::ty;
use rustc_data_structures::fx::FxHashSet;
use rustc::mir::interpret::{
Scalar, AllocType, EvalResult, EvalErrorKind
Scalar, AllocType, EvalResult, EvalErrorKind,
};

use super::{