Skip to content

Commit

Permalink
miri/CTFE refactor
Browse files Browse the repository at this point in the history
* Value gets renamed to Operand, so that now interpret::{Place, Operand} are the
  "dynamic" versions of mir::{Place, Operand}.
* Operand and Place share the data for their "stuff is in memory"-base in a new
  type, MemPlace.  This also makes it possible to give some more precise types
  in other areas.  Both Operand and MemPlace have methods available to project
  into fields (and other kinds of projections) without causing further
  allocations.
* The type for "a Scalar or a ScalarPair" is called Value, and again used to
  give some more precise types.
* All of these have versions with an attached layout, so that we can more often
  drag the layout along instead of recomputing it.  This lets us get rid of
  `PlaceExtra::Downcast`.  MPlaceTy and PlaceTy can only be constructed
  in place.rs, making sure the layout is handled properly.
  (The same should eventually be done for ValTy and OpTy.)
* All the high-level functions to write typed memory take a Place, and live in
  place.rs.  All the high-level typed functions to read typed memory take an
  Operand, and live in operands.rs.
  • Loading branch information
RalfJung committed Aug 22, 2018
1 parent 7d4f5f7 commit ad2de8b
Show file tree
Hide file tree
Showing 31 changed files with 1,997 additions and 1,935 deletions.
2 changes: 1 addition & 1 deletion src/librustc/dep_graph/dep_node.rs
Original file line number Diff line number Diff line change
Expand Up @@ -632,7 +632,7 @@ define_dep_nodes!( <'tcx>
// queries). Making them anonymous avoids hashing the result, which
// may save a bit of time.
[anon] EraseRegionsTy { ty: Ty<'tcx> },
[anon] ConstValueToAllocation { val: &'tcx ty::Const<'tcx> },
[anon] ConstToAllocation { val: &'tcx ty::Const<'tcx> },

[input] Freevars(DefId),
[input] MaybeUnusedTraitImport(DefId),
Expand Down
6 changes: 0 additions & 6 deletions src/librustc/ich/impls_ty.rs
Original file line number Diff line number Diff line change
Expand Up @@ -397,12 +397,6 @@ impl_stable_hash_for!(enum mir::interpret::ScalarMaybeUndef {
Undef
});

impl_stable_hash_for!(enum mir::interpret::Value {
Scalar(v),
ScalarPair(a, b),
ByRef(ptr, align)
});

impl_stable_hash_for!(struct mir::interpret::Pointer {
alloc_id,
offset
Expand Down
6 changes: 3 additions & 3 deletions src/librustc/mir/interpret/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ pub use self::error::{
FrameInfo, ConstEvalResult,
};

pub use self::value::{Scalar, Value, ConstValue, ScalarMaybeUndef};
pub use self::value::{Scalar, ConstValue, ScalarMaybeUndef};

use std::fmt;
use mir;
Expand Down Expand Up @@ -135,7 +135,7 @@ impl<'tcx> Pointer {
Pointer { alloc_id, offset }
}

pub(crate) fn wrapping_signed_offset<C: HasDataLayout>(self, i: i64, cx: C) -> Self {
pub fn wrapping_signed_offset<C: HasDataLayout>(self, i: i64, cx: C) -> Self {
Pointer::new(
self.alloc_id,
Size::from_bytes(cx.data_layout().wrapping_signed_offset(self.offset.bytes(), i)),
Expand All @@ -147,7 +147,7 @@ impl<'tcx> Pointer {
(Pointer::new(self.alloc_id, Size::from_bytes(res)), over)
}

pub(crate) fn signed_offset<C: HasDataLayout>(self, i: i64, cx: C) -> EvalResult<'tcx, Self> {
pub fn signed_offset<C: HasDataLayout>(self, i: i64, cx: C) -> EvalResult<'tcx, Self> {
Ok(Pointer::new(
self.alloc_id,
Size::from_bytes(cx.data_layout().signed_offset(self.offset.bytes(), i)?),
Expand Down
200 changes: 77 additions & 123 deletions src/librustc/mir/interpret/value.rs
Original file line number Diff line number Diff line change
@@ -1,21 +1,22 @@
#![allow(unknown_lints)]

use ty::layout::{Align, HasDataLayout, Size};
use ty;
use ty::layout::{HasDataLayout, Size};
use ty::subst::Substs;
use hir::def_id::DefId;

use super::{EvalResult, Pointer, PointerArithmetic, Allocation};

/// Represents a constant value in Rust. Scalar and ScalarPair are optimizations which
/// matches Value's optimizations for easy conversions between these two types
/// matches the LocalValue optimizations for easy conversions between Value and ConstValue.
#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)]
pub enum ConstValue<'tcx> {
/// Never returned from the `const_eval` query, but the HIR contains these frequently in order
/// to allow HIR creation to happen for everything before needing to be able to run constant
/// evaluation
Unevaluated(DefId, &'tcx Substs<'tcx>),
/// Used only for types with layout::abi::Scalar ABI and ZSTs
///
/// Not using the enum `Value` to encode that this must not be `Undef`
Scalar(Scalar),
/// Used only for types with layout::abi::ScalarPair
///
Expand All @@ -26,25 +27,6 @@ pub enum ConstValue<'tcx> {
}

impl<'tcx> ConstValue<'tcx> {
#[inline]
pub fn from_byval_value(val: Value) -> EvalResult<'static, Self> {
Ok(match val {
Value::ByRef(..) => bug!(),
Value::ScalarPair(a, b) => ConstValue::ScalarPair(a.unwrap_or_err()?, b),
Value::Scalar(val) => ConstValue::Scalar(val.unwrap_or_err()?),
})
}

#[inline]
pub fn to_byval_value(&self) -> Option<Value> {
match *self {
ConstValue::Unevaluated(..) |
ConstValue::ByRef(..) => None,
ConstValue::ScalarPair(a, b) => Some(Value::ScalarPair(a.into(), b)),
ConstValue::Scalar(val) => Some(Value::Scalar(val.into())),
}
}

#[inline]
pub fn try_to_scalar(&self) -> Option<Scalar> {
match *self {
Expand All @@ -56,58 +38,44 @@ impl<'tcx> ConstValue<'tcx> {
}

#[inline]
pub fn to_bits(&self, size: Size) -> Option<u128> {
pub fn try_to_bits(&self, size: Size) -> Option<u128> {
self.try_to_scalar()?.to_bits(size).ok()
}

#[inline]
pub fn to_ptr(&self) -> Option<Pointer> {
pub fn try_to_ptr(&self) -> Option<Pointer> {
self.try_to_scalar()?.to_ptr().ok()
}
}

/// A `Value` represents a single self-contained Rust value.
///
/// A `Value` can either refer to a block of memory inside an allocation (`ByRef`) or to a primitive
/// value held directly, outside of any allocation (`Scalar`). For `ByRef`-values, we remember
/// whether the pointer is supposed to be aligned or not (also see Place).
///
/// For optimization of a few very common cases, there is also a representation for a pair of
/// primitive values (`ScalarPair`). It allows Miri to avoid making allocations for checked binary
/// operations and fat pointers. This idea was taken from rustc's codegen.
#[derive(Clone, Copy, Debug, Eq, PartialEq, Ord, PartialOrd, RustcEncodable, RustcDecodable, Hash)]
pub enum Value {
ByRef(Scalar, Align),
Scalar(ScalarMaybeUndef),
ScalarPair(ScalarMaybeUndef, ScalarMaybeUndef),
}

impl<'tcx> ty::TypeFoldable<'tcx> for Value {
fn super_fold_with<'gcx: 'tcx, F: ty::fold::TypeFolder<'gcx, 'tcx>>(&self, _: &mut F) -> Self {
*self
pub fn new_slice(
val: Scalar,
len: u64,
cx: impl HasDataLayout
) -> Self {
ConstValue::ScalarPair(val, Scalar::Bits {
bits: len as u128,
size: cx.data_layout().pointer_size.bytes() as u8,
}.into())
}
fn super_visit_with<V: ty::fold::TypeVisitor<'tcx>>(&self, _: &mut V) -> bool {
false

pub fn new_dyn_trait(val: Scalar, vtable: Pointer) -> Self {
ConstValue::ScalarPair(val, Scalar::Ptr(vtable).into())
}
}

impl<'tcx> Scalar {
pub fn ptr_null<C: HasDataLayout>(cx: C) -> Self {
pub fn ptr_null(cx: impl HasDataLayout) -> Self {
Scalar::Bits {
bits: 0,
size: cx.data_layout().pointer_size.bytes() as u8,
}
}

pub fn to_value_with_len<C: HasDataLayout>(self, len: u64, cx: C) -> Value {
ScalarMaybeUndef::Scalar(self).to_value_with_len(len, cx)
}

pub fn to_value_with_vtable(self, vtable: Pointer) -> Value {
ScalarMaybeUndef::Scalar(self).to_value_with_vtable(vtable)
pub fn zst() -> Self {
Scalar::Bits { bits: 0, size: 0 }
}

pub fn ptr_signed_offset<C: HasDataLayout>(self, i: i64, cx: C) -> EvalResult<'tcx, Self> {
pub fn ptr_signed_offset(self, i: i64, cx: impl HasDataLayout) -> EvalResult<'tcx, Self> {
let layout = cx.data_layout();
match self {
Scalar::Bits { bits, size } => {
Expand All @@ -121,7 +89,7 @@ impl<'tcx> Scalar {
}
}

pub fn ptr_offset<C: HasDataLayout>(self, i: Size, cx: C) -> EvalResult<'tcx, Self> {
pub fn ptr_offset(self, i: Size, cx: impl HasDataLayout) -> EvalResult<'tcx, Self> {
let layout = cx.data_layout();
match self {
Scalar::Bits { bits, size } => {
Expand All @@ -135,7 +103,7 @@ impl<'tcx> Scalar {
}
}

pub fn ptr_wrapping_signed_offset<C: HasDataLayout>(self, i: i64, cx: C) -> Self {
pub fn ptr_wrapping_signed_offset(self, i: i64, cx: impl HasDataLayout) -> Self {
let layout = cx.data_layout();
match self {
Scalar::Bits { bits, size } => {
Expand All @@ -149,7 +117,7 @@ impl<'tcx> Scalar {
}
}

pub fn is_null_ptr<C: HasDataLayout>(self, cx: C) -> bool {
pub fn is_null_ptr(self, cx: impl HasDataLayout) -> bool {
match self {
Scalar::Bits { bits, size } => {
assert_eq!(size as u64, cx.data_layout().pointer_size.bytes());
Expand All @@ -159,8 +127,52 @@ impl<'tcx> Scalar {
}
}

pub fn to_value(self) -> Value {
Value::Scalar(ScalarMaybeUndef::Scalar(self))
pub fn from_bool(b: bool) -> Self {
Scalar::Bits { bits: b as u128, size: 1 }
}

pub fn from_char(c: char) -> Self {
Scalar::Bits { bits: c as u128, size: 4 }
}

pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
match self {
Scalar::Bits { bits, size } => {
assert_eq!(target_size.bytes(), size as u64);
assert_ne!(size, 0, "to_bits cannot be used with zsts");
Ok(bits)
}
Scalar::Ptr(_) => err!(ReadPointerAsBytes),
}
}

pub fn to_ptr(self) -> EvalResult<'tcx, Pointer> {
match self {
Scalar::Bits {..} => err!(ReadBytesAsPointer),
Scalar::Ptr(p) => Ok(p),
}
}

pub fn is_bits(self) -> bool {
match self {
Scalar::Bits { .. } => true,
_ => false,
}
}

pub fn is_ptr(self) -> bool {
match self {
Scalar::Ptr(_) => true,
_ => false,
}
}

pub fn to_bool(self) -> EvalResult<'tcx, bool> {
match self {
Scalar::Bits { bits: 0, size: 1 } => Ok(false),
Scalar::Bits { bits: 1, size: 1 } => Ok(true),
_ => err!(InvalidBool),
}
}
}

Expand Down Expand Up @@ -202,81 +214,23 @@ impl From<Scalar> for ScalarMaybeUndef {
}
}

impl ScalarMaybeUndef {
pub fn unwrap_or_err(self) -> EvalResult<'static, Scalar> {
impl<'tcx> ScalarMaybeUndef {
pub fn not_undef(self) -> EvalResult<'static, Scalar> {
match self {
ScalarMaybeUndef::Scalar(scalar) => Ok(scalar),
ScalarMaybeUndef::Undef => err!(ReadUndefBytes),
}
}

pub fn to_value_with_len<C: HasDataLayout>(self, len: u64, cx: C) -> Value {
Value::ScalarPair(self, Scalar::Bits {
bits: len as u128,
size: cx.data_layout().pointer_size.bytes() as u8,
}.into())
}

pub fn to_value_with_vtable(self, vtable: Pointer) -> Value {
Value::ScalarPair(self, Scalar::Ptr(vtable).into())
}

pub fn ptr_offset<C: HasDataLayout>(self, i: Size, cx: C) -> EvalResult<'tcx, Self> {
match self {
ScalarMaybeUndef::Scalar(scalar) => {
scalar.ptr_offset(i, cx).map(ScalarMaybeUndef::Scalar)
},
ScalarMaybeUndef::Undef => Ok(ScalarMaybeUndef::Undef)
}
}
}

impl<'tcx> Scalar {
pub fn from_bool(b: bool) -> Self {
Scalar::Bits { bits: b as u128, size: 1 }
}

pub fn from_char(c: char) -> Self {
Scalar::Bits { bits: c as u128, size: 4 }
}

pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
match self {
Scalar::Bits { bits, size } => {
assert_eq!(target_size.bytes(), size as u64);
assert_ne!(size, 0, "to_bits cannot be used with zsts");
Ok(bits)
}
Scalar::Ptr(_) => err!(ReadPointerAsBytes),
}
}

pub fn to_ptr(self) -> EvalResult<'tcx, Pointer> {
match self {
Scalar::Bits {..} => err!(ReadBytesAsPointer),
Scalar::Ptr(p) => Ok(p),
}
}

pub fn is_bits(self) -> bool {
match self {
Scalar::Bits { .. } => true,
_ => false,
}
self.not_undef()?.to_ptr()
}

pub fn is_ptr(self) -> bool {
match self {
Scalar::Ptr(_) => true,
_ => false,
}
pub fn to_bits(self, target_size: Size) -> EvalResult<'tcx, u128> {
self.not_undef()?.to_bits(target_size)
}

pub fn to_bool(self) -> EvalResult<'tcx, bool> {
match self {
Scalar::Bits { bits: 0, size: 1 } => Ok(false),
Scalar::Bits { bits: 1, size: 1 } => Ok(true),
_ => err!(InvalidBool),
}
self.not_undef()?.to_bool()
}
}
Loading

0 comments on commit ad2de8b

Please sign in to comment.