Skip to content

Commit

Permalink
Auto merge of #856 - RalfJung:type_dispatch_first, r=RalfJung
Browse files Browse the repository at this point in the history
Adjust for ptr_op changes

This is the Miri side of rust-lang/rust#62946.
  • Loading branch information
bors committed Aug 3, 2019
2 parents 247786d + 5efacf6 commit 9d3fdee
Show file tree
Hide file tree
Showing 6 changed files with 29 additions and 224 deletions.
2 changes: 1 addition & 1 deletion rust-version
Original file line number Diff line number Diff line change
@@ -1 +1 @@
d7270712cb446aad0817040bbca73a8d024f67b0
8e917f48382c6afaf50568263b89d35fba5d98e4
4 changes: 2 additions & 2 deletions src/machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -179,13 +179,13 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'tcx> {
}

#[inline(always)]
fn ptr_op(
fn binary_ptr_op(
ecx: &rustc_mir::interpret::InterpCx<'mir, 'tcx, Self>,
bin_op: mir::BinOp,
left: ImmTy<'tcx, Tag>,
right: ImmTy<'tcx, Tag>,
) -> InterpResult<'tcx, (Scalar<Tag>, bool)> {
ecx.ptr_op(bin_op, left, right)
ecx.binary_ptr_op(bin_op, left, right)
}

fn box_alloc(
Expand Down
231 changes: 25 additions & 206 deletions src/operator.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use rustc::ty::{Ty, layout::{Size, LayoutOf}};
use rustc::ty::{Ty, layout::LayoutOf};
use rustc::mir;

use crate::*;
Expand All @@ -9,21 +9,13 @@ pub trait EvalContextExt<'tcx> {
ptr: Pointer<Tag>
) -> InterpResult<'tcx>;

fn ptr_op(
fn binary_ptr_op(
&self,
bin_op: mir::BinOp,
left: ImmTy<'tcx, Tag>,
right: ImmTy<'tcx, Tag>,
) -> InterpResult<'tcx, (Scalar<Tag>, bool)>;

fn ptr_int_arithmetic(
&self,
bin_op: mir::BinOp,
left: Pointer<Tag>,
right: u128,
signed: bool,
) -> InterpResult<'tcx, (Scalar<Tag>, bool)>;

fn ptr_eq(
&self,
left: Scalar<Tag>,
Expand All @@ -46,7 +38,7 @@ impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {
ptr.check_in_alloc(size, CheckInAllocMsg::InboundsTest)
}

fn ptr_op(
fn binary_ptr_op(
&self,
bin_op: mir::BinOp,
left: ImmTy<'tcx, Tag>,
Expand All @@ -56,24 +48,9 @@ impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {

trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right);

// Treat everything of integer *type* at integer *value*.
if left.layout.ty.is_integral() {
// This is actually an integer operation, so dispatch back to the core engine.
// TODO: Once intptrcast is the default, librustc_mir should never even call us
// for integer types.
assert!(right.layout.ty.is_integral());
let l_bits = self.force_bits(left.imm.to_scalar()?, left.layout.size)?;
let r_bits = self.force_bits(right.imm.to_scalar()?, right.layout.size)?;

let left = ImmTy::from_scalar(Scalar::from_uint(l_bits, left.layout.size), left.layout);
let right = ImmTy::from_scalar(Scalar::from_uint(r_bits, left.layout.size), right.layout);

return self.binary_op(bin_op, left, right);
}

// Operations that support fat pointers
match bin_op {
Ok(match bin_op {
Eq | Ne => {
// This supports fat pointers.
let eq = match (*left, *right) {
(Immediate::Scalar(left), Immediate::Scalar(right)) =>
self.ptr_eq(left.not_undef()?, right.not_undef()?)?,
Expand All @@ -82,103 +59,38 @@ impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {
self.ptr_eq(left2.not_undef()?, right2.not_undef()?)?,
_ => bug!("Type system should not allow comparing Scalar with ScalarPair"),
};
return Ok((Scalar::from_bool(if bin_op == Eq { eq } else { !eq }), false));
(Scalar::from_bool(if bin_op == Eq { eq } else { !eq }), false)
}
_ => {},
}

// Now we expect no more fat pointers.
let left_layout = left.layout;
let left = left.to_scalar()?;
let right_layout = right.layout;
let right = right.to_scalar()?;
debug_assert!(left.is_ptr() || right.is_ptr() || bin_op == Offset);
Lt | Le | Gt | Ge => {
// Just compare the integers.
// TODO: Do we really want to *always* do that, even when comparing two live in-bounds pointers?
let left = self.force_bits(left.to_scalar()?, left.layout.size)?;
let right = self.force_bits(right.to_scalar()?, right.layout.size)?;
let res = match bin_op {
Lt => left < right,
Le => left <= right,
Gt => left > right,
Ge => left >= right,
_ => bug!("We already established it has to be one of these operators."),
};
(Scalar::from_bool(res), false)
}

Ok(match bin_op {
Offset => {
let pointee_ty = left_layout.ty
let pointee_ty = left.layout.ty
.builtin_deref(true)
.expect("Offset called on non-ptr type")
.ty;
let ptr = self.pointer_offset_inbounds(
left,
left.to_scalar()?,
pointee_ty,
right.to_isize(self)?,
right.to_scalar()?.to_isize(self)?,
)?;
(ptr, false)
}
// These need both to be pointer, and fail if they are not in the same location
Lt | Le | Gt | Ge | Sub if left.is_ptr() && right.is_ptr() => {
let left = left.to_ptr().expect("we checked is_ptr");
let right = right.to_ptr().expect("we checked is_ptr");
if left.alloc_id == right.alloc_id {
let res = match bin_op {
Lt => left.offset < right.offset,
Le => left.offset <= right.offset,
Gt => left.offset > right.offset,
Ge => left.offset >= right.offset,
Sub => {
// subtract the offsets
let left_offset = Scalar::from_uint(left.offset.bytes(), self.memory().pointer_size());
let right_offset = Scalar::from_uint(right.offset.bytes(), self.memory().pointer_size());
let layout = self.layout_of(self.tcx.types.usize)?;
return self.binary_op(
Sub,
ImmTy::from_scalar(left_offset, layout),
ImmTy::from_scalar(right_offset, layout),
)
}
_ => bug!("We already established it has to be one of these operators."),
};
(Scalar::from_bool(res), false)
} else {
// Both are pointers, but from different allocations.
throw_unsup!(InvalidPointerMath)
}
}
Gt | Ge if left.is_ptr() && right.is_bits() => {
// "ptr >[=] integer" can be tested if the integer is small enough.
let left = left.to_ptr().expect("we checked is_ptr");
let right = right.to_bits(self.memory().pointer_size()).expect("we checked is_bits");
let (_alloc_size, alloc_align) = self.memory()
.get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
.expect("alloc info with MaybeDead cannot fail");
let min_ptr_val = u128::from(alloc_align.bytes()) + u128::from(left.offset.bytes());
let result = match bin_op {
Gt => min_ptr_val > right,
Ge => min_ptr_val >= right,
_ => bug!(),
};
if result {
// Definitely true!
(Scalar::from_bool(true), false)
} else {
// Sorry, can't tell.
throw_unsup!(InvalidPointerMath)
}
}
// These work if the left operand is a pointer, and the right an integer
Add | BitAnd | Sub | Rem if left.is_ptr() && right.is_bits() => {
// Cast to i128 is fine as we checked the kind to be ptr-sized
self.ptr_int_arithmetic(
bin_op,
left.to_ptr().expect("we checked is_ptr"),
right.to_bits(self.memory().pointer_size()).expect("we checked is_bits"),
right_layout.abi.is_signed(),
)?
}
// Commutative operators also work if the integer is on the left
Add | BitAnd if left.is_bits() && right.is_ptr() => {
// This is a commutative operation, just swap the operands
self.ptr_int_arithmetic(
bin_op,
right.to_ptr().expect("we checked is_ptr"),
left.to_bits(self.memory().pointer_size()).expect("we checked is_bits"),
left_layout.abi.is_signed(),
)?
}
// Nothing else works
_ => throw_unsup!(InvalidPointerMath),

_ => bug!("Invalid operator on pointers: {:?}", bin_op)
})
}

Expand All @@ -195,99 +107,6 @@ impl<'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'mir, 'tcx> {
Ok(left == right)
}

fn ptr_int_arithmetic(
&self,
bin_op: mir::BinOp,
left: Pointer<Tag>,
right: u128,
signed: bool,
) -> InterpResult<'tcx, (Scalar<Tag>, bool)> {
use rustc::mir::BinOp::*;

fn map_to_primval((res, over): (Pointer<Tag>, bool)) -> (Scalar<Tag>, bool) {
(Scalar::Ptr(res), over)
}

Ok(match bin_op {
Sub =>
// The only way this can overflow is by underflowing, so signdeness of the right
// operands does not matter.
map_to_primval(left.overflowing_signed_offset(-(right as i128), self)),
Add if signed =>
map_to_primval(left.overflowing_signed_offset(right as i128, self)),
Add if !signed =>
map_to_primval(left.overflowing_offset(Size::from_bytes(right as u64), self)),

BitAnd if !signed => {
let ptr_base_align = self.memory().get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
.expect("alloc info with MaybeDead cannot fail")
.1.bytes();
let base_mask = {
// FIXME: use `interpret::truncate`, once that takes a `Size` instead of a `Layout`.
let shift = 128 - self.memory().pointer_size().bits();
let value = !(ptr_base_align as u128 - 1);
// Truncate (shift left to drop out leftover values, shift right to fill with zeroes).
(value << shift) >> shift
};
let ptr_size = self.memory().pointer_size();
trace!("ptr BitAnd, align {}, operand {:#010x}, base_mask {:#010x}",
ptr_base_align, right, base_mask);
if right & base_mask == base_mask {
// Case 1: the base address bits are all preserved, i.e., right is all-1 there.
let offset = (left.offset.bytes() as u128 & right) as u64;
(
Scalar::Ptr(Pointer::new_with_tag(
left.alloc_id,
Size::from_bytes(offset),
left.tag,
)),
false,
)
} else if right & base_mask == 0 {
// Case 2: the base address bits are all taken away, i.e., right is all-0 there.
let v = Scalar::from_uint((left.offset.bytes() as u128) & right, ptr_size);
(v, false)
} else {
throw_unsup!(ReadPointerAsBytes);
}
}

Rem if !signed => {
// Doing modulo a divisor of the alignment is allowed.
// (Intuition: modulo a divisor leaks less information.)
let ptr_base_align = self.memory().get_size_and_align(left.alloc_id, AllocCheck::MaybeDead)
.expect("alloc info with MaybeDead cannot fail")
.1.bytes();
let right = right as u64;
let ptr_size = self.memory().pointer_size();
if right == 1 {
// Modulo 1 is always 0.
(Scalar::from_uint(0u32, ptr_size), false)
} else if ptr_base_align % right == 0 {
// The base address would be cancelled out by the modulo operation, so we can
// just take the modulo of the offset.
(
Scalar::from_uint((left.offset.bytes() % right) as u128, ptr_size),
false,
)
} else {
throw_unsup!(ReadPointerAsBytes);
}
}

_ => {
let msg = format!(
"unimplemented binary op on pointer {:?}: {:?}, {:?} ({})",
bin_op,
left,
right,
if signed { "signed" } else { "unsigned" }
);
throw_unsup!(Unimplemented(msg));
}
})
}

/// Raises an error if the offset moves the pointer outside of its allocation.
/// We consider ZSTs their own huge allocation that doesn't overlap with anything (and nothing
/// moves in there because the size is 0). We also consider the NULL pointer its own separate
Expand Down
2 changes: 1 addition & 1 deletion src/shims/intrinsics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,7 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx
match dest.layout.abi {
layout::Abi::Scalar(ref s) => {
let x = Scalar::from_int(0, s.value.size(this));
this.write_immediate(Immediate::Scalar(x.into()), dest)?;
this.write_scalar(x, dest)?;
}
layout::Abi::ScalarPair(ref s1, ref s2) => {
let x = Scalar::from_int(0, s1.value.size(this));
Expand Down

This file was deleted.

7 changes: 0 additions & 7 deletions tests/compile-fail/ptr_ge_integer.rs

This file was deleted.

0 comments on commit 9d3fdee

Please sign in to comment.