rust/src/librustc_mir/interpret/terminator/mod.rs

429 lines
18 KiB
Rust
Raw Normal View History

use rustc::mir;
use rustc::ty::{self, Ty};
use rustc::ty::layout::LayoutOf;
use syntax::codemap::Span;
use rustc_target::spec::abi::Abi;
use rustc::mir::interpret::{EvalResult, PrimVal, Value};
use super::{EvalContext, Place, Machine, ValTy};
2017-03-23 12:36:13 +00:00
use rustc_data_structures::indexed_vec::Idx;
use interpret::memory::HasMemory;
mod drop;
2016-09-20 14:05:30 +00:00
2018-01-16 08:31:48 +00:00
impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
2017-07-21 15:25:30 +00:00
pub fn goto_block(&mut self, target: mir::BasicBlock) {
self.frame_mut().block = target;
self.frame_mut().stmt = 0;
}
pub(super) fn eval_terminator(
&mut self,
terminator: &mir::Terminator<'tcx>,
) -> EvalResult<'tcx> {
use rustc::mir::TerminatorKind::*;
match terminator.kind {
2016-11-27 03:13:22 +00:00
Return => {
2017-12-06 08:25:29 +00:00
self.dump_local(self.frame().return_place);
2016-11-27 03:13:22 +00:00
self.pop_stack_frame()?
}
Goto { target } => self.goto_block(target),
SwitchInt {
ref discr,
ref values,
ref targets,
..
} => {
2017-02-24 09:39:55 +00:00
let discr_val = self.eval_operand(discr)?;
let discr_prim = self.value_to_primval(discr_val)?;
// Branch to the `otherwise` case by default, if no match is found.
let mut target_block = targets[targets.len() - 1];
2018-01-16 08:24:38 +00:00
for (index, &const_int) in values.iter().enumerate() {
2018-04-13 14:05:54 +00:00
if discr_prim.to_bytes()? == const_int {
target_block = targets[index];
break;
}
}
self.goto_block(target_block);
}
Call {
ref func,
ref args,
ref destination,
..
} => {
let destination = match *destination {
2017-12-06 08:25:29 +00:00
Some((ref lv, target)) => Some((self.eval_place(lv)?, target)),
None => None,
};
let func = self.eval_operand(func)?;
let (fn_def, sig) = match func.ty.sty {
2017-03-22 12:13:52 +00:00
ty::TyFnPtr(sig) => {
let fn_ptr = self.value_to_primval(func)?.to_ptr()?;
let instance = self.memory.get_fn(fn_ptr)?;
let instance_ty = instance.ty(*self.tcx);
match instance_ty.sty {
2017-06-29 01:24:17 +00:00
ty::TyFnDef(..) => {
let real_sig = instance_ty.fn_sig(*self.tcx);
let sig = self.tcx.normalize_erasing_late_bound_regions(
ty::ParamEnv::reveal_all(),
&sig,
);
let real_sig = self.tcx.normalize_erasing_late_bound_regions(
ty::ParamEnv::reveal_all(),
&real_sig,
);
if !self.check_sig_compat(sig, real_sig)? {
2017-08-02 14:59:01 +00:00
return err!(FunctionPointerTyMismatch(real_sig, sig));
2017-03-23 14:17:02 +00:00
}
}
2017-03-23 14:17:02 +00:00
ref other => bug!("instance def ty: {:?}", other),
}
(instance, sig)
}
ty::TyFnDef(def_id, substs) => (
2017-12-06 08:25:29 +00:00
self.resolve(def_id, substs)?,
func.ty.fn_sig(*self.tcx),
),
2016-11-27 06:58:01 +00:00
_ => {
let msg = format!("can't handle callee of type {:?}", func.ty);
2017-08-02 14:59:01 +00:00
return err!(Unimplemented(msg));
2016-11-27 06:58:01 +00:00
}
};
let args = self.operands_to_args(args)?;
let sig = self.tcx.normalize_erasing_late_bound_regions(
ty::ParamEnv::reveal_all(),
&sig,
);
self.eval_fn_call(
fn_def,
destination,
&args,
terminator.source_info.span,
sig,
)?;
}
Drop {
ref location,
target,
..
} => {
// FIXME(CTFE): forbid drop in const eval
2017-12-06 08:25:29 +00:00
let place = self.eval_place(location)?;
let ty = self.place_ty(location);
let ty = self.tcx.subst_and_normalize_erasing_regions(
self.substs(),
ty::ParamEnv::reveal_all(),
&ty,
);
trace!("TerminatorKind::drop: {:?}, type {}", location, ty);
2017-03-22 12:13:52 +00:00
let instance = ::monomorphize::resolve_drop_in_place(*self.tcx, ty);
2017-12-06 08:25:29 +00:00
self.drop_place(
place,
instance,
ty,
terminator.source_info.span,
target,
)?;
2017-03-22 12:13:52 +00:00
}
Assert {
ref cond,
expected,
ref msg,
target,
..
} => {
let cond_val = self.eval_operand_to_primval(cond)?.to_bool()?;
if expected == cond_val {
self.goto_block(target);
} else {
use rustc::mir::interpret::EvalErrorKind::*;
return match *msg {
2017-08-30 09:13:01 +00:00
BoundsCheck { ref len, ref index } => {
2016-11-27 06:58:01 +00:00
let len = self.eval_operand_to_primval(len)
.expect("can't eval len")
.to_u64()?;
let index = self.eval_operand_to_primval(index)
.expect("can't eval index")
.to_u64()?;
err!(BoundsCheck { len, index })
}
Overflow(op) => Err(Overflow(op).into()),
OverflowNeg => Err(OverflowNeg.into()),
2017-08-30 09:13:01 +00:00
GeneratorResumedAfterReturn |
GeneratorResumedAfterPanic => unimplemented!(),
_ => bug!(),
};
}
}
2017-08-30 09:13:01 +00:00
Yield { .. } => unimplemented!("{:#?}", terminator.kind),
GeneratorDrop => unimplemented!(),
DropAndReplace { .. } => unimplemented!(),
Resume => unimplemented!(),
Abort => unimplemented!(),
2017-12-06 08:25:29 +00:00
FalseEdges { .. } => bug!("should have been eliminated by `simplify_branches` mir pass"),
FalseUnwind { .. } => bug!("should have been eliminated by `simplify_branches` mir pass"),
2017-08-02 14:59:01 +00:00
Unreachable => return err!(Unreachable),
}
Ok(())
}
2017-05-30 17:24:37 +00:00
/// Decides whether it is okay to call the method with signature `real_sig` using signature `sig`.
/// FIXME: This should take into account the platform-dependent ABI description.
fn check_sig_compat(
&mut self,
sig: ty::FnSig<'tcx>,
real_sig: ty::FnSig<'tcx>,
) -> EvalResult<'tcx, bool> {
2017-12-06 11:55:46 +00:00
fn check_ty_compat<'tcx>(ty: Ty<'tcx>, real_ty: Ty<'tcx>) -> bool {
if ty == real_ty {
return true;
} // This is actually a fast pointer comparison
return match (&ty.sty, &real_ty.sty) {
// Permit changing the pointer type of raw pointers and references as well as
// mutability of raw pointers.
// TODO: Should not be allowed when fat pointers are involved.
2017-12-06 11:55:46 +00:00
(&ty::TyRawPtr(_), &ty::TyRawPtr(_)) => true,
(&ty::TyRef(_, _), &ty::TyRef(_, _)) => {
ty.is_mutable_pointer() == real_ty.is_mutable_pointer()
}
// rule out everything else
_ => false,
};
}
if sig.abi == real_sig.abi && sig.variadic == real_sig.variadic &&
sig.inputs_and_output.len() == real_sig.inputs_and_output.len() &&
sig.inputs_and_output
.iter()
.zip(real_sig.inputs_and_output)
.all(|(ty, real_ty)| check_ty_compat(ty, real_ty))
{
// Definitely good.
return Ok(true);
}
if sig.variadic || real_sig.variadic {
// We're not touching this
return Ok(false);
}
// We need to allow what comes up when a non-capturing closure is cast to a fn().
match (sig.abi, real_sig.abi) {
(Abi::Rust, Abi::RustCall) // check the ABIs. This makes the test here non-symmetric.
if check_ty_compat(sig.output(), real_sig.output()) && real_sig.inputs_and_output.len() == 3 => {
// First argument of real_sig must be a ZST
let fst_ty = real_sig.inputs_and_output[0];
if self.layout_of(fst_ty)?.is_zst() {
// Second argument must be a tuple matching the argument list of sig
let snd_ty = real_sig.inputs_and_output[1];
match snd_ty.sty {
ty::TyTuple(tys) if sig.inputs().len() == tys.len() =>
if sig.inputs().iter().zip(tys).all(|(ty, real_ty)| check_ty_compat(ty, real_ty)) {
return Ok(true)
},
_ => {}
}
}
}
_ => {}
};
// Nope, this doesn't work.
return Ok(false);
}
fn eval_fn_call(
&mut self,
2017-03-21 12:53:55 +00:00
instance: ty::Instance<'tcx>,
2017-12-06 08:25:29 +00:00
destination: Option<(Place, mir::BasicBlock)>,
args: &[ValTy<'tcx>],
span: Span,
2017-03-22 13:19:29 +00:00
sig: ty::FnSig<'tcx>,
) -> EvalResult<'tcx> {
2017-03-22 12:13:52 +00:00
trace!("eval_fn_call: {:#?}", instance);
match instance.def {
ty::InstanceDef::Intrinsic(..) => {
let (ret, target) = match destination {
2017-03-22 13:19:29 +00:00
Some(dest) => dest,
2017-08-02 14:59:01 +00:00
_ => return err!(Unreachable),
};
2017-03-22 13:19:29 +00:00
let ty = sig.output();
let layout = self.layout_of(ty)?;
2017-12-06 08:25:29 +00:00
M::call_intrinsic(self, instance, args, ret, layout, target)?;
self.dump_local(ret);
2017-03-22 13:19:29 +00:00
Ok(())
}
// FIXME: figure out why we can't just go through the shim
ty::InstanceDef::ClosureOnceShim { .. } => {
if M::eval_fn_call(self, instance, destination, args, span, sig)? {
return Ok(());
}
2017-03-23 12:36:13 +00:00
let mut arg_locals = self.frame().mir.args_iter();
match sig.abi {
// closure as closure once
Abi::RustCall => {
for (arg_local, &valty) in arg_locals.zip(args) {
2017-12-06 08:25:29 +00:00
let dest = self.eval_place(&mir::Place::Local(arg_local))?;
self.write_value(valty, dest)?;
2017-03-23 12:36:13 +00:00
}
}
2017-03-23 12:36:13 +00:00
// non capture closure as fn ptr
// need to inject zst ptr for closure object (aka do nothing)
// and need to pack arguments
Abi::Rust => {
trace!(
"arg_locals: {:?}",
self.frame().mir.args_iter().collect::<Vec<_>>()
);
trace!("args: {:?}", args);
2017-03-23 12:36:13 +00:00
let local = arg_locals.nth(1).unwrap();
for (i, &valty) in args.into_iter().enumerate() {
2017-12-06 08:25:29 +00:00
let dest = self.eval_place(&mir::Place::Local(local).field(
mir::Field::new(i),
valty.ty,
))?;
self.write_value(valty, dest)?;
2017-03-23 12:36:13 +00:00
}
}
2017-03-23 12:36:13 +00:00
_ => bug!("bad ABI for ClosureOnceShim: {:?}", sig.abi),
}
Ok(())
2017-03-22 12:13:52 +00:00
}
ty::InstanceDef::FnPtrShim(..) |
ty::InstanceDef::DropGlue(..) |
ty::InstanceDef::CloneShim(..) |
2017-03-22 12:13:52 +00:00
ty::InstanceDef::Item(_) => {
2017-05-26 05:38:07 +00:00
// Push the stack frame, and potentially be entirely done if the call got hooked
if M::eval_fn_call(self, instance, destination, args, span, sig)? {
return Ok(());
}
2017-03-23 12:36:13 +00:00
2017-05-26 05:38:07 +00:00
// Pass the arguments
2017-03-23 12:36:13 +00:00
let mut arg_locals = self.frame().mir.args_iter();
2017-03-23 15:09:36 +00:00
trace!("ABI: {:?}", sig.abi);
trace!(
"arg_locals: {:?}",
self.frame().mir.args_iter().collect::<Vec<_>>()
);
trace!("args: {:?}", args);
2017-03-23 12:36:13 +00:00
match sig.abi {
Abi::RustCall => {
assert_eq!(args.len(), 2);
{
// write first argument
2017-03-23 12:36:13 +00:00
let first_local = arg_locals.next().unwrap();
2017-12-06 08:25:29 +00:00
let dest = self.eval_place(&mir::Place::Local(first_local))?;
self.write_value(args[0], dest)?;
2017-03-23 12:36:13 +00:00
}
// unpack and write all other args
let layout = self.layout_of(args[1].ty)?;
2017-12-06 08:25:29 +00:00
if let ty::TyTuple(..) = args[1].ty.sty {
if self.frame().mir.args_iter().count() == layout.fields.count() + 1 {
match args[1].value {
Value::ByRef(ptr, align) => {
2017-12-06 08:25:29 +00:00
for (i, arg_local) in arg_locals.enumerate() {
let field = layout.field(&self, i)?;
let offset = layout.fields.offset(i).bytes();
let arg = Value::ByRef(ptr.offset(offset, &self)?,
align.min(field.align));
let dest =
2017-12-06 08:25:29 +00:00
self.eval_place(&mir::Place::Local(arg_local))?;
trace!(
"writing arg {:?} to {:?} (type: {})",
arg,
dest,
2017-12-06 08:25:29 +00:00
field.ty
);
let valty = ValTy {
value: arg,
2017-12-06 08:25:29 +00:00
ty: field.ty,
};
self.write_value(valty, dest)?;
2017-03-23 15:09:36 +00:00
}
}
Value::ByVal(PrimVal::Undef) => {}
2017-03-23 15:09:36 +00:00
other => {
2017-12-06 08:25:29 +00:00
trace!("{:#?}, {:#?}", other, layout);
let mut layout = layout;
'outer: loop {
for i in 0..layout.fields.count() {
let field = layout.field(&self, i)?;
if layout.fields.offset(i).bytes() == 0 && layout.size == field.size {
layout = field;
continue 'outer;
}
}
break;
}
let dest = self.eval_place(&mir::Place::Local(
arg_locals.next().unwrap(),
))?;
let valty = ValTy {
value: other,
2017-12-06 08:25:29 +00:00
ty: layout.ty,
};
self.write_value(valty, dest)?;
2017-03-23 14:07:33 +00:00
}
2017-03-23 12:36:13 +00:00
}
2017-03-23 14:07:33 +00:00
} else {
2017-03-23 15:09:36 +00:00
trace!("manual impl of rust-call ABI");
2017-03-23 14:07:33 +00:00
// called a manual impl of a rust-call function
2017-12-06 08:25:29 +00:00
let dest = self.eval_place(
&mir::Place::Local(arg_locals.next().unwrap()),
)?;
self.write_value(args[1], dest)?;
2017-03-23 12:36:13 +00:00
}
} else {
bug!(
"rust-call ABI tuple argument was {:#?}, {:#?}",
args[1].ty,
layout
);
2017-03-23 12:36:13 +00:00
}
}
2017-05-30 17:24:37 +00:00
_ => {
for (arg_local, &valty) in arg_locals.zip(args) {
2017-12-06 08:25:29 +00:00
let dest = self.eval_place(&mir::Place::Local(arg_local))?;
self.write_value(valty, dest)?;
2017-05-30 17:24:37 +00:00
}
2017-03-23 12:36:13 +00:00
}
}
Ok(())
}
// cannot use the shim here, because that will only result in infinite recursion
2017-03-23 13:24:02 +00:00
ty::InstanceDef::Virtual(_, idx) => {
let ptr_size = self.memory.pointer_size();
let ptr_align = self.tcx.data_layout.pointer_align;
let (ptr, vtable) = self.into_ptr_vtable_pair(args[0].value)?;
2018-02-22 16:29:39 +00:00
let fn_ptr = self.memory.read_ptr_sized(
vtable.offset(ptr_size * (idx as u64 + 3), &self)?,
ptr_align
)?.to_ptr()?;
let instance = self.memory.get_fn(fn_ptr)?;
let mut args = args.to_vec();
let ty = self.layout_of(args[0].ty)?.field(&self, 0)?.ty;
args[0].ty = ty;
args[0].value = ptr.to_value();
2017-03-23 13:57:11 +00:00
// recurse with concrete function
self.eval_fn_call(instance, destination, &args, span, sig)
}
}
}
2016-09-20 10:52:01 +00:00
}