Auto merge of #21744 - eddyb:rvalue-promotion, r=nikomatsakis

This includes everything necessary for promoting borrows of constant rvalues to `'static`.
That is, `&expr` will have the type `&'static T` if `const T: &'static T = &expr;` is valid.
There is a small exception, dereferences of raw pointers, as they misbehave.
They still "work" in constants as I didn't want to break legitimate uses (are there any?).

The qualification done here can be expanded to allow simple CTFE via `const fn`.
This commit is contained in:
bors 2015-02-16 16:38:51 +00:00
commit e4e7aa2856
56 changed files with 1907 additions and 1657 deletions

View File

@ -134,7 +134,11 @@ pub trait Reseeder<R> {
/// Reseed an RNG using a `Default` instance. This reseeds by
/// replacing the RNG with the result of a `Default::default` call.
#[derive(Copy)]
pub struct ReseedWithDefault;
pub struct ReseedWithDefault { __hack: [u8; 0] }
// FIXME(#21721) used to be an unit struct but that can cause
// certain LLVM versions to abort during optimizations.
#[allow(non_upper_case_globals)]
pub const ReseedWithDefault: ReseedWithDefault = ReseedWithDefault { __hack: [] };
impl<R: Rng + Default> Reseeder<R> for ReseedWithDefault {
fn reseed(&mut self, rng: &mut R) {

View File

@ -59,6 +59,7 @@ register_diagnostics! {
E0010,
E0011,
E0012,
E0013,
E0014,
E0015,
E0016,

View File

@ -90,7 +90,6 @@ pub mod middle {
pub mod check_loop;
pub mod check_match;
pub mod check_rvalues;
pub mod check_static;
pub mod const_eval;
pub mod dataflow;
pub mod dead;

View File

@ -199,7 +199,7 @@ impl LintPass for TypeLimits {
if let ast::LitInt(shift, _) = lit.node { shift >= bits }
else { false }
} else {
match eval_const_expr_partial(cx.tcx, &**r) {
match eval_const_expr_partial(cx.tcx, &**r, Some(cx.tcx.types.uint)) {
Ok(const_int(shift)) => { shift as u64 >= bits },
Ok(const_uint(shift)) => { shift >= bits },
_ => { false }

View File

@ -12,7 +12,6 @@
pub use self::astencode_tag::*;
use std::mem;
use back::svh::Svh;
// EBML enum definitions and utils shared by the encoder and decoder
@ -113,7 +112,7 @@ pub const tag_items_data_item_reexport_def_id: uint = 0x39;
pub const tag_items_data_item_reexport_name: uint = 0x3a;
// used to encode crate_ctxt side tables
#[derive(Copy, PartialEq)]
#[derive(Copy, PartialEq, FromPrimitive)]
#[repr(uint)]
pub enum astencode_tag { // Reserves 0x40 -- 0x5f
tag_ast = 0x40,
@ -144,17 +143,7 @@ pub enum astencode_tag { // Reserves 0x40 -- 0x5f
tag_table_upvar_capture_map = 0x56,
tag_table_capture_modes = 0x57,
tag_table_object_cast_map = 0x58,
}
static first_astencode_tag: uint = tag_ast as uint;
static last_astencode_tag: uint = tag_table_object_cast_map as uint;
impl astencode_tag {
pub fn from_uint(value : uint) -> Option<astencode_tag> {
let is_a_tag = first_astencode_tag <= value && value <= last_astencode_tag;
if !is_a_tag { None } else {
Some(unsafe { mem::transmute::<uint, astencode_tag>(value) })
}
}
tag_table_const_qualif = 0x59,
}
pub const tag_item_trait_item_sort: uint = 0x60;

View File

@ -23,6 +23,7 @@ use metadata::tydecode;
use metadata::tydecode::{DefIdSource, NominalType, TypeWithId, TypeParameter};
use metadata::tydecode::{RegionParameter, ClosureSource};
use metadata::tyencode;
use middle::check_const::ConstQualif;
use middle::mem_categorization::Typer;
use middle::subst;
use middle::subst::VecPerParamSpace;
@ -38,6 +39,7 @@ use syntax::ptr::P;
use syntax;
use std::old_io::Seek;
use std::num::FromPrimitive;
use std::rc::Rc;
use rbml::io::SeekableMemWriter;
@ -1305,6 +1307,15 @@ fn encode_side_tables_for_id(ecx: &e::EncodeContext,
})
})
}
for &qualif in tcx.const_qualif_map.borrow().get(&id).iter() {
rbml_w.tag(c::tag_table_const_qualif, |rbml_w| {
rbml_w.id(id);
rbml_w.tag(c::tag_table_val, |rbml_w| {
qualif.encode(rbml_w).unwrap()
})
})
}
}
trait doc_decoder_helpers {
@ -1836,8 +1847,8 @@ fn decode_side_tables(dcx: &DecodeContext,
debug!(">> Side table document with tag 0x{:x} \
found for id {} (orig {})",
tag, id, id0);
match c::astencode_tag::from_uint(tag) {
let decoded_tag: Option<c::astencode_tag> = FromPrimitive::from_uint(tag);
match decoded_tag {
None => {
dcx.tcx.sess.bug(
&format!("unknown tag found in side tables: {:x}",
@ -1919,6 +1930,10 @@ fn decode_side_tables(dcx: &DecodeContext,
dcx.tcx.closure_kinds.borrow_mut().insert(ast_util::local_def(id),
closure_kind);
}
c::tag_table_const_qualif => {
let qualif: ConstQualif = Decodable::decode(val_dsr).unwrap();
dcx.tcx.const_qualif_map.borrow_mut().insert(id, qualif);
}
_ => {
dcx.tcx.sess.bug(
&format!("unknown tag found in side tables: {:x}",

View File

@ -1,4 +1,4 @@
// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -8,91 +8,414 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Verifies that the types and values of const and static items
// are safe. The rules enforced by this module are:
//
// - For each *mutable* static item, it checks that its **type**:
// - doesn't have a destructor
// - doesn't own an owned pointer
//
// - For each *immutable* static item, it checks that its **value**:
// - doesn't own owned, managed pointers
// - doesn't contain a struct literal or a call to an enum variant / struct constructor where
// - the type of the struct/enum has a dtor
//
// Rules Enforced Elsewhere:
// - It's not possible to take the address of a static item with unsafe interior. This is enforced
// by borrowck::gather_loans
use middle::def::*;
use middle::ty;
use middle::const_eval;
use middle::def;
use middle::expr_use_visitor as euv;
use middle::infer;
use middle::mem_categorization as mc;
use middle::traits;
use middle::ty::{self, Ty};
use util::nodemap::NodeMap;
use util::ppaux;
use syntax::ast;
use syntax::codemap::Span;
use syntax::print::pprust;
use syntax::visit::{self, Visitor};
use std::collections::hash_map::Entry;
// Const qualification, from partial to completely promotable.
bitflags! {
#[derive(RustcEncodable, RustcDecodable)]
flags ConstQualif: u8 {
// Const rvalue which can be placed behind a reference.
const PURE_CONST = 0b000000,
// Inner mutability (can not be placed behind a reference) or behind
// &mut in a non-global expression. Can be copied from static memory.
const MUTABLE_MEM = 0b000001,
// Constant value with a type that implements Drop. Can be copied
// from static memory, similar to MUTABLE_MEM.
const NEEDS_DROP = 0b000010,
// Even if the value can be placed in static memory, copying it from
// there is more expensive than in-place instantiation, and/or it may
// be too large. This applies to [T; N] and everything containing it.
// N.B.: references need to clear this flag to not end up on the stack.
const PREFER_IN_PLACE = 0b000100,
// May use more than 0 bytes of memory, doesn't impact the constness
// directly, but is not allowed to be borrowed mutably in a constant.
const NON_ZERO_SIZED = 0b001000,
// Actually borrowed, has to always be in static memory. Does not
// propagate, and requires the expression to behave like a 'static
// lvalue. The set of expressions with this flag is the minimum
// that have to be promoted.
const HAS_STATIC_BORROWS = 0b010000,
// Invalid const for miscellaneous reasons (e.g. not implemented).
const NOT_CONST = 0b100000,
// Borrowing the expression won't produce &'static T if any of these
// bits are set, though the value could be copied from static memory
// if `NOT_CONST` isn't set.
const NON_STATIC_BORROWS = MUTABLE_MEM.bits | NEEDS_DROP.bits | NOT_CONST.bits
}
}
#[derive(Copy, Eq, PartialEq)]
enum Mode {
Const,
Static,
StaticMut,
// An expression that occurs outside of any constant context
// (i.e. `const`, `static`, array lengths, etc.). The value
// can be variable at runtime, but will be promotable to
// static memory if we can prove it is actually constant.
Var,
}
struct CheckCrateVisitor<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
in_const: bool
mode: Mode,
qualif: ConstQualif,
rvalue_borrows: NodeMap<ast::Mutability>
}
impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> {
fn with_const<F>(&mut self, in_const: bool, f: F) where
F: FnOnce(&mut CheckCrateVisitor<'a, 'tcx>),
fn with_mode<F, R>(&mut self, mode: Mode, f: F) -> R where
F: FnOnce(&mut CheckCrateVisitor<'a, 'tcx>) -> R,
{
let was_const = self.in_const;
self.in_const = in_const;
f(self);
self.in_const = was_const;
let (old_mode, old_qualif) = (self.mode, self.qualif);
self.mode = mode;
self.qualif = PURE_CONST;
let r = f(self);
self.mode = old_mode;
self.qualif = old_qualif;
r
}
fn inside_const<F>(&mut self, f: F) where
F: FnOnce(&mut CheckCrateVisitor<'a, 'tcx>),
fn with_euv<'b, F, R>(&'b mut self, item_id: Option<ast::NodeId>, f: F) -> R where
F: for<'t> FnOnce(&mut euv::ExprUseVisitor<'b, 't, 'tcx,
ty::ParameterEnvironment<'a, 'tcx>>) -> R,
{
self.with_const(true, f);
let param_env = match item_id {
Some(item_id) => ty::ParameterEnvironment::for_item(self.tcx, item_id),
None => ty::empty_parameter_environment(self.tcx)
};
f(&mut euv::ExprUseVisitor::new(self, &param_env))
}
fn global_expr(&mut self, mode: Mode, expr: &ast::Expr) -> ConstQualif {
assert!(mode != Mode::Var);
match self.tcx.const_qualif_map.borrow_mut().entry(expr.id) {
Entry::Occupied(entry) => return *entry.get(),
Entry::Vacant(entry) => {
// Prevent infinite recursion on re-entry.
entry.insert(PURE_CONST);
}
}
self.with_mode(mode, |this| {
this.with_euv(None, |euv| euv.consume_expr(expr));
this.visit_expr(expr);
this.qualif
})
}
fn add_qualif(&mut self, qualif: ConstQualif) {
self.qualif = self.qualif | qualif;
}
fn record_borrow(&mut self, id: ast::NodeId, mutbl: ast::Mutability) {
match self.rvalue_borrows.entry(id) {
Entry::Occupied(mut entry) => {
// Merge the two borrows, taking the most demanding
// one, mutability-wise.
if mutbl == ast::MutMutable {
entry.insert(mutbl);
}
}
Entry::Vacant(entry) => {
entry.insert(mutbl);
}
}
}
fn msg(&self) -> &'static str {
match self.mode {
Mode::Const => "constant",
Mode::StaticMut | Mode::Static => "static",
Mode::Var => unreachable!(),
}
}
fn check_static_mut_type(&self, e: &ast::Expr) {
let node_ty = ty::node_id_to_type(self.tcx, e.id);
let tcontents = ty::type_contents(self.tcx, node_ty);
let suffix = if tcontents.has_dtor() {
"destructors"
} else if tcontents.owns_owned() {
"owned pointers"
} else {
return
};
self.tcx.sess.span_err(e.span, &format!("mutable statics are not allowed \
to have {}", suffix)[]);
}
fn check_static_type(&self, e: &ast::Expr) {
let ty = ty::node_id_to_type(self.tcx, e.id);
let infcx = infer::new_infer_ctxt(self.tcx);
let mut fulfill_cx = traits::FulfillmentContext::new();
let cause = traits::ObligationCause::new(e.span, e.id, traits::SharedStatic);
fulfill_cx.register_builtin_bound(&infcx, ty, ty::BoundSync, cause);
let env = ty::empty_parameter_environment(self.tcx);
match fulfill_cx.select_all_or_error(&infcx, &env) {
Ok(()) => { },
Err(ref errors) => {
traits::report_fulfillment_errors(&infcx, errors);
}
}
}
}
impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> {
fn visit_item(&mut self, i: &ast::Item) {
debug!("visit_item(item={})", pprust::item_to_string(i));
match i.node {
ast::ItemStatic(_, _, ref ex) |
ast::ItemConst(_, ref ex) => {
self.inside_const(|v| v.visit_expr(&**ex));
ast::ItemStatic(_, ast::MutImmutable, ref expr) => {
self.check_static_type(&**expr);
self.global_expr(Mode::Static, &**expr);
}
ast::ItemStatic(_, ast::MutMutable, ref expr) => {
self.check_static_mut_type(&**expr);
self.global_expr(Mode::StaticMut, &**expr);
}
ast::ItemConst(_, ref expr) => {
self.global_expr(Mode::Const, &**expr);
}
ast::ItemEnum(ref enum_definition, _) => {
self.inside_const(|v| {
for var in &enum_definition.variants {
if let Some(ref ex) = var.node.disr_expr {
v.visit_expr(&**ex);
for var in &enum_definition.variants {
if let Some(ref ex) = var.node.disr_expr {
self.global_expr(Mode::Const, &**ex);
}
}
}
_ => {
self.with_mode(Mode::Var, |v| visit::walk_item(v, i));
}
}
}
fn visit_fn(&mut self,
fk: visit::FnKind<'v>,
fd: &'v ast::FnDecl,
b: &'v ast::Block,
s: Span,
fn_id: ast::NodeId) {
assert!(self.mode == Mode::Var);
self.with_euv(Some(fn_id), |euv| euv.walk_fn(fd, b));
visit::walk_fn(self, fk, fd, b, s);
}
fn visit_pat(&mut self, p: &ast::Pat) {
match p.node {
ast::PatLit(ref lit) => {
self.global_expr(Mode::Const, &**lit);
}
ast::PatRange(ref start, ref end) => {
self.global_expr(Mode::Const, &**start);
self.global_expr(Mode::Const, &**end);
}
_ => visit::walk_pat(self, p)
}
}
fn visit_expr(&mut self, ex: &ast::Expr) {
let mut outer = self.qualif;
self.qualif = PURE_CONST;
let node_ty = ty::node_id_to_type(self.tcx, ex.id);
check_expr(self, ex, node_ty);
// Special-case some expressions to avoid certain flags bubbling up.
match ex.node {
ast::ExprCall(ref callee, ref args) => {
for arg in args.iter() {
self.visit_expr(&**arg)
}
let inner = self.qualif;
self.visit_expr(&**callee);
// The callee's size doesn't count in the call.
let added = self.qualif - inner;
self.qualif = inner | (added - NON_ZERO_SIZED);
}
ast::ExprRepeat(ref element, _) => {
self.visit_expr(&**element);
// The count is checked elsewhere (typeck).
let count = match node_ty.sty {
ty::ty_vec(_, Some(n)) => n,
_ => unreachable!()
};
// [element; 0] is always zero-sized.
if count == 0 {
self.qualif = self.qualif - (NON_ZERO_SIZED | PREFER_IN_PLACE);
}
}
ast::ExprMatch(ref discr, ref arms, _) => {
// Compute the most demanding borrow from all the arms'
// patterns and set that on the discriminator.
let mut borrow = None;
for pat in arms.iter().flat_map(|arm| arm.pats.iter()) {
let pat_borrow = self.rvalue_borrows.remove(&pat.id);
match (borrow, pat_borrow) {
(None, _) | (_, Some(ast::MutMutable)) => {
borrow = pat_borrow;
}
_ => {}
}
}
if let Some(mutbl) = borrow {
self.record_borrow(discr.id, mutbl);
}
visit::walk_expr(self, ex);
}
// Division by zero and overflow checking.
ast::ExprBinary(op, _, _) => {
visit::walk_expr(self, ex);
let div_or_rem = op.node == ast::BiDiv || op.node == ast::BiRem;
match node_ty.sty {
ty::ty_uint(_) | ty::ty_int(_) if div_or_rem => {
if !self.qualif.intersects(NOT_CONST) {
match const_eval::eval_const_expr_partial(self.tcx, ex, None) {
Ok(_) => {}
Err(msg) => {
span_err!(self.tcx.sess, ex.span, E0020,
"{} in a constant expression", msg)
}
}
}
}
});
_ => {}
}
}
_ => self.with_const(false, |v| visit::walk_item(v, i))
_ => visit::walk_expr(self, ex)
}
}
fn visit_pat(&mut self, p: &ast::Pat) {
let is_const = match p.node {
ast::PatLit(_) | ast::PatRange(..) => true,
_ => false
};
self.with_const(is_const, |v| visit::walk_pat(v, p))
}
fn visit_expr(&mut self, ex: &ast::Expr) {
if self.in_const {
check_expr(self, ex);
// Handle borrows on (or inside the autorefs of) this expression.
match self.rvalue_borrows.remove(&ex.id) {
Some(ast::MutImmutable) => {
// Constants cannot be borrowed if they contain interior mutability as
// it means that our "silent insertion of statics" could change
// initializer values (very bad).
// If the type doesn't have interior mutability, then `MUTABLE_MEM` has
// propagated from another error, so erroring again would be just noise.
let tc = ty::type_contents(self.tcx, node_ty);
if self.qualif.intersects(MUTABLE_MEM) && tc.interior_unsafe() {
outer = outer | NOT_CONST;
if self.mode != Mode::Var {
self.tcx.sess.span_err(ex.span,
"cannot borrow a constant which contains \
interior mutability, create a static instead");
}
}
// If the reference has to be 'static, avoid in-place initialization
// as that will end up pointing to the stack instead.
if !self.qualif.intersects(NON_STATIC_BORROWS) {
self.qualif = self.qualif - PREFER_IN_PLACE;
self.add_qualif(HAS_STATIC_BORROWS);
}
}
Some(ast::MutMutable) => {
// `&mut expr` means expr could be mutated, unless it's zero-sized.
if self.qualif.intersects(NON_ZERO_SIZED) {
if self.mode == Mode::Var {
outer = outer | NOT_CONST;
self.add_qualif(MUTABLE_MEM);
} else {
span_err!(self.tcx.sess, ex.span, E0017,
"references in {}s may only refer \
to immutable values", self.msg())
}
}
if !self.qualif.intersects(NON_STATIC_BORROWS) {
self.add_qualif(HAS_STATIC_BORROWS);
}
}
None => {}
}
visit::walk_expr(self, ex);
self.tcx.const_qualif_map.borrow_mut().insert(ex.id, self.qualif);
// Don't propagate certain flags.
self.qualif = outer | (self.qualif - HAS_STATIC_BORROWS);
}
}
pub fn check_crate(tcx: &ty::ctxt) {
visit::walk_crate(&mut CheckCrateVisitor { tcx: tcx, in_const: false },
tcx.map.krate());
tcx.sess.abort_if_errors();
}
/// This function is used to enforce the constraints on
/// const/static items. It walks through the *value*
/// of the item walking down the expression and evaluating
/// every nested expression. If the expression is not part
/// of a const/static item, it is qualified for promotion
/// instead of producing errors.
fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>,
e: &ast::Expr, node_ty: Ty<'tcx>) {
match node_ty.sty {
ty::ty_struct(did, _) |
ty::ty_enum(did, _) if ty::has_dtor(v.tcx, did) => {
v.add_qualif(NEEDS_DROP);
if v.mode != Mode::Var {
v.tcx.sess.span_err(e.span,
&format!("{}s are not allowed to have destructors",
v.msg())[]);
}
}
_ => {}
}
fn check_expr(v: &mut CheckCrateVisitor, e: &ast::Expr) {
let method_call = ty::MethodCall::expr(e.id);
match e.node {
ast::ExprUnary(ast::UnDeref, _) => {}
ast::ExprUnary(ast::UnUniq, _) => {
span_err!(v.tcx.sess, e.span, E0010,
"cannot do allocations in constant expressions");
}
ast::ExprBinary(..) | ast::ExprUnary(..) => {
let method_call = ty::MethodCall::expr(e.id);
if v.tcx.method_map.borrow().contains_key(&method_call) {
ast::ExprUnary(..) |
ast::ExprBinary(..) |
ast::ExprIndex(..) if v.tcx.method_map.borrow().contains_key(&method_call) => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0011,
"user-defined operators are not allowed in constant \
expressions");
"user-defined operators are not allowed in {}s", v.msg());
}
}
ast::ExprBox(..) |
ast::ExprUnary(ast::UnUniq, _) => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0010,
"allocations are not allowed in {}s", v.msg());
}
}
ast::ExprUnary(ast::UnDeref, ref ptr) => {
match ty::node_id_to_type(v.tcx, ptr.id).sty {
ty::ty_ptr(_) => {
// This shouldn't be allowed in constants at all.
v.add_qualif(NOT_CONST);
}
_ => {}
}
}
ast::ExprLit(_) => {}
ast::ExprCast(ref from, _) => {
let toty = ty::expr_ty(v.tcx, e);
let fromty = ty::expr_ty(v.tcx, &**from);
@ -101,47 +424,110 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &ast::Expr) {
ty::type_is_unsafe_ptr(toty) ||
(ty::type_is_bare_fn(toty) && ty::type_is_bare_fn_item(fromty));
if !is_legal_cast {
span_err!(v.tcx.sess, e.span, E0012,
"can not cast to `{}` in a constant expression",
ppaux::ty_to_string(v.tcx, toty));
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0012,
"can not cast to `{}` in {}s",
ppaux::ty_to_string(v.tcx, toty), v.msg());
}
}
if ty::type_is_unsafe_ptr(fromty) && ty::type_is_numeric(toty) {
span_err!(v.tcx.sess, e.span, E0018,
"can not cast a pointer to an integer in a constant \
expression");
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0018,
"can not cast a pointer to an integer in {}s", v.msg());
}
}
}
ast::ExprPath(_) | ast::ExprQPath(_) => {
match v.tcx.def_map.borrow()[e.id] {
DefStatic(..) | DefConst(..) |
DefFn(..) | DefStaticMethod(..) | DefMethod(..) |
DefStruct(_) | DefVariant(_, _, _) => {}
let def = v.tcx.def_map.borrow().get(&e.id).cloned();
match def {
Some(def::DefVariant(_, _, _)) => {
// Count the discriminator or function pointer.
v.add_qualif(NON_ZERO_SIZED);
}
Some(def::DefStruct(_)) => {
if let ty::ty_bare_fn(..) = node_ty.sty {
// Count the function pointer.
v.add_qualif(NON_ZERO_SIZED);
}
}
Some(def::DefFn(..)) |
Some(def::DefStaticMethod(..)) | Some(def::DefMethod(..)) => {
// Count the function pointer.
v.add_qualif(NON_ZERO_SIZED);
}
Some(def::DefStatic(..)) => {
match v.mode {
Mode::Static | Mode::StaticMut => {}
Mode::Const => {
span_err!(v.tcx.sess, e.span, E0013,
"constants cannot refer to other statics, \
insert an intermediate constant instead");
}
Mode::Var => v.add_qualif(NOT_CONST)
}
}
Some(def::DefConst(did)) => {
if let Some(expr) = const_eval::lookup_const_by_id(v.tcx, did) {
let inner = v.global_expr(Mode::Const, expr);
v.add_qualif(inner);
} else {
v.tcx.sess.span_bug(e.span, "DefConst doesn't point \
to an ItemConst");
}
}
def => {
debug!("(checking const) found bad def: {:?}", def);
span_err!(v.tcx.sess, e.span, E0014,
"paths in constants may only refer to constants \
or functions");
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
debug!("(checking const) found bad def: {:?}", def);
span_err!(v.tcx.sess, e.span, E0014,
"paths in {}s may only refer to constants \
or functions", v.msg());
}
}
}
}
ast::ExprCall(ref callee, _) => {
match v.tcx.def_map.borrow()[callee.id] {
DefStruct(..) | DefVariant(..) => {} // OK.
let mut callee = &**callee;
loop {
callee = match callee.node {
ast::ExprParen(ref inner) => &**inner,
ast::ExprBlock(ref block) => match block.expr {
Some(ref tail) => &**tail,
None => break
},
_ => break
};
}
let def = v.tcx.def_map.borrow().get(&callee.id).cloned();
match def {
Some(def::DefStruct(..)) => {}
Some(def::DefVariant(..)) => {
// Count the discriminator.
v.add_qualif(NON_ZERO_SIZED);
}
_ => {
span_err!(v.tcx.sess, e.span, E0015,
"function calls in constants are limited to \
struct and enum constructors");
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0015,
"function calls in {}s are limited to \
struct and enum constructors", v.msg());
}
}
}
}
ast::ExprBlock(ref block) => {
// Check all statements in the block
for stmt in &block.stmts {
let block_span_err = |span|
let mut block_span_err = |span| {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, span, E0016,
"blocks in constants are limited to items and \
tail expressions");
"blocks in {}s are limited to items and \
tail expressions", v.msg());
}
};
for stmt in &block.stmts {
match stmt.node {
ast::StmtDecl(ref decl, _) => {
match decl.node {
@ -160,28 +546,184 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &ast::Expr) {
}
}
}
ast::ExprVec(_) |
ast::ExprAddrOf(ast::MutImmutable, _) |
ast::ExprParen(..) |
ast::ExprField(..) |
ast::ExprTupField(..) |
ast::ExprIndex(..) |
ast::ExprTup(..) |
ast::ExprRepeat(..) |
ast::ExprStruct(..) => {}
ast::ExprAddrOf(_, ref inner) => {
match inner.node {
// Mutable slices are allowed.
ast::ExprVec(_) => {}
_ => span_err!(v.tcx.sess, e.span, E0017,
"references in constants may only refer \
to immutable values")
ast::ExprStruct(..) => {
let did = v.tcx.def_map.borrow().get(&e.id).map(|def| def.def_id());
if did == v.tcx.lang_items.unsafe_cell_type() {
v.add_qualif(MUTABLE_MEM);
}
}
_ => span_err!(v.tcx.sess, e.span, E0019,
"constant contains unimplemented expression type")
ast::ExprLit(_) |
ast::ExprAddrOf(..) => {
v.add_qualif(NON_ZERO_SIZED);
}
ast::ExprRepeat(..) => {
v.add_qualif(PREFER_IN_PLACE);
}
ast::ExprClosure(..) => {
// Paths in constant constexts cannot refer to local variables,
// as there are none, and thus closures can't have upvars there.
if ty::with_freevars(v.tcx, e.id, |fv| !fv.is_empty()) {
assert!(v.mode == Mode::Var,
"global closures can't capture anything");
v.add_qualif(NOT_CONST);
}
}
ast::ExprUnary(..) |
ast::ExprBinary(..) |
ast::ExprIndex(..) |
ast::ExprField(..) |
ast::ExprTupField(..) |
ast::ExprVec(_) |
ast::ExprParen(..) |
ast::ExprTup(..) => {}
// Conditional control flow (possible to implement).
ast::ExprMatch(..) |
ast::ExprIf(..) |
ast::ExprIfLet(..) |
// Loops (not very meaningful in constants).
ast::ExprWhile(..) |
ast::ExprWhileLet(..) |
ast::ExprForLoop(..) |
ast::ExprLoop(..) |
// More control flow (also not very meaningful).
ast::ExprBreak(_) |
ast::ExprAgain(_) |
ast::ExprRet(_) |
// Miscellaneous expressions that could be implemented.
ast::ExprRange(..) |
// Various other expressions.
ast::ExprMethodCall(..) |
ast::ExprAssign(..) |
ast::ExprAssignOp(..) |
ast::ExprInlineAsm(_) |
ast::ExprMac(_) => {
v.add_qualif(NOT_CONST);
if v.mode != Mode::Var {
span_err!(v.tcx.sess, e.span, E0019,
"{} contains unimplemented expression type", v.msg());
}
}
}
}
pub fn check_crate(tcx: &ty::ctxt) {
visit::walk_crate(&mut CheckCrateVisitor {
tcx: tcx,
mode: Mode::Var,
qualif: NOT_CONST,
rvalue_borrows: NodeMap()
}, tcx.map.krate());
tcx.sess.abort_if_errors();
}
impl<'a, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'tcx> {
fn consume(&mut self,
_consume_id: ast::NodeId,
consume_span: Span,
cmt: mc::cmt,
_mode: euv::ConsumeMode) {
let mut cur = &cmt;
loop {
match cur.cat {
mc::cat_static_item => {
if self.mode != Mode::Var {
// statics cannot be consumed by value at any time, that would imply
// that they're an initializer (what a const is for) or kept in sync
// over time (not feasible), so deny it outright.
self.tcx.sess.span_err(consume_span,
"cannot refer to other statics by value, use the \
address-of operator or a constant instead");
}
break;
}
mc::cat_deref(ref cmt, _, _) |
mc::cat_downcast(ref cmt, _) |
mc::cat_interior(ref cmt, _) => cur = cmt,
mc::cat_rvalue(..) |
mc::cat_upvar(..) |
mc::cat_local(..) => break
}
}
}
fn borrow(&mut self,
borrow_id: ast::NodeId,
borrow_span: Span,
cmt: mc::cmt<'tcx>,
_loan_region: ty::Region,
bk: ty::BorrowKind,
loan_cause: euv::LoanCause) {
let mut cur = &cmt;
let mut is_interior = false;
loop {
match cur.cat {
mc::cat_rvalue(..) => {
if loan_cause == euv::MatchDiscriminant {
// Ignore the dummy immutable borrow created by EUV.
break;
}
let mutbl = bk.to_mutbl_lossy();
if mutbl == ast::MutMutable && self.mode == Mode::StaticMut {
// Mutable slices are the only `&mut` allowed in globals,
// but only in `static mut`, nowhere else.
match cmt.ty.sty {
ty::ty_vec(_, _) => break,
_ => {}
}
}
self.record_borrow(borrow_id, mutbl);
break;
}
mc::cat_static_item => {
if is_interior && self.mode != Mode::Var {
// Borrowed statics can specifically *only* have their address taken,
// not any number of other borrows such as borrowing fields, reading
// elements of an array, etc.
self.tcx.sess.span_err(borrow_span,
"cannot refer to the interior of another \
static, use a constant instead");
}
break;
}
mc::cat_deref(ref cmt, _, _) |
mc::cat_downcast(ref cmt, _) |
mc::cat_interior(ref cmt, _) => {
is_interior = true;
cur = cmt;
}
mc::cat_upvar(..) |
mc::cat_local(..) => break
}
}
}
fn decl_without_init(&mut self,
_id: ast::NodeId,
_span: Span) {}
fn mutate(&mut self,
_assignment_id: ast::NodeId,
_assignment_span: Span,
_assignee_cmt: mc::cmt,
_mode: euv::MutateMode) {}
fn matched_pat(&mut self,
_: &ast::Pat,
_: mc::cmt,
_: euv::MatchMode) {}
fn consume_pat(&mut self,
_consume_pat: &ast::Pat,
_cmt: mc::cmt,
_mode: euv::ConsumeMode) {}
}

View File

@ -23,6 +23,7 @@ use middle::mem_categorization::cmt;
use middle::pat_util::*;
use middle::ty::*;
use middle::ty;
use std::cmp::Ordering;
use std::fmt;
use std::iter::{range_inclusive, AdditiveIterator, FromIterator, repeat};
use std::num::Float;
@ -821,7 +822,9 @@ fn range_covered_by_constructor(ctor: &Constructor,
let cmp_from = compare_const_vals(c_from, from);
let cmp_to = compare_const_vals(c_to, to);
match (cmp_from, cmp_to) {
(Some(val1), Some(val2)) => Some(val1 >= 0 && val2 <= 0),
(Some(cmp_from), Some(cmp_to)) => {
Some(cmp_from != Ordering::Less && cmp_to != Ordering::Greater)
}
_ => None
}
}

View File

@ -1,341 +0,0 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Verifies that the types and values of static items
// are safe. The rules enforced by this module are:
//
// - For each *mutable* static item, it checks that its **type**:
// - doesn't have a destructor
// - doesn't own an owned pointer
//
// - For each *immutable* static item, it checks that its **value**:
// - doesn't own owned, managed pointers
// - doesn't contain a struct literal or a call to an enum variant / struct constructor where
// - the type of the struct/enum has a dtor
//
// Rules Enforced Elsewhere:
// - It's not possible to take the address of a static item with unsafe interior. This is enforced
// by borrowck::gather_loans
use self::Mode::*;
use middle::ty;
use middle::def;
use middle::infer;
use middle::traits;
use middle::mem_categorization as mc;
use middle::expr_use_visitor as euv;
use util::nodemap::NodeSet;
use syntax::ast;
use syntax::print::pprust;
use syntax::visit::Visitor;
use syntax::codemap::Span;
use syntax::visit;
#[derive(Copy, Eq, PartialEq)]
enum Mode {
InConstant,
InStatic,
InStaticMut,
InNothing,
}
struct CheckStaticVisitor<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
mode: Mode,
checker: &'a mut GlobalChecker,
}
struct GlobalVisitor<'a,'b,'tcx:'a+'b>(
euv::ExprUseVisitor<'a,'b,'tcx,ty::ParameterEnvironment<'b,'tcx>>);
struct GlobalChecker {
static_consumptions: NodeSet,
const_borrows: NodeSet,
static_interior_borrows: NodeSet,
static_local_borrows: NodeSet,
}
pub fn check_crate(tcx: &ty::ctxt) {
let mut checker = GlobalChecker {
static_consumptions: NodeSet(),
const_borrows: NodeSet(),
static_interior_borrows: NodeSet(),
static_local_borrows: NodeSet(),
};
{
let param_env = ty::empty_parameter_environment(tcx);
let visitor = euv::ExprUseVisitor::new(&mut checker, &param_env);
visit::walk_crate(&mut GlobalVisitor(visitor), tcx.map.krate());
}
visit::walk_crate(&mut CheckStaticVisitor {
tcx: tcx,
mode: InNothing,
checker: &mut checker,
}, tcx.map.krate());
}
impl<'a, 'tcx> CheckStaticVisitor<'a, 'tcx> {
fn with_mode<F>(&mut self, mode: Mode, f: F) where
F: FnOnce(&mut CheckStaticVisitor<'a, 'tcx>),
{
let old = self.mode;
self.mode = mode;
f(self);
self.mode = old;
}
fn msg(&self) -> &'static str {
match self.mode {
InConstant => "constants",
InStaticMut | InStatic => "statics",
InNothing => unreachable!(),
}
}
fn check_static_mut_type(&self, e: &ast::Expr) {
let node_ty = ty::node_id_to_type(self.tcx, e.id);
let tcontents = ty::type_contents(self.tcx, node_ty);
let suffix = if tcontents.has_dtor() {
"destructors"
} else if tcontents.owns_owned() {
"owned pointers"
} else {
return
};
self.tcx.sess.span_err(e.span, &format!("mutable statics are not allowed \
to have {}", suffix)[]);
}
fn check_static_type(&self, e: &ast::Expr) {
let ty = ty::node_id_to_type(self.tcx, e.id);
let infcx = infer::new_infer_ctxt(self.tcx);
let mut fulfill_cx = traits::FulfillmentContext::new();
let cause = traits::ObligationCause::new(e.span, e.id, traits::SharedStatic);
fulfill_cx.register_builtin_bound(&infcx, ty, ty::BoundSync, cause);
let env = ty::empty_parameter_environment(self.tcx);
match fulfill_cx.select_all_or_error(&infcx, &env) {
Ok(()) => { },
Err(ref errors) => {
traits::report_fulfillment_errors(&infcx, errors);
}
}
}
}
impl<'a, 'tcx, 'v> Visitor<'v> for CheckStaticVisitor<'a, 'tcx> {
fn visit_item(&mut self, i: &ast::Item) {
debug!("visit_item(item={})", pprust::item_to_string(i));
match i.node {
ast::ItemStatic(_, ast::MutImmutable, ref expr) => {
self.check_static_type(&**expr);
self.with_mode(InStatic, |v| v.visit_expr(&**expr));
}
ast::ItemStatic(_, ast::MutMutable, ref expr) => {
self.check_static_mut_type(&**expr);
self.with_mode(InStaticMut, |v| v.visit_expr(&**expr));
}
ast::ItemConst(_, ref expr) => {
self.with_mode(InConstant, |v| v.visit_expr(&**expr));
}
_ => {
self.with_mode(InNothing, |v| visit::walk_item(v, i));
}
}
}
/// This method is used to enforce the constraints on
/// immutable static items. It walks through the *value*
/// of the item walking down the expression and evaluating
/// every nested expression. if the expression is not part
/// of a static item, this method does nothing but walking
/// down through it.
fn visit_expr(&mut self, e: &ast::Expr) {
if self.mode == InNothing {
return visit::walk_expr(self, e);
}
let node_ty = ty::node_id_to_type(self.tcx, e.id);
match node_ty.sty {
ty::ty_struct(did, _) |
ty::ty_enum(did, _) if ty::has_dtor(self.tcx, did) => {
self.tcx.sess.span_err(e.span,
&format!("{} are not allowed to have \
destructors", self.msg())[])
}
_ => {}
}
// statics cannot be consumed by value at any time, that would imply
// that they're an initializer (what a const is for) or kept in sync
// over time (not feasible), so deny it outright.
if self.checker.static_consumptions.remove(&e.id) {
self.tcx.sess.span_err(e.span, "cannot refer to other statics by \
value, use the address-of operator \
or a constant instead");
}
// Borrowed statics can specifically *only* have their address taken,
// not any number of other borrows such as borrowing fields, reading
// elements of an array, etc.
if self.checker.static_interior_borrows.remove(&e.id) {
self.tcx.sess.span_err(e.span, "cannot refer to the interior of \
another static, use a constant \
instead");
}
// constants cannot be borrowed if they contain interior mutability as
// it means that our "silent insertion of statics" could change
// initializer values (very bad).
if self.checker.const_borrows.remove(&e.id) {
let node_ty = ty::node_id_to_type(self.tcx, e.id);
let tcontents = ty::type_contents(self.tcx, node_ty);
if tcontents.interior_unsafe() {
self.tcx.sess.span_err(e.span, "cannot borrow a constant which \
contains interior mutability, \
create a static instead");
}
}
// local variables in a block expression in a static context (i.e. being
// assigned to a static variable) cannot be borrowed.
if self.checker.static_local_borrows.remove(&e.id) {
self.tcx.sess.span_err(e.span, "cannot borrow a local variable inside \
a static block, define a separate static \
instead");
}
match e.node {
ast::ExprAddrOf(ast::MutMutable, _) => {
if self.mode != InStaticMut {
span_err!(self.tcx.sess, e.span, E0020,
"{} are not allowed to have mutable references",
self.msg());
}
},
ast::ExprBox(..) |
ast::ExprUnary(ast::UnUniq, _) => {
span_err!(self.tcx.sess, e.span, E0022,
"{} are not allowed to have custom pointers",
self.msg());
}
ast::ExprPath(_) | ast::ExprQPath(_) => {
match ty::resolve_expr(self.tcx, e) {
def::DefStatic(..) if self.mode == InConstant => {
let msg = "constants cannot refer to other statics, \
insert an intermediate constant \
instead";
self.tcx.sess.span_err(e.span, &msg[]);
}
_ => {}
}
}
_ => {}
}
visit::walk_expr(self, e);
}
}
impl<'a,'b,'t,'v> Visitor<'v> for GlobalVisitor<'a,'b,'t> {
fn visit_item(&mut self, item: &ast::Item) {
match item.node {
ast::ItemConst(_, ref e) |
ast::ItemStatic(_, _, ref e) => {
let GlobalVisitor(ref mut v) = *self;
v.consume_expr(&**e);
}
_ => {}
}
visit::walk_item(self, item);
}
}
impl<'tcx> euv::Delegate<'tcx> for GlobalChecker {
fn consume(&mut self,
consume_id: ast::NodeId,
_consume_span: Span,
cmt: mc::cmt,
_mode: euv::ConsumeMode) {
let mut cur = &cmt;
loop {
match cur.cat {
mc::cat_static_item => {
self.static_consumptions.insert(consume_id);
break
}
mc::cat_deref(ref cmt, _, _) |
mc::cat_downcast(ref cmt, _) |
mc::cat_interior(ref cmt, _) => cur = cmt,
mc::cat_rvalue(..) |
mc::cat_upvar(..) |
mc::cat_local(..) => break,
}
}
}
fn borrow(&mut self,
borrow_id: ast::NodeId,
_borrow_span: Span,
cmt: mc::cmt,
_loan_region: ty::Region,
_bk: ty::BorrowKind,
_loan_cause: euv::LoanCause) {
let mut cur = &cmt;
let mut is_interior = false;
loop {
match cur.cat {
mc::cat_rvalue(..) => {
self.const_borrows.insert(borrow_id);
break
}
mc::cat_static_item => {
if is_interior {
self.static_interior_borrows.insert(borrow_id);
}
break
}
mc::cat_deref(ref cmt, _, _) |
mc::cat_interior(ref cmt, _) => {
is_interior = true;
cur = cmt;
}
mc::cat_downcast(..) |
mc::cat_upvar(..) => unreachable!(),
mc::cat_local(..) => {
self.static_local_borrows.insert(borrow_id);
break
}
}
}
}
fn decl_without_init(&mut self,
_id: ast::NodeId,
_span: Span) {}
fn mutate(&mut self,
_assignment_id: ast::NodeId,
_assignment_span: Span,
_assignee_cmt: mc::cmt,
_mode: euv::MutateMode) {}
fn matched_pat(&mut self,
_: &ast::Pat,
_: mc::cmt,
_: euv::MatchMode) {}
fn consume_pat(&mut self,
_consume_pat: &ast::Pat,
_cmt: mc::cmt,
_mode: euv::ConsumeMode) {}
}

View File

@ -12,80 +12,24 @@
#![allow(unsigned_negation)]
pub use self::const_val::*;
pub use self::constness::*;
use metadata::csearch;
use middle::{astencode, def};
use middle::pat_util::def_to_path;
use middle::ty::{self};
use middle::ty::{self, Ty};
use middle::astconv_util::{ast_ty_to_prim_ty};
use util::nodemap::DefIdMap;
use syntax::ast::{self, Expr};
use syntax::codemap::Span;
use syntax::parse::token::InternedString;
use syntax::ptr::P;
use syntax::visit::{self, Visitor};
use syntax::{ast_map, ast_util, codemap};
use std::cmp::Ordering;
use std::collections::hash_map::Entry::Vacant;
use std::{i8, i16, i32, i64};
use std::rc::Rc;
//
// This pass classifies expressions by their constant-ness.
//
// Constant-ness comes in 3 flavours:
//
// - Integer-constants: can be evaluated by the frontend all the way down
// to their actual value. They are used in a few places (enum
// discriminants, switch arms) and are a subset of
// general-constants. They cover all the integer and integer-ish
// literals (nil, bool, int, uint, char, iNN, uNN) and all integer
// operators and copies applied to them.
//
// - General-constants: can be evaluated by LLVM but not necessarily by
// the frontend; usually due to reliance on target-specific stuff such
// as "where in memory the value goes" or "what floating point mode the
// target uses". This _includes_ integer-constants, plus the following
// constructors:
//
// fixed-size vectors and strings: [] and ""/_
// vector and string slices: &[] and &""
// tuples: (,)
// enums: foo(...)
// floating point literals and operators
// & and * pointers
// copies of general constants
//
// (in theory, probably not at first: if/match on integer-const
// conditions / discriminants)
//
// - Non-constants: everything else.
//
#[derive(Copy)]
pub enum constness {
integral_const,
general_const,
non_const
}
type constness_cache = DefIdMap<constness>;
pub fn join(a: constness, b: constness) -> constness {
match (a, b) {
(integral_const, integral_const) => integral_const,
(integral_const, general_const)
| (general_const, integral_const)
| (general_const, general_const) => general_const,
_ => non_const
}
}
pub fn join_all<It: Iterator<Item=constness>>(cs: It) -> constness {
cs.fold(integral_const, |a, b| join(a, b))
}
fn lookup_const<'a>(tcx: &'a ty::ctxt, e: &Expr) -> Option<&'a Expr> {
let opt_def = tcx.def_map.borrow().get(&e.id).cloned();
match opt_def {
@ -186,113 +130,6 @@ pub fn lookup_const_by_id<'a>(tcx: &'a ty::ctxt, def_id: ast::DefId)
}
}
struct ConstEvalVisitor<'a, 'tcx: 'a> {
tcx: &'a ty::ctxt<'tcx>,
ccache: constness_cache,
}
impl<'a, 'tcx> ConstEvalVisitor<'a, 'tcx> {
fn classify(&mut self, e: &Expr) -> constness {
let did = ast_util::local_def(e.id);
match self.ccache.get(&did) {
Some(&x) => return x,
None => {}
}
let cn = match e.node {
ast::ExprLit(ref lit) => {
match lit.node {
ast::LitStr(..) | ast::LitFloat(..) => general_const,
_ => integral_const
}
}
ast::ExprUnary(_, ref inner) | ast::ExprParen(ref inner) =>
self.classify(&**inner),
ast::ExprBinary(_, ref a, ref b) =>
join(self.classify(&**a), self.classify(&**b)),
ast::ExprTup(ref es) |
ast::ExprVec(ref es) =>
join_all(es.iter().map(|e| self.classify(&**e))),
ast::ExprStruct(_, ref fs, None) => {
let cs = fs.iter().map(|f| self.classify(&*f.expr));
join_all(cs)
}
ast::ExprCast(ref base, _) => {
let ty = ty::expr_ty(self.tcx, e);
let base = self.classify(&**base);
if ty::type_is_integral(ty) {
join(integral_const, base)
} else if ty::type_is_fp(ty) {
join(general_const, base)
} else {
non_const
}
}
ast::ExprField(ref base, _) => self.classify(&**base),
ast::ExprTupField(ref base, _) => self.classify(&**base),
ast::ExprIndex(ref base, ref idx) =>
join(self.classify(&**base), self.classify(&**idx)),
ast::ExprAddrOf(ast::MutImmutable, ref base) =>
self.classify(&**base),
// FIXME: (#3728) we can probably do something CCI-ish
// surrounding nonlocal constants. But we don't yet.
ast::ExprPath(_) | ast::ExprQPath(_) => self.lookup_constness(e),
ast::ExprRepeat(..) => general_const,
ast::ExprBlock(ref block) => {
match block.expr {
Some(ref e) => self.classify(&**e),
None => integral_const
}
}
_ => non_const
};
self.ccache.insert(did, cn);
cn
}
fn lookup_constness(&self, e: &Expr) -> constness {
match lookup_const(self.tcx, e) {
Some(rhs) => {
let ty = ty::expr_ty(self.tcx, &*rhs);
if ty::type_is_integral(ty) {
integral_const
} else {
general_const
}
}
None => non_const
}
}
}
impl<'a, 'tcx, 'v> Visitor<'v> for ConstEvalVisitor<'a, 'tcx> {
fn visit_expr_post(&mut self, e: &Expr) {
self.classify(e);
}
}
pub fn process_crate(tcx: &ty::ctxt) {
visit::walk_crate(&mut ConstEvalVisitor {
tcx: tcx,
ccache: DefIdMap(),
}, tcx.map.krate());
tcx.sess.abort_if_errors();
}
// FIXME (#33): this doesn't handle big integer/float literals correctly
// (nor does the rest of our literal handling).
#[derive(Clone, PartialEq)]
@ -370,17 +207,23 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: &Expr, span: Span) -> P<ast::Pat>
}
pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val {
match eval_const_expr_partial(tcx, e) {
match eval_const_expr_partial(tcx, e, None) {
Ok(r) => r,
Err(s) => tcx.sess.span_fatal(e.span, &s[])
}
}
pub fn eval_const_expr_partial(tcx: &ty::ctxt, e: &Expr) -> Result<const_val, String> {
pub fn eval_const_expr_partial<'tcx>(tcx: &ty::ctxt<'tcx>,
e: &Expr,
ty_hint: Option<Ty<'tcx>>)
-> Result<const_val, String> {
fn fromb(b: bool) -> Result<const_val, String> { Ok(const_int(b as i64)) }
let ety = ty_hint.or_else(|| ty::expr_ty_opt(tcx, e));
match e.node {
ast::ExprUnary(ast::UnNeg, ref inner) => {
match eval_const_expr_partial(tcx, &**inner) {
match eval_const_expr_partial(tcx, &**inner, ety) {
Ok(const_float(f)) => Ok(const_float(-f)),
Ok(const_int(i)) => Ok(const_int(-i)),
Ok(const_uint(i)) => Ok(const_uint(-i)),
@ -390,7 +233,7 @@ pub fn eval_const_expr_partial(tcx: &ty::ctxt, e: &Expr) -> Result<const_val, St
}
}
ast::ExprUnary(ast::UnNot, ref inner) => {
match eval_const_expr_partial(tcx, &**inner) {
match eval_const_expr_partial(tcx, &**inner, ety) {
Ok(const_int(i)) => Ok(const_int(!i)),
Ok(const_uint(i)) => Ok(const_uint(!i)),
Ok(const_bool(b)) => Ok(const_bool(!b)),
@ -398,8 +241,12 @@ pub fn eval_const_expr_partial(tcx: &ty::ctxt, e: &Expr) -> Result<const_val, St
}
}
ast::ExprBinary(op, ref a, ref b) => {
match (eval_const_expr_partial(tcx, &**a),
eval_const_expr_partial(tcx, &**b)) {
let b_ty = match op.node {
ast::BiShl | ast::BiShr => Some(tcx.types.uint),
_ => ety
};
match (eval_const_expr_partial(tcx, &**a, ety),
eval_const_expr_partial(tcx, &**b, b_ty)) {
(Ok(const_float(a)), Ok(const_float(b))) => {
match op.node {
ast::BiAdd => Ok(const_float(a + b)),
@ -417,19 +264,46 @@ pub fn eval_const_expr_partial(tcx: &ty::ctxt, e: &Expr) -> Result<const_val, St
}
}
(Ok(const_int(a)), Ok(const_int(b))) => {
let is_a_min_value = |&:| {
let int_ty = match ty::expr_ty_opt(tcx, e).map(|ty| &ty.sty) {
Some(&ty::ty_int(int_ty)) => int_ty,
_ => return false
};
let int_ty = if let ast::TyIs(_) = int_ty {
tcx.sess.target.int_type
} else {
int_ty
};
match int_ty {
ast::TyI8 => (a as i8) == i8::MIN,
ast::TyI16 => (a as i16) == i16::MIN,
ast::TyI32 => (a as i32) == i32::MIN,
ast::TyI64 => (a as i64) == i64::MIN,
ast::TyIs(_) => unreachable!()
}
};
match op.node {
ast::BiAdd => Ok(const_int(a + b)),
ast::BiSub => Ok(const_int(a - b)),
ast::BiMul => Ok(const_int(a * b)),
ast::BiDiv if b == 0 => {
Err("attempted to divide by zero".to_string())
ast::BiDiv => {
if b == 0 {
Err("attempted to divide by zero".to_string())
} else if b == -1 && is_a_min_value() {
Err("attempted to divide with overflow".to_string())
} else {
Ok(const_int(a / b))
}
}
ast::BiDiv => Ok(const_int(a / b)),
ast::BiRem if b == 0 => {
Err("attempted remainder with a divisor of \
zero".to_string())
ast::BiRem => {
if b == 0 {
Err("attempted remainder with a divisor of zero".to_string())
} else if b == -1 && is_a_min_value() {
Err("attempted remainder with overflow".to_string())
} else {
Ok(const_int(a % b))
}
}
ast::BiRem => Ok(const_int(a % b)),
ast::BiAnd | ast::BiBitAnd => Ok(const_int(a & b)),
ast::BiOr | ast::BiBitOr => Ok(const_int(a | b)),
ast::BiBitXor => Ok(const_int(a ^ b)),
@ -504,63 +378,53 @@ pub fn eval_const_expr_partial(tcx: &ty::ctxt, e: &Expr) -> Result<const_val, St
// This tends to get called w/o the type actually having been
// populated in the ctxt, which was causing things to blow up
// (#5900). Fall back to doing a limited lookup to get past it.
let ety = ty::expr_ty_opt(tcx, e)
.or_else(|| ast_ty_to_prim_ty(tcx, &**target_ty))
let ety = ety.or_else(|| ast_ty_to_prim_ty(tcx, &**target_ty))
.unwrap_or_else(|| {
tcx.sess.span_fatal(target_ty.span,
"target type not found for const cast")
});
macro_rules! define_casts {
($val:ident, {
$($ty_pat:pat => (
$intermediate_ty:ty,
$const_type:ident,
$target_ty:ty
)),*
}) => (match ety.sty {
$($ty_pat => {
match $val {
const_bool(b) => Ok($const_type(b as $intermediate_ty as $target_ty)),
const_uint(u) => Ok($const_type(u as $intermediate_ty as $target_ty)),
const_int(i) => Ok($const_type(i as $intermediate_ty as $target_ty)),
const_float(f) => Ok($const_type(f as $intermediate_ty as $target_ty)),
_ => Err(concat!(
"can't cast this type to ", stringify!($const_type)
).to_string())
}
},)*
_ => Err("can't cast this type".to_string())
})
}
eval_const_expr_partial(tcx, &**base)
.and_then(|val| define_casts!(val, {
ty::ty_int(ast::TyIs(_)) => (int, const_int, i64),
ty::ty_int(ast::TyI8) => (i8, const_int, i64),
ty::ty_int(ast::TyI16) => (i16, const_int, i64),
ty::ty_int(ast::TyI32) => (i32, const_int, i64),
ty::ty_int(ast::TyI64) => (i64, const_int, i64),
ty::ty_uint(ast::TyUs(_)) => (uint, const_uint, u64),
ty::ty_uint(ast::TyU8) => (u8, const_uint, u64),
ty::ty_uint(ast::TyU16) => (u16, const_uint, u64),
ty::ty_uint(ast::TyU32) => (u32, const_uint, u64),
ty::ty_uint(ast::TyU64) => (u64, const_uint, u64),
ty::ty_float(ast::TyF32) => (f32, const_float, f64),
ty::ty_float(ast::TyF64) => (f64, const_float, f64)
}))
// Prefer known type to noop, but always have a type hint.
let base_hint = ty::expr_ty_opt(tcx, &**base).unwrap_or(ety);
let val = try!(eval_const_expr_partial(tcx, &**base, Some(base_hint)));
cast_const(val, ety)
}
ast::ExprPath(_) | ast::ExprQPath(_) => {
match lookup_const(tcx, e) {
Some(actual_e) => eval_const_expr_partial(tcx, &*actual_e),
None => Err("non-constant path in constant expr".to_string())
}
let opt_def = tcx.def_map.borrow().get(&e.id).cloned();
let (const_expr, const_ty) = match opt_def {
Some(def::DefConst(def_id)) => {
if ast_util::is_local(def_id) {
match tcx.map.find(def_id.node) {
Some(ast_map::NodeItem(it)) => match it.node {
ast::ItemConst(ref ty, ref expr) => {
(Some(&**expr), Some(&**ty))
}
_ => (None, None)
},
_ => (None, None)
}
} else {
(lookup_const_by_id(tcx, def_id), None)
}
}
Some(def::DefVariant(enum_def, variant_def, _)) => {
(lookup_variant_by_id(tcx, enum_def, variant_def), None)
}
_ => (None, None)
};
let const_expr = match const_expr {
Some(actual_e) => actual_e,
None => return Err("non-constant path in constant expr".to_string())
};
let ety = ety.or_else(|| const_ty.and_then(|ty| ast_ty_to_prim_ty(tcx, ty)));
eval_const_expr_partial(tcx, const_expr, ety)
}
ast::ExprLit(ref lit) => Ok(lit_to_const(&**lit)),
ast::ExprParen(ref e) => eval_const_expr_partial(tcx, &**e),
ast::ExprLit(ref lit) => {
Ok(lit_to_const(&**lit, ety))
}
ast::ExprParen(ref e) => eval_const_expr_partial(tcx, &**e, ety),
ast::ExprBlock(ref block) => {
match block.expr {
Some(ref expr) => eval_const_expr_partial(tcx, &**expr),
Some(ref expr) => eval_const_expr_partial(tcx, &**expr, ety),
None => Ok(const_int(0i64))
}
}
@ -569,7 +433,7 @@ pub fn eval_const_expr_partial(tcx: &ty::ctxt, e: &Expr) -> Result<const_val, St
if let Some(&ast::ExprTup(ref fields)) = lookup_const(tcx, &**base).map(|s| &s.node) {
// Check that the given index is within bounds and evaluate its value
if fields.len() > index.node {
return eval_const_expr_partial(tcx, &*fields[index.node])
return eval_const_expr_partial(tcx, &*fields[index.node], None)
} else {
return Err("tuple index out of bounds".to_string())
}
@ -584,7 +448,7 @@ pub fn eval_const_expr_partial(tcx: &ty::ctxt, e: &Expr) -> Result<const_val, St
// Check that the given field exists and evaluate it
if let Some(f) = fields.iter().find(|f|
f.ident.node.as_str() == field_name.node.as_str()) {
return eval_const_expr_partial(tcx, &*f.expr)
return eval_const_expr_partial(tcx, &*f.expr, None)
} else {
return Err("nonexistent struct field".to_string())
}
@ -596,7 +460,44 @@ pub fn eval_const_expr_partial(tcx: &ty::ctxt, e: &Expr) -> Result<const_val, St
}
}
pub fn lit_to_const(lit: &ast::Lit) -> const_val {
fn cast_const(val: const_val, ty: Ty) -> Result<const_val, String> {
macro_rules! define_casts {
($($ty_pat:pat => (
$intermediate_ty:ty,
$const_type:ident,
$target_ty:ty
)),*) => (match ty.sty {
$($ty_pat => {
match val {
const_bool(b) => Ok($const_type(b as $intermediate_ty as $target_ty)),
const_uint(u) => Ok($const_type(u as $intermediate_ty as $target_ty)),
const_int(i) => Ok($const_type(i as $intermediate_ty as $target_ty)),
const_float(f) => Ok($const_type(f as $intermediate_ty as $target_ty)),
_ => Err(concat!("can't cast this type to ",
stringify!($const_type)).to_string())
}
},)*
_ => Err("can't cast this type".to_string())
})
}
define_casts!{
ty::ty_int(ast::TyIs(_)) => (int, const_int, i64),
ty::ty_int(ast::TyI8) => (i8, const_int, i64),
ty::ty_int(ast::TyI16) => (i16, const_int, i64),
ty::ty_int(ast::TyI32) => (i32, const_int, i64),
ty::ty_int(ast::TyI64) => (i64, const_int, i64),
ty::ty_uint(ast::TyUs(_)) => (uint, const_uint, u64),
ty::ty_uint(ast::TyU8) => (u8, const_uint, u64),
ty::ty_uint(ast::TyU16) => (u16, const_uint, u64),
ty::ty_uint(ast::TyU32) => (u32, const_uint, u64),
ty::ty_uint(ast::TyU64) => (u64, const_uint, u64),
ty::ty_float(ast::TyF32) => (f32, const_float, f64),
ty::ty_float(ast::TyF64) => (f64, const_float, f64)
}
}
fn lit_to_const(lit: &ast::Lit, ty_hint: Option<Ty>) -> const_val {
match lit.node {
ast::LitStr(ref s, _) => const_str((*s).clone()),
ast::LitBinary(ref data) => {
@ -604,8 +505,13 @@ pub fn lit_to_const(lit: &ast::Lit) -> const_val {
}
ast::LitByte(n) => const_uint(n as u64),
ast::LitChar(n) => const_uint(n as u64),
ast::LitInt(n, ast::SignedIntLit(_, ast::Plus)) |
ast::LitInt(n, ast::UnsuffixedIntLit(ast::Plus)) => const_int(n as i64),
ast::LitInt(n, ast::SignedIntLit(_, ast::Plus)) => const_int(n as i64),
ast::LitInt(n, ast::UnsuffixedIntLit(ast::Plus)) => {
match ty_hint.map(|ty| &ty.sty) {
Some(&ty::ty_uint(_)) => const_uint(n),
_ => const_int(n as i64)
}
}
ast::LitInt(n, ast::SignedIntLit(_, ast::Minus)) |
ast::LitInt(n, ast::UnsuffixedIntLit(ast::Minus)) => const_int(-(n as i64)),
ast::LitInt(n, ast::UnsignedIntLit(_)) => const_uint(n),
@ -617,21 +523,45 @@ pub fn lit_to_const(lit: &ast::Lit) -> const_val {
}
}
fn compare_vals<T: PartialOrd>(a: T, b: T) -> Option<int> {
Some(if a == b { 0 } else if a < b { -1 } else { 1 })
}
pub fn compare_const_vals(a: &const_val, b: &const_val) -> Option<int> {
match (a, b) {
(&const_int(a), &const_int(b)) => compare_vals(a, b),
(&const_uint(a), &const_uint(b)) => compare_vals(a, b),
(&const_float(a), &const_float(b)) => compare_vals(a, b),
(&const_str(ref a), &const_str(ref b)) => compare_vals(a, b),
(&const_bool(a), &const_bool(b)) => compare_vals(a, b),
(&const_binary(ref a), &const_binary(ref b)) => compare_vals(a, b),
_ => None
}
pub fn compare_const_vals(a: &const_val, b: &const_val) -> Option<Ordering> {
Some(match (a, b) {
(&const_int(a), &const_int(b)) => a.cmp(&b),
(&const_uint(a), &const_uint(b)) => a.cmp(&b),
(&const_float(a), &const_float(b)) => {
// This is pretty bad but it is the existing behavior.
if a == b {
Ordering::Equal
} else if a < b {
Ordering::Less
} else {
Ordering::Greater
}
}
(&const_str(ref a), &const_str(ref b)) => a.cmp(b),
(&const_bool(a), &const_bool(b)) => a.cmp(&b),
(&const_binary(ref a), &const_binary(ref b)) => a.cmp(b),
_ => return None
})
}
pub fn compare_lit_exprs(tcx: &ty::ctxt, a: &Expr, b: &Expr) -> Option<int> {
compare_const_vals(&eval_const_expr(tcx, a), &eval_const_expr(tcx, b))
pub fn compare_lit_exprs<'tcx>(tcx: &ty::ctxt<'tcx>,
a: &Expr,
b: &Expr,
ty_hint: Option<Ty<'tcx>>)
-> Option<Ordering> {
let a = match eval_const_expr_partial(tcx, a, ty_hint) {
Ok(a) => a,
Err(s) => {
tcx.sess.span_err(a.span, &s[]);
return None;
}
};
let b = match eval_const_expr_partial(tcx, b, ty_hint) {
Ok(b) => b,
Err(s) => {
tcx.sess.span_err(b.span, &s[]);
return None;
}
};
compare_const_vals(&a, &b)
}

View File

@ -1435,7 +1435,6 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
infer::MiscVariable(_) => "".to_string(),
infer::PatternRegion(_) => " for pattern".to_string(),
infer::AddrOfRegion(_) => " for borrow expression".to_string(),
infer::AddrOfSlice(_) => " for slice expression".to_string(),
infer::Autoref(_) => " for autoref".to_string(),
infer::Coercion(_) => " for automatic coercion".to_string(),
infer::LateBoundRegion(_, br, infer::FnCall) => {

View File

@ -247,9 +247,6 @@ pub enum RegionVariableOrigin<'tcx> {
// Regions created by `&` operator
AddrOfRegion(Span),
// Regions created by `&[...]` literal
AddrOfSlice(Span),
// Regions created as part of an autoref of a method receiver
Autoref(Span),
@ -1273,7 +1270,6 @@ impl<'tcx> RegionVariableOrigin<'tcx> {
MiscVariable(a) => a,
PatternRegion(a) => a,
AddrOfRegion(a) => a,
AddrOfSlice(a) => a,
Autoref(a) => a,
Coercion(ref a) => a.span(),
EarlyBoundRegion(a, _) => a,
@ -1296,7 +1292,6 @@ impl<'tcx> Repr<'tcx> for RegionVariableOrigin<'tcx> {
AddrOfRegion(a) => {
format!("AddrOfRegion({})", a.repr(tcx))
}
AddrOfSlice(a) => format!("AddrOfSlice({})", a.repr(tcx)),
Autoref(a) => format!("Autoref({})", a.repr(tcx)),
Coercion(ref a) => format!("Coercion({})", a.repr(tcx)),
EarlyBoundRegion(a, b) => {

View File

@ -71,6 +71,7 @@ pub use self::Note::*;
pub use self::deref_kind::*;
pub use self::categorization::*;
use middle::check_const;
use middle::def;
use middle::region;
use middle::ty::{self, Ty};
@ -808,17 +809,29 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> {
span: Span,
expr_ty: Ty<'tcx>)
-> cmt<'tcx> {
match self.typer.temporary_scope(id) {
Some(scope) => {
match expr_ty.sty {
ty::ty_vec(_, Some(0)) => self.cat_rvalue(id, span, ty::ReStatic, expr_ty),
_ => self.cat_rvalue(id, span, ty::ReScope(scope), expr_ty)
let qualif = self.tcx().const_qualif_map.borrow().get(&id).cloned()
.unwrap_or(check_const::NOT_CONST);
// Only promote `[T; 0]` before an RFC for rvalue promotions
// is accepted.
let qualif = match expr_ty.sty {
ty::ty_vec(_, Some(0)) => qualif,
_ => check_const::NOT_CONST
};
let re = match qualif & check_const::NON_STATIC_BORROWS {
check_const::PURE_CONST => {
// Constant rvalues get promoted to 'static.
ty::ReStatic
}
_ => {
match self.typer.temporary_scope(id) {
Some(scope) => ty::ReScope(scope),
None => ty::ReStatic
}
}
None => {
self.cat_rvalue(id, span, ty::ReStatic, expr_ty)
}
}
};
self.cat_rvalue(id, span, re, expr_ty)
}
pub fn cat_rvalue(&self,

View File

@ -44,6 +44,7 @@ use session::Session;
use lint;
use metadata::csearch;
use middle;
use middle::check_const;
use middle::const_eval;
use middle::def::{self, DefMap, ExportMap};
use middle::dependency_format;
@ -838,6 +839,9 @@ pub struct ctxt<'tcx> {
/// Caches whether traits are object safe
pub object_safety_cache: RefCell<DefIdMap<bool>>,
/// Maps Expr NodeId's to their constant qualification.
pub const_qualif_map: RefCell<NodeMap<check_const::ConstQualif>>,
}
// Flags that we track on types. These flags are propagated upwards
@ -2472,6 +2476,7 @@ pub fn mk_ctxt<'tcx>(s: Session,
type_impls_copy_cache: RefCell::new(HashMap::new()),
type_impls_sized_cache: RefCell::new(HashMap::new()),
object_safety_cache: RefCell::new(DefIdMap()),
const_qualif_map: RefCell::new(NodeMap()),
}
}
@ -5350,26 +5355,25 @@ pub fn enum_variants<'tcx>(cx: &ctxt<'tcx>, id: ast::DefId)
None => INITIAL_DISCRIMINANT_VALUE
};
match variant.node.disr_expr {
Some(ref e) =>
match const_eval::eval_const_expr_partial(cx, &**e) {
Ok(const_eval::const_int(val)) => {
discriminant = val as Disr
}
Ok(const_eval::const_uint(val)) => {
discriminant = val as Disr
}
Ok(_) => {
span_err!(cx.sess, e.span, E0304,
"expected signed integer constant");
}
Err(ref err) => {
span_err!(cx.sess, e.span, E0305,
"expected constant: {}",
*err);
}
},
None => {}
if let Some(ref e) = variant.node.disr_expr {
// Preserve all values, and prefer signed.
let ty = Some(cx.types.i64);
match const_eval::eval_const_expr_partial(cx, &**e, ty) {
Ok(const_eval::const_int(val)) => {
discriminant = val as Disr;
}
Ok(const_eval::const_uint(val)) => {
discriminant = val as Disr;
}
Ok(_) => {
span_err!(cx.sess, e.span, E0304,
"expected signed integer constant");
}
Err(err) => {
span_err!(cx.sess, e.span, E0305,
"expected constant: {}", err);
}
}
};
last_discriminant = Some(discriminant);
@ -5822,7 +5826,7 @@ pub fn is_binopable<'tcx>(cx: &ctxt<'tcx>, ty: Ty<'tcx>, op: ast::BinOp) -> bool
// Returns the repeat count for a repeating vector expression.
pub fn eval_repeat_count(tcx: &ctxt, count_expr: &ast::Expr) -> uint {
match const_eval::eval_const_expr_partial(tcx, count_expr) {
match const_eval::eval_const_expr_partial(tcx, count_expr, Some(tcx.types.uint)) {
Ok(val) => {
let found = match val {
const_eval::const_uint(count) => return count as uint,

View File

@ -611,13 +611,6 @@ pub fn phase_3_run_analysis_passes<'tcx>(sess: Session,
// passes are timed inside typeck
typeck::check_crate(&ty_cx, trait_map);
time(time_passes, "check static items", (), |_|
middle::check_static::check_crate(&ty_cx));
// These next two const passes can probably be merged
time(time_passes, "const marking", (), |_|
middle::const_eval::process_crate(&ty_cx));
time(time_passes, "const checking", (), |_|
middle::check_const::check_crate(&ty_cx));

View File

@ -881,6 +881,7 @@ extern {
/* Operations on global variables */
pub fn LLVMIsAGlobalVariable(GlobalVar: ValueRef) -> ValueRef;
pub fn LLVMAddGlobal(M: ModuleRef, Ty: TypeRef, Name: *const c_char)
-> ValueRef;
pub fn LLVMAddGlobalInAddressSpace(M: ModuleRef,

View File

@ -219,6 +219,7 @@ use util::nodemap::FnvHashMap;
use util::ppaux::{Repr, vec_map_to_string};
use std;
use std::cmp::Ordering;
use std::iter::AdditiveIterator;
use std::rc::Rc;
use syntax::ast;
@ -232,10 +233,8 @@ struct ConstantExpr<'a>(&'a ast::Expr);
impl<'a> ConstantExpr<'a> {
fn eq(self, other: ConstantExpr<'a>, tcx: &ty::ctxt) -> bool {
let ConstantExpr(expr) = self;
let ConstantExpr(other_expr) = other;
match const_eval::compare_lit_exprs(tcx, expr, other_expr) {
Some(val1) => val1 == 0,
match const_eval::compare_lit_exprs(tcx, self.0, other.0, None) {
Some(result) => result == Ordering::Equal,
None => panic!("compare_list_exprs: type mismatch"),
}
}
@ -279,14 +278,14 @@ impl<'a, 'tcx> Opt<'a, 'tcx> {
match *self {
ConstantValue(ConstantExpr(lit_expr), _) => {
let lit_ty = ty::node_id_to_type(bcx.tcx(), lit_expr.id);
let (llval, _) = consts::const_expr(ccx, &*lit_expr);
let (llval, _) = consts::const_expr(ccx, &*lit_expr, bcx.fcx.param_substs);
let lit_datum = immediate_rvalue(llval, lit_ty);
let lit_datum = unpack_datum!(bcx, lit_datum.to_appropriate_datum(bcx));
SingleResult(Result::new(bcx, lit_datum.val))
}
ConstantRange(ConstantExpr(ref l1), ConstantExpr(ref l2), _) => {
let (l1, _) = consts::const_expr(ccx, &**l1);
let (l2, _) = consts::const_expr(ccx, &**l2);
let (l1, _) = consts::const_expr(ccx, &**l1, bcx.fcx.param_substs);
let (l2, _) = consts::const_expr(ccx, &**l2, bcx.fcx.param_substs);
RangeResult(Result::new(bcx, l1), Result::new(bcx, l2))
}
Variant(disr_val, ref repr, _, _) => {
@ -833,8 +832,8 @@ fn compare_values<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
let _icx = push_ctxt("compare_values");
if ty::type_is_scalar(rhs_t) {
let rs = compare_scalar_types(cx, lhs, rhs, rhs_t, ast::BiEq, debug_loc);
return Result::new(rs.bcx, rs.val);
let cmp = compare_scalar_types(cx, lhs, rhs, rhs_t, ast::BiEq, debug_loc);
return Result::new(cx, cmp);
}
match rhs_t.sty {
@ -1164,29 +1163,16 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
}
RangeResult(Result { val: vbegin, .. },
Result { bcx, val: vend }) => {
let Result { bcx, val: llge } =
compare_scalar_types(bcx,
test_val,
vbegin,
t,
ast::BiGe,
debug_loc);
let Result { bcx, val: llle } =
compare_scalar_types(bcx,
test_val,
vend,
t,
ast::BiLe,
debug_loc);
Result::new(bcx, And(bcx, llge, llle, debug_loc))
let llge = compare_scalar_types(bcx, test_val, vbegin,
t, ast::BiGe, debug_loc);
let llle = compare_scalar_types(bcx, test_val, vend,
t, ast::BiLe, debug_loc);
Result::new(bcx, And(bcx, llge, llle, DebugLoc::None))
}
LowerBound(Result { bcx, val }) => {
compare_scalar_types(bcx,
test_val,
val,
t,
ast::BiGe,
debug_loc)
Result::new(bcx, compare_scalar_types(bcx, test_val,
val, t, ast::BiGe,
debug_loc))
}
}
};

View File

@ -323,7 +323,7 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
mk_struct(cx, &ftys[], false, t)
}).collect();
ensure_enum_fits_in_address_space(cx, ity, &fields[], t);
ensure_enum_fits_in_address_space(cx, &fields[], t);
General(ity, fields, dtor)
}
@ -582,20 +582,14 @@ fn ensure_struct_fits_in_address_space<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
fn union_size_and_align(sts: &[Struct]) -> (machine::llsize, machine::llalign) {
let size = sts.iter().map(|st| st.size).max().unwrap();
let most_aligned = sts.iter().max_by(|st| st.align).unwrap();
(size, most_aligned.align)
let align = sts.iter().map(|st| st.align).max().unwrap();
(roundup(size, align), align)
}
fn ensure_enum_fits_in_address_space<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
discr: IntType,
fields: &[Struct],
scapegoat: Ty<'tcx>) {
let discr_size = machine::llsize_of_alloc(ccx, ll_inttype(ccx, discr));
let (field_size, field_align) = union_size_and_align(fields);
// field_align < 1<<32, discr_size <= 8, field_size < OBJ_SIZE_BOUND <= 1<<61
// so the sum is less than 1<<62 (and can't overflow).
let total_size = roundup(discr_size, field_align) + field_size;
let (total_size, _) = union_size_and_align(fields);
if total_size >= ccx.obj_size_bound() {
ccx.report_overbig_object(scapegoat);
@ -667,9 +661,11 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
// FIXME #10604: this breaks when vector types are present.
let (size, align) = union_size_and_align(&sts[]);
let align_s = align as u64;
assert_eq!(size % align_s, 0);
let align_units = size / align_s - 1;
let discr_ty = ll_inttype(cx, ity);
let discr_size = machine::llsize_of_alloc(cx, discr_ty);
let align_units = (size + align_s - 1) / align_s - 1;
let fill_ty = match align_s {
1 => Type::array(&Type::i8(cx), align_units),
2 => Type::array(&Type::i16(cx), align_units),
@ -1049,7 +1045,7 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr
}
General(ity, ref cases, _) => {
let case = &cases[discr as uint];
let max_sz = cases.iter().map(|x| x.size).max().unwrap();
let (max_sz, _) = union_size_and_align(&cases[]);
let lldiscr = C_integral(ll_inttype(ccx, ity), discr as u64, true);
let mut f = vec![lldiscr];
f.push_all(vals);

View File

@ -26,7 +26,6 @@
#![allow(non_camel_case_types)]
pub use self::ValueOrigin::*;
pub use self::scalar_type::*;
use super::CrateTranslation;
use super::ModuleTranslation;
@ -40,7 +39,6 @@ use metadata::{csearch, encoder, loader};
use middle::astencode;
use middle::cfg;
use middle::lang_items::{LangItem, ExchangeMallocFnLangItem, StartFnLangItem};
use middle::subst;
use middle::weak_lang_items;
use middle::subst::{Subst, Substs};
use middle::ty::{self, Ty, ClosureTyper};
@ -498,7 +496,7 @@ pub fn get_res_dtor<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
did: ast::DefId,
t: Ty<'tcx>,
parent_id: ast::DefId,
substs: &subst::Substs<'tcx>)
substs: &Substs<'tcx>)
-> ValueRef {
let _icx = push_ctxt("trans_res_dtor");
let did = inline::maybe_instantiate_inline(ccx, did);
@ -507,9 +505,9 @@ pub fn get_res_dtor<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
assert_eq!(did.krate, ast::LOCAL_CRATE);
// Since we're in trans we don't care for any region parameters
let substs = subst::Substs::erased(substs.types.clone());
let substs = ccx.tcx().mk_substs(Substs::erased(substs.types.clone()));
let (val, _, _) = monomorphize::monomorphic_fn(ccx, did, &substs, None);
let (val, _, _) = monomorphize::monomorphic_fn(ccx, did, substs, None);
val
} else if did.krate == ast::LOCAL_CRATE {
@ -532,137 +530,100 @@ pub fn get_res_dtor<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
}
}
// Used only for creating scalar comparison glue.
#[derive(Copy)]
pub enum scalar_type { nil_type, signed_int, unsigned_int, floating_point, }
pub fn bin_op_to_icmp_predicate(ccx: &CrateContext, op: ast::BinOp_, signed: bool)
-> llvm::IntPredicate {
match op {
ast::BiEq => llvm::IntEQ,
ast::BiNe => llvm::IntNE,
ast::BiLt => if signed { llvm::IntSLT } else { llvm::IntULT },
ast::BiLe => if signed { llvm::IntSLE } else { llvm::IntULE },
ast::BiGt => if signed { llvm::IntSGT } else { llvm::IntUGT },
ast::BiGe => if signed { llvm::IntSGE } else { llvm::IntUGE },
op => {
ccx.sess().bug(&format!("comparison_op_to_icmp_predicate: expected \
comparison operator, found {:?}", op)[]);
}
}
}
pub fn compare_scalar_types<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
pub fn bin_op_to_fcmp_predicate(ccx: &CrateContext, op: ast::BinOp_)
-> llvm::RealPredicate {
match op {
ast::BiEq => llvm::RealOEQ,
ast::BiNe => llvm::RealUNE,
ast::BiLt => llvm::RealOLT,
ast::BiLe => llvm::RealOLE,
ast::BiGt => llvm::RealOGT,
ast::BiGe => llvm::RealOGE,
op => {
ccx.sess().bug(&format!("comparison_op_to_fcmp_predicate: expected \
comparison operator, found {:?}", op)[]);
}
}
}
pub fn compare_scalar_types<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
lhs: ValueRef,
rhs: ValueRef,
t: Ty<'tcx>,
op: ast::BinOp_,
debug_loc: DebugLoc)
-> Result<'blk, 'tcx> {
let f = |a| Result::new(cx, compare_scalar_values(cx, lhs, rhs, a, op, debug_loc));
-> ValueRef {
match t.sty {
ty::ty_tup(ref tys) if tys.is_empty() => f(nil_type),
ty::ty_bool | ty::ty_uint(_) | ty::ty_char => f(unsigned_int),
ty::ty_ptr(mt) if common::type_is_sized(cx.tcx(), mt.ty) => f(unsigned_int),
ty::ty_int(_) => f(signed_int),
ty::ty_float(_) => f(floating_point),
// Should never get here, because t is scalar.
_ => cx.sess().bug("non-scalar type passed to compare_scalar_types")
}
}
// A helper function to do the actual comparison of scalar values.
pub fn compare_scalar_values<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
lhs: ValueRef,
rhs: ValueRef,
nt: scalar_type,
op: ast::BinOp_,
debug_loc: DebugLoc)
-> ValueRef {
let _icx = push_ctxt("compare_scalar_values");
fn die(cx: Block) -> ! {
cx.sess().bug("compare_scalar_values: must be a comparison operator");
}
match nt {
nil_type => {
// We don't need to do actual comparisons for nil.
// () == () holds but () < () does not.
match op {
ast::BiEq | ast::BiLe | ast::BiGe => return C_bool(cx.ccx(), true),
ast::BiNe | ast::BiLt | ast::BiGt => return C_bool(cx.ccx(), false),
// refinements would be nice
_ => die(cx)
ty::ty_tup(ref tys) if tys.is_empty() => {
// We don't need to do actual comparisons for nil.
// () == () holds but () < () does not.
match op {
ast::BiEq | ast::BiLe | ast::BiGe => return C_bool(bcx.ccx(), true),
ast::BiNe | ast::BiLt | ast::BiGt => return C_bool(bcx.ccx(), false),
// refinements would be nice
_ => bcx.sess().bug("compare_scalar_types: must be a comparison operator")
}
}
}
floating_point => {
let cmp = match op {
ast::BiEq => llvm::RealOEQ,
ast::BiNe => llvm::RealUNE,
ast::BiLt => llvm::RealOLT,
ast::BiLe => llvm::RealOLE,
ast::BiGt => llvm::RealOGT,
ast::BiGe => llvm::RealOGE,
_ => die(cx)
};
return FCmp(cx, cmp, lhs, rhs, debug_loc);
}
signed_int => {
let cmp = match op {
ast::BiEq => llvm::IntEQ,
ast::BiNe => llvm::IntNE,
ast::BiLt => llvm::IntSLT,
ast::BiLe => llvm::IntSLE,
ast::BiGt => llvm::IntSGT,
ast::BiGe => llvm::IntSGE,
_ => die(cx)
};
return ICmp(cx, cmp, lhs, rhs, debug_loc);
}
unsigned_int => {
let cmp = match op {
ast::BiEq => llvm::IntEQ,
ast::BiNe => llvm::IntNE,
ast::BiLt => llvm::IntULT,
ast::BiLe => llvm::IntULE,
ast::BiGt => llvm::IntUGT,
ast::BiGe => llvm::IntUGE,
_ => die(cx)
};
return ICmp(cx, cmp, lhs, rhs, debug_loc);
}
ty::ty_bool | ty::ty_uint(_) | ty::ty_char => {
ICmp(bcx, bin_op_to_icmp_predicate(bcx.ccx(), op, false), lhs, rhs, debug_loc)
}
ty::ty_ptr(mt) if common::type_is_sized(bcx.tcx(), mt.ty) => {
ICmp(bcx, bin_op_to_icmp_predicate(bcx.ccx(), op, false), lhs, rhs, debug_loc)
}
ty::ty_int(_) => {
ICmp(bcx, bin_op_to_icmp_predicate(bcx.ccx(), op, true), lhs, rhs, debug_loc)
}
ty::ty_float(_) => {
FCmp(bcx, bin_op_to_fcmp_predicate(bcx.ccx(), op), lhs, rhs, debug_loc)
}
// Should never get here, because t is scalar.
_ => bcx.sess().bug("non-scalar type passed to compare_scalar_types")
}
}
pub fn compare_simd_types<'blk, 'tcx>(
cx: Block<'blk, 'tcx>,
lhs: ValueRef,
rhs: ValueRef,
t: Ty<'tcx>,
size: uint,
op: ast::BinOp_,
debug_loc: DebugLoc)
-> ValueRef {
let cmp = match t.sty {
pub fn compare_simd_types<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
lhs: ValueRef,
rhs: ValueRef,
t: Ty<'tcx>,
op: ast::BinOp_,
debug_loc: DebugLoc)
-> ValueRef {
let signed = match t.sty {
ty::ty_float(_) => {
// The comparison operators for floating point vectors are challenging.
// LLVM outputs a `< size x i1 >`, but if we perform a sign extension
// then bitcast to a floating point vector, the result will be `-NaN`
// for each truth value. Because of this they are unsupported.
cx.sess().bug("compare_simd_types: comparison operators \
not supported for floating point SIMD types")
bcx.sess().bug("compare_simd_types: comparison operators \
not supported for floating point SIMD types")
},
ty::ty_uint(_) => match op {
ast::BiEq => llvm::IntEQ,
ast::BiNe => llvm::IntNE,
ast::BiLt => llvm::IntULT,
ast::BiLe => llvm::IntULE,
ast::BiGt => llvm::IntUGT,
ast::BiGe => llvm::IntUGE,
_ => cx.sess().bug("compare_simd_types: must be a comparison operator"),
},
ty::ty_int(_) => match op {
ast::BiEq => llvm::IntEQ,
ast::BiNe => llvm::IntNE,
ast::BiLt => llvm::IntSLT,
ast::BiLe => llvm::IntSLE,
ast::BiGt => llvm::IntSGT,
ast::BiGe => llvm::IntSGE,
_ => cx.sess().bug("compare_simd_types: must be a comparison operator"),
},
_ => cx.sess().bug("compare_simd_types: invalid SIMD type"),
ty::ty_uint(_) => false,
ty::ty_int(_) => true,
_ => bcx.sess().bug("compare_simd_types: invalid SIMD type"),
};
let return_ty = Type::vector(&type_of(cx.ccx(), t), size as u64);
let cmp = bin_op_to_icmp_predicate(bcx.ccx(), op, signed);
// LLVM outputs an `< size x i1 >`, so we need to perform a sign extension
// to get the correctly sized type. This will compile to a single instruction
// once the IR is converted to assembly if the SIMD instruction is supported
// by the target architecture.
SExt(cx, ICmp(cx, cmp, lhs, rhs, debug_loc), return_ty)
SExt(bcx, ICmp(bcx, cmp, lhs, rhs, debug_loc), val_ty(lhs))
}
// Iterates through the elements of a structural type.
@ -679,7 +640,7 @@ pub fn iter_structural_ty<'blk, 'tcx, F>(cx: Block<'blk, 'tcx>,
repr: &adt::Repr<'tcx>,
av: ValueRef,
variant: &ty::VariantInfo<'tcx>,
substs: &subst::Substs<'tcx>,
substs: &Substs<'tcx>,
f: &mut F)
-> Block<'blk, 'tcx> where
F: FnMut(Block<'blk, 'tcx>, ValueRef, Ty<'tcx>) -> Block<'blk, 'tcx>,
@ -1034,21 +995,39 @@ pub fn load_ty<'blk, 'tcx>(cx: Block<'blk, 'tcx>,
ptr: ValueRef, t: Ty<'tcx>) -> ValueRef {
if type_is_zero_size(cx.ccx(), t) {
C_undef(type_of::type_of(cx.ccx(), t))
} else if ty::type_is_bool(t) {
Trunc(cx, LoadRangeAssert(cx, ptr, 0, 2, llvm::False), Type::i1(cx.ccx()))
} else if type_is_immediate(cx.ccx(), t) && type_of::type_of(cx.ccx(), t).is_aggregate() {
// We want to pass small aggregates as immediate values, but using an aggregate LLVM type
// for this leads to bad optimizations, so its arg type is an appropriately sized integer
// and we have to convert it
Load(cx, BitCast(cx, ptr, type_of::arg_type_of(cx.ccx(), t).ptr_to()))
} else if ty::type_is_region_ptr(t) || ty::type_is_unique(t) {
LoadNonNull(cx, ptr)
} else if ty::type_is_char(t) {
// a char is a Unicode codepoint, and so takes values from 0
// to 0x10FFFF inclusive only.
LoadRangeAssert(cx, ptr, 0, 0x10FFFF + 1, llvm::False)
} else {
Load(cx, ptr)
unsafe {
let global = llvm::LLVMIsAGlobalVariable(ptr);
if !global.is_null() && llvm::LLVMIsGlobalConstant(global) == llvm::True {
let val = llvm::LLVMGetInitializer(global);
if !val.is_null() {
// This could go into its own function, for DRY.
// (something like "pre-store packing/post-load unpacking")
if ty::type_is_bool(t) {
return Trunc(cx, val, Type::i1(cx.ccx()));
} else {
return val;
}
}
}
}
if ty::type_is_bool(t) {
Trunc(cx, LoadRangeAssert(cx, ptr, 0, 2, llvm::False), Type::i1(cx.ccx()))
} else if ty::type_is_char(t) {
// a char is a Unicode codepoint, and so takes values from 0
// to 0x10FFFF inclusive only.
LoadRangeAssert(cx, ptr, 0, 0x10FFFF + 1, llvm::False)
} else if (ty::type_is_region_ptr(t) || ty::type_is_unique(t))
&& !common::type_is_fat_ptr(cx.tcx(), t) {
LoadNonNull(cx, ptr)
} else {
Load(cx, ptr)
}
}
}
@ -1064,7 +1043,7 @@ pub fn store_ty<'blk, 'tcx>(cx: Block<'blk, 'tcx>, v: ValueRef, dst: ValueRef, t
Store(cx, v, BitCast(cx, dst, type_of::arg_type_of(cx.ccx(), t).ptr_to()));
} else {
Store(cx, v, dst);
};
}
}
pub fn init_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, local: &ast::Local)
@ -1162,7 +1141,7 @@ pub fn memcpy_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let llalign = type_of::align_of(ccx, t);
call_memcpy(bcx, dst, src, llsz, llalign as u32);
} else {
store_ty(bcx, Load(bcx, src), dst, t);
store_ty(bcx, load_ty(bcx, src, t), dst, t);
}
}
@ -1425,7 +1404,7 @@ pub fn new_fn_ctxt<'a, 'tcx>(ccx: &'a CrateContext<'a, 'tcx>,
id: ast::NodeId,
has_env: bool,
output_type: ty::FnOutput<'tcx>,
param_substs: &'a Substs<'tcx>,
param_substs: &'tcx Substs<'tcx>,
sp: Option<Span>,
block_arena: &'a TypedArena<common::BlockS<'a, 'tcx>>)
-> FunctionContext<'a, 'tcx> {
@ -1793,7 +1772,7 @@ pub fn trans_closure<'a, 'b, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
decl: &ast::FnDecl,
body: &ast::Block,
llfndecl: ValueRef,
param_substs: &Substs<'tcx>,
param_substs: &'tcx Substs<'tcx>,
fn_ast_id: ast::NodeId,
_attributes: &[ast::Attribute],
output_type: ty::FnOutput<'tcx>,
@ -1942,7 +1921,7 @@ pub fn trans_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
decl: &ast::FnDecl,
body: &ast::Block,
llfndecl: ValueRef,
param_substs: &Substs<'tcx>,
param_substs: &'tcx Substs<'tcx>,
id: ast::NodeId,
attrs: &[ast::Attribute]) {
let _s = StatRecorder::new(ccx, ccx.tcx().map.path_to_string(id).to_string());
@ -1968,7 +1947,7 @@ pub fn trans_enum_variant<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
variant: &ast::Variant,
_args: &[ast::VariantArg],
disr: ty::Disr,
param_substs: &Substs<'tcx>,
param_substs: &'tcx Substs<'tcx>,
llfndecl: ValueRef) {
let _icx = push_ctxt("trans_enum_variant");
@ -2049,7 +2028,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
pub fn trans_tuple_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
_fields: &[ast::StructField],
ctor_id: ast::NodeId,
param_substs: &Substs<'tcx>,
param_substs: &'tcx Substs<'tcx>,
llfndecl: ValueRef) {
let _icx = push_ctxt("trans_tuple_struct");
@ -2064,7 +2043,7 @@ pub fn trans_tuple_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
fn trans_enum_variant_or_tuple_like_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
ctor_id: ast::NodeId,
disr: ty::Disr,
param_substs: &Substs<'tcx>,
param_substs: &'tcx Substs<'tcx>,
llfndecl: ValueRef) {
let ctor_ty = ty::node_id_to_type(ccx.tcx(), ctor_id);
let ctor_ty = monomorphize::apply_param_substs(ccx.tcx(), param_substs, &ctor_ty);
@ -2302,13 +2281,14 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) {
// translated everywhere it's needed.
for (ref ccx, is_origin) in ccx.maybe_iter(!from_external && trans_everywhere) {
let llfn = get_item_val(ccx, item.id);
let empty_substs = ccx.tcx().mk_substs(Substs::trans_empty());
if abi != Rust {
foreign::trans_rust_fn_with_foreign_abi(ccx,
&**decl,
&**body,
&item.attrs[],
llfn,
&Substs::trans_empty(),
empty_substs,
item.id,
None);
} else {
@ -2316,7 +2296,7 @@ pub fn trans_item(ccx: &CrateContext, item: &ast::Item) {
&**decl,
&**body,
llfn,
&Substs::trans_empty(),
empty_substs,
item.id,
&item.attrs[]);
}
@ -2792,7 +2772,8 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
// We need the translated value here, because for enums the
// LLVM type is not fully determined by the Rust type.
let (v, ty) = consts::const_expr(ccx, &**expr);
let empty_substs = ccx.tcx().mk_substs(Substs::trans_empty());
let (v, ty) = consts::const_expr(ccx, &**expr, empty_substs);
ccx.static_values().borrow_mut().insert(id, v);
unsafe {
// boolean SSA values are i1, but they have to be stored in i8 slots,
@ -2820,12 +2801,6 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef {
}
}
ast::ItemConst(_, ref expr) => {
let (v, _) = consts::const_expr(ccx, &**expr);
ccx.const_values().borrow_mut().insert(id, v);
v
}
ast::ItemFn(_, _, abi, _, _) => {
let sym = sym();
let llfn = if abi == Rust {

View File

@ -226,7 +226,7 @@ fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr)
pub fn trans_fn_ref<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
def_id: ast::DefId,
node: ExprOrMethodCall,
param_substs: &subst::Substs<'tcx>)
param_substs: &'tcx subst::Substs<'tcx>)
-> Datum<'tcx, Rvalue> {
let _icx = push_ctxt("trans_fn_ref");
@ -326,7 +326,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>(
&function_name[]);
//
let empty_substs = Substs::trans_empty();
let empty_substs = tcx.mk_substs(Substs::trans_empty());
let (block_arena, fcx): (TypedArena<_>, FunctionContext);
block_arena = TypedArena::new();
fcx = new_fn_ctxt(ccx,
@ -334,7 +334,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>(
ast::DUMMY_NODE_ID,
false,
sig.output,
&empty_substs,
empty_substs,
None,
&block_arena);
let mut bcx = init_function(&fcx, false, sig.output);
@ -386,7 +386,7 @@ pub fn trans_fn_ref_with_substs<'a, 'tcx>(
ccx: &CrateContext<'a, 'tcx>,
def_id: ast::DefId,
node: ExprOrMethodCall,
param_substs: &subst::Substs<'tcx>,
param_substs: &'tcx subst::Substs<'tcx>,
substs: subst::Substs<'tcx>)
-> Datum<'tcx, Rvalue>
{
@ -416,7 +416,9 @@ pub fn trans_fn_ref_with_substs<'a, 'tcx>(
// We need to modify the def_id and our substs in order to monomorphize
// the function.
let (is_default, def_id, substs) = match ty::provided_source(tcx, def_id) {
None => (false, def_id, substs),
None => {
(false, def_id, tcx.mk_substs(substs))
}
Some(source_id) => {
// There are two relevant substitutions when compiling
// default methods. First, there is the substitution for
@ -444,7 +446,7 @@ pub fn trans_fn_ref_with_substs<'a, 'tcx>(
.erase_regions();
// And compose them
let new_substs = first_subst.subst(tcx, &substs);
let new_substs = tcx.mk_substs(first_subst.subst(tcx, &substs));
debug!("trans_fn_with_vtables - default method: \
substs = {}, trait_subst = {}, \
@ -463,7 +465,7 @@ pub fn trans_fn_ref_with_substs<'a, 'tcx>(
};
// If this is a closure, redirect to it.
match closure::get_or_create_declaration_if_closure(ccx, def_id, &substs) {
match closure::get_or_create_declaration_if_closure(ccx, def_id, substs) {
None => {}
Some(llfn) => return llfn,
}
@ -505,7 +507,7 @@ pub fn trans_fn_ref_with_substs<'a, 'tcx>(
};
let (val, fn_ty, must_cast) =
monomorphize::monomorphic_fn(ccx, def_id, &substs, opt_ref_id);
monomorphize::monomorphic_fn(ccx, def_id, substs, opt_ref_id);
if must_cast && node != ExprId(0) {
// Monotype of the REFERENCE to the function (type params
// are subst'd)

View File

@ -9,6 +9,7 @@
// except according to those terms.
use back::link::mangle_internal_name_by_path_and_seq;
use llvm::ValueRef;
use middle::mem_categorization::Typer;
use trans::adt;
use trans::base::*;
@ -137,7 +138,7 @@ pub fn get_or_create_declaration_if_closure<'a, 'tcx>(ccx: &CrateContext<'a, 'tc
// duplicate declarations
let function_type = erase_regions(ccx.tcx(), &function_type);
let params = match function_type.sty {
ty::ty_closure(_, _, ref substs) => substs.types.clone(),
ty::ty_closure(_, _, substs) => &substs.types,
_ => unreachable!()
};
let mono_id = MonoId {
@ -171,41 +172,51 @@ pub fn get_or_create_declaration_if_closure<'a, 'tcx>(ccx: &CrateContext<'a, 'tc
Some(Datum::new(llfn, function_type, Rvalue::new(ByValue)))
}
pub fn trans_closure_expr<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
decl: &ast::FnDecl,
body: &ast::Block,
id: ast::NodeId,
dest: expr::Dest)
-> Block<'blk, 'tcx>
pub enum Dest<'a, 'tcx: 'a> {
SaveIn(Block<'a, 'tcx>, ValueRef),
Ignore(&'a CrateContext<'a, 'tcx>)
}
pub fn trans_closure_expr<'a, 'tcx>(dest: Dest<'a, 'tcx>,
decl: &ast::FnDecl,
body: &ast::Block,
id: ast::NodeId,
param_substs: &'tcx Substs<'tcx>)
-> Option<Block<'a, 'tcx>>
{
let ccx = match dest {
Dest::SaveIn(bcx, _) => bcx.ccx(),
Dest::Ignore(ccx) => ccx
};
let tcx = ccx.tcx();
let _icx = push_ctxt("closure::trans_closure");
debug!("trans_closure()");
let closure_id = ast_util::local_def(id);
let llfn = get_or_create_declaration_if_closure(
bcx.ccx(),
ccx,
closure_id,
bcx.fcx.param_substs).unwrap();
param_substs).unwrap();
// Get the type of this closure. Use the current `param_substs` as
// the closure substitutions. This makes sense because the closure
// takes the same set of type arguments as the enclosing fn, and
// this function (`trans_closure`) is invoked at the point
// of the closure expression.
let typer = NormalizingClosureTyper::new(bcx.tcx());
let function_type = typer.closure_type(closure_id, bcx.fcx.param_substs);
let typer = NormalizingClosureTyper::new(tcx);
let function_type = typer.closure_type(closure_id, param_substs);
let freevars: Vec<ty::Freevar> =
ty::with_freevars(bcx.tcx(), id, |fv| fv.iter().map(|&fv| fv).collect());
ty::with_freevars(tcx, id, |fv| fv.iter().map(|&fv| fv).collect());
let sig = ty::erase_late_bound_regions(bcx.tcx(), &function_type.sig);
let sig = ty::erase_late_bound_regions(tcx, &function_type.sig);
trans_closure(bcx.ccx(),
trans_closure(ccx,
decl,
body,
llfn.val,
bcx.fcx.param_substs,
param_substs,
id,
&[],
sig.output,
@ -215,15 +226,15 @@ pub fn trans_closure_expr<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
// Don't hoist this to the top of the function. It's perfectly legitimate
// to have a zero-size closure (in which case dest will be `Ignore`) and
// we must still generate the closure body.
let dest_addr = match dest {
expr::SaveIn(p) => p,
expr::Ignore => {
let (mut bcx, dest_addr) = match dest {
Dest::SaveIn(bcx, p) => (bcx, p),
Dest::Ignore(_) => {
debug!("trans_closure() ignoring result");
return bcx
return None;
}
};
let repr = adt::represent_type(bcx.ccx(), node_id_type(bcx, id));
let repr = adt::represent_type(ccx, node_id_type(bcx, id));
// Create the closure.
for (i, freevar) in freevars.iter().enumerate() {
@ -235,8 +246,7 @@ pub fn trans_closure_expr<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
i);
let upvar_id = ty::UpvarId { var_id: freevar.def.local_node_id(),
closure_expr_id: id };
let upvar_capture = bcx.tcx().upvar_capture(upvar_id).unwrap();
match upvar_capture {
match tcx.upvar_capture(upvar_id).unwrap() {
ty::UpvarCapture::ByValue => {
bcx = datum.store_to(bcx, upvar_slot_dest);
}
@ -247,6 +257,6 @@ pub fn trans_closure_expr<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
}
adt::trans_set_discr(bcx, &*repr, dest_addr, 0);
bcx
Some(bcx)
}

View File

@ -410,7 +410,7 @@ pub struct FunctionContext<'a, 'tcx: 'a> {
// If this function is being monomorphized, this contains the type
// substitutions used.
pub param_substs: &'a Substs<'tcx>,
pub param_substs: &'tcx Substs<'tcx>,
// The source span and nesting context where this function comes from, for
// error reporting and symbol generation.
@ -858,25 +858,6 @@ pub fn C_str_slice(cx: &CrateContext, s: InternedString) -> ValueRef {
C_named_struct(cx.tn().find_type("str_slice").unwrap(), &[cs, C_uint(cx, len)])
}
pub fn C_binary_slice(cx: &CrateContext, data: &[u8]) -> ValueRef {
unsafe {
let len = data.len();
let lldata = C_bytes(cx, data);
let gsym = token::gensym("binary");
let name = format!("binary{}", gsym.usize());
let name = CString::from_vec(name.into_bytes());
let g = llvm::LLVMAddGlobal(cx.llmod(), val_ty(lldata).to_ref(),
name.as_ptr());
llvm::LLVMSetInitializer(g, lldata);
llvm::LLVMSetGlobalConstant(g, True);
llvm::SetLinkage(g, llvm::InternalLinkage);
let cs = consts::ptrcast(g, Type::i8p(cx));
C_struct(cx, &[cs, C_uint(cx, len)], false)
}
}
pub fn C_struct(cx: &CrateContext, elts: &[ValueRef], packed: bool) -> ValueRef {
C_struct_in_context(cx.llcx(), elts, packed)
}
@ -901,6 +882,12 @@ pub fn C_array(ty: Type, elts: &[ValueRef]) -> ValueRef {
}
}
pub fn C_vector(elts: &[ValueRef]) -> ValueRef {
unsafe {
return llvm::LLVMConstVector(elts.as_ptr(), elts.len() as c_uint);
}
}
pub fn C_bytes(cx: &CrateContext, bytes: &[u8]) -> ValueRef {
C_bytes_in_context(cx.llcx(), bytes)
}

View File

@ -11,13 +11,13 @@
use back::abi;
use llvm;
use llvm::{ConstFCmp, ConstICmp, SetLinkage, PrivateLinkage, ValueRef, Bool, True, False};
use llvm::{IntEQ, IntNE, IntUGT, IntUGE, IntULT, IntULE, IntSGT, IntSGE, IntSLT, IntSLE,
RealOEQ, RealOGT, RealOGE, RealOLT, RealOLE, RealONE};
use middle::{const_eval, def};
use trans::{adt, consts, debuginfo, expr, inline, machine};
use llvm::{ConstFCmp, ConstICmp, SetLinkage, SetUnnamedAddr};
use llvm::{InternalLinkage, ValueRef, Bool, True};
use middle::{check_const, const_eval, def};
use trans::{adt, closure, debuginfo, expr, inline, machine};
use trans::base::{self, push_ctxt};
use trans::common::*;
use trans::monomorphize;
use trans::type_::Type;
use trans::type_of;
use middle::subst::Substs;
@ -74,7 +74,16 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit)
}
ast::LitBool(b) => C_bool(cx, b),
ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()),
ast::LitBinary(ref data) => C_binary_slice(cx, &data[]),
ast::LitBinary(ref data) => {
let g = addr_of(cx, C_bytes(cx, &data[]), "binary", e.id);
let base = ptrcast(g, Type::i8p(cx));
let prev_const = cx.const_unsized().borrow_mut()
.insert(base, g);
assert!(prev_const.is_none() || prev_const == Some(g));
assert_eq!(abi::FAT_PTR_ADDR, 0);
assert_eq!(abi::FAT_PTR_EXTRA, 1);
C_struct(cx, &[base, C_uint(cx, data.len())], false)
}
}
}
@ -84,36 +93,41 @@ pub fn ptrcast(val: ValueRef, ty: Type) -> ValueRef {
}
}
fn const_vec(cx: &CrateContext, e: &ast::Expr,
es: &[P<ast::Expr>]) -> (ValueRef, Type) {
let vec_ty = ty::expr_ty(cx.tcx(), e);
let unit_ty = ty::sequence_element_type(cx.tcx(), vec_ty);
let llunitty = type_of::type_of(cx, unit_ty);
let vs = es.iter().map(|e| const_expr(cx, &**e).0)
.collect::<Vec<_>>();
// If the vector contains enums, an LLVM array won't work.
let v = if vs.iter().any(|vi| val_ty(*vi) != llunitty) {
C_struct(cx, &vs[], false)
} else {
C_array(llunitty, &vs[])
};
(v, llunitty)
}
pub fn const_addr_of(cx: &CrateContext, cv: ValueRef, mutbl: ast::Mutability) -> ValueRef {
fn addr_of_mut(ccx: &CrateContext,
cv: ValueRef,
kind: &str,
id: ast::NodeId)
-> ValueRef {
unsafe {
let gv = llvm::LLVMAddGlobal(cx.llmod(), val_ty(cv).to_ref(),
"const\0".as_ptr() as *const _);
let name = format!("{}{}\0", kind, id);
let gv = llvm::LLVMAddGlobal(ccx.llmod(), val_ty(cv).to_ref(),
name.as_ptr() as *const _);
llvm::LLVMSetInitializer(gv, cv);
llvm::LLVMSetGlobalConstant(gv,
if mutbl == ast::MutImmutable {True} else {False});
SetLinkage(gv, PrivateLinkage);
SetLinkage(gv, InternalLinkage);
SetUnnamedAddr(gv, true);
gv
}
}
pub fn addr_of(ccx: &CrateContext,
cv: ValueRef,
kind: &str,
id: ast::NodeId)
-> ValueRef {
match ccx.const_globals().borrow().get(&cv) {
Some(&gv) => return gv,
None => {}
}
let gv = addr_of_mut(ccx, cv, kind, id);
unsafe {
llvm::LLVMSetGlobalConstant(gv, True);
}
ccx.const_globals().borrow_mut().insert(cv, gv);
gv
}
fn const_deref_ptr(cx: &CrateContext, v: ValueRef) -> ValueRef {
let v = match cx.const_globals().borrow().get(&(v as int)) {
let v = match cx.const_unsized().borrow().get(&v) {
Some(&v) => v,
None => v
};
@ -122,155 +136,209 @@ fn const_deref_ptr(cx: &CrateContext, v: ValueRef) -> ValueRef {
}
}
fn const_deref_newtype<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, v: ValueRef, t: Ty<'tcx>)
-> ValueRef {
let repr = adt::represent_type(cx, t);
adt::const_get_field(cx, &*repr, v, 0, 0)
}
fn const_deref<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, v: ValueRef,
t: Ty<'tcx>, explicit: bool)
fn const_deref<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
v: ValueRef,
ty: Ty<'tcx>)
-> (ValueRef, Ty<'tcx>) {
match ty::deref(t, explicit) {
Some(ref mt) => {
match t.sty {
ty::ty_ptr(mt) | ty::ty_rptr(_, mt) => {
if type_is_sized(cx.tcx(), mt.ty) {
(const_deref_ptr(cx, v), mt.ty)
} else {
// Derefing a fat pointer does not change the representation,
// just the type to ty_open.
(v, ty::mk_open(cx.tcx(), mt.ty))
}
}
ty::ty_enum(..) | ty::ty_struct(..) => {
assert!(mt.mutbl != ast::MutMutable);
(const_deref_newtype(cx, v, t), mt.ty)
}
_ => {
cx.sess().bug(&format!("unexpected dereferenceable type {}",
ty_to_string(cx.tcx(), t))[])
}
match ty::deref(ty, true) {
Some(mt) => {
if type_is_sized(cx.tcx(), mt.ty) {
(const_deref_ptr(cx, v), mt.ty)
} else {
// Derefing a fat pointer does not change the representation,
// just the type to ty_open.
(v, ty::mk_open(cx.tcx(), mt.ty))
}
}
None => {
cx.sess().bug(&format!("cannot dereference const of type {}",
ty_to_string(cx.tcx(), t))[])
cx.sess().bug(&format!("unexpected dereferenceable type {}",
ty_to_string(cx.tcx(), ty))[])
}
}
}
pub fn get_const_val(cx: &CrateContext,
mut def_id: ast::DefId) -> ValueRef {
let contains_key = cx.const_values().borrow().contains_key(&def_id.node);
if !ast_util::is_local(def_id) || !contains_key {
if !ast_util::is_local(def_id) {
def_id = inline::maybe_instantiate_inline(cx, def_id);
}
pub fn get_const_expr<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
def_id: ast::DefId,
ref_expr: &ast::Expr)
-> &'tcx ast::Expr {
let def_id = inline::maybe_instantiate_inline(ccx, def_id);
if let ast::ItemConst(..) = cx.tcx().map.expect_item(def_id.node).node {
base::get_item_val(cx, def_id.node);
}
if def_id.krate != ast::LOCAL_CRATE {
ccx.sess().span_bug(ref_expr.span,
"cross crate constant could not be inlined");
}
cx.const_values().borrow()[def_id.node].clone()
let item = ccx.tcx().map.expect_item(def_id.node);
if let ast::ItemConst(_, ref expr) = item.node {
&**expr
} else {
ccx.sess().span_bug(ref_expr.span,
&format!("get_const_val given non-constant item {}",
item.repr(ccx.tcx()))[]);
}
}
pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr)
fn get_const_val(ccx: &CrateContext,
def_id: ast::DefId,
ref_expr: &ast::Expr) -> ValueRef {
let expr = get_const_expr(ccx, def_id, ref_expr);
let empty_substs = ccx.tcx().mk_substs(Substs::trans_empty());
get_const_expr_as_global(ccx, expr, check_const::PURE_CONST, empty_substs)
}
pub fn get_const_expr_as_global<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
expr: &ast::Expr,
qualif: check_const::ConstQualif,
param_substs: &'tcx Substs<'tcx>)
-> ValueRef {
// Special-case constants to cache a common global for all uses.
match expr.node {
ast::ExprPath(_) => {
let def = ccx.tcx().def_map.borrow()[expr.id];
match def {
def::DefConst(def_id) => {
if !ccx.tcx().adjustments.borrow().contains_key(&expr.id) {
return get_const_val(ccx, def_id, expr);
}
}
_ => {}
}
}
_ => {}
}
let key = (expr.id, param_substs);
match ccx.const_values().borrow().get(&key) {
Some(&val) => return val,
None => {}
}
let val = if qualif.intersects(check_const::NON_STATIC_BORROWS) {
// Avoid autorefs as they would create global instead of stack
// references, even when only the latter are correct.
let ty = monomorphize::apply_param_substs(ccx.tcx(), param_substs,
&ty::expr_ty(ccx.tcx(), expr));
const_expr_unadjusted(ccx, expr, ty, param_substs)
} else {
const_expr(ccx, expr, param_substs).0
};
// boolean SSA values are i1, but they have to be stored in i8 slots,
// otherwise some LLVM optimization passes don't work as expected
let val = unsafe {
if llvm::LLVMTypeOf(val) == Type::i1(ccx).to_ref() {
llvm::LLVMConstZExt(val, Type::i8(ccx).to_ref())
} else {
val
}
};
let lvalue = addr_of(ccx, val, "const", expr.id);
ccx.const_values().borrow_mut().insert(key, lvalue);
lvalue
}
pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
e: &ast::Expr,
param_substs: &'tcx Substs<'tcx>)
-> (ValueRef, Ty<'tcx>) {
let llconst = const_expr_unadjusted(cx, e);
let ety = monomorphize::apply_param_substs(cx.tcx(), param_substs,
&ty::expr_ty(cx.tcx(), e));
let llconst = const_expr_unadjusted(cx, e, ety, param_substs);
let mut llconst = llconst;
let ety = ty::expr_ty(cx.tcx(), e);
let mut ety_adjusted = ty::expr_ty_adjusted(cx.tcx(), e);
let mut ety_adjusted = monomorphize::apply_param_substs(cx.tcx(), param_substs,
&ty::expr_ty_adjusted(cx.tcx(), e));
let opt_adj = cx.tcx().adjustments.borrow().get(&e.id).cloned();
match opt_adj {
None => { }
Some(adj) => {
match adj {
ty::AdjustReifyFnPointer(_def_id) => {
// FIXME(#19925) once fn item types are
// zero-sized, we'll need to do something here
Some(ty::AdjustReifyFnPointer(_def_id)) => {
// FIXME(#19925) once fn item types are
// zero-sized, we'll need to do something here
}
Some(ty::AdjustDerefRef(adj)) => {
let mut ty = ety;
// Save the last autoderef in case we can avoid it.
if adj.autoderefs > 0 {
for _ in 0..adj.autoderefs-1 {
let (dv, dt) = const_deref(cx, llconst, ty);
llconst = dv;
ty = dt;
}
ty::AdjustDerefRef(ref adj) => {
let mut ty = ety;
// Save the last autoderef in case we can avoid it.
if adj.autoderefs > 0 {
for _ in 0..adj.autoderefs-1 {
let (dv, dt) = const_deref(cx, llconst, ty, false);
llconst = dv;
ty = dt;
}
}
}
match adj.autoref {
None => {
let (dv, dt) = const_deref(cx, llconst, ty, false);
llconst = dv;
let second_autoref = match adj.autoref {
None => {
let (dv, dt) = const_deref(cx, llconst, ty);
llconst = dv;
// If we derefed a fat pointer then we will have an
// open type here. So we need to update the type with
// the one returned from const_deref.
ety_adjusted = dt;
}
Some(ref autoref) => {
match *autoref {
ty::AutoUnsafe(_, None) |
ty::AutoPtr(ty::ReStatic, _, None) => {
// Don't copy data to do a deref+ref
// (i.e., skip the last auto-deref).
if adj.autoderefs == 0 {
llconst = const_addr_of(cx, llconst, ast::MutImmutable);
}
}
ty::AutoPtr(ty::ReStatic, _, Some(box ty::AutoUnsize(..))) => {
if adj.autoderefs > 0 {
// Seeing as we are deref'ing here and take a reference
// again to make the pointer part of the far pointer below,
// we just skip the whole thing. We still need the type
// though. This works even if we don't need to deref
// because of byref semantics. Note that this is not just
// an optimisation, it is necessary for mutable vectors to
// work properly.
let (_, dt) = const_deref(cx, llconst, ty, false);
ty = dt;
} else {
llconst = const_addr_of(cx, llconst, ast::MutImmutable)
}
match ty.sty {
ty::ty_vec(unit_ty, Some(len)) => {
let llunitty = type_of::type_of(cx, unit_ty);
let llptr = ptrcast(llconst, llunitty.ptr_to());
let prev_const = cx.const_globals().borrow_mut()
.insert(llptr as int, llconst);
assert!(prev_const.is_none() ||
prev_const == Some(llconst));
assert_eq!(abi::FAT_PTR_ADDR, 0);
assert_eq!(abi::FAT_PTR_EXTRA, 1);
llconst = C_struct(cx, &[
llptr,
C_uint(cx, len)
], false);
}
_ => cx.sess().span_bug(e.span,
&format!("unimplemented type in const unsize: {}",
ty_to_string(cx.tcx(), ty))[])
}
}
_ => {
cx.sess()
.span_bug(e.span,
&format!("unimplemented const \
autoref {:?}",
autoref)[])
// If we derefed a fat pointer then we will have an
// open type here. So we need to update the type with
// the one returned from const_deref.
ety_adjusted = dt;
None
}
Some(ty::AutoUnsafe(_, opt_autoref)) |
Some(ty::AutoPtr(_, _, opt_autoref)) => {
if adj.autoderefs == 0 {
// Don't copy data to do a deref+ref
// (i.e., skip the last auto-deref).
llconst = addr_of(cx, llconst, "autoref", e.id);
} else {
// Seeing as we are deref'ing here and take a reference
// again to make the pointer part of the far pointer below,
// we just skip the whole thing. We still need the type
// though. This works even if we don't need to deref
// because of byref semantics. Note that this is not just
// an optimisation, it is necessary for mutable vectors to
// work properly.
ty = match ty::deref(ty, true) {
Some(mt) => {
if type_is_sized(cx.tcx(), mt.ty) {
mt.ty
} else {
// Derefing a fat pointer does not change the representation,
// just the type to ty_open.
ty::mk_open(cx.tcx(), mt.ty)
}
}
None => {
cx.sess().bug(&format!("unexpected dereferenceable type {}",
ty_to_string(cx.tcx(), ty))[])
}
}
}
opt_autoref
}
Some(autoref) => {
cx.sess().span_bug(e.span,
&format!("unimplemented const first autoref {:?}", autoref)[])
}
};
match second_autoref {
None => {}
Some(box ty::AutoUnsafe(_, None)) |
Some(box ty::AutoPtr(_, _, None)) => {
llconst = addr_of(cx, llconst, "autoref", e.id);
}
Some(box ty::AutoUnsize(ref k)) => {
let unsized_ty = ty::unsize_ty(cx.tcx(), ty, k, e.span);
let info = expr::unsized_info(cx, k, e.id, ty, param_substs,
|t| ty::mk_imm_rptr(cx.tcx(), cx.tcx().mk_region(ty::ReStatic), t));
let base = ptrcast(llconst, type_of::type_of(cx, unsized_ty).ptr_to());
let prev_const = cx.const_unsized().borrow_mut()
.insert(base, llconst);
assert!(prev_const.is_none() || prev_const == Some(llconst));
assert_eq!(abi::FAT_PTR_ADDR, 0);
assert_eq!(abi::FAT_PTR_EXTRA, 1);
llconst = C_struct(cx, &[base, info], false);
}
Some(autoref) => {
cx.sess().span_bug(e.span,
&format!("unimplemented const second autoref {:?}", autoref)[])
}
}
}
}
None => {}
};
let llty = type_of::sizing_type_of(cx, ety_adjusted);
let csize = machine::llsize_of_alloc(cx, val_ty(llconst));
@ -282,36 +350,42 @@ pub fn const_expr<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, e: &ast::Expr)
llvm::LLVMDumpValue(C_undef(llty));
}
cx.sess().bug(&format!("const {} of type {} has size {} instead of {}",
e.repr(cx.tcx()), ty_to_string(cx.tcx(), ety),
e.repr(cx.tcx()), ty_to_string(cx.tcx(), ety_adjusted),
csize, tsize)[]);
}
(llconst, ety_adjusted)
}
// the bool returned is whether this expression can be inlined into other crates
// if it's assigned to a static.
fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
e: &ast::Expr,
ety: Ty<'tcx>,
param_substs: &'tcx Substs<'tcx>) -> ValueRef {
let map_list = |exprs: &[P<ast::Expr>]| {
exprs.iter().map(|e| const_expr(cx, &**e).0)
exprs.iter().map(|e| const_expr(cx, &**e, param_substs).0)
.fold(Vec::new(), |mut l, val| { l.push(val); l })
};
unsafe {
let _icx = push_ctxt("const_expr");
return match e.node {
ast::ExprLit(ref lit) => {
consts::const_lit(cx, e, &**lit)
const_lit(cx, e, &**lit)
}
ast::ExprBinary(b, ref e1, ref e2) => {
let (te1, _) = const_expr(cx, &**e1);
let (te2, _) = const_expr(cx, &**e2);
let te2 = base::cast_shift_const_rhs(b, te1, te2);
/* Neither type is bottom, and we expect them to be unified
* already, so the following is safe. */
let ty = ty::expr_ty(cx.tcx(), &**e1);
let is_float = ty::type_is_fp(ty);
let signed = ty::type_is_signed(ty);
let (te1, ty) = const_expr(cx, &**e1, param_substs);
let is_simd = ty::type_is_simd(cx.tcx(), ty);
let intype = if is_simd {
ty::simd_type(cx.tcx(), ty)
} else {
ty
};
let is_float = ty::type_is_fp(intype);
let signed = ty::type_is_signed(intype);
let (te2, _) = const_expr(cx, &**e2, param_substs);
let te2 = base::cast_shift_const_rhs(b, te1, te2);
return match b.node {
ast::BiAdd => {
if is_float { llvm::LLVMConstFAdd(te1, te2) }
@ -345,52 +419,30 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
if signed { llvm::LLVMConstAShr(te1, te2) }
else { llvm::LLVMConstLShr(te1, te2) }
}
ast::BiEq => {
if is_float { ConstFCmp(RealOEQ, te1, te2) }
else { ConstICmp(IntEQ, te1, te2) }
},
ast::BiLt => {
if is_float { ConstFCmp(RealOLT, te1, te2) }
else {
if signed { ConstICmp(IntSLT, te1, te2) }
else { ConstICmp(IntULT, te1, te2) }
ast::BiEq | ast::BiNe | ast::BiLt | ast::BiLe | ast::BiGt | ast::BiGe => {
if is_float {
let cmp = base::bin_op_to_fcmp_predicate(cx, b.node);
ConstFCmp(cmp, te1, te2)
} else {
let cmp = base::bin_op_to_icmp_predicate(cx, b.node, signed);
let bool_val = ConstICmp(cmp, te1, te2);
if is_simd {
// LLVM outputs an `< size x i1 >`, so we need to perform
// a sign extension to get the correctly sized type.
llvm::LLVMConstIntCast(bool_val, val_ty(te1).to_ref(), True)
} else {
bool_val
}
}
},
ast::BiLe => {
if is_float { ConstFCmp(RealOLE, te1, te2) }
else {
if signed { ConstICmp(IntSLE, te1, te2) }
else { ConstICmp(IntULE, te1, te2) }
}
},
ast::BiNe => {
if is_float { ConstFCmp(RealONE, te1, te2) }
else { ConstICmp(IntNE, te1, te2) }
},
ast::BiGe => {
if is_float { ConstFCmp(RealOGE, te1, te2) }
else {
if signed { ConstICmp(IntSGE, te1, te2) }
else { ConstICmp(IntUGE, te1, te2) }
}
},
ast::BiGt => {
if is_float { ConstFCmp(RealOGT, te1, te2) }
else {
if signed { ConstICmp(IntSGT, te1, te2) }
else { ConstICmp(IntUGT, te1, te2) }
}
},
}
}
},
ast::ExprUnary(u, ref e) => {
let (te, _) = const_expr(cx, &**e);
let ty = ty::expr_ty(cx.tcx(), &**e);
let (te, ty) = const_expr(cx, &**e, param_substs);
let is_float = ty::type_is_fp(ty);
return match u {
ast::UnUniq | ast::UnDeref => {
let (dv, _dt) = const_deref(cx, te, ty, true);
dv
const_deref(cx, te, ty).0
}
ast::UnNot => llvm::LLVMConstNot(te),
ast::UnNeg => {
@ -400,7 +452,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
}
}
ast::ExprField(ref base, field) => {
let (bv, bt) = const_expr(cx, &**base);
let (bv, bt) = const_expr(cx, &**base, param_substs);
let brepr = adt::represent_type(cx, bt);
expr::with_field_tys(cx.tcx(), bt, None, |discr, field_tys| {
let ix = ty::field_idx_strict(cx.tcx(), field.node.name, field_tys);
@ -408,7 +460,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
})
}
ast::ExprTupField(ref base, idx) => {
let (bv, bt) = const_expr(cx, &**base);
let (bv, bt) = const_expr(cx, &**base, param_substs);
let brepr = adt::represent_type(cx, bt);
expr::with_field_tys(cx.tcx(), bt, None, |discr, _| {
adt::const_get_field(cx, &*brepr, bv, discr, idx.node)
@ -416,7 +468,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
}
ast::ExprIndex(ref base, ref index) => {
let (bv, bt) = const_expr(cx, &**base);
let (bv, bt) = const_expr(cx, &**base, param_substs);
let iv = match const_eval::eval_const_expr(cx.tcx(), &**index) {
const_eval::const_int(i) => i as u64,
const_eval::const_uint(u) => u,
@ -470,9 +522,11 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
const_get_elt(cx, arr, &[iv as c_uint])
}
ast::ExprCast(ref base, _) => {
let ety = ty::expr_ty(cx.tcx(), e);
let llty = type_of::type_of(cx, ety);
let (v, basety) = const_expr(cx, &**base);
let (v, basety) = const_expr(cx, &**base, param_substs);
if expr::cast_is_noop(basety, ety) {
return v;
}
return match (expr::cast_type_kind(cx.tcx(), basety),
expr::cast_type_kind(cx.tcx(), ety)) {
@ -523,7 +577,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
}
}
}
ast::ExprAddrOf(mutbl, ref sub) => {
ast::ExprAddrOf(ast::MutImmutable, ref sub) => {
// If this is the address of some static, then we need to return
// the actual address of the static itself (short circuit the rest
// of const eval).
@ -531,41 +585,48 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
loop {
match cur.node {
ast::ExprParen(ref sub) => cur = sub,
ast::ExprBlock(ref blk) => {
if let Some(ref sub) = blk.expr {
cur = sub;
} else {
break;
}
}
_ => break,
}
}
let opt_def = cx.tcx().def_map.borrow().get(&cur.id).cloned();
if let Some(def::DefStatic(def_id, _)) = opt_def {
let ty = ty::expr_ty(cx.tcx(), e);
return get_static_val(cx, def_id, ty);
return get_static_val(cx, def_id, ety);
}
// If this isn't the address of a static, then keep going through
// normal constant evaluation.
let (e, _) = const_expr(cx, &**sub);
const_addr_of(cx, e, mutbl)
let (v, _) = const_expr(cx, &**sub, param_substs);
addr_of(cx, v, "ref", e.id)
}
ast::ExprAddrOf(ast::MutMutable, ref sub) => {
let (v, _) = const_expr(cx, &**sub, param_substs);
addr_of_mut(cx, v, "ref_mut_slice", e.id)
}
ast::ExprTup(ref es) => {
let ety = ty::expr_ty(cx.tcx(), e);
let repr = adt::represent_type(cx, ety);
let vals = map_list(&es[]);
adt::trans_const(cx, &*repr, 0, &vals[])
}
ast::ExprStruct(_, ref fs, ref base_opt) => {
let ety = ty::expr_ty(cx.tcx(), e);
let repr = adt::represent_type(cx, ety);
let tcx = cx.tcx();
let base_val = match *base_opt {
Some(ref base) => Some(const_expr(cx, &**base)),
Some(ref base) => Some(const_expr(cx, &**base, param_substs)),
None => None
};
expr::with_field_tys(tcx, ety, Some(e.id), |discr, field_tys| {
expr::with_field_tys(cx.tcx(), ety, Some(e.id), |discr, field_tys| {
let cs = field_tys.iter().enumerate()
.map(|(ix, &field_ty)| {
match fs.iter().find(|f| field_ty.name == f.ident.node.name) {
Some(ref f) => const_expr(cx, &*f.expr).0,
Some(ref f) => const_expr(cx, &*f.expr, param_substs).0,
None => {
match base_val {
Some((bv, _)) => {
@ -580,23 +641,36 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
}
}
}).collect::<Vec<_>>();
adt::trans_const(cx, &*repr, discr, &cs[])
if ty::type_is_simd(cx.tcx(), ety) {
C_vector(&cs[])
} else {
adt::trans_const(cx, &*repr, discr, &cs[])
}
})
}
ast::ExprVec(ref es) => {
const_vec(cx, e, es).0
let unit_ty = ty::sequence_element_type(cx.tcx(), ety);
let llunitty = type_of::type_of(cx, unit_ty);
let vs = es.iter().map(|e| const_expr(cx, &**e, param_substs).0)
.collect::<Vec<_>>();
// If the vector contains enums, an LLVM array won't work.
if vs.iter().any(|vi| val_ty(*vi) != llunitty) {
C_struct(cx, &vs[], false)
} else {
C_array(llunitty, &vs[])
}
}
ast::ExprRepeat(ref elem, ref count) => {
let vec_ty = ty::expr_ty(cx.tcx(), e);
let unit_ty = ty::sequence_element_type(cx.tcx(), vec_ty);
let unit_ty = ty::sequence_element_type(cx.tcx(), ety);
let llunitty = type_of::type_of(cx, unit_ty);
let n = match const_eval::eval_const_expr(cx.tcx(), &**count) {
const_eval::const_int(i) => i as uint,
const_eval::const_uint(i) => i as uint,
_ => cx.sess().span_bug(count.span, "count must be integral const expression.")
};
let vs: Vec<_> = repeat(const_expr(cx, &**elem).0).take(n).collect();
if vs.iter().any(|vi| val_ty(*vi) != llunitty) {
let unit_val = const_expr(cx, &**elem, param_substs).0;
let vs: Vec<_> = repeat(unit_val).take(n).collect();
if val_ty(unit_val) != llunitty {
C_struct(cx, &vs[], false)
} else {
C_array(llunitty, &vs[])
@ -606,10 +680,10 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
let def = cx.tcx().def_map.borrow()[e.id];
match def {
def::DefFn(..) | def::DefStaticMethod(..) | def::DefMethod(..) => {
expr::trans_def_fn_unadjusted(cx, e, def, &Substs::trans_empty()).val
expr::trans_def_fn_unadjusted(cx, e, def, param_substs).val
}
def::DefConst(def_id) => {
get_const_val(cx, def_id)
const_deref_ptr(cx, get_const_val(cx, def_id, e))
}
def::DefVariant(enum_did, variant_did, _) => {
let vinfo = ty::enum_variant_with_id(cx.tcx(),
@ -617,19 +691,17 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
variant_did);
if vinfo.args.len() > 0 {
// N-ary variant.
expr::trans_def_fn_unadjusted(cx, e, def, &Substs::trans_empty()).val
expr::trans_def_fn_unadjusted(cx, e, def, param_substs).val
} else {
// Nullary variant.
let ety = ty::expr_ty(cx.tcx(), e);
let repr = adt::represent_type(cx, ety);
adt::trans_const(cx, &*repr, vinfo.disr_val, &[])
}
}
def::DefStruct(_) => {
let ety = ty::expr_ty(cx.tcx(), e);
if let ty::ty_bare_fn(..) = ety.sty {
// Tuple struct.
expr::trans_def_fn_unadjusted(cx, e, def, &Substs::trans_empty()).val
expr::trans_def_fn_unadjusted(cx, e, def, param_substs).val
} else {
// Unit struct.
C_null(type_of::type_of(cx, ety))
@ -643,20 +715,21 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
}
ast::ExprCall(ref callee, ref args) => {
let opt_def = cx.tcx().def_map.borrow().get(&callee.id).cloned();
let arg_vals = map_list(&args[]);
match opt_def {
Some(def::DefStruct(_)) => {
let ety = ty::expr_ty(cx.tcx(), e);
let repr = adt::represent_type(cx, ety);
let arg_vals = map_list(&args[]);
adt::trans_const(cx, &*repr, 0, &arg_vals[])
if ty::type_is_simd(cx.tcx(), ety) {
C_vector(&arg_vals[])
} else {
let repr = adt::represent_type(cx, ety);
adt::trans_const(cx, &*repr, 0, &arg_vals[])
}
}
Some(def::DefVariant(enum_did, variant_did, _)) => {
let ety = ty::expr_ty(cx.tcx(), e);
let repr = adt::represent_type(cx, ety);
let vinfo = ty::enum_variant_with_id(cx.tcx(),
enum_did,
variant_did);
let arg_vals = map_list(&args[]);
adt::trans_const(cx,
&*repr,
vinfo.disr_val,
@ -665,13 +738,19 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr) -> ValueRef {
_ => cx.sess().span_bug(e.span, "expected a struct or variant def")
}
}
ast::ExprParen(ref e) => const_expr(cx, &**e).0,
ast::ExprParen(ref e) => const_expr(cx, &**e, param_substs).0,
ast::ExprBlock(ref block) => {
match block.expr {
Some(ref expr) => const_expr(cx, &**expr).0,
Some(ref expr) => const_expr(cx, &**expr, param_substs).0,
None => C_nil(cx)
}
}
ast::ExprClosure(_, ref decl, ref body) => {
closure::trans_closure_expr(closure::Dest::Ignore(cx),
&**decl, &**body, e.id,
param_substs);
C_null(type_of::type_of(cx, ety))
}
_ => cx.sess().span_bug(e.span,
"bad constant expression type in consts::const_expr")
};

View File

@ -22,6 +22,7 @@ use trans::common::{ExternMap,tydesc_info,BuilderRef_res};
use trans::debuginfo;
use trans::monomorphize::MonoId;
use trans::type_::{Type, TypeNames};
use middle::subst::Substs;
use middle::ty::{self, Ty};
use session::config::NoDebugInfo;
use session::Session;
@ -105,17 +106,20 @@ pub struct LocalCrateContext<'tcx> {
const_cstr_cache: RefCell<FnvHashMap<InternedString, ValueRef>>,
/// Reverse-direction for const ptrs cast from globals.
/// Key is an int, cast from a ValueRef holding a *T,
/// Key is a ValueRef holding a *T,
/// Val is a ValueRef holding a *[T].
///
/// Needed because LLVM loses pointer->pointee association
/// when we ptrcast, and we have to ptrcast during translation
/// of a [T] const because we form a slice, a [*T,int] pair, not
/// a pointer to an LLVM array type.
const_globals: RefCell<FnvHashMap<int, ValueRef>>,
/// of a [T] const because we form a slice, a (*T,usize) pair, not
/// a pointer to an LLVM array type. Similar for trait objects.
const_unsized: RefCell<FnvHashMap<ValueRef, ValueRef>>,
/// Cache of emitted const globals (value -> global)
const_globals: RefCell<FnvHashMap<ValueRef, ValueRef>>,
/// Cache of emitted const values
const_values: RefCell<NodeMap<ValueRef>>,
const_values: RefCell<FnvHashMap<(ast::NodeId, &'tcx Substs<'tcx>), ValueRef>>,
/// Cache of emitted static values
static_values: RefCell<NodeMap<ValueRef>>,
@ -400,8 +404,9 @@ impl<'tcx> LocalCrateContext<'tcx> {
monomorphizing: RefCell::new(DefIdMap()),
vtables: RefCell::new(FnvHashMap()),
const_cstr_cache: RefCell::new(FnvHashMap()),
const_unsized: RefCell::new(FnvHashMap()),
const_globals: RefCell::new(FnvHashMap()),
const_values: RefCell::new(NodeMap()),
const_values: RefCell::new(FnvHashMap()),
static_values: RefCell::new(NodeMap()),
extern_const_values: RefCell::new(DefIdMap()),
impl_method_cache: RefCell::new(FnvHashMap()),
@ -615,11 +620,16 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> {
&self.local.const_cstr_cache
}
pub fn const_globals<'a>(&'a self) -> &'a RefCell<FnvHashMap<int, ValueRef>> {
pub fn const_unsized<'a>(&'a self) -> &'a RefCell<FnvHashMap<ValueRef, ValueRef>> {
&self.local.const_unsized
}
pub fn const_globals<'a>(&'a self) -> &'a RefCell<FnvHashMap<ValueRef, ValueRef>> {
&self.local.const_globals
}
pub fn const_values<'a>(&'a self) -> &'a RefCell<NodeMap<ValueRef>> {
pub fn const_values<'a>(&'a self) -> &'a RefCell<FnvHashMap<(ast::NodeId, &'tcx Substs<'tcx>),
ValueRef>> {
&self.local.const_values
}

View File

@ -372,7 +372,8 @@ pub fn trans_fail<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let filename = C_str_slice(ccx, filename);
let line = C_uint(ccx, loc.line);
let expr_file_line_const = C_struct(ccx, &[v_str, filename, line], false);
let expr_file_line = consts::const_addr_of(ccx, expr_file_line_const, ast::MutImmutable);
let expr_file_line = consts::addr_of(ccx, expr_file_line_const,
"panic_loc", call_info.id);
let args = vec!(expr_file_line);
let did = langcall(bcx, Some(call_info.span), "", PanicFnLangItem);
let bcx = callee::trans_lang_call(bcx,
@ -400,7 +401,8 @@ pub fn trans_fail_bounds_check<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let filename = C_str_slice(ccx, filename);
let line = C_uint(ccx, loc.line);
let file_line_const = C_struct(ccx, &[filename, line], false);
let file_line = consts::const_addr_of(ccx, file_line_const, ast::MutImmutable);
let file_line = consts::addr_of(ccx, file_line_const,
"panic_bounds_check_loc", call_info.id);
let args = vec!(file_line, index, len);
let did = langcall(bcx, Some(call_info.span), "", PanicBoundsCheckFnLangItem);
let bcx = callee::trans_lang_call(bcx,

View File

@ -54,6 +54,7 @@ use self::lazy_binop_ty::*;
use back::abi;
use llvm::{self, ValueRef};
use middle::check_const;
use middle::def;
use middle::mem_categorization::Typer;
use middle::subst::{self, Substs};
@ -68,7 +69,6 @@ use trans::glue;
use trans::machine;
use trans::meth;
use trans::monomorphize;
use trans::inline;
use trans::tvec;
use trans::type_of;
use middle::ty::{struct_fields, tup_fields};
@ -84,8 +84,9 @@ use trans::type_::Type;
use syntax::{ast, ast_util, codemap};
use syntax::ptr::P;
use syntax::parse::token;
use std::rc::Rc;
use std::iter::repeat;
use std::mem;
use std::rc::Rc;
// Destinations
@ -115,11 +116,56 @@ pub fn trans_into<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
-> Block<'blk, 'tcx> {
let mut bcx = bcx;
debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
if bcx.tcx().adjustments.borrow().contains_key(&expr.id) {
// use trans, which may be less efficient but
// which will perform the adjustments:
let datum = unpack_datum!(bcx, trans(bcx, expr));
return datum.store_to_dest(bcx, dest, expr.id)
return datum.store_to_dest(bcx, dest, expr.id);
}
let qualif = bcx.tcx().const_qualif_map.borrow()[expr.id];
if !qualif.intersects(check_const::NOT_CONST | check_const::NEEDS_DROP) {
if !qualif.intersects(check_const::PREFER_IN_PLACE) {
if let SaveIn(lldest) = dest {
let global = consts::get_const_expr_as_global(bcx.ccx(), expr, qualif,
bcx.fcx.param_substs);
// Cast pointer to destination, because constants
// have different types.
let lldest = PointerCast(bcx, lldest, val_ty(global));
memcpy_ty(bcx, lldest, global, expr_ty_adjusted(bcx, expr));
}
// Don't do anything in the Ignore case, consts don't need drop.
return bcx;
} else {
// The only way we're going to see a `const` at this point is if
// it prefers in-place instantiation, likely because it contains
// `[x; N]` somewhere within.
match expr.node {
ast::ExprPath(_) | ast::ExprQPath(_) => {
match bcx.def(expr.id) {
def::DefConst(did) => {
let expr = consts::get_const_expr(bcx.ccx(), did, expr);
// Temporarily get cleanup scopes out of the way,
// as they require sub-expressions to be contained
// inside the current AST scope.
// These should record no cleanups anyways, `const`
// can't have destructors.
let scopes = mem::replace(&mut *bcx.fcx.scopes.borrow_mut(),
vec![]);
bcx = trans_into(bcx, expr, dest);
let scopes = mem::replace(&mut *bcx.fcx.scopes.borrow_mut(),
scopes);
assert!(scopes.is_empty());
return bcx;
}
_ => {}
}
}
_ => {}
}
}
}
debug!("trans_into() expr={}", expr.repr(bcx.tcx()));
@ -130,7 +176,6 @@ pub fn trans_into<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
false);
bcx.fcx.push_ast_cleanup_scope(cleanup_debug_loc);
debuginfo::set_source_location(bcx.fcx, expr.id, expr.span);
let kind = ty::expr_kind(bcx.tcx(), expr);
bcx = match kind {
ty::LvalueExpr | ty::RvalueDatumExpr => {
@ -157,14 +202,70 @@ pub fn trans<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let mut bcx = bcx;
let fcx = bcx.fcx;
let qualif = bcx.tcx().const_qualif_map.borrow()[expr.id];
let adjusted_global = !qualif.intersects(check_const::NON_STATIC_BORROWS);
let global = if !qualif.intersects(check_const::NOT_CONST | check_const::NEEDS_DROP) {
let global = consts::get_const_expr_as_global(bcx.ccx(), expr, qualif,
bcx.fcx.param_substs);
if qualif.intersects(check_const::HAS_STATIC_BORROWS) {
// Is borrowed as 'static, must return lvalue.
// Cast pointer to global, because constants have different types.
let const_ty = expr_ty_adjusted(bcx, expr);
let llty = type_of::type_of(bcx.ccx(), const_ty);
let global = PointerCast(bcx, global, llty.ptr_to());
let datum = Datum::new(global, const_ty, Lvalue);
return DatumBlock::new(bcx, datum.to_expr_datum());
}
// Otherwise, keep around and perform adjustments, if needed.
let const_ty = if adjusted_global {
expr_ty_adjusted(bcx, expr)
} else {
expr_ty(bcx, expr)
};
// This could use a better heuristic.
Some(if type_is_immediate(bcx.ccx(), const_ty) {
// Cast pointer to global, because constants have different types.
let llty = type_of::type_of(bcx.ccx(), const_ty);
let global = PointerCast(bcx, global, llty.ptr_to());
// Maybe just get the value directly, instead of loading it?
immediate_rvalue(load_ty(bcx, global, const_ty), const_ty)
} else {
let llty = type_of::type_of(bcx.ccx(), const_ty);
// HACK(eddyb) get around issues with lifetime intrinsics.
let scratch = alloca_no_lifetime(bcx, llty, "const");
let lldest = if !ty::type_is_structural(const_ty) {
// Cast pointer to slot, because constants have different types.
PointerCast(bcx, scratch, val_ty(global))
} else {
// In this case, memcpy_ty calls llvm.memcpy after casting both
// source and destination to i8*, so we don't need any casts.
scratch
};
memcpy_ty(bcx, lldest, global, const_ty);
Datum::new(scratch, const_ty, Rvalue::new(ByRef))
})
} else {
None
};
let cleanup_debug_loc = debuginfo::get_cleanup_debug_loc_for_ast_node(bcx.ccx(),
expr.id,
expr.span,
false);
fcx.push_ast_cleanup_scope(cleanup_debug_loc);
let datum = unpack_datum!(bcx, trans_unadjusted(bcx, expr));
let datum = unpack_datum!(bcx, apply_adjustments(bcx, expr, datum));
let datum = match global {
Some(rvalue) => rvalue.to_expr_datum(),
None => unpack_datum!(bcx, trans_unadjusted(bcx, expr))
};
let datum = if adjusted_global {
datum // trans::consts already performed adjustments.
} else {
unpack_datum!(bcx, apply_adjustments(bcx, expr, datum))
};
bcx = fcx.pop_and_trans_ast_cleanup_scope(bcx, expr.id);
return DatumBlock::new(bcx, datum);
}
@ -177,6 +278,54 @@ pub fn get_dataptr(bcx: Block, fat_ptr: ValueRef) -> ValueRef {
GEPi(bcx, fat_ptr, &[0, abi::FAT_PTR_ADDR])
}
// Retrieve the information we are losing (making dynamic) in an unsizing
// adjustment.
// When making a dtor, we need to do different things depending on the
// ownership of the object.. mk_ty is a function for turning `unadjusted_ty`
// into a type to be destructed. If we want to end up with a Box pointer,
// then mk_ty should make a Box pointer (T -> Box<T>), if we want a
// borrowed reference then it should be T -> &T.
pub fn unsized_info<'a, 'tcx, F>(ccx: &CrateContext<'a, 'tcx>,
kind: &ty::UnsizeKind<'tcx>,
id: ast::NodeId,
unadjusted_ty: Ty<'tcx>,
param_substs: &'tcx subst::Substs<'tcx>,
mk_ty: F) -> ValueRef where
F: FnOnce(Ty<'tcx>) -> Ty<'tcx>,
{
// FIXME(#19596) workaround: `|t| t` causes monomorphization recursion
fn identity<T>(t: T) -> T { t }
debug!("unsized_info(kind={:?}, id={}, unadjusted_ty={})",
kind, id, unadjusted_ty.repr(ccx.tcx()));
match kind {
&ty::UnsizeLength(len) => C_uint(ccx, len),
&ty::UnsizeStruct(box ref k, tp_index) => match unadjusted_ty.sty {
ty::ty_struct(_, ref substs) => {
let ty_substs = substs.types.get_slice(subst::TypeSpace);
// The dtor for a field treats it like a value, so mk_ty
// should just be the identity function.
unsized_info(ccx, k, id, ty_substs[tp_index], param_substs, identity)
}
_ => ccx.sess().bug(&format!("UnsizeStruct with bad sty: {}",
unadjusted_ty.repr(ccx.tcx()))[])
},
&ty::UnsizeVtable(ty::TyTrait { ref principal, .. }, _) => {
// Note that we preserve binding levels here:
let substs = principal.0.substs.with_self_ty(unadjusted_ty).erase_regions();
let substs = ccx.tcx().mk_substs(substs);
let trait_ref = ty::Binder(Rc::new(ty::TraitRef { def_id: principal.def_id(),
substs: substs }));
let trait_ref = monomorphize::apply_param_substs(ccx.tcx(),
param_substs,
&trait_ref);
let box_ty = mk_ty(unadjusted_ty);
consts::ptrcast(meth::get_vtable(ccx, box_ty, trait_ref, param_substs),
Type::vtable_ptr(ccx))
}
}
}
/// Helper for trans that apply adjustments from `expr` to `datum`, which should be the unadjusted
/// translation of `expr`.
fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
@ -262,13 +411,17 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let datum = match autoref {
&AutoPtr(_, _, ref a) | &AutoUnsafe(_, ref a) => {
debug!(" AutoPtr");
match a {
&Some(box ref a) => {
datum = unpack_datum!(bcx, apply_autoref(a, bcx, expr, datum));
}
&None => {}
if let &Some(box ref a) = a {
datum = unpack_datum!(bcx, apply_autoref(a, bcx, expr, datum));
}
if !type_is_sized(bcx.tcx(), datum.ty) {
// Arrange cleanup
let lval = unpack_datum!(bcx,
datum.to_lvalue_datum(bcx, "ref_fat_ptr", expr.id));
unpack_datum!(bcx, ref_fat_ptr(bcx, lval))
} else {
unpack_datum!(bcx, auto_ref(bcx, datum, expr))
}
unpack_datum!(bcx, ref_ptr(bcx, expr, datum))
}
&ty::AutoUnsize(ref k) => {
debug!(" AutoUnsize");
@ -288,139 +441,35 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
DatumBlock::new(bcx, datum)
}
fn ref_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr: &ast::Expr,
datum: Datum<'tcx, Expr>)
-> DatumBlock<'blk, 'tcx, Expr> {
debug!("ref_ptr(expr={}, datum={})",
expr.repr(bcx.tcx()),
datum.to_string(bcx.ccx()));
if !type_is_sized(bcx.tcx(), datum.ty) {
debug!("Taking address of unsized type {}",
bcx.ty_to_string(datum.ty));
ref_fat_ptr(bcx, expr, datum)
} else {
debug!("Taking address of sized type {}",
bcx.ty_to_string(datum.ty));
auto_ref(bcx, datum, expr)
}
}
// Retrieve the information we are losing (making dynamic) in an unsizing
// adjustment.
// When making a dtor, we need to do different things depending on the
// ownership of the object.. mk_ty is a function for turning `unadjusted_ty`
// into a type to be destructed. If we want to end up with a Box pointer,
// then mk_ty should make a Box pointer (T -> Box<T>), if we want a
// borrowed reference then it should be T -> &T.
fn unsized_info<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
kind: &ty::UnsizeKind<'tcx>,
id: ast::NodeId,
unadjusted_ty: Ty<'tcx>,
mk_ty: F) -> ValueRef where
F: FnOnce(Ty<'tcx>) -> Ty<'tcx>,
{
// FIXME(#19596) workaround: `|t| t` causes monomorphization recursion
fn identity<T>(t: T) -> T { t }
debug!("unsized_info(kind={:?}, id={}, unadjusted_ty={})",
kind, id, unadjusted_ty.repr(bcx.tcx()));
match kind {
&ty::UnsizeLength(len) => C_uint(bcx.ccx(), len),
&ty::UnsizeStruct(box ref k, tp_index) => match unadjusted_ty.sty {
ty::ty_struct(_, ref substs) => {
let ty_substs = substs.types.get_slice(subst::TypeSpace);
// The dtor for a field treats it like a value, so mk_ty
// should just be the identity function.
unsized_info(bcx, k, id, ty_substs[tp_index], identity)
}
_ => bcx.sess().bug(&format!("UnsizeStruct with bad sty: {}",
bcx.ty_to_string(unadjusted_ty))[])
},
&ty::UnsizeVtable(ty::TyTrait { ref principal, .. }, _) => {
// Note that we preserve binding levels here:
let substs = principal.0.substs.with_self_ty(unadjusted_ty).erase_regions();
let substs = bcx.tcx().mk_substs(substs);
let trait_ref =
ty::Binder(Rc::new(ty::TraitRef { def_id: principal.def_id(),
substs: substs }));
let trait_ref = bcx.monomorphize(&trait_ref);
let box_ty = mk_ty(unadjusted_ty);
PointerCast(bcx,
meth::get_vtable(bcx, box_ty, trait_ref),
Type::vtable_ptr(bcx.ccx()))
}
}
}
fn unsize_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr: &ast::Expr,
datum: Datum<'tcx, Expr>,
k: &ty::UnsizeKind<'tcx>)
-> DatumBlock<'blk, 'tcx, Expr> {
let mut bcx = bcx;
let tcx = bcx.tcx();
let datum_ty = datum.ty;
let unsized_ty = ty::unsize_ty(tcx, datum_ty, k, expr.span);
debug!("unsized_ty={}", unsized_ty.repr(bcx.tcx()));
let dest_ty = ty::mk_open(tcx, unsized_ty);
debug!("dest_ty={}", unsized_ty.repr(bcx.tcx()));
// Closures for extracting and manipulating the data and payload parts of
// the fat pointer.
let info = |bcx, _val| unsized_info(bcx,
k,
expr.id,
datum_ty,
|t| ty::mk_rptr(tcx,
tcx.mk_region(ty::ReStatic),
ty::mt{
ty: t,
mutbl: ast::MutImmutable
}));
match *k {
ty::UnsizeStruct(..) =>
into_fat_ptr(bcx, expr, datum, dest_ty, |bcx, val| {
PointerCast(bcx, val, type_of::type_of(bcx.ccx(), unsized_ty).ptr_to())
}, info),
ty::UnsizeLength(..) =>
into_fat_ptr(bcx, expr, datum, dest_ty, |bcx, val| {
GEPi(bcx, val, &[0, 0])
}, info),
ty::UnsizeVtable(..) =>
into_fat_ptr(bcx, expr, datum, dest_ty, |_bcx, val| {
PointerCast(bcx, val, Type::i8p(bcx.ccx()))
}, info),
}
}
fn ref_fat_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr: &ast::Expr,
datum: Datum<'tcx, Expr>)
-> DatumBlock<'blk, 'tcx, Expr> {
let tcx = bcx.tcx();
let dest_ty = ty::close_type(tcx, datum.ty);
let base = |bcx, val| Load(bcx, get_dataptr(bcx, val));
let len = |bcx, val| Load(bcx, get_len(bcx, val));
into_fat_ptr(bcx, expr, datum, dest_ty, base, len)
}
fn into_fat_ptr<'blk, 'tcx, F, G>(bcx: Block<'blk, 'tcx>,
expr: &ast::Expr,
datum: Datum<'tcx, Expr>,
dest_ty: Ty<'tcx>,
base: F,
info: G)
-> DatumBlock<'blk, 'tcx, Expr> where
F: FnOnce(Block<'blk, 'tcx>, ValueRef) -> ValueRef,
G: FnOnce(Block<'blk, 'tcx>, ValueRef) -> ValueRef,
{
let mut bcx = bcx;
let info = unsized_info(bcx.ccx(), k, expr.id, datum_ty, bcx.fcx.param_substs,
|t| ty::mk_imm_rptr(tcx, tcx.mk_region(ty::ReStatic), t));
// Arrange cleanup
let lval = unpack_datum!(bcx,
datum.to_lvalue_datum(bcx, "into_fat_ptr", expr.id));
let base = base(bcx, lval.val);
let info = info(bcx, lval.val);
// Compute the base pointer. This doesn't change the pointer value,
// but merely its type.
let base = match *k {
ty::UnsizeStruct(..) | ty::UnsizeVtable(..) => {
PointerCast(bcx, lval.val, type_of::type_of(bcx.ccx(), unsized_ty).ptr_to())
}
ty::UnsizeLength(..) => {
GEPi(bcx, lval.val, &[0u, 0u])
}
};
let scratch = rvalue_scratch_datum(bcx, dest_ty, "__fat_ptr");
Store(bcx, base, get_dataptr(bcx, scratch.val));
@ -490,7 +539,8 @@ fn apply_adjustments<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let base = PointerCast(bcx, get_dataptr(bcx, scratch.val), llbox_ty.ptr_to());
bcx = datum.store_to(bcx, base);
let info = unsized_info(bcx, k, expr.id, unboxed_ty, |t| ty::mk_uniq(tcx, t));
let info = unsized_info(bcx.ccx(), k, expr.id, unboxed_ty, bcx.fcx.param_substs,
|t| ty::mk_uniq(tcx, t));
Store(bcx, info, get_len(bcx, scratch.val));
DatumBlock::new(bcx, scratch.to_expr_datum())
@ -847,53 +897,25 @@ fn trans_def<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
// pointer to that.
let const_ty = expr_ty(bcx, ref_expr);
fn get_val<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, did: ast::DefId,
const_ty: Ty<'tcx>) -> ValueRef {
// For external constants, we don't inline.
if did.krate == ast::LOCAL_CRATE {
// Case 1.
// For external constants, we don't inline.
let val = if did.krate == ast::LOCAL_CRATE {
// Case 1.
// The LLVM global has the type of its initializer,
// which may not be equal to the enum's type for
// non-C-like enums.
let val = base::get_item_val(bcx.ccx(), did.node);
let pty = type_of::type_of(bcx.ccx(), const_ty).ptr_to();
PointerCast(bcx, val, pty)
} else {
// Case 2.
base::get_extern_const(bcx.ccx(), did, const_ty)
}
}
let val = get_val(bcx, did, const_ty);
// The LLVM global has the type of its initializer,
// which may not be equal to the enum's type for
// non-C-like enums.
let val = base::get_item_val(bcx.ccx(), did.node);
let pty = type_of::type_of(bcx.ccx(), const_ty).ptr_to();
PointerCast(bcx, val, pty)
} else {
// Case 2.
base::get_extern_const(bcx.ccx(), did, const_ty)
};
DatumBlock::new(bcx, Datum::new(val, const_ty, LvalueExpr))
}
def::DefConst(did) => {
// First, inline any external constants into the local crate so we
// can be sure to get the LLVM value corresponding to it.
let did = inline::maybe_instantiate_inline(bcx.ccx(), did);
if did.krate != ast::LOCAL_CRATE {
bcx.tcx().sess.span_bug(ref_expr.span,
"cross crate constant could not \
be inlined");
}
let val = base::get_item_val(bcx.ccx(), did.node);
// Next, we need to crate a ByRef rvalue datum to return. We can't
// use the normal .to_ref_datum() function because the type of
// `val` is not actually the same as `const_ty`.
//
// To get around this, we make a custom alloca slot with the
// appropriate type (const_ty), and then we cast it to a pointer of
// typeof(val), store the value, and then hand this slot over to
// the datum infrastructure.
let const_ty = expr_ty(bcx, ref_expr);
let llty = type_of::type_of(bcx.ccx(), const_ty);
let slot = alloca(bcx, llty, "const");
let pty = Type::from_ref(unsafe { llvm::LLVMTypeOf(val) }).ptr_to();
Store(bcx, val, PointerCast(bcx, slot, pty));
let datum = Datum::new(slot, const_ty, Rvalue::new(ByRef));
DatumBlock::new(bcx, datum.to_expr_datum())
def::DefConst(_) => {
bcx.sess().span_bug(ref_expr.span,
"constant expression should not reach expr::trans_def")
}
_ => {
DatumBlock::new(bcx, trans_local_var(bcx, def).to_expr_datum())
@ -1119,7 +1141,12 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
tvec::trans_fixed_vstore(bcx, expr, dest)
}
ast::ExprClosure(_, ref decl, ref body) => {
closure::trans_closure_expr(bcx, &**decl, &**body, expr.id, dest)
let dest = match dest {
SaveIn(lldest) => closure::Dest::SaveIn(bcx, lldest),
Ignore => closure::Dest::Ignore(bcx.ccx())
};
closure::trans_closure_expr(dest, &**decl, &**body, expr.id, bcx.fcx.param_substs)
.unwrap_or(bcx)
}
ast::ExprCall(ref f, ref args) => {
if bcx.tcx().is_method_call(expr.id) {
@ -1247,7 +1274,7 @@ fn trans_def_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
pub fn trans_def_fn_unadjusted<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
ref_expr: &ast::Expr,
def: def::Def,
param_substs: &subst::Substs<'tcx>)
param_substs: &'tcx subst::Substs<'tcx>)
-> Datum<'tcx, Rvalue> {
let _icx = push_ctxt("trans_def_datum_unadjusted");
@ -1641,6 +1668,16 @@ fn trans_uniq_expr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
immediate_rvalue_bcx(bcx, val, box_ty).to_expr_datumblock()
}
fn ref_fat_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
lval: Datum<'tcx, Lvalue>)
-> DatumBlock<'blk, 'tcx, Expr> {
let dest_ty = ty::close_type(bcx.tcx(), lval.ty);
let scratch = rvalue_scratch_datum(bcx, dest_ty, "__fat_ptr");
memcpy_ty(bcx, scratch.val, lval.val, scratch.ty);
DatumBlock::new(bcx, scratch.to_expr_datum())
}
fn trans_addr_of<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr: &ast::Expr,
subexpr: &ast::Expr)
@ -1651,18 +1688,7 @@ fn trans_addr_of<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
match sub_datum.ty.sty {
ty::ty_open(_) => {
// Opened DST value, close to a fat pointer
debug!("Closing fat pointer {}", bcx.ty_to_string(sub_datum.ty));
let scratch = rvalue_scratch_datum(bcx,
ty::close_type(bcx.tcx(), sub_datum.ty),
"fat_addr_of");
let base = Load(bcx, get_dataptr(bcx, sub_datum.val));
Store(bcx, base, get_dataptr(bcx, scratch.val));
let len = Load(bcx, get_len(bcx, sub_datum.val));
Store(bcx, len, get_len(bcx, scratch.val));
DatumBlock::new(bcx, scratch.to_expr_datum())
ref_fat_ptr(bcx, sub_datum)
}
_ => {
// Sized value, ref to a thin pointer
@ -1687,9 +1713,10 @@ fn trans_eager_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
let tcx = bcx.tcx();
let is_simd = ty::type_is_simd(tcx, lhs_t);
let intype = {
if is_simd { ty::simd_type(tcx, lhs_t) }
else { lhs_t }
let intype = if is_simd {
ty::simd_type(tcx, lhs_t)
} else {
lhs_t
};
let is_float = ty::type_is_fp(intype);
let is_signed = ty::type_is_signed(intype);
@ -1766,24 +1793,10 @@ fn trans_eager_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
}
}
ast::BiEq | ast::BiNe | ast::BiLt | ast::BiGe | ast::BiLe | ast::BiGt => {
if ty::type_is_scalar(rhs_t) {
unpack_result!(bcx,
base::compare_scalar_types(bcx,
lhs,
rhs,
rhs_t,
op.node,
binop_debug_loc))
} else if is_simd {
base::compare_simd_types(bcx,
lhs,
rhs,
intype,
ty::simd_size(tcx, lhs_t),
op.node,
binop_debug_loc)
if is_simd {
base::compare_simd_types(bcx, lhs, rhs, intype, op.node, binop_debug_loc)
} else {
bcx.tcx().sess.span_bug(binop_expr.span, "comparison operator unsupported for type")
base::compare_scalar_types(bcx, lhs, rhs, intype, op.node, binop_debug_loc)
}
}
_ => {
@ -1997,7 +2010,7 @@ pub fn cast_type_kind<'tcx>(tcx: &ty::ctxt<'tcx>, t: Ty<'tcx>) -> cast_kind {
}
}
fn cast_is_noop<'tcx>(t_in: Ty<'tcx>, t_out: Ty<'tcx>) -> bool {
pub fn cast_is_noop<'tcx>(t_in: Ty<'tcx>, t_out: Ty<'tcx>) -> bool {
match (ty::deref(t_in, true), ty::deref(t_out, true)) {
(Some(ty::mt{ ty: t_in, .. }), Some(ty::mt{ ty: t_out, .. })) => {
t_in == t_out

View File

@ -557,7 +557,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
body: &ast::Block,
attrs: &[ast::Attribute],
llwrapfn: ValueRef,
param_substs: &Substs<'tcx>,
param_substs: &'tcx Substs<'tcx>,
id: ast::NodeId,
hash: Option<&str>) {
let _icx = push_ctxt("foreign::build_foreign_fn");
@ -577,7 +577,7 @@ pub fn trans_rust_fn_with_foreign_abi<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
fn build_rust_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
decl: &ast::FnDecl,
body: &ast::Block,
param_substs: &Substs<'tcx>,
param_substs: &'tcx Substs<'tcx>,
attrs: &[ast::Attribute],
id: ast::NodeId,
hash: Option<&str>)

View File

@ -558,12 +558,12 @@ fn make_generic_glue<'a, 'tcx, F>(ccx: &CrateContext<'a, 'tcx>,
let glue_name = format!("glue {} {}", name, ty_to_short_str(ccx.tcx(), t));
let _s = StatRecorder::new(ccx, glue_name);
let empty_param_substs = Substs::trans_empty();
let empty_substs = ccx.tcx().mk_substs(Substs::trans_empty());
let (arena, fcx): (TypedArena<_>, FunctionContext);
arena = TypedArena::new();
fcx = new_fn_ctxt(ccx, llfn, ast::DUMMY_NODE_ID, false,
ty::FnConverging(ty::mk_nil(ccx.tcx())),
&empty_param_substs, None, &arena);
empty_substs, None, &arena);
let bcx = init_function(&fcx, false, ty::FnConverging(ty::mk_nil(ccx.tcx())));

View File

@ -159,13 +159,14 @@ fn instantiate_inline(ccx: &CrateContext, fn_id: ast::DefId)
let unparameterized = impl_tpt.generics.types.is_empty() &&
mth.pe_generics().ty_params.is_empty();
let empty_substs = ccx.tcx().mk_substs(Substs::trans_empty());
if unparameterized {
let llfn = get_item_val(ccx, mth.id);
trans_fn(ccx,
&*mth.pe_fn_decl(),
&*mth.pe_body(),
llfn,
&Substs::trans_empty(),
empty_substs,
mth.id,
&[]);
// Use InternalLinkage so LLVM can optimize more

View File

@ -11,7 +11,7 @@
use arena::TypedArena;
use back::abi;
use back::link;
use llvm::{self, ValueRef, get_param};
use llvm::{ValueRef, get_param};
use metadata::csearch;
use middle::subst::Substs;
use middle::subst::VecPerParamSpace;
@ -23,6 +23,7 @@ use trans::callee::*;
use trans::callee;
use trans::cleanup;
use trans::common::*;
use trans::consts;
use trans::datum::*;
use trans::debuginfo::DebugLoc;
use trans::expr::{SaveIn, Ignore};
@ -36,7 +37,6 @@ use middle::ty::{self, Ty};
use middle::ty::MethodCall;
use util::ppaux::Repr;
use std::ffi::CString;
use std::rc::Rc;
use syntax::abi::{Rust, RustCall};
use syntax::parse::token;
@ -82,11 +82,12 @@ pub fn trans_impl(ccx: &CrateContext,
let trans_everywhere = attr::requests_inline(&method.attrs[]);
for (ref ccx, is_origin) in ccx.maybe_iter(trans_everywhere) {
let llfn = get_item_val(ccx, method.id);
let empty_substs = tcx.mk_substs(Substs::trans_empty());
trans_fn(ccx,
method.pe_fn_decl(),
method.pe_body(),
llfn,
&Substs::trans_empty(),
empty_substs,
method.id,
&[]);
update_linkage(ccx,
@ -174,7 +175,7 @@ pub fn trans_static_method_callee<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
method_id: ast::DefId,
trait_id: ast::DefId,
expr_id: ast::NodeId,
param_substs: &subst::Substs<'tcx>)
param_substs: &'tcx subst::Substs<'tcx>)
-> Datum<'tcx, Rvalue>
{
let _icx = push_ctxt("meth::trans_static_method_callee");
@ -599,7 +600,7 @@ pub fn trans_object_shim<'a, 'tcx>(
let sig = ty::erase_late_bound_regions(ccx.tcx(), &fty.sig);
let empty_substs = Substs::trans_empty();
let empty_substs = tcx.mk_substs(Substs::trans_empty());
let (block_arena, fcx): (TypedArena<_>, FunctionContext);
block_arena = TypedArena::new();
fcx = new_fn_ctxt(ccx,
@ -607,7 +608,7 @@ pub fn trans_object_shim<'a, 'tcx>(
ast::DUMMY_NODE_ID,
false,
sig.output,
&empty_substs,
empty_substs,
None,
&block_arena);
let mut bcx = init_function(&fcx, false, sig.output);
@ -689,19 +690,19 @@ pub fn trans_object_shim<'a, 'tcx>(
/// `trait_ref` would map `T:Trait`, but `box_ty` would be
/// `Foo<T>`. This `box_ty` is primarily used to encode the destructor.
/// This will hopefully change now that DST is underway.
pub fn get_vtable<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
box_ty: Ty<'tcx>,
trait_ref: ty::PolyTraitRef<'tcx>)
-> ValueRef
pub fn get_vtable<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
box_ty: Ty<'tcx>,
trait_ref: ty::PolyTraitRef<'tcx>,
param_substs: &'tcx subst::Substs<'tcx>)
-> ValueRef
{
debug!("get_vtable(box_ty={}, trait_ref={})",
box_ty.repr(bcx.tcx()),
trait_ref.repr(bcx.tcx()));
let tcx = bcx.tcx();
let ccx = bcx.ccx();
let tcx = ccx.tcx();
let _icx = push_ctxt("meth::get_vtable");
debug!("get_vtable(box_ty={}, trait_ref={})",
box_ty.repr(tcx),
trait_ref.repr(tcx));
// Check the cache.
let cache_key = (box_ty, trait_ref.clone());
match ccx.vtables().borrow().get(&cache_key) {
@ -711,9 +712,7 @@ pub fn get_vtable<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
// Not in the cache. Build it.
let methods = traits::supertraits(tcx, trait_ref.clone()).flat_map(|trait_ref| {
let vtable = fulfill_obligation(bcx.ccx(),
DUMMY_SP,
trait_ref.clone());
let vtable = fulfill_obligation(ccx, DUMMY_SP, trait_ref.clone());
match vtable {
traits::VtableBuiltin(_) => {
Vec::new().into_iter()
@ -723,83 +722,61 @@ pub fn get_vtable<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
impl_def_id: id,
substs,
nested: _ }) => {
emit_vtable_methods(bcx, id, substs).into_iter()
emit_vtable_methods(ccx, id, substs, param_substs).into_iter()
}
traits::VtableClosure(closure_def_id, substs) => {
let llfn = trans_fn_ref_with_substs(
bcx.ccx(),
ccx,
closure_def_id,
ExprId(0),
bcx.fcx.param_substs,
substs.clone()).val;
param_substs,
substs).val;
(vec!(llfn)).into_iter()
vec![llfn].into_iter()
}
traits::VtableFnPointer(bare_fn_ty) => {
let llfn = vec![trans_fn_pointer_shim(bcx.ccx(), bare_fn_ty)];
llfn.into_iter()
vec![trans_fn_pointer_shim(ccx, bare_fn_ty)].into_iter()
}
traits::VtableObject(ref data) => {
// this would imply that the Self type being erased is
// an object type; this cannot happen because we
// cannot cast an unsized type into a trait object
bcx.sess().bug(
tcx.sess.bug(
&format!("cannot get vtable for an object type: {}",
data.repr(bcx.tcx())));
data.repr(tcx)));
}
traits::VtableParam(..) => {
bcx.sess().bug(
tcx.sess.bug(
&format!("resolved vtable for {} to bad vtable {} in trans",
trait_ref.repr(bcx.tcx()),
vtable.repr(bcx.tcx()))[]);
trait_ref.repr(tcx),
vtable.repr(tcx))[]);
}
}
});
let size_ty = sizing_type_of(ccx, trait_ref.self_ty());
let size = machine::llsize_of_alloc(ccx, size_ty);
let ll_size = C_uint(ccx, size);
let align = align_of(ccx, trait_ref.self_ty());
let ll_align = C_uint(ccx, align);
// Generate a destructor for the vtable.
let drop_glue = glue::get_drop_glue(ccx, box_ty);
let vtable = make_vtable(ccx, drop_glue, ll_size, ll_align, methods);
let components: Vec<_> = vec![
// Generate a destructor for the vtable.
glue::get_drop_glue(ccx, box_ty),
C_uint(ccx, size),
C_uint(ccx, align)
].into_iter().chain(methods).collect();
let vtable = consts::addr_of(ccx, C_struct(ccx, &components, false),
"vtable", trait_ref.def_id().node);
ccx.vtables().borrow_mut().insert(cache_key, vtable);
vtable
}
/// Helper function to declare and initialize the vtable.
pub fn make_vtable<I: Iterator<Item=ValueRef>>(ccx: &CrateContext,
drop_glue: ValueRef,
size: ValueRef,
align: ValueRef,
ptrs: I)
-> ValueRef {
let _icx = push_ctxt("meth::make_vtable");
let head = vec![drop_glue, size, align];
let components: Vec<_> = head.into_iter().chain(ptrs).collect();
unsafe {
let tbl = C_struct(ccx, &components[], false);
let sym = token::gensym("vtable");
let buf = CString::from_vec(format!("vtable{}", sym.usize()).into_bytes());
let vt_gvar = llvm::LLVMAddGlobal(ccx.llmod(), val_ty(tbl).to_ref(),
buf.as_ptr());
llvm::LLVMSetInitializer(vt_gvar, tbl);
llvm::LLVMSetGlobalConstant(vt_gvar, llvm::True);
llvm::SetLinkage(vt_gvar, llvm::InternalLinkage);
vt_gvar
}
}
fn emit_vtable_methods<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
impl_id: ast::DefId,
substs: subst::Substs<'tcx>)
-> Vec<ValueRef> {
let ccx = bcx.ccx();
fn emit_vtable_methods<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
impl_id: ast::DefId,
substs: subst::Substs<'tcx>,
param_substs: &'tcx subst::Substs<'tcx>)
-> Vec<ValueRef> {
let tcx = ccx.tcx();
let trt_id = match ty::impl_trait_ref(tcx, impl_id) {
@ -808,7 +785,7 @@ fn emit_vtable_methods<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
make a vtable for a type impl!")
};
ty::populate_implementations_for_trait_if_necessary(bcx.tcx(), trt_id);
ty::populate_implementations_for_trait_if_necessary(tcx, trt_id);
let trait_item_def_ids = ty::trait_item_def_ids(tcx, trt_id);
trait_item_def_ids.iter().flat_map(|method_def_id| {
@ -835,7 +812,7 @@ fn emit_vtable_methods<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
ccx,
m_id,
ExprId(0),
bcx.fcx.param_substs,
param_substs,
substs.clone()).val;
// currently, at least, by-value self is not object safe
@ -882,7 +859,7 @@ pub fn trans_trait_cast<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
bcx = datum.store_to(bcx, llboxdest);
// Store the vtable into the second half of pair.
let vtable = get_vtable(bcx, datum_ty, trait_ref);
let vtable = get_vtable(bcx.ccx(), datum_ty, trait_ref, bcx.fcx.param_substs);
let llvtabledest = GEPi(bcx, lldest, &[0, abi::FAT_PTR_EXTRA]);
let llvtabledest = PointerCast(bcx, llvtabledest, val_ty(vtable).ptr_to());
Store(bcx, vtable, llvtabledest);

View File

@ -36,7 +36,7 @@ use std::hash::{Hasher, Hash, SipHasher};
pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
fn_id: ast::DefId,
psubsts: &subst::Substs<'tcx>,
psubsts: &'tcx subst::Substs<'tcx>,
ref_id: Option<ast::NodeId>)
-> (ValueRef, Ty<'tcx>, bool) {
debug!("monomorphic_fn(\
@ -55,7 +55,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
let hash_id = MonoId {
def: fn_id,
params: psubsts.types.clone()
params: &psubsts.types
};
let item_ty = ty::lookup_item_type(ccx.tcx(), fn_id).ty;
@ -289,7 +289,7 @@ pub fn monomorphic_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>,
#[derive(PartialEq, Eq, Hash, Debug)]
pub struct MonoId<'tcx> {
pub def: ast::DefId,
pub params: subst::VecPerParamSpace<Ty<'tcx>>
pub params: &'tcx subst::VecPerParamSpace<Ty<'tcx>>
}
/// Monomorphizes a type from the AST by first applying the in-scope

View File

@ -309,8 +309,6 @@ pub fn write_content<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
|set_bcx, lleltptr, _| {
elem.shallow_copy(set_bcx, lleltptr)
});
elem.add_clean_if_rvalue(bcx, element.id);
bcx
}
}

View File

@ -1183,7 +1183,7 @@ pub fn ast_ty_to_ty<'tcx>(
qpath_to_ty(this, rscope, ast_ty, &**qpath)
}
ast::TyFixedLengthVec(ref ty, ref e) => {
match const_eval::eval_const_expr_partial(tcx, &**e) {
match const_eval::eval_const_expr_partial(tcx, &**e, Some(tcx.types.uint)) {
Ok(ref r) => {
match *r {
const_eval::const_int(i) =>

View File

@ -8,6 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use middle::const_eval;
use middle::def;
use middle::infer;
use middle::pat_util::{PatIdMap, pat_id_map, pat_is_binding, pat_is_const};
@ -15,12 +16,12 @@ use middle::subst::{Substs};
use middle::ty::{self, Ty};
use check::{check_expr, check_expr_has_type, check_expr_with_expectation};
use check::{check_expr_coercable_to_type, demand, FnCtxt, Expectation};
use check::{instantiate_path, structurally_resolved_type, valid_range_bounds};
use check::{instantiate_path, structurally_resolved_type};
use require_same_types;
use util::nodemap::FnvHashMap;
use util::ppaux::Repr;
use std::cmp;
use std::cmp::{self, Ordering};
use std::collections::hash_map::Entry::{Occupied, Vacant};
use syntax::ast;
use syntax::ast_util;
@ -79,16 +80,17 @@ pub fn check_pat<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>,
lhs_eq_rhs && (ty::type_is_numeric(lhs_ty) || ty::type_is_char(lhs_ty));
if numeric_or_char {
match valid_range_bounds(fcx.ccx, &**begin, &**end) {
Some(false) => {
match const_eval::compare_lit_exprs(tcx, &**begin, &**end, Some(lhs_ty)) {
Some(Ordering::Less) |
Some(Ordering::Equal) => {}
Some(Ordering::Greater) => {
span_err!(tcx.sess, begin.span, E0030,
"lower range bound must be less than upper");
},
}
None => {
span_err!(tcx.sess, begin.span, E0031,
"mismatched types in range");
},
Some(true) => {}
}
}
} else {
span_err!(tcx.sess, begin.span, E0029,

View File

@ -2499,16 +2499,6 @@ fn check_lit<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
}
}
pub fn valid_range_bounds(ccx: &CrateCtxt,
from: &ast::Expr,
to: &ast::Expr)
-> Option<bool> {
match const_eval::compare_lit_exprs(ccx.tcx, from, to) {
Some(val) => Some(val <= 0),
None => None
}
}
pub fn check_expr_has_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>,
expr: &'tcx ast::Expr,
expected: Ty<'tcx>) {
@ -3596,24 +3586,8 @@ fn check_expr_with_unifier<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>,
// Finally, borrowck is charged with guaranteeing that the
// value whose address was taken can actually be made to live
// as long as it needs to live.
match oprnd.node {
// String literals are already, implicitly converted to slices.
//ast::ExprLit(lit) if ast_util::lit_is_str(lit) => fcx.expr_ty(oprnd),
// Empty slices live in static memory.
ast::ExprVec(ref elements) if elements.len() == 0 => {
// Note: we do not assign a lifetime of
// static. This is because the resulting type
// `&'static [T]` would require that T outlives
// `'static`!
let region = fcx.infcx().next_region_var(
infer::AddrOfSlice(expr.span));
ty::mk_rptr(tcx, tcx.mk_region(region), tm)
}
_ => {
let region = fcx.infcx().next_region_var(infer::AddrOfRegion(expr.span));
ty::mk_rptr(tcx, tcx.mk_region(region), tm)
}
}
let region = fcx.infcx().next_region_var(infer::AddrOfRegion(expr.span));
ty::mk_rptr(tcx, tcx.mk_region(region), tm)
};
fcx.write_ty(id, oprnd_t);
}
@ -4550,7 +4524,7 @@ pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>,
// that the expression is in a form that eval_const_expr can
// handle, so we may still get an internal compiler error
match const_eval::eval_const_expr_partial(ccx.tcx, &**e) {
match const_eval::eval_const_expr_partial(ccx.tcx, &**e, Some(declty)) {
Ok(const_eval::const_int(val)) => current_disr_val = val as Disr,
Ok(const_eval::const_uint(val)) => current_disr_val = val as Disr,
Ok(_) => {

View File

@ -40,7 +40,9 @@ fn run(repeat: int, depth: int) {
}
}
type nillist = List<()>;
// FIXME(#21721) used to be `List<()>` but that can cause
// certain LLVM versions to abort during optimizations.
type nillist = List<[u8; 0]>;
// Filled with things that have to be unwound
@ -81,11 +83,11 @@ fn recurse_or_panic(depth: int, st: Option<State>) {
}
Some(st) => {
let mut v = st.vec.clone();
v.push_all(&[box List::Cons((), st.vec.last().unwrap().clone())]);
v.push_all(&[box List::Cons([], st.vec.last().unwrap().clone())]);
State {
unique: box List::Cons((), box *st.unique),
unique: box List::Cons([], box *st.unique),
vec: v,
res: r(box List::Cons((), st.res._l.clone())),
res: r(box List::Cons([], st.res._l.clone())),
}
}
};

View File

@ -11,6 +11,6 @@
// Checks that immutable static items can't have mutable slices
static TEST: &'static mut [isize] = &mut [];
//~^ ERROR statics are not allowed to have mutable references
//~^ ERROR references in statics may only refer to immutable values
pub fn main() { }

View File

@ -99,7 +99,7 @@ static STATIC10: UnsafeStruct = UnsafeStruct;
struct MyOwned;
static STATIC11: Box<MyOwned> = box MyOwned;
//~^ ERROR statics are not allowed to have custom pointers
//~^ ERROR allocations are not allowed in statics
// The following examples test that mutable structs are just forbidden
// to have types with destructors
@ -117,16 +117,17 @@ static mut STATIC14: SafeStruct = SafeStruct {
//~^ ERROR mutable statics are not allowed to have destructors
field1: SafeEnum::Variant1,
field2: SafeEnum::Variant4("str".to_string())
//~^ ERROR static contains unimplemented expression type
};
static STATIC15: &'static [Box<MyOwned>] = &[
box MyOwned, //~ ERROR statics are not allowed to have custom pointers
box MyOwned, //~ ERROR statics are not allowed to have custom pointers
box MyOwned, //~ ERROR allocations are not allowed in statics
box MyOwned, //~ ERROR allocations are not allowed in statics
];
static STATIC16: (&'static Box<MyOwned>, &'static Box<MyOwned>) = (
&box MyOwned, //~ ERROR statics are not allowed to have custom pointers
&box MyOwned, //~ ERROR statics are not allowed to have custom pointers
&box MyOwned, //~ ERROR allocations are not allowed in statics
&box MyOwned, //~ ERROR allocations are not allowed in statics
);
static mut STATIC17: SafeEnum = SafeEnum::Variant1;
@ -134,9 +135,9 @@ static mut STATIC17: SafeEnum = SafeEnum::Variant1;
static STATIC19: Box<isize> =
box 3;
//~^ ERROR statics are not allowed to have custom pointers
//~^ ERROR allocations are not allowed in statics
pub fn main() {
let y = { static x: Box<isize> = box 3; x };
//~^ ERROR statics are not allowed to have custom pointers
//~^ ERROR allocations are not allowed in statics
}

View File

@ -8,18 +8,18 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
static A: usize = { 1us; 2 };
const A: usize = { 1us; 2 };
//~^ ERROR: blocks in constants are limited to items and tail expressions
static B: usize = { { } 2 };
const B: usize = { { } 2 };
//~^ ERROR: blocks in constants are limited to items and tail expressions
macro_rules! foo {
() => (()) //~ ERROR: blocks in constants are limited to items and tail expressions
}
static C: usize = { foo!(); 2 };
const C: usize = { foo!(); 2 };
static D: usize = { let x = 4us; 2 };
const D: usize = { let x = 4us; 2 };
//~^ ERROR: blocks in constants are limited to items and tail expressions
pub fn main() {

View File

@ -20,6 +20,7 @@ mod Y {
static foo: *const Y::X = Y::foo(Y::x as *const Y::X);
//~^ ERROR cannot refer to other statics by value
//~| ERROR: the trait `core::marker::Sync` is not implemented for the type
//~| ERROR the trait `core::marker::Sync` is not implemented for the type
//~| ERROR function calls in statics are limited to struct and enum constructors
fn main() {}

View File

@ -9,7 +9,7 @@
// except according to those terms.
static X: usize = 0 as *const usize as usize;
//~^ ERROR: can not cast a pointer to an integer in a constant expression
//~^ ERROR: can not cast a pointer to an integer in statics
fn main() {
assert_eq!(X, 0);

View File

@ -9,12 +9,12 @@
// except according to those terms.
const C1: &'static mut [usize] = &mut [];
//~^ ERROR: constants are not allowed to have mutable references
//~^ ERROR: references in constants may only refer to immutable values
static mut S: usize = 3;
const C2: &'static mut usize = &mut S;
//~^ ERROR: constants cannot refer to other statics
//~^^ ERROR: are not allowed to have mutable references
//~^^ ERROR: references in constants may only refer to immutable values
fn main() {}

View File

@ -9,9 +9,10 @@
// except according to those terms.
pub fn main() {
static z: &'static isize = {
const z: &'static isize = {
let p = 3;
//~^ ERROR blocks in constants are limited to items and tail expressions
&p
//~^ ERROR cannot borrow a local variable inside a static block, define a separate static instead
//~^ ERROR paths in constants may only refer to constants or functions
};
}

View File

@ -1,15 +0,0 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Trying to create a fixed-length vector with a negative size
fn main() {
let _x = [0; -1]; //~ ERROR found negative integer
}

View File

@ -14,8 +14,9 @@ use std::cell::RefCell;
// Regression test for issue 7364
static boxed: Box<RefCell<isize>> = box RefCell::new(0);
//~^ ERROR statics are not allowed to have custom pointers
//~| ERROR: the trait `core::marker::Sync` is not implemented for the type
//~| ERROR: the trait `core::marker::Sync` is not implemented for the type
//~^ ERROR allocations are not allowed in statics
//~| ERROR the trait `core::marker::Sync` is not implemented for the type
//~| ERROR the trait `core::marker::Sync` is not implemented for the type
//~| ERROR function calls in statics are limited to struct and enum constructors
fn main() { }

View File

@ -0,0 +1,55 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::{int, i8, i16, i32, i64};
use std::thread::Thread;
fn main() {
assert!(Thread::scoped(move|| int::MIN / -1).join().is_err());
//~^ ERROR attempted to divide with overflow in a constant expression
assert!(Thread::scoped(move|| i8::MIN / -1).join().is_err());
//~^ ERROR attempted to divide with overflow in a constant expression
assert!(Thread::scoped(move|| i16::MIN / -1).join().is_err());
//~^ ERROR attempted to divide with overflow in a constant expression
assert!(Thread::scoped(move|| i32::MIN / -1).join().is_err());
//~^ ERROR attempted to divide with overflow in a constant expression
assert!(Thread::scoped(move|| i64::MIN / -1).join().is_err());
//~^ ERROR attempted to divide with overflow in a constant expression
assert!(Thread::scoped(move|| 1is / 0).join().is_err());
//~^ ERROR attempted to divide by zero in a constant expression
assert!(Thread::scoped(move|| 1i8 / 0).join().is_err());
//~^ ERROR attempted to divide by zero in a constant expression
assert!(Thread::scoped(move|| 1i16 / 0).join().is_err());
//~^ ERROR attempted to divide by zero in a constant expression
assert!(Thread::scoped(move|| 1i32 / 0).join().is_err());
//~^ ERROR attempted to divide by zero in a constant expression
assert!(Thread::scoped(move|| 1i64 / 0).join().is_err());
//~^ ERROR attempted to divide by zero in a constant expression
assert!(Thread::scoped(move|| int::MIN % -1).join().is_err());
//~^ ERROR attempted remainder with overflow in a constant expression
assert!(Thread::scoped(move|| i8::MIN % -1).join().is_err());
//~^ ERROR attempted remainder with overflow in a constant expression
assert!(Thread::scoped(move|| i16::MIN % -1).join().is_err());
//~^ ERROR attempted remainder with overflow in a constant expression
assert!(Thread::scoped(move|| i32::MIN % -1).join().is_err());
//~^ ERROR attempted remainder with overflow in a constant expression
assert!(Thread::scoped(move|| i64::MIN % -1).join().is_err());
//~^ ERROR attempted remainder with overflow in a constant expression
assert!(Thread::scoped(move|| 1is % 0).join().is_err());
//~^ ERROR attempted remainder with a divisor of zero in a constant expression
assert!(Thread::scoped(move|| 1i8 % 0).join().is_err());
//~^ ERROR attempted remainder with a divisor of zero in a constant expression
assert!(Thread::scoped(move|| 1i16 % 0).join().is_err());
//~^ ERROR attempted remainder with a divisor of zero in a constant expression
assert!(Thread::scoped(move|| 1i32 % 0).join().is_err());
//~^ ERROR attempted remainder with a divisor of zero in a constant expression
assert!(Thread::scoped(move|| 1i64 % 0).join().is_err());
//~^ ERROR attempted remainder with a divisor of zero in a constant expression
}

View File

@ -41,8 +41,18 @@ fn main() {
//~| expected usize
//~| found &-ptr
//~| ERROR expected positive integer for repeat count, found string
let f = [0; -4];
//~^ ERROR expected positive integer for repeat count, found negative integer
let f = [0us; -1];
//~^ ERROR expected positive integer for repeat count, found negative integer
let f = [0; -4is];
//~^ ERROR mismatched types
//~| expected `usize`
//~| found `isize`
//~| expected usize
//~| found isize
//~| ERROR expected positive integer for repeat count, found negative integer
let f = [0us; -1is];
//~^ ERROR mismatched types
//~| expected `usize`
//~| found `isize`
//~| expected usize
//~| found isize
//~| ERROR expected positive integer for repeat count, found negative integer
}

View File

@ -11,7 +11,7 @@
#![feature(box_syntax)]
static mut a: Box<isize> = box 3;
//~^ ERROR statics are not allowed to have custom pointers
//~^ ERROR allocations are not allowed in statics
//~^^ ERROR mutable statics are not allowed to have owned pointers
fn main() {}

View File

@ -11,6 +11,6 @@
fn foo() -> isize { 23 }
static a: [isize; 2] = [foo(); 2];
//~^ ERROR: function calls in constants are limited to struct and enum constructors
//~^ ERROR: function calls in statics are limited to struct and enum constructors
fn main() {}

View File

@ -47,26 +47,26 @@
#![allow(dead_code)]
#![omit_gdb_pretty_printer_section]
static B: bool = false;
static I: int = -1;
static C: char = 'a';
static I8: i8 = 68;
static I16: i16 = -16;
static I32: i32 = -32;
static I64: i64 = -64;
static U: uint = 1;
static U8: u8 = 100;
static U16: u16 = 16;
static U32: u32 = 32;
static U64: u64 = 64;
static F32: f32 = 2.5;
static F64: f64 = 3.5;
// N.B. These are `mut` only so they don't constant fold away.
static mut B: bool = false;
static mut I: int = -1;
static mut C: char = 'a';
static mut I8: i8 = 68;
static mut I16: i16 = -16;
static mut I32: i32 = -32;
static mut I64: i64 = -64;
static mut U: uint = 1;
static mut U8: u8 = 100;
static mut U16: u16 = 16;
static mut U32: u32 = 32;
static mut U64: u64 = 64;
static mut F32: f32 = 2.5;
static mut F64: f64 = 3.5;
fn main() {
_zzz(); // #break
let a = (B, I, C, I8, I16, I32, I64, U, U8, U16, U32, U64, F32, F64);
let a = unsafe { (B, I, C, I8, I16, I32, I64, U, U8, U16, U32, U64, F32, F64) };
}
fn _zzz() {()}

View File

@ -52,25 +52,26 @@
#![allow(unused_variables)]
#![omit_gdb_pretty_printer_section]
static B: bool = false;
static I: int = -1;
static C: char = 'a';
static I8: i8 = 68;
static I16: i16 = -16;
static I32: i32 = -32;
static I64: i64 = -64;
static U: uint = 1;
static U8: u8 = 100;
static U16: u16 = 16;
static U32: u32 = 32;
static U64: u64 = 64;
static F32: f32 = 2.5;
static F64: f64 = 3.5;
// N.B. These are `mut` only so they don't constant fold away.
static mut B: bool = false;
static mut I: int = -1;
static mut C: char = 'a';
static mut I8: i8 = 68;
static mut I16: i16 = -16;
static mut I32: i32 = -32;
static mut I64: i64 = -64;
static mut U: uint = 1;
static mut U8: u8 = 100;
static mut U16: u16 = 16;
static mut U32: u32 = 32;
static mut U64: u64 = 64;
static mut F32: f32 = 2.5;
static mut F64: f64 = 3.5;
fn main() {
_zzz(); // #break
let a = (B, I, C, I8, I16, I32, I64, U, U8, U16, U32, U64, F32, F64);
let a = unsafe { (B, I, C, I8, I16, I32, I64, U, U8, U16, U32, U64, F32, F64) };
}
fn _zzz() {()}

View File

@ -7,5 +7,5 @@
all:
$(RUSTC) foo.rs --emit=llvm-ir
[ "$$(grep -c 8675309 "$(TMPDIR)/foo.ll")" -eq "1" ]
[ "$$(grep -c 11235813 "$(TMPDIR)/foo.ll")" -eq "1" ]
[ "$$(grep -c 'ret i32 8675309' "$(TMPDIR)/foo.ll")" -eq "1" ]
[ "$$(grep -c 'ret i32 11235813' "$(TMPDIR)/foo.ll")" -eq "1" ]

View File

@ -10,14 +10,14 @@
fn outer<T>() {
#[allow(dead_code)]
fn inner() -> uint {
fn inner() -> u32 {
8675309
}
}
extern "C" fn outer_foreign<T>() {
#[allow(dead_code)]
fn inner() -> uint {
fn inner() -> u32 {
11235813
}
}

View File

@ -0,0 +1,28 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::mem;
#[derive(PartialEq, Show)]
enum Foo {
A(u32),
Bar([u16; 4]),
C
}
// NOTE(eddyb) Don't make this a const, needs to be a static
// so it is always instantiated as a LLVM constant value.
static FOO: Foo = Foo::C;
fn main() {
assert_eq!(FOO, Foo::C);
assert_eq!(mem::size_of::<Foo>(), 12);
assert_eq!(mem::min_align_of::<Foo>(), 4);
}

View File

@ -25,7 +25,9 @@ fn main() {
let mut dropped = false;
{
let leak = Leak { dropped: &mut dropped };
for ((), leaked) in Some(((),leak)).into_iter() {}
// FIXME(#21721) "hack" used to be () but that can cause
// certain LLVM versions to abort during optimizations.
for (_, leaked) in Some(("hack", leak)).into_iter() {}
}
assert!(dropped);

View File

@ -8,28 +8,32 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::{int, i8, i16, i32, i64};
use std::num::Int;
use std::thread::Thread;
// Avoid using constants, which would trigger compile-time errors.
fn min_val<T: Int>() -> T { Int::min_value() }
fn zero<T: Int>() -> T { Int::zero() }
fn main() {
assert!(Thread::scoped(move|| int::MIN / -1).join().is_err());
assert!(Thread::scoped(move|| i8::MIN / -1).join().is_err());
assert!(Thread::scoped(move|| i16::MIN / -1).join().is_err());
assert!(Thread::scoped(move|| i32::MIN / -1).join().is_err());
assert!(Thread::scoped(move|| i64::MIN / -1).join().is_err());
assert!(Thread::scoped(move|| 1 / 0).join().is_err());
assert!(Thread::scoped(move|| 1i8 / 0).join().is_err());
assert!(Thread::scoped(move|| 1i16 / 0).join().is_err());
assert!(Thread::scoped(move|| 1i32 / 0).join().is_err());
assert!(Thread::scoped(move|| 1i64 / 0).join().is_err());
assert!(Thread::scoped(move|| int::MIN % -1).join().is_err());
assert!(Thread::scoped(move|| i8::MIN % -1).join().is_err());
assert!(Thread::scoped(move|| i16::MIN % -1).join().is_err());
assert!(Thread::scoped(move|| i32::MIN % -1).join().is_err());
assert!(Thread::scoped(move|| i64::MIN % -1).join().is_err());
assert!(Thread::scoped(move|| 1 % 0).join().is_err());
assert!(Thread::scoped(move|| 1i8 % 0).join().is_err());
assert!(Thread::scoped(move|| 1i16 % 0).join().is_err());
assert!(Thread::scoped(move|| 1i32 % 0).join().is_err());
assert!(Thread::scoped(move|| 1i64 % 0).join().is_err());
assert!(Thread::scoped(move|| min_val::<isize>() / -1).join().is_err());
assert!(Thread::scoped(move|| min_val::<i8>() / -1).join().is_err());
assert!(Thread::scoped(move|| min_val::<i16>() / -1).join().is_err());
assert!(Thread::scoped(move|| min_val::<i32>() / -1).join().is_err());
assert!(Thread::scoped(move|| min_val::<i64>() / -1).join().is_err());
assert!(Thread::scoped(move|| 1is / zero()).join().is_err());
assert!(Thread::scoped(move|| 1i8 / zero()).join().is_err());
assert!(Thread::scoped(move|| 1i16 / zero()).join().is_err());
assert!(Thread::scoped(move|| 1i32 / zero()).join().is_err());
assert!(Thread::scoped(move|| 1i64 / zero()).join().is_err());
assert!(Thread::scoped(move|| min_val::<isize>() % -1).join().is_err());
assert!(Thread::scoped(move|| min_val::<i8>() % -1).join().is_err());
assert!(Thread::scoped(move|| min_val::<i16>() % -1).join().is_err());
assert!(Thread::scoped(move|| min_val::<i32>() % -1).join().is_err());
assert!(Thread::scoped(move|| min_val::<i64>() % -1).join().is_err());
assert!(Thread::scoped(move|| 1is % zero()).join().is_err());
assert!(Thread::scoped(move|| 1i8 % zero()).join().is_err());
assert!(Thread::scoped(move|| 1i16 % zero()).join().is_err());
assert!(Thread::scoped(move|| 1i32 % zero()).join().is_err());
assert!(Thread::scoped(move|| 1i64 % zero()).join().is_err());
}