Auto merge of #55665 - eddyb:by-ref-layout-of, r=oli-obk

rustc_target: pass contexts by reference, not value.

`LayoutOf` now takes `&self` instead of `self`, and so does every method generic over a context that implements `LayoutOf` and/or other traits, like `HasDataLayout`, `HasTyCtxt`, etc.

Originally using by-value `Copy` types was relevant because `TyCtxt` was one of those types, but now `TyCtxt::layout_of` is separate from `LayoutOf`, and `TyCtxt` is not an often used layout context.

Passing these context by reference is a lot nicer for miri, which has `self: &mut EvalContext`, and needed `f(&self)` (that is, creating `&&mut EvalContext` references) for layout purposes.
Now, the `&mut EvalContext` can be passed to a function expecting `&C`, directly.

This should help with #54012 / #55627 (to not need `where &'a T::Cx: LayoutOf` bounds).

r? @nikomatsakis or @oli-obk or @nagisa cc @sunfishcode
This commit is contained in:
bors 2018-11-04 18:56:43 +00:00
commit e6c5cf9234
38 changed files with 175 additions and 214 deletions

View File

@ -783,11 +783,11 @@ impl<'a, 'tcx> LateContext<'a, 'tcx> {
} }
} }
impl<'a, 'tcx> LayoutOf for &'a LateContext<'a, 'tcx> { impl<'a, 'tcx> LayoutOf for LateContext<'a, 'tcx> {
type Ty = Ty<'tcx>; type Ty = Ty<'tcx>;
type TyLayout = Result<TyLayout<'tcx>, LayoutError<'tcx>>; type TyLayout = Result<TyLayout<'tcx>, LayoutError<'tcx>>;
fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout { fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
self.tcx.layout_of(self.param_env.and(ty)) self.tcx.layout_of(self.param_env.and(ty))
} }
} }

View File

@ -84,7 +84,7 @@ impl<'a, 'tcx> ExprVisitor<'a, 'tcx> {
// `Option<typeof(function)>` to present a clearer error. // `Option<typeof(function)>` to present a clearer error.
let from = unpack_option_like(self.tcx.global_tcx(), from); let from = unpack_option_like(self.tcx.global_tcx(), from);
if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (&from.sty, sk_to) { if let (&ty::FnDef(..), SizeSkeleton::Known(size_to)) = (&from.sty, sk_to) {
if size_to == Pointer.size(self.tcx) { if size_to == Pointer.size(&self.tcx) {
struct_span_err!(self.tcx.sess, span, E0591, struct_span_err!(self.tcx.sess, span, E0591,
"can't transmute zero-sized type") "can't transmute zero-sized type")
.note(&format!("source type: {}", from)) .note(&format!("source type: {}", from))

View File

@ -86,18 +86,18 @@ pub trait PointerArithmetic: layout::HasDataLayout {
// These are not supposed to be overridden. // These are not supposed to be overridden.
#[inline(always)] #[inline(always)]
fn pointer_size(self) -> Size { fn pointer_size(&self) -> Size {
self.data_layout().pointer_size self.data_layout().pointer_size
} }
//// Trunace the given value to the pointer size; also return whether there was an overflow //// Trunace the given value to the pointer size; also return whether there was an overflow
fn truncate_to_ptr(self, val: u128) -> (u64, bool) { fn truncate_to_ptr(&self, val: u128) -> (u64, bool) {
let max_ptr_plus_1 = 1u128 << self.pointer_size().bits(); let max_ptr_plus_1 = 1u128 << self.pointer_size().bits();
((val % max_ptr_plus_1) as u64, val >= max_ptr_plus_1) ((val % max_ptr_plus_1) as u64, val >= max_ptr_plus_1)
} }
// Overflow checking only works properly on the range from -u64 to +u64. // Overflow checking only works properly on the range from -u64 to +u64.
fn overflowing_signed_offset(self, val: u64, i: i128) -> (u64, bool) { fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) {
// FIXME: is it possible to over/underflow here? // FIXME: is it possible to over/underflow here?
if i < 0 { if i < 0 {
// trickery to ensure that i64::min_value() works fine // trickery to ensure that i64::min_value() works fine
@ -109,23 +109,23 @@ pub trait PointerArithmetic: layout::HasDataLayout {
} }
} }
fn overflowing_offset(self, val: u64, i: u64) -> (u64, bool) { fn overflowing_offset(&self, val: u64, i: u64) -> (u64, bool) {
let (res, over1) = val.overflowing_add(i); let (res, over1) = val.overflowing_add(i);
let (res, over2) = self.truncate_to_ptr(res as u128); let (res, over2) = self.truncate_to_ptr(res as u128);
(res, over1 || over2) (res, over1 || over2)
} }
fn signed_offset<'tcx>(self, val: u64, i: i64) -> EvalResult<'tcx, u64> { fn signed_offset<'tcx>(&self, val: u64, i: i64) -> EvalResult<'tcx, u64> {
let (res, over) = self.overflowing_signed_offset(val, i as i128); let (res, over) = self.overflowing_signed_offset(val, i as i128);
if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) } if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
} }
fn offset<'tcx>(self, val: u64, i: u64) -> EvalResult<'tcx, u64> { fn offset<'tcx>(&self, val: u64, i: u64) -> EvalResult<'tcx, u64> {
let (res, over) = self.overflowing_offset(val, i); let (res, over) = self.overflowing_offset(val, i);
if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) } if over { err!(Overflow(mir::BinOp::Add)) } else { Ok(res) }
} }
fn wrapping_signed_offset(self, val: u64, i: i64) -> u64 { fn wrapping_signed_offset(&self, val: u64, i: i64) -> u64 {
self.overflowing_signed_offset(val, i as i128).0 self.overflowing_signed_offset(val, i as i128).0
} }
} }
@ -176,7 +176,7 @@ impl<'tcx, Tag> Pointer<Tag> {
Pointer { alloc_id, offset, tag } Pointer { alloc_id, offset, tag }
} }
pub fn wrapping_signed_offset(self, i: i64, cx: impl HasDataLayout) -> Self { pub fn wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
Pointer::new_with_tag( Pointer::new_with_tag(
self.alloc_id, self.alloc_id,
Size::from_bytes(cx.data_layout().wrapping_signed_offset(self.offset.bytes(), i)), Size::from_bytes(cx.data_layout().wrapping_signed_offset(self.offset.bytes(), i)),
@ -184,12 +184,12 @@ impl<'tcx, Tag> Pointer<Tag> {
) )
} }
pub fn overflowing_signed_offset(self, i: i128, cx: impl HasDataLayout) -> (Self, bool) { pub fn overflowing_signed_offset(self, i: i128, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_signed_offset(self.offset.bytes(), i); let (res, over) = cx.data_layout().overflowing_signed_offset(self.offset.bytes(), i);
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over) (Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
} }
pub fn signed_offset(self, i: i64, cx: impl HasDataLayout) -> EvalResult<'tcx, Self> { pub fn signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
Ok(Pointer::new_with_tag( Ok(Pointer::new_with_tag(
self.alloc_id, self.alloc_id,
Size::from_bytes(cx.data_layout().signed_offset(self.offset.bytes(), i)?), Size::from_bytes(cx.data_layout().signed_offset(self.offset.bytes(), i)?),
@ -197,12 +197,12 @@ impl<'tcx, Tag> Pointer<Tag> {
)) ))
} }
pub fn overflowing_offset(self, i: Size, cx: impl HasDataLayout) -> (Self, bool) { pub fn overflowing_offset(self, i: Size, cx: &impl HasDataLayout) -> (Self, bool) {
let (res, over) = cx.data_layout().overflowing_offset(self.offset.bytes(), i.bytes()); let (res, over) = cx.data_layout().overflowing_offset(self.offset.bytes(), i.bytes());
(Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over) (Pointer::new_with_tag(self.alloc_id, Size::from_bytes(res), self.tag), over)
} }
pub fn offset(self, i: Size, cx: impl HasDataLayout) -> EvalResult<'tcx, Self> { pub fn offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
Ok(Pointer::new_with_tag( Ok(Pointer::new_with_tag(
self.alloc_id, self.alloc_id,
Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?), Size::from_bytes(cx.data_layout().offset(self.offset.bytes(), i.bytes())?),

View File

@ -65,7 +65,7 @@ impl<'tcx> ConstValue<'tcx> {
pub fn new_slice( pub fn new_slice(
val: Scalar, val: Scalar,
len: u64, len: u64,
cx: impl HasDataLayout cx: &impl HasDataLayout
) -> Self { ) -> Self {
ConstValue::ScalarPair(val, Scalar::Bits { ConstValue::ScalarPair(val, Scalar::Bits {
bits: len as u128, bits: len as u128,
@ -121,7 +121,7 @@ impl<'tcx, Tag> Scalar<Tag> {
} }
#[inline] #[inline]
pub fn ptr_null(cx: impl HasDataLayout) -> Self { pub fn ptr_null(cx: &impl HasDataLayout) -> Self {
Scalar::Bits { Scalar::Bits {
bits: 0, bits: 0,
size: cx.data_layout().pointer_size.bytes() as u8, size: cx.data_layout().pointer_size.bytes() as u8,
@ -134,52 +134,52 @@ impl<'tcx, Tag> Scalar<Tag> {
} }
#[inline] #[inline]
pub fn ptr_signed_offset(self, i: i64, cx: impl HasDataLayout) -> EvalResult<'tcx, Self> { pub fn ptr_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
let layout = cx.data_layout(); let dl = cx.data_layout();
match self { match self {
Scalar::Bits { bits, size } => { Scalar::Bits { bits, size } => {
assert_eq!(size as u64, layout.pointer_size.bytes()); assert_eq!(size as u64, dl.pointer_size.bytes());
Ok(Scalar::Bits { Ok(Scalar::Bits {
bits: layout.signed_offset(bits as u64, i)? as u128, bits: dl.signed_offset(bits as u64, i)? as u128,
size, size,
}) })
} }
Scalar::Ptr(ptr) => ptr.signed_offset(i, layout).map(Scalar::Ptr), Scalar::Ptr(ptr) => ptr.signed_offset(i, dl).map(Scalar::Ptr),
} }
} }
#[inline] #[inline]
pub fn ptr_offset(self, i: Size, cx: impl HasDataLayout) -> EvalResult<'tcx, Self> { pub fn ptr_offset(self, i: Size, cx: &impl HasDataLayout) -> EvalResult<'tcx, Self> {
let layout = cx.data_layout(); let dl = cx.data_layout();
match self { match self {
Scalar::Bits { bits, size } => { Scalar::Bits { bits, size } => {
assert_eq!(size as u64, layout.pointer_size.bytes()); assert_eq!(size as u64, dl.pointer_size.bytes());
Ok(Scalar::Bits { Ok(Scalar::Bits {
bits: layout.offset(bits as u64, i.bytes())? as u128, bits: dl.offset(bits as u64, i.bytes())? as u128,
size, size,
}) })
} }
Scalar::Ptr(ptr) => ptr.offset(i, layout).map(Scalar::Ptr), Scalar::Ptr(ptr) => ptr.offset(i, dl).map(Scalar::Ptr),
} }
} }
#[inline] #[inline]
pub fn ptr_wrapping_signed_offset(self, i: i64, cx: impl HasDataLayout) -> Self { pub fn ptr_wrapping_signed_offset(self, i: i64, cx: &impl HasDataLayout) -> Self {
let layout = cx.data_layout(); let dl = cx.data_layout();
match self { match self {
Scalar::Bits { bits, size } => { Scalar::Bits { bits, size } => {
assert_eq!(size as u64, layout.pointer_size.bytes()); assert_eq!(size as u64, dl.pointer_size.bytes());
Scalar::Bits { Scalar::Bits {
bits: layout.wrapping_signed_offset(bits as u64, i) as u128, bits: dl.wrapping_signed_offset(bits as u64, i) as u128,
size, size,
} }
} }
Scalar::Ptr(ptr) => Scalar::Ptr(ptr.wrapping_signed_offset(i, layout)), Scalar::Ptr(ptr) => Scalar::Ptr(ptr.wrapping_signed_offset(i, dl)),
} }
} }
#[inline] #[inline]
pub fn is_null_ptr(self, cx: impl HasDataLayout) -> bool { pub fn is_null_ptr(self, cx: &impl HasDataLayout) -> bool {
match self { match self {
Scalar::Bits { bits, size } => { Scalar::Bits { bits, size } => {
assert_eq!(size as u64, cx.data_layout().pointer_size.bytes()); assert_eq!(size as u64, cx.data_layout().pointer_size.bytes());
@ -301,7 +301,7 @@ impl<'tcx, Tag> Scalar<Tag> {
Ok(b as u64) Ok(b as u64)
} }
pub fn to_usize(self, cx: impl HasDataLayout) -> EvalResult<'static, u64> { pub fn to_usize(self, cx: &impl HasDataLayout) -> EvalResult<'static, u64> {
let b = self.to_bits(cx.data_layout().pointer_size)?; let b = self.to_bits(cx.data_layout().pointer_size)?;
assert_eq!(b as u64 as u128, b); assert_eq!(b as u64 as u128, b);
Ok(b as u64) Ok(b as u64)
@ -331,7 +331,7 @@ impl<'tcx, Tag> Scalar<Tag> {
Ok(b as i64) Ok(b as i64)
} }
pub fn to_isize(self, cx: impl HasDataLayout) -> EvalResult<'static, i64> { pub fn to_isize(self, cx: &impl HasDataLayout) -> EvalResult<'static, i64> {
let b = self.to_bits(cx.data_layout().pointer_size)?; let b = self.to_bits(cx.data_layout().pointer_size)?;
let b = sign_extend(b, cx.data_layout().pointer_size) as i128; let b = sign_extend(b, cx.data_layout().pointer_size) as i128;
assert_eq!(b as i64 as i128, b); assert_eq!(b as i64 as i128, b);

View File

@ -428,7 +428,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
)); ));
let tcx = self.tcx; let tcx = self.tcx;
if let Some(len) = len.val.try_to_scalar().and_then(|scalar| { if let Some(len) = len.val.try_to_scalar().and_then(|scalar| {
scalar.to_usize(tcx).ok() scalar.to_usize(&tcx).ok()
}) { }) {
flags.push(( flags.push((
"_Self".to_owned(), "_Self".to_owned(),

View File

@ -30,7 +30,7 @@ pub use rustc_target::abi::*;
pub trait IntegerExt { pub trait IntegerExt {
fn to_ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, signed: bool) -> Ty<'tcx>; fn to_ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, signed: bool) -> Ty<'tcx>;
fn from_attr<C: HasDataLayout>(cx: C, ity: attr::IntType) -> Integer; fn from_attr<C: HasDataLayout>(cx: &C, ity: attr::IntType) -> Integer;
fn repr_discr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, fn repr_discr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ty: Ty<'tcx>, ty: Ty<'tcx>,
repr: &ReprOptions, repr: &ReprOptions,
@ -56,7 +56,7 @@ impl IntegerExt for Integer {
} }
/// Get the Integer type from an attr::IntType. /// Get the Integer type from an attr::IntType.
fn from_attr<C: HasDataLayout>(cx: C, ity: attr::IntType) -> Integer { fn from_attr<C: HasDataLayout>(cx: &C, ity: attr::IntType) -> Integer {
let dl = cx.data_layout(); let dl = cx.data_layout();
match ity { match ity {
@ -92,7 +92,7 @@ impl IntegerExt for Integer {
let min_default = I8; let min_default = I8;
if let Some(ity) = repr.int { if let Some(ity) = repr.int {
let discr = Integer::from_attr(tcx, ity); let discr = Integer::from_attr(&tcx, ity);
let fit = if ity.is_signed() { signed_fit } else { unsigned_fit }; let fit = if ity.is_signed() { signed_fit } else { unsigned_fit };
if discr < fit { if discr < fit {
bug!("Integer::repr_discr: `#[repr]` hint too small for \ bug!("Integer::repr_discr: `#[repr]` hint too small for \
@ -202,14 +202,13 @@ pub fn provide(providers: &mut ty::query::Providers<'_>) {
}; };
} }
#[derive(Copy, Clone)]
pub struct LayoutCx<'tcx, C> { pub struct LayoutCx<'tcx, C> {
pub tcx: C, pub tcx: C,
pub param_env: ty::ParamEnv<'tcx> pub param_env: ty::ParamEnv<'tcx>
} }
impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> { impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
fn layout_raw_uncached(self, ty: Ty<'tcx>) fn layout_raw_uncached(&self, ty: Ty<'tcx>)
-> Result<&'tcx LayoutDetails, LayoutError<'tcx>> { -> Result<&'tcx LayoutDetails, LayoutError<'tcx>> {
let tcx = self.tcx; let tcx = self.tcx;
let param_env = self.param_env; let param_env = self.param_env;
@ -899,7 +898,7 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
let (mut min, mut max) = (i128::max_value(), i128::min_value()); let (mut min, mut max) = (i128::max_value(), i128::min_value());
let discr_type = def.repr.discr_type(); let discr_type = def.repr.discr_type();
let bits = Integer::from_attr(tcx, discr_type).size().bits(); let bits = Integer::from_attr(self, discr_type).size().bits();
for (i, discr) in def.discriminants(tcx).enumerate() { for (i, discr) in def.discriminants(tcx).enumerate() {
if variants[i].iter().any(|f| f.abi.is_uninhabited()) { if variants[i].iter().any(|f| f.abi.is_uninhabited()) {
continue; continue;
@ -1141,7 +1140,7 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
/// This is invoked by the `layout_raw` query to record the final /// This is invoked by the `layout_raw` query to record the final
/// layout of each type. /// layout of each type.
#[inline] #[inline]
fn record_layout_for_printing(self, layout: TyLayout<'tcx>) { fn record_layout_for_printing(&self, layout: TyLayout<'tcx>) {
// If we are running with `-Zprint-type-sizes`, record layouts for // If we are running with `-Zprint-type-sizes`, record layouts for
// dumping later. Ignore layouts that are done with non-empty // dumping later. Ignore layouts that are done with non-empty
// environments or non-monomorphic layouts, as the user only wants // environments or non-monomorphic layouts, as the user only wants
@ -1158,7 +1157,7 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
self.record_layout_for_printing_outlined(layout) self.record_layout_for_printing_outlined(layout)
} }
fn record_layout_for_printing_outlined(self, layout: TyLayout<'tcx>) { fn record_layout_for_printing_outlined(&self, layout: TyLayout<'tcx>) {
// (delay format until we actually need it) // (delay format until we actually need it)
let record = |kind, packed, opt_discr_size, variants| { let record = |kind, packed, opt_discr_size, variants| {
let type_desc = format!("{:?}", layout.ty); let type_desc = format!("{:?}", layout.ty);
@ -1478,7 +1477,7 @@ impl<'a, 'tcx> LayoutOf for LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
/// Computes the layout of a type. Note that this implicitly /// Computes the layout of a type. Note that this implicitly
/// executes in "reveal all" mode. /// executes in "reveal all" mode.
fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout { fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
let param_env = self.param_env.with_reveal_all(); let param_env = self.param_env.with_reveal_all();
let ty = self.tcx.normalize_erasing_regions(param_env, ty); let ty = self.tcx.normalize_erasing_regions(param_env, ty);
let details = self.tcx.layout_raw(param_env.and(ty))?; let details = self.tcx.layout_raw(param_env.and(ty))?;
@ -1505,7 +1504,7 @@ impl<'a, 'tcx> LayoutOf for LayoutCx<'tcx, ty::query::TyCtxtAt<'a, 'tcx, 'tcx>>
/// Computes the layout of a type. Note that this implicitly /// Computes the layout of a type. Note that this implicitly
/// executes in "reveal all" mode. /// executes in "reveal all" mode.
fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout { fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
let param_env = self.param_env.with_reveal_all(); let param_env = self.param_env.with_reveal_all();
let ty = self.tcx.normalize_erasing_regions(param_env, ty); let ty = self.tcx.normalize_erasing_regions(param_env, ty);
let details = self.tcx.layout_raw(param_env.and(ty))?; let details = self.tcx.layout_raw(param_env.and(ty))?;
@ -1563,7 +1562,7 @@ impl<'a, 'tcx, C> TyLayoutMethods<'tcx, C> for Ty<'tcx>
where C: LayoutOf<Ty = Ty<'tcx>> + HasTyCtxt<'tcx>, where C: LayoutOf<Ty = Ty<'tcx>> + HasTyCtxt<'tcx>,
C::TyLayout: MaybeResult<TyLayout<'tcx>> C::TyLayout: MaybeResult<TyLayout<'tcx>>
{ {
fn for_variant(this: TyLayout<'tcx>, cx: C, variant_index: usize) -> TyLayout<'tcx> { fn for_variant(this: TyLayout<'tcx>, cx: &C, variant_index: usize) -> TyLayout<'tcx> {
let details = match this.variants { let details = match this.variants {
Variants::Single { index } if index == variant_index => this.details, Variants::Single { index } if index == variant_index => this.details,
@ -1602,7 +1601,7 @@ impl<'a, 'tcx, C> TyLayoutMethods<'tcx, C> for Ty<'tcx>
} }
} }
fn field(this: TyLayout<'tcx>, cx: C, i: usize) -> C::TyLayout { fn field(this: TyLayout<'tcx>, cx: &C, i: usize) -> C::TyLayout {
let tcx = cx.tcx(); let tcx = cx.tcx();
cx.layout_of(match this.ty.sty { cx.layout_of(match this.ty.sty {
ty::Bool | ty::Bool |
@ -1699,7 +1698,7 @@ impl<'a, 'tcx, C> TyLayoutMethods<'tcx, C> for Ty<'tcx>
Variants::Tagged { tag: ref discr, .. } | Variants::Tagged { tag: ref discr, .. } |
Variants::NicheFilling { niche: ref discr, .. } => { Variants::NicheFilling { niche: ref discr, .. } => {
assert_eq!(i, 0); assert_eq!(i, 0);
let layout = LayoutDetails::scalar(tcx, discr.clone()); let layout = LayoutDetails::scalar(cx, discr.clone());
return MaybeResult::from_ok(TyLayout { return MaybeResult::from_ok(TyLayout {
details: tcx.intern_layout(layout), details: tcx.intern_layout(layout),
ty: discr.value.to_ty(tcx) ty: discr.value.to_ty(tcx)
@ -1725,7 +1724,7 @@ struct Niche {
impl Niche { impl Niche {
fn reserve<'a, 'tcx>( fn reserve<'a, 'tcx>(
&self, &self,
cx: LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>>, cx: &LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>>,
count: u128, count: u128,
) -> Option<(u128, Scalar)> { ) -> Option<(u128, Scalar)> {
if count > self.available { if count > self.available {
@ -1745,7 +1744,7 @@ impl<'a, 'tcx> LayoutCx<'tcx, TyCtxt<'a, 'tcx, 'tcx>> {
/// Find the offset of a niche leaf field, starting from /// Find the offset of a niche leaf field, starting from
/// the given type and recursing through aggregates. /// the given type and recursing through aggregates.
// FIXME(eddyb) traverse already optimized enums. // FIXME(eddyb) traverse already optimized enums.
fn find_niche(self, layout: TyLayout<'tcx>) -> Result<Option<Niche>, LayoutError<'tcx>> { fn find_niche(&self, layout: TyLayout<'tcx>) -> Result<Option<Niche>, LayoutError<'tcx>> {
let scalar_niche = |scalar: &Scalar, offset| { let scalar_niche = |scalar: &Scalar, offset| {
let Scalar { value, valid_range: ref v } = *scalar; let Scalar { value, valid_range: ref v } = *scalar;

View File

@ -43,7 +43,7 @@ impl<'tcx> fmt::Display for Discr<'tcx> {
match self.ty.sty { match self.ty.sty {
ty::Int(ity) => { ty::Int(ity) => {
let bits = ty::tls::with(|tcx| { let bits = ty::tls::with(|tcx| {
Integer::from_attr(tcx, SignedInt(ity)).size().bits() Integer::from_attr(&tcx, SignedInt(ity)).size().bits()
}); });
let x = self.val as i128; let x = self.val as i128;
// sign extend the raw representation to be an i128 // sign extend the raw representation to be an i128
@ -62,8 +62,8 @@ impl<'tcx> Discr<'tcx> {
} }
pub fn checked_add<'a, 'gcx>(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, n: u128) -> (Self, bool) { pub fn checked_add<'a, 'gcx>(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, n: u128) -> (Self, bool) {
let (int, signed) = match self.ty.sty { let (int, signed) = match self.ty.sty {
Int(ity) => (Integer::from_attr(tcx, SignedInt(ity)), true), Int(ity) => (Integer::from_attr(&tcx, SignedInt(ity)), true),
Uint(uty) => (Integer::from_attr(tcx, UnsignedInt(uty)), false), Uint(uty) => (Integer::from_attr(&tcx, UnsignedInt(uty)), false),
_ => bug!("non integer discriminant"), _ => bug!("non integer discriminant"),
}; };

View File

@ -446,29 +446,29 @@ impl<'b, 'tcx> CodegenCx<'b, 'tcx> {
} }
} }
impl ty::layout::HasDataLayout for &'a CodegenCx<'ll, 'tcx> { impl ty::layout::HasDataLayout for CodegenCx<'ll, 'tcx> {
fn data_layout(&self) -> &ty::layout::TargetDataLayout { fn data_layout(&self) -> &ty::layout::TargetDataLayout {
&self.tcx.data_layout &self.tcx.data_layout
} }
} }
impl HasTargetSpec for &'a CodegenCx<'ll, 'tcx> { impl HasTargetSpec for CodegenCx<'ll, 'tcx> {
fn target_spec(&self) -> &Target { fn target_spec(&self) -> &Target {
&self.tcx.sess.target.target &self.tcx.sess.target.target
} }
} }
impl ty::layout::HasTyCtxt<'tcx> for &'a CodegenCx<'ll, 'tcx> { impl ty::layout::HasTyCtxt<'tcx> for CodegenCx<'ll, 'tcx> {
fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> { fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> {
self.tcx self.tcx
} }
} }
impl LayoutOf for &'a CodegenCx<'ll, 'tcx> { impl LayoutOf for CodegenCx<'ll, 'tcx> {
type Ty = Ty<'tcx>; type Ty = Ty<'tcx>;
type TyLayout = TyLayout<'tcx>; type TyLayout = TyLayout<'tcx>;
fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout { fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
self.tcx.layout_of(ty::ParamEnv::reveal_all().and(ty)) self.tcx.layout_of(ty::ParamEnv::reveal_all().and(ty))
.unwrap_or_else(|e| if let LayoutError::SizeOverflow(_) = e { .unwrap_or_else(|e| if let LayoutError::SizeOverflow(_) = e {
self.sess().fatal(&e.to_string()) self.sess().fatal(&e.to_string())

View File

@ -87,8 +87,8 @@ pub fn scalar_to_llvm(
pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll Value { pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll Value {
let mut llvals = Vec::with_capacity(alloc.relocations.len() + 1); let mut llvals = Vec::with_capacity(alloc.relocations.len() + 1);
let layout = cx.data_layout(); let dl = cx.data_layout();
let pointer_size = layout.pointer_size.bytes() as usize; let pointer_size = dl.pointer_size.bytes() as usize;
let mut next_offset = 0; let mut next_offset = 0;
for &(offset, ((), alloc_id)) in alloc.relocations.iter() { for &(offset, ((), alloc_id)) in alloc.relocations.iter() {
@ -99,7 +99,7 @@ pub fn const_alloc_to_llvm(cx: &CodegenCx<'ll, '_>, alloc: &Allocation) -> &'ll
llvals.push(C_bytes(cx, &alloc.bytes[next_offset..offset])); llvals.push(C_bytes(cx, &alloc.bytes[next_offset..offset]));
} }
let ptr_offset = read_target_uint( let ptr_offset = read_target_uint(
layout.endian, dl.endian,
&alloc.bytes[offset..(offset + pointer_size)], &alloc.bytes[offset..(offset + pointer_size)],
).expect("const_alloc_to_llvm: could not read relocation pointer") as u64; ).expect("const_alloc_to_llvm: could not read relocation pointer") as u64;
llvals.push(scalar_to_llvm( llvals.push(scalar_to_llvm(

View File

@ -377,13 +377,13 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for TypeLimits {
let (t, actually) = match ty { let (t, actually) = match ty {
ty::Int(t) => { ty::Int(t) => {
let ity = attr::IntType::SignedInt(t); let ity = attr::IntType::SignedInt(t);
let bits = layout::Integer::from_attr(cx.tcx, ity).size().bits(); let bits = layout::Integer::from_attr(&cx.tcx, ity).size().bits();
let actually = (val << (128 - bits)) as i128 >> (128 - bits); let actually = (val << (128 - bits)) as i128 >> (128 - bits);
(format!("{:?}", t), actually.to_string()) (format!("{:?}", t), actually.to_string())
} }
ty::Uint(t) => { ty::Uint(t) => {
let ity = attr::IntType::UnsignedInt(t); let ity = attr::IntType::UnsignedInt(t);
let bits = layout::Integer::from_attr(cx.tcx, ity).size().bits(); let bits = layout::Integer::from_attr(&cx.tcx, ity).size().bits();
let actually = (val << (128 - bits)) >> (128 - bits); let actually = (val << (128 - bits)) >> (128 - bits);
(format!("{:?}", t), actually.to_string()) (format!("{:?}", t), actually.to_string())
} }
@ -829,7 +829,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for VariantSizeDifferences {
Ok(layout) => { Ok(layout) => {
let variants = &layout.variants; let variants = &layout.variants;
if let layout::Variants::Tagged { ref variants, ref tag, .. } = variants { if let layout::Variants::Tagged { ref variants, ref tag, .. } = variants {
let discr_size = tag.value.size(cx.tcx).bytes(); let discr_size = tag.value.size(&cx.tcx).bytes();
debug!("enum `{}` is {} bytes large with layout:\n{:#?}", debug!("enum `{}` is {} bytes large with layout:\n{:#?}",
t, layout.size.bytes(), layout); t, layout.size.bytes(), layout);

View File

@ -165,7 +165,7 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> {
LitKind::Str(ref s, _) => { LitKind::Str(ref s, _) => {
let s = s.as_str(); let s = s.as_str();
let id = self.tcx.allocate_bytes(s.as_bytes()); let id = self.tcx.allocate_bytes(s.as_bytes());
ConstValue::new_slice(Scalar::Ptr(id.into()), s.len() as u64, self.tcx) ConstValue::new_slice(Scalar::Ptr(id.into()), s.len() as u64, &self.tcx)
}, },
LitKind::ByteStr(ref data) => { LitKind::ByteStr(ref data) => {
let id = self.tcx.allocate_bytes(data); let id = self.tcx.allocate_bytes(data);

View File

@ -669,14 +669,14 @@ fn all_constructors<'a, 'tcx: 'a>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
} }
ty::Int(ity) if exhaustive_integer_patterns => { ty::Int(ity) if exhaustive_integer_patterns => {
// FIXME(49937): refactor these bit manipulations into interpret. // FIXME(49937): refactor these bit manipulations into interpret.
let bits = Integer::from_attr(cx.tcx, SignedInt(ity)).size().bits() as u128; let bits = Integer::from_attr(&cx.tcx, SignedInt(ity)).size().bits() as u128;
let min = 1u128 << (bits - 1); let min = 1u128 << (bits - 1);
let max = (1u128 << (bits - 1)) - 1; let max = (1u128 << (bits - 1)) - 1;
vec![ConstantRange(min, max, pcx.ty, RangeEnd::Included)] vec![ConstantRange(min, max, pcx.ty, RangeEnd::Included)]
} }
ty::Uint(uty) if exhaustive_integer_patterns => { ty::Uint(uty) if exhaustive_integer_patterns => {
// FIXME(49937): refactor these bit manipulations into interpret. // FIXME(49937): refactor these bit manipulations into interpret.
let bits = Integer::from_attr(cx.tcx, UnsignedInt(uty)).size().bits() as u128; let bits = Integer::from_attr(&cx.tcx, UnsignedInt(uty)).size().bits() as u128;
let max = !0u128 >> (128 - bits); let max = !0u128 >> (128 - bits);
vec![ConstantRange(0, max, pcx.ty, RangeEnd::Included)] vec![ConstantRange(0, max, pcx.ty, RangeEnd::Included)]
} }
@ -862,7 +862,7 @@ impl<'tcx> IntRange<'tcx> {
fn signed_bias(tcx: TyCtxt<'_, 'tcx, 'tcx>, ty: Ty<'tcx>) -> u128 { fn signed_bias(tcx: TyCtxt<'_, 'tcx, 'tcx>, ty: Ty<'tcx>) -> u128 {
match ty.sty { match ty.sty {
ty::Int(ity) => { ty::Int(ity) => {
let bits = Integer::from_attr(tcx, SignedInt(ity)).size().bits() as u128; let bits = Integer::from_attr(&tcx, SignedInt(ity)).size().bits() as u128;
1u128 << (bits - 1) 1u128 << (bits - 1)
} }
_ => 0 _ => 0

View File

@ -1313,7 +1313,7 @@ fn lit_to_const<'a, 'tcx>(lit: &'tcx ast::LitKind,
LitKind::Str(ref s, _) => { LitKind::Str(ref s, _) => {
let s = s.as_str(); let s = s.as_str();
let id = tcx.allocate_bytes(s.as_bytes()); let id = tcx.allocate_bytes(s.as_bytes());
ConstValue::new_slice(Scalar::Ptr(id.into()), s.len() as u64, tcx) ConstValue::new_slice(Scalar::Ptr(id.into()), s.len() as u64, &tcx)
}, },
LitKind::ByteStr(ref data) => { LitKind::ByteStr(ref data) => {
let id = tcx.allocate_bytes(data); let id = tcx.allocate_bytes(data);

View File

@ -331,7 +331,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
let val = Immediate::new_slice( let val = Immediate::new_slice(
ptr, ptr,
length.unwrap_usize(self.tcx.tcx), length.unwrap_usize(self.tcx.tcx),
self.tcx.tcx, self,
); );
self.write_immediate(val, dest) self.write_immediate(val, dest)
} }
@ -394,7 +394,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
src_field.into() src_field.into()
} }
Err(..) => { Err(..) => {
let src_field_layout = src.layout.field(&self, i)?; let src_field_layout = src.layout.field(self, i)?;
// this must be a field covering the entire thing // this must be a field covering the entire thing
assert_eq!(src.layout.fields.offset(i).bytes(), 0); assert_eq!(src.layout.fields.offset(i).bytes(), 0);
assert_eq!(src_field_layout.size, src.layout.size); assert_eq!(src_field_layout.size, src.layout.size);

View File

@ -139,8 +139,8 @@ impl<'tcx, Tag> LocalValue<Tag> {
} }
} }
impl<'b, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout
for &'b EvalContext<'a, 'mir, 'tcx, M> for EvalContext<'a, 'mir, 'tcx, M>
{ {
#[inline] #[inline]
fn data_layout(&self) -> &layout::TargetDataLayout { fn data_layout(&self) -> &layout::TargetDataLayout {
@ -148,16 +148,7 @@ impl<'b, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout
} }
} }
impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout impl<'a, 'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for EvalContext<'a, 'mir, 'tcx, M>
for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M>
{
#[inline]
fn data_layout(&self) -> &layout::TargetDataLayout {
&self.tcx.data_layout
}
}
impl<'b, 'a, 'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for &'b EvalContext<'a, 'mir, 'tcx, M>
where M: Machine<'a, 'mir, 'tcx> where M: Machine<'a, 'mir, 'tcx>
{ {
#[inline] #[inline]
@ -166,40 +157,19 @@ impl<'b, 'a, 'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for &'b EvalContext<'a, 'mir
} }
} }
impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> layout::HasTyCtxt<'tcx> impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> LayoutOf
for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> for EvalContext<'a, 'mir, 'tcx, M>
{
#[inline]
fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
*self.tcx
}
}
impl<'b, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> LayoutOf
for &'b EvalContext<'a, 'mir, 'tcx, M>
{ {
type Ty = Ty<'tcx>; type Ty = Ty<'tcx>;
type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>; type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
#[inline] #[inline]
fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout { fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
self.tcx.layout_of(self.param_env.and(ty)) self.tcx.layout_of(self.param_env.and(ty))
.map_err(|layout| EvalErrorKind::Layout(layout).into()) .map_err(|layout| EvalErrorKind::Layout(layout).into())
} }
} }
impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> LayoutOf
for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M>
{
type Ty = Ty<'tcx>;
type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
#[inline]
fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
(&**self).layout_of(ty)
}
}
impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> { impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
pub fn new( pub fn new(
tcx: TyCtxtAt<'a, 'tcx, 'tcx>, tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
@ -335,7 +305,7 @@ impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tc
pub fn str_to_immediate(&mut self, s: &str) -> EvalResult<'tcx, Immediate<M::PointerTag>> { pub fn str_to_immediate(&mut self, s: &str) -> EvalResult<'tcx, Immediate<M::PointerTag>> {
let ptr = self.memory.allocate_static_bytes(s.as_bytes()).with_default_tag(); let ptr = self.memory.allocate_static_bytes(s.as_bytes()).with_default_tag();
Ok(Immediate::new_slice(Scalar::Ptr(ptr), s.len() as u64, self.tcx.tcx)) Ok(Immediate::new_slice(Scalar::Ptr(ptr), s.len() as u64, self))
} }
/// Return the actual dynamic size and alignment of the place at the given type. /// Return the actual dynamic size and alignment of the place at the given type.

View File

@ -77,16 +77,8 @@ pub struct Memory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'a, 'mir, 'tcx>> {
pub(super) tcx: TyCtxtAt<'a, 'tcx, 'tcx>, pub(super) tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
} }
impl<'b, 'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout
for &'b Memory<'a, 'mir, 'tcx, M> for Memory<'a, 'mir, 'tcx, M>
{
#[inline]
fn data_layout(&self) -> &TargetDataLayout {
&self.tcx.data_layout
}
}
impl<'a, 'b, 'c, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout
for &'b &'c mut Memory<'a, 'mir, 'tcx, M>
{ {
#[inline] #[inline]
fn data_layout(&self) -> &TargetDataLayout { fn data_layout(&self) -> &TargetDataLayout {

View File

@ -112,7 +112,7 @@ impl<'tcx, Tag> ScalarMaybeUndef<Tag> {
} }
#[inline(always)] #[inline(always)]
pub fn to_usize(self, cx: impl HasDataLayout) -> EvalResult<'tcx, u64> { pub fn to_usize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, u64> {
self.not_undef()?.to_usize(cx) self.not_undef()?.to_usize(cx)
} }
@ -132,7 +132,7 @@ impl<'tcx, Tag> ScalarMaybeUndef<Tag> {
} }
#[inline(always)] #[inline(always)]
pub fn to_isize(self, cx: impl HasDataLayout) -> EvalResult<'tcx, i64> { pub fn to_isize(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, i64> {
self.not_undef()?.to_isize(cx) self.not_undef()?.to_isize(cx)
} }
} }
@ -178,7 +178,7 @@ impl<'tcx, Tag> Immediate<Tag> {
pub fn new_slice( pub fn new_slice(
val: Scalar<Tag>, val: Scalar<Tag>,
len: u64, len: u64,
cx: impl HasDataLayout cx: &impl HasDataLayout
) -> Self { ) -> Self {
Immediate::ScalarPair( Immediate::ScalarPair(
val.into(), val.into(),
@ -743,7 +743,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
.ty_adt_def().expect("tagged layout corresponds to adt") .ty_adt_def().expect("tagged layout corresponds to adt")
.repr .repr
.discr_type(); .discr_type();
let discr_ty = layout::Integer::from_attr(self.tcx.tcx, discr_ty); let discr_ty = layout::Integer::from_attr(self, discr_ty);
let shift = 128 - discr_ty.size().bits(); let shift = 128 - discr_ty.size().bits();
let truncatee = sexted as u128; let truncatee = sexted as u128;
(truncatee << shift) >> shift (truncatee << shift) >> shift

View File

@ -128,7 +128,7 @@ impl<Tag> MemPlace<Tag> {
/// Produces a Place that will error if attempted to be read from or written to /// Produces a Place that will error if attempted to be read from or written to
#[inline(always)] #[inline(always)]
pub fn null(cx: impl HasDataLayout) -> Self { pub fn null(cx: &impl HasDataLayout) -> Self {
Self::from_scalar_ptr(Scalar::ptr_null(cx), Align::from_bytes(1, 1).unwrap()) Self::from_scalar_ptr(Scalar::ptr_null(cx), Align::from_bytes(1, 1).unwrap())
} }
@ -156,7 +156,7 @@ impl<Tag> MemPlace<Tag> {
impl<'tcx, Tag> MPlaceTy<'tcx, Tag> { impl<'tcx, Tag> MPlaceTy<'tcx, Tag> {
/// Produces a MemPlace that works for ZST but nothing else /// Produces a MemPlace that works for ZST but nothing else
#[inline] #[inline]
pub fn dangling(layout: TyLayout<'tcx>, cx: impl HasDataLayout) -> Self { pub fn dangling(layout: TyLayout<'tcx>, cx: &impl HasDataLayout) -> Self {
MPlaceTy { MPlaceTy {
mplace: MemPlace::from_scalar_ptr( mplace: MemPlace::from_scalar_ptr(
Scalar::from_uint(layout.align.abi(), cx.pointer_size()), Scalar::from_uint(layout.align.abi(), cx.pointer_size()),
@ -172,7 +172,7 @@ impl<'tcx, Tag> MPlaceTy<'tcx, Tag> {
} }
#[inline] #[inline]
pub(super) fn len(self, cx: impl HasDataLayout) -> EvalResult<'tcx, u64> { pub(super) fn len(self, cx: &impl HasDataLayout) -> EvalResult<'tcx, u64> {
if self.layout.is_unsized() { if self.layout.is_unsized() {
// We need to consult `meta` metadata // We need to consult `meta` metadata
match self.layout.ty.sty { match self.layout.ty.sty {
@ -217,7 +217,7 @@ impl<'tcx, Tag: ::std::fmt::Debug> OpTy<'tcx, Tag> {
impl<'tcx, Tag: ::std::fmt::Debug> Place<Tag> { impl<'tcx, Tag: ::std::fmt::Debug> Place<Tag> {
/// Produces a Place that will error if attempted to be read from or written to /// Produces a Place that will error if attempted to be read from or written to
#[inline(always)] #[inline(always)]
pub fn null(cx: impl HasDataLayout) -> Self { pub fn null(cx: &impl HasDataLayout) -> Self {
Place::Ptr(MemPlace::null(cx)) Place::Ptr(MemPlace::null(cx))
} }
@ -510,7 +510,7 @@ where
Place::Ptr(mplace) => Place::Ptr(mplace) =>
self.mplace_downcast(MPlaceTy { mplace, layout: base.layout }, variant)?.into(), self.mplace_downcast(MPlaceTy { mplace, layout: base.layout }, variant)?.into(),
Place::Local { .. } => { Place::Local { .. } => {
let layout = base.layout.for_variant(&self, variant); let layout = base.layout.for_variant(self, variant);
PlaceTy { layout, ..base } PlaceTy { layout, ..base }
} }
}) })
@ -738,10 +738,10 @@ where
_ => bug!("write_immediate_to_mplace: invalid ScalarPair layout: {:#?}", _ => bug!("write_immediate_to_mplace: invalid ScalarPair layout: {:#?}",
dest.layout) dest.layout)
}; };
let (a_size, b_size) = (a.size(&self), b.size(&self)); let (a_size, b_size) = (a.size(self), b.size(self));
let (a_align, b_align) = (a.align(&self), b.align(&self)); let (a_align, b_align) = (a.align(self), b.align(self));
let b_offset = a_size.abi_align(b_align); let b_offset = a_size.abi_align(b_align);
let b_ptr = ptr.offset(b_offset, &self)?.into(); let b_ptr = ptr.offset(b_offset, self)?.into();
// It is tempting to verify `b_offset` against `layout.fields.offset(1)`, // It is tempting to verify `b_offset` against `layout.fields.offset(1)`,
// but that does not work: We could be a newtype around a pair, then the // but that does not work: We could be a newtype around a pair, then the
@ -896,7 +896,7 @@ where
if layout.is_unsized() { if layout.is_unsized() {
assert!(self.tcx.features().unsized_locals, "cannot alloc memory for unsized type"); assert!(self.tcx.features().unsized_locals, "cannot alloc memory for unsized type");
// FIXME: What should we do here? We should definitely also tag! // FIXME: What should we do here? We should definitely also tag!
Ok(MPlaceTy::dangling(layout, &self)) Ok(MPlaceTy::dangling(layout, self))
} else { } else {
let ptr = self.memory.allocate(layout.size, layout.align, kind)?; let ptr = self.memory.allocate(layout.size, layout.align, kind)?;
let ptr = M::tag_new_allocation(self, ptr, kind)?; let ptr = M::tag_new_allocation(self, ptr, kind)?;
@ -923,7 +923,7 @@ where
// raw discriminants for enums are isize or bigger during // raw discriminants for enums are isize or bigger during
// their computation, but the in-memory tag is the smallest possible // their computation, but the in-memory tag is the smallest possible
// representation // representation
let size = tag.value.size(self.tcx.tcx); let size = tag.value.size(self);
let shift = 128 - size.bits(); let shift = 128 - size.bits();
let discr_val = (discr_val << shift) >> shift; let discr_val = (discr_val << shift) >> shift;

View File

@ -217,7 +217,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
Repeat(ref operand, _) => { Repeat(ref operand, _) => {
let op = self.eval_operand(operand, None)?; let op = self.eval_operand(operand, None)?;
let dest = self.force_allocation(dest)?; let dest = self.force_allocation(dest)?;
let length = dest.len(&self)?; let length = dest.len(self)?;
if length > 0 { if length > 0 {
// write the first // write the first
@ -227,7 +227,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
if length > 1 { if length > 1 {
// copy the rest // copy the rest
let (dest, dest_align) = first.to_scalar_ptr_align(); let (dest, dest_align) = first.to_scalar_ptr_align();
let rest = dest.ptr_offset(first.layout.size, &self)?; let rest = dest.ptr_offset(first.layout.size, self)?;
self.memory.copy_repeatedly( self.memory.copy_repeatedly(
dest, dest_align, rest, dest_align, first.layout.size, length - 1, true dest, dest_align, rest, dest_align, first.layout.size, length - 1, true
)?; )?;
@ -239,7 +239,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
// FIXME(CTFE): don't allow computing the length of arrays in const eval // FIXME(CTFE): don't allow computing the length of arrays in const eval
let src = self.eval_place(place)?; let src = self.eval_place(place)?;
let mplace = self.force_allocation(src)?; let mplace = self.force_allocation(src)?;
let len = mplace.len(&self)?; let len = mplace.len(self)?;
let size = self.pointer_size(); let size = self.pointer_size();
self.write_scalar( self.write_scalar(
Scalar::from_uint(len, size), Scalar::from_uint(len, size),

View File

@ -405,7 +405,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?; let ptr = self.ref_to_mplace(self.read_immediate(args[0])?)?;
let vtable = ptr.vtable()?; let vtable = ptr.vtable()?;
let fn_ptr = self.memory.read_ptr_sized( let fn_ptr = self.memory.read_ptr_sized(
vtable.offset(ptr_size * (idx as u64 + 3), &self)?, vtable.offset(ptr_size * (idx as u64 + 3), self)?,
ptr_align ptr_align
)?.to_ptr()?; )?.to_ptr()?;
let instance = self.memory.get_fn(fn_ptr)?; let instance = self.memory.get_fn(fn_ptr)?;
@ -416,7 +416,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
let mut args = args.to_vec(); let mut args = args.to_vec();
let pointee = args[0].layout.ty.builtin_deref(true).unwrap().ty; let pointee = args[0].layout.ty.builtin_deref(true).unwrap().ty;
let fake_fat_ptr_ty = self.tcx.mk_mut_ptr(pointee); let fake_fat_ptr_ty = self.tcx.mk_mut_ptr(pointee);
args[0].layout = self.layout_of(fake_fat_ptr_ty)?.field(&self, 0)?; args[0].layout = self.layout_of(fake_fat_ptr_ty)?.field(self, 0)?;
args[0].op = Operand::Immediate(Immediate::Scalar(ptr.ptr.into())); // strip vtable args[0].op = Operand::Immediate(Immediate::Scalar(ptr.ptr.into())); // strip vtable
trace!("Patched self operand to {:#?}", args[0]); trace!("Patched self operand to {:#?}", args[0]);
// recurse with concrete function // recurse with concrete function
@ -455,7 +455,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
}; };
let ty = self.tcx.mk_unit(); // return type is () let ty = self.tcx.mk_unit(); // return type is ()
let dest = MPlaceTy::dangling(self.layout_of(ty)?, &self); let dest = MPlaceTy::dangling(self.layout_of(ty)?, self);
self.eval_fn_call( self.eval_fn_call(
instance, instance,

View File

@ -60,9 +60,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
let drop = self.memory.create_fn_alloc(drop).with_default_tag(); let drop = self.memory.create_fn_alloc(drop).with_default_tag();
self.memory.write_ptr_sized(vtable, ptr_align, Scalar::Ptr(drop).into())?; self.memory.write_ptr_sized(vtable, ptr_align, Scalar::Ptr(drop).into())?;
let size_ptr = vtable.offset(ptr_size, &self)?; let size_ptr = vtable.offset(ptr_size, self)?;
self.memory.write_ptr_sized(size_ptr, ptr_align, Scalar::from_uint(size, ptr_size).into())?; self.memory.write_ptr_sized(size_ptr, ptr_align, Scalar::from_uint(size, ptr_size).into())?;
let align_ptr = vtable.offset(ptr_size * 2, &self)?; let align_ptr = vtable.offset(ptr_size * 2, self)?;
self.memory.write_ptr_sized(align_ptr, ptr_align, self.memory.write_ptr_sized(align_ptr, ptr_align,
Scalar::from_uint(align, ptr_size).into())?; Scalar::from_uint(align, ptr_size).into())?;
@ -70,7 +70,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M>
if let Some((def_id, substs)) = *method { if let Some((def_id, substs)) = *method {
let instance = self.resolve(def_id, substs)?; let instance = self.resolve(def_id, substs)?;
let fn_ptr = self.memory.create_fn_alloc(instance).with_default_tag(); let fn_ptr = self.memory.create_fn_alloc(instance).with_default_tag();
let method_ptr = vtable.offset(ptr_size * (3 + i as u64), &self)?; let method_ptr = vtable.offset(ptr_size * (3 + i as u64), self)?;
self.memory.write_ptr_sized(method_ptr, ptr_align, Scalar::Ptr(fn_ptr).into())?; self.memory.write_ptr_sized(method_ptr, ptr_align, Scalar::Ptr(fn_ptr).into())?;
} }
} }

View File

@ -87,23 +87,23 @@ struct ConstPropagator<'a, 'mir, 'tcx:'a+'mir> {
param_env: ParamEnv<'tcx>, param_env: ParamEnv<'tcx>,
} }
impl<'a, 'b, 'tcx> LayoutOf for &'a ConstPropagator<'a, 'b, 'tcx> { impl<'a, 'b, 'tcx> LayoutOf for ConstPropagator<'a, 'b, 'tcx> {
type Ty = ty::Ty<'tcx>; type Ty = ty::Ty<'tcx>;
type TyLayout = Result<TyLayout<'tcx>, LayoutError<'tcx>>; type TyLayout = Result<TyLayout<'tcx>, LayoutError<'tcx>>;
fn layout_of(self, ty: ty::Ty<'tcx>) -> Self::TyLayout { fn layout_of(&self, ty: ty::Ty<'tcx>) -> Self::TyLayout {
self.tcx.layout_of(self.param_env.and(ty)) self.tcx.layout_of(self.param_env.and(ty))
} }
} }
impl<'a, 'b, 'tcx> HasDataLayout for &'a ConstPropagator<'a, 'b, 'tcx> { impl<'a, 'b, 'tcx> HasDataLayout for ConstPropagator<'a, 'b, 'tcx> {
#[inline] #[inline]
fn data_layout(&self) -> &TargetDataLayout { fn data_layout(&self) -> &TargetDataLayout {
&self.tcx.data_layout &self.tcx.data_layout
} }
} }
impl<'a, 'b, 'tcx> HasTyCtxt<'tcx> for &'a ConstPropagator<'a, 'b, 'tcx> { impl<'a, 'b, 'tcx> HasTyCtxt<'tcx> for ConstPropagator<'a, 'b, 'tcx> {
#[inline] #[inline]
fn tcx<'c>(&'c self) -> TyCtxt<'c, 'tcx, 'tcx> { fn tcx<'c>(&'c self) -> TyCtxt<'c, 'tcx, 'tcx> {
self.tcx self.tcx

View File

@ -11,7 +11,7 @@
use abi::call::{FnType, ArgType, Reg, RegKind, Uniform}; use abi::call::{FnType, ArgType, Reg, RegKind, Uniform};
use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods}; use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>) fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
-> Option<Uniform> -> Option<Uniform>
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
@ -41,7 +41,7 @@ fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
}) })
} }
fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>) fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
@ -75,7 +75,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>)
ret.make_indirect(); ret.make_indirect();
} }
fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>) fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
@ -109,7 +109,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
arg.make_indirect(); arg.make_indirect();
} }
pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>) pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {

View File

@ -11,21 +11,21 @@
use abi::call::{ArgType, FnType, }; use abi::call::{ArgType, FnType, };
use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods}; use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
fn classify_ret_ty<'a, Ty, C>(_tuncx: C, ret: &mut ArgType<'a, Ty>) fn classify_ret_ty<'a, Ty, C>(_cx: &C, ret: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
ret.extend_integer_width_to(32); ret.extend_integer_width_to(32);
} }
fn classify_arg_ty<'a, Ty, C>(_cx: C, arg: &mut ArgType<'a, Ty>) fn classify_arg_ty<'a, Ty, C>(_cx: &C, arg: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
arg.extend_integer_width_to(32); arg.extend_integer_width_to(32);
} }
pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>) pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {

View File

@ -12,7 +12,7 @@ use abi::call::{Conv, FnType, ArgType, Reg, RegKind, Uniform};
use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods}; use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
use spec::HasTargetSpec; use spec::HasTargetSpec;
fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>) fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
-> Option<Uniform> -> Option<Uniform>
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
@ -42,7 +42,7 @@ fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
}) })
} }
fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>, vfp: bool) fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>, vfp: bool)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
@ -77,7 +77,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>, vfp: bool)
ret.make_indirect(); ret.make_indirect();
} }
fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>, vfp: bool) fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>, vfp: bool)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
@ -101,7 +101,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>, vfp: bool)
}); });
} }
pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>) pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout + HasTargetSpec C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout + HasTargetSpec
{ {

View File

@ -16,7 +16,7 @@ use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
// See the https://github.com/kripken/emscripten-fastcomp-clang repository. // See the https://github.com/kripken/emscripten-fastcomp-clang repository.
// The class `EmscriptenABIInfo` in `/lib/CodeGen/TargetInfo.cpp` contains the ABI definitions. // The class `EmscriptenABIInfo` in `/lib/CodeGen/TargetInfo.cpp` contains the ABI definitions.
fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>) fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
@ -42,7 +42,7 @@ fn classify_arg_ty<Ty>(arg: &mut ArgType<Ty>) {
} }
} }
pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>) pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {

View File

@ -11,7 +11,7 @@
use abi::call::{ArgType, FnType, Reg, Uniform}; use abi::call::{ArgType, FnType, Reg, Uniform};
use abi::{HasDataLayout, LayoutOf, Size, TyLayoutMethods}; use abi::{HasDataLayout, LayoutOf, Size, TyLayoutMethods};
fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<Ty>, offset: &mut Size) fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<Ty>, offset: &mut Size)
where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
{ {
if !ret.layout.is_aggregate() { if !ret.layout.is_aggregate() {
@ -22,7 +22,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<Ty>, offset: &mut Size)
} }
} }
fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<Ty>, offset: &mut Size) fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<Ty>, offset: &mut Size)
where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
{ {
let dl = cx.data_layout(); let dl = cx.data_layout();
@ -44,7 +44,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<Ty>, offset: &mut Size)
*offset = offset.abi_align(align) + size.abi_align(align); *offset = offset.abi_align(align) + size.abi_align(align);
} }
pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<Ty>) pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<Ty>)
where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
{ {
let mut offset = Size::ZERO; let mut offset = Size::ZERO;

View File

@ -27,7 +27,7 @@ fn extend_integer_width_mips<Ty>(arg: &mut ArgType<Ty>, bits: u64) {
arg.extend_integer_width_to(bits); arg.extend_integer_width_to(bits);
} }
fn float_reg<'a, Ty, C>(cx: C, ret: &ArgType<'a, Ty>, i: usize) -> Option<Reg> fn float_reg<'a, Ty, C>(cx: &C, ret: &ArgType<'a, Ty>, i: usize) -> Option<Reg>
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
@ -41,7 +41,7 @@ fn float_reg<'a, Ty, C>(cx: C, ret: &ArgType<'a, Ty>, i: usize) -> Option<Reg>
} }
} }
fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>) fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
@ -83,7 +83,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>)
} }
} }
fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>) fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
@ -151,7 +151,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
}); });
} }
pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>) pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {

View File

@ -137,7 +137,7 @@ impl Reg {
} }
impl Reg { impl Reg {
pub fn align<C: HasDataLayout>(&self, cx: C) -> Align { pub fn align<C: HasDataLayout>(&self, cx: &C) -> Align {
let dl = cx.data_layout(); let dl = cx.data_layout();
match self.kind { match self.kind {
RegKind::Integer => { RegKind::Integer => {
@ -188,7 +188,7 @@ impl From<Reg> for Uniform {
} }
impl Uniform { impl Uniform {
pub fn align<C: HasDataLayout>(&self, cx: C) -> Align { pub fn align<C: HasDataLayout>(&self, cx: &C) -> Align {
self.unit.align(cx) self.unit.align(cx)
} }
} }
@ -225,12 +225,12 @@ impl CastTarget {
} }
} }
pub fn size<C: HasDataLayout>(&self, cx: C) -> Size { pub fn size<C: HasDataLayout>(&self, cx: &C) -> Size {
(self.prefix_chunk * self.prefix.iter().filter(|x| x.is_some()).count() as u64) (self.prefix_chunk * self.prefix.iter().filter(|x| x.is_some()).count() as u64)
.abi_align(self.rest.align(cx)) + self.rest.total .abi_align(self.rest.align(cx)) + self.rest.total
} }
pub fn align<C: HasDataLayout>(&self, cx: C) -> Align { pub fn align<C: HasDataLayout>(&self, cx: &C) -> Align {
self.prefix.iter() self.prefix.iter()
.filter_map(|x| x.map(|kind| Reg { kind, size: self.prefix_chunk }.align(cx))) .filter_map(|x| x.map(|kind| Reg { kind, size: self.prefix_chunk }.align(cx)))
.fold(cx.data_layout().aggregate_align.max(self.rest.align(cx)), .fold(cx.data_layout().aggregate_align.max(self.rest.align(cx)),
@ -249,8 +249,8 @@ impl<'a, Ty> TyLayout<'a, Ty> {
} }
} }
fn homogeneous_aggregate<C>(&self, cx: C) -> Option<Reg> fn homogeneous_aggregate<C>(&self, cx: &C) -> Option<Reg>
where Ty: TyLayoutMethods<'a, C> + Copy, C: LayoutOf<Ty = Ty, TyLayout = Self> + Copy where Ty: TyLayoutMethods<'a, C> + Copy, C: LayoutOf<Ty = Ty, TyLayout = Self>
{ {
match self.abi { match self.abi {
Abi::Uninhabited => None, Abi::Uninhabited => None,
@ -483,7 +483,7 @@ pub struct FnType<'a, Ty> {
} }
impl<'a, Ty> FnType<'a, Ty> { impl<'a, Ty> FnType<'a, Ty> {
pub fn adjust_for_cabi<C>(&mut self, cx: C, abi: ::spec::abi::Abi) -> Result<(), String> pub fn adjust_for_cabi<C>(&mut self, cx: &C, abi: ::spec::abi::Abi) -> Result<(), String>
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout + HasTargetSpec C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout + HasTargetSpec
{ {

View File

@ -11,7 +11,7 @@
use abi::call::{ArgType, FnType, Reg, Uniform}; use abi::call::{ArgType, FnType, Reg, Uniform};
use abi::{HasDataLayout, LayoutOf, Size, TyLayoutMethods}; use abi::{HasDataLayout, LayoutOf, Size, TyLayoutMethods};
fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<Ty>, offset: &mut Size) fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<Ty>, offset: &mut Size)
where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
{ {
if !ret.layout.is_aggregate() { if !ret.layout.is_aggregate() {
@ -22,7 +22,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<Ty>, offset: &mut Size)
} }
} }
fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<Ty>, offset: &mut Size) fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<Ty>, offset: &mut Size)
where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
{ {
let dl = cx.data_layout(); let dl = cx.data_layout();
@ -44,7 +44,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<Ty>, offset: &mut Size)
*offset = offset.abi_align(align) + size.abi_align(align); *offset = offset.abi_align(align) + size.abi_align(align);
} }
pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<Ty>) pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<Ty>)
where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
{ {
let mut offset = Size::ZERO; let mut offset = Size::ZERO;

View File

@ -22,7 +22,7 @@ enum ABI {
} }
use self::ABI::*; use self::ABI::*;
fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>, abi: ABI) fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>, abi: ABI)
-> Option<Uniform> -> Option<Uniform>
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
@ -52,7 +52,7 @@ fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>, abi: AB
}) })
} }
fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>, abi: ABI) fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>, abi: ABI)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
@ -95,7 +95,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>, abi: ABI)
ret.make_indirect(); ret.make_indirect();
} }
fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>, abi: ABI) fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>, abi: ABI)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
@ -134,7 +134,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>, abi: ABI)
}); });
} }
pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>) pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {

View File

@ -24,7 +24,7 @@ fn classify_ret_ty<'a, Ty, C>(ret: &mut ArgType<Ty>)
} }
} }
fn is_single_fp_element<'a, Ty, C>(cx: C, layout: TyLayout<'a, Ty>) -> bool fn is_single_fp_element<'a, Ty, C>(cx: &C, layout: TyLayout<'a, Ty>) -> bool
where Ty: TyLayoutMethods<'a, C>, where Ty: TyLayoutMethods<'a, C>,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
@ -41,7 +41,7 @@ fn is_single_fp_element<'a, Ty, C>(cx: C, layout: TyLayout<'a, Ty>) -> bool
} }
} }
fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>) fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
@ -67,7 +67,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
} }
} }
pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>) pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {

View File

@ -11,7 +11,7 @@
use abi::call::{ArgType, FnType, Reg, Uniform}; use abi::call::{ArgType, FnType, Reg, Uniform};
use abi::{HasDataLayout, LayoutOf, Size, TyLayoutMethods}; use abi::{HasDataLayout, LayoutOf, Size, TyLayoutMethods};
fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<Ty>, offset: &mut Size) fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<Ty>, offset: &mut Size)
where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
{ {
if !ret.layout.is_aggregate() { if !ret.layout.is_aggregate() {
@ -22,7 +22,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<Ty>, offset: &mut Size)
} }
} }
fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<Ty>, offset: &mut Size) fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<Ty>, offset: &mut Size)
where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
{ {
let dl = cx.data_layout(); let dl = cx.data_layout();
@ -44,7 +44,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<Ty>, offset: &mut Size)
*offset = offset.abi_align(align) + size.abi_align(align); *offset = offset.abi_align(align) + size.abi_align(align);
} }
pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<Ty>) pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<Ty>)
where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> + HasDataLayout
{ {
let mut offset = Size::ZERO; let mut offset = Size::ZERO;

View File

@ -13,7 +13,7 @@
use abi::call::{FnType, ArgType, Reg, RegKind, Uniform}; use abi::call::{FnType, ArgType, Reg, RegKind, Uniform};
use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods}; use abi::{HasDataLayout, LayoutOf, TyLayout, TyLayoutMethods};
fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>) fn is_homogeneous_aggregate<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
-> Option<Uniform> -> Option<Uniform>
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
@ -41,7 +41,7 @@ fn is_homogeneous_aggregate<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
}) })
} }
fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>) fn classify_ret_ty<'a, Ty, C>(cx: &C, ret: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
@ -69,7 +69,7 @@ fn classify_ret_ty<'a, Ty, C>(cx: C, ret: &mut ArgType<'a, Ty>)
ret.make_indirect(); ret.make_indirect();
} }
fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>) fn classify_arg_ty<'a, Ty, C>(cx: &C, arg: &mut ArgType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
@ -95,7 +95,7 @@ fn classify_arg_ty<'a, Ty, C>(cx: C, arg: &mut ArgType<'a, Ty>)
}); });
} }
pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>) pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {

View File

@ -18,7 +18,7 @@ pub enum Flavor {
Fastcall Fastcall
} }
fn is_single_fp_element<'a, Ty, C>(cx: C, layout: TyLayout<'a, Ty>) -> bool fn is_single_fp_element<'a, Ty, C>(cx: &C, layout: TyLayout<'a, Ty>) -> bool
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
@ -35,7 +35,7 @@ fn is_single_fp_element<'a, Ty, C>(cx: C, layout: TyLayout<'a, Ty>) -> bool
} }
} }
pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>, flavor: Flavor) pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>, flavor: Flavor)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout + HasTargetSpec C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout + HasTargetSpec
{ {

View File

@ -31,12 +31,12 @@ struct Memory;
const LARGEST_VECTOR_SIZE: usize = 512; const LARGEST_VECTOR_SIZE: usize = 512;
const MAX_EIGHTBYTES: usize = LARGEST_VECTOR_SIZE / 64; const MAX_EIGHTBYTES: usize = LARGEST_VECTOR_SIZE / 64;
fn classify_arg<'a, Ty, C>(cx: C, arg: &ArgType<'a, Ty>) fn classify_arg<'a, Ty, C>(cx: &C, arg: &ArgType<'a, Ty>)
-> Result<[Option<Class>; MAX_EIGHTBYTES], Memory> -> Result<[Option<Class>; MAX_EIGHTBYTES], Memory>
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {
fn classify<'a, Ty, C>(cx: C, layout: TyLayout<'a, Ty>, fn classify<'a, Ty, C>(cx: &C, layout: TyLayout<'a, Ty>,
cls: &mut [Option<Class>], off: Size) -> Result<(), Memory> cls: &mut [Option<Class>], off: Size) -> Result<(), Memory>
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
@ -178,7 +178,7 @@ fn cast_target(cls: &[Option<Class>], size: Size) -> CastTarget {
target target
} }
pub fn compute_abi_info<'a, Ty, C>(cx: C, fty: &mut FnType<'a, Ty>) pub fn compute_abi_info<'a, Ty, C>(cx: &C, fty: &mut FnType<'a, Ty>)
where Ty: TyLayoutMethods<'a, C> + Copy, where Ty: TyLayoutMethods<'a, C> + Copy,
C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout C: LayoutOf<Ty = Ty, TyLayout = TyLayout<'a, Ty>> + HasDataLayout
{ {

View File

@ -203,11 +203,11 @@ impl TargetDataLayout {
} }
} }
pub trait HasDataLayout: Copy { pub trait HasDataLayout {
fn data_layout(&self) -> &TargetDataLayout; fn data_layout(&self) -> &TargetDataLayout;
} }
impl<'a> HasDataLayout for &'a TargetDataLayout { impl HasDataLayout for TargetDataLayout {
fn data_layout(&self) -> &TargetDataLayout { fn data_layout(&self) -> &TargetDataLayout {
self self
} }
@ -267,7 +267,7 @@ impl Size {
} }
#[inline] #[inline]
pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: C) -> Option<Size> { pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
let dl = cx.data_layout(); let dl = cx.data_layout();
let bytes = self.bytes().checked_add(offset.bytes())?; let bytes = self.bytes().checked_add(offset.bytes())?;
@ -280,7 +280,7 @@ impl Size {
} }
#[inline] #[inline]
pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: C) -> Option<Size> { pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
let dl = cx.data_layout(); let dl = cx.data_layout();
let bytes = self.bytes().checked_mul(count)?; let bytes = self.bytes().checked_mul(count)?;
@ -457,7 +457,7 @@ impl Integer {
} }
} }
pub fn align<C: HasDataLayout>(self, cx: C) -> Align { pub fn align<C: HasDataLayout>(self, cx: &C) -> Align {
let dl = cx.data_layout(); let dl = cx.data_layout();
match self { match self {
@ -492,7 +492,7 @@ impl Integer {
} }
/// Find the smallest integer with the given alignment. /// Find the smallest integer with the given alignment.
pub fn for_abi_align<C: HasDataLayout>(cx: C, align: Align) -> Option<Integer> { pub fn for_abi_align<C: HasDataLayout>(cx: &C, align: Align) -> Option<Integer> {
let dl = cx.data_layout(); let dl = cx.data_layout();
let wanted = align.abi(); let wanted = align.abi();
@ -505,7 +505,7 @@ impl Integer {
} }
/// Find the largest integer with the given alignment or less. /// Find the largest integer with the given alignment or less.
pub fn approximate_abi_align<C: HasDataLayout>(cx: C, align: Align) -> Integer { pub fn approximate_abi_align<C: HasDataLayout>(cx: &C, align: Align) -> Integer {
let dl = cx.data_layout(); let dl = cx.data_layout();
let wanted = align.abi(); let wanted = align.abi();
@ -571,7 +571,7 @@ pub enum Primitive {
} }
impl<'a, 'tcx> Primitive { impl<'a, 'tcx> Primitive {
pub fn size<C: HasDataLayout>(self, cx: C) -> Size { pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
let dl = cx.data_layout(); let dl = cx.data_layout();
match self { match self {
@ -582,7 +582,7 @@ impl<'a, 'tcx> Primitive {
} }
} }
pub fn align<C: HasDataLayout>(self, cx: C) -> Align { pub fn align<C: HasDataLayout>(self, cx: &C) -> Align {
let dl = cx.data_layout(); let dl = cx.data_layout();
match self { match self {
@ -642,7 +642,7 @@ impl Scalar {
/// Returns the valid range as a `x..y` range. /// Returns the valid range as a `x..y` range.
/// ///
/// If `x` and `y` are equal, the range is full, not empty. /// If `x` and `y` are equal, the range is full, not empty.
pub fn valid_range_exclusive<C: HasDataLayout>(&self, cx: C) -> Range<u128> { pub fn valid_range_exclusive<C: HasDataLayout>(&self, cx: &C) -> Range<u128> {
// For a (max) value of -1, max will be `-1 as usize`, which overflows. // For a (max) value of -1, max will be `-1 as usize`, which overflows.
// However, that is fine here (it would still represent the full range), // However, that is fine here (it would still represent the full range),
// i.e., if the range is everything. // i.e., if the range is everything.
@ -854,7 +854,7 @@ pub struct LayoutDetails {
} }
impl LayoutDetails { impl LayoutDetails {
pub fn scalar<C: HasDataLayout>(cx: C, scalar: Scalar) -> Self { pub fn scalar<C: HasDataLayout>(cx: &C, scalar: Scalar) -> Self {
let size = scalar.value.size(cx); let size = scalar.value.size(cx);
let align = scalar.value.align(cx); let align = scalar.value.align(cx);
LayoutDetails { LayoutDetails {
@ -891,20 +891,20 @@ pub trait LayoutOf {
type Ty; type Ty;
type TyLayout; type TyLayout;
fn layout_of(self, ty: Self::Ty) -> Self::TyLayout; fn layout_of(&self, ty: Self::Ty) -> Self::TyLayout;
} }
pub trait TyLayoutMethods<'a, C: LayoutOf<Ty = Self>>: Sized { pub trait TyLayoutMethods<'a, C: LayoutOf<Ty = Self>>: Sized {
fn for_variant(this: TyLayout<'a, Self>, cx: C, variant_index: usize) -> TyLayout<'a, Self>; fn for_variant(this: TyLayout<'a, Self>, cx: &C, variant_index: usize) -> TyLayout<'a, Self>;
fn field(this: TyLayout<'a, Self>, cx: C, i: usize) -> C::TyLayout; fn field(this: TyLayout<'a, Self>, cx: &C, i: usize) -> C::TyLayout;
} }
impl<'a, Ty> TyLayout<'a, Ty> { impl<'a, Ty> TyLayout<'a, Ty> {
pub fn for_variant<C>(self, cx: C, variant_index: usize) -> Self pub fn for_variant<C>(self, cx: &C, variant_index: usize) -> Self
where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> { where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> {
Ty::for_variant(self, cx, variant_index) Ty::for_variant(self, cx, variant_index)
} }
pub fn field<C>(self, cx: C, i: usize) -> C::TyLayout pub fn field<C>(self, cx: &C, i: usize) -> C::TyLayout
where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> { where Ty: TyLayoutMethods<'a, C>, C: LayoutOf<Ty = Ty> {
Ty::field(self, cx, i) Ty::field(self, cx, i)
} }

View File

@ -444,11 +444,11 @@ pub struct Target {
pub options: TargetOptions, pub options: TargetOptions,
} }
pub trait HasTargetSpec: Copy { pub trait HasTargetSpec {
fn target_spec(&self) -> &Target; fn target_spec(&self) -> &Target;
} }
impl<'a> HasTargetSpec for &'a Target { impl HasTargetSpec for Target {
fn target_spec(&self) -> &Target { fn target_spec(&self) -> &Target {
self self
} }