2020-03-11 11:49:08 +00:00
|
|
|
|
use crate::ich::StableHashingContext;
|
2020-03-29 01:47:50 +00:00
|
|
|
|
use crate::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
2020-03-11 11:49:08 +00:00
|
|
|
|
use crate::mir::{GeneratorLayout, GeneratorSavedLocal};
|
|
|
|
|
use crate::ty::subst::Subst;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
use crate::ty::{self, subst::SubstsRef, ReprOptions, Ty, TyCtxt, TypeFoldable};
|
2016-04-19 06:11:46 +00:00
|
|
|
|
|
2020-04-27 17:56:11 +00:00
|
|
|
|
use rustc_ast::{self as ast, IntTy, UintTy};
|
2020-01-11 12:15:20 +00:00
|
|
|
|
use rustc_attr as attr;
|
2019-09-26 22:54:39 +00:00
|
|
|
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
2020-01-05 01:37:57 +00:00
|
|
|
|
use rustc_hir as hir;
|
2020-08-18 10:47:27 +00:00
|
|
|
|
use rustc_hir::lang_items::LangItem;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
use rustc_index::bit_set::BitSet;
|
|
|
|
|
use rustc_index::vec::{Idx, IndexVec};
|
2020-03-11 11:49:08 +00:00
|
|
|
|
use rustc_session::{DataTypeKind, FieldInfo, SizeKind, VariantInfo};
|
2020-04-19 11:00:18 +00:00
|
|
|
|
use rustc_span::symbol::{Ident, Symbol};
|
2020-03-11 11:49:08 +00:00
|
|
|
|
use rustc_span::DUMMY_SP;
|
2019-05-10 04:49:58 +00:00
|
|
|
|
use rustc_target::abi::call::{
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ArgAbi, ArgAttribute, ArgAttributes, Conv, FnAbi, PassMode, Reg, RegKind,
|
2019-05-10 04:49:58 +00:00
|
|
|
|
};
|
2020-03-31 16:16:47 +00:00
|
|
|
|
use rustc_target::abi::*;
|
2020-03-29 01:47:50 +00:00
|
|
|
|
use rustc_target::spec::{abi::Abi as SpecAbi, HasTargetSpec, PanicStrategy};
|
2019-05-10 04:49:58 +00:00
|
|
|
|
|
2020-03-11 11:49:08 +00:00
|
|
|
|
use std::cmp;
|
|
|
|
|
use std::fmt;
|
|
|
|
|
use std::iter;
|
|
|
|
|
use std::mem;
|
2020-04-16 15:15:46 +00:00
|
|
|
|
use std::num::NonZeroUsize;
|
2020-03-11 11:49:08 +00:00
|
|
|
|
use std::ops::Bound;
|
|
|
|
|
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub trait IntegerExt {
|
2019-06-13 21:48:52 +00:00
|
|
|
|
fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>, signed: bool) -> Ty<'tcx>;
|
2018-11-03 20:57:53 +00:00
|
|
|
|
fn from_attr<C: HasDataLayout>(cx: &C, ity: attr::IntType) -> Integer;
|
2019-06-11 21:11:55 +00:00
|
|
|
|
fn repr_discr<'tcx>(
|
2019-06-13 21:48:52 +00:00
|
|
|
|
tcx: TyCtxt<'tcx>,
|
2019-06-11 21:11:55 +00:00
|
|
|
|
ty: Ty<'tcx>,
|
|
|
|
|
repr: &ReprOptions,
|
|
|
|
|
min: i128,
|
|
|
|
|
max: i128,
|
|
|
|
|
) -> (Integer, bool);
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-12-18 14:18:36 +00:00
|
|
|
|
impl IntegerExt for Integer {
|
2019-06-13 21:48:52 +00:00
|
|
|
|
fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>, signed: bool) -> Ty<'tcx> {
|
2016-08-29 00:44:19 +00:00
|
|
|
|
match (*self, signed) {
|
|
|
|
|
(I8, false) => tcx.types.u8,
|
|
|
|
|
(I16, false) => tcx.types.u16,
|
|
|
|
|
(I32, false) => tcx.types.u32,
|
|
|
|
|
(I64, false) => tcx.types.u64,
|
2016-09-27 16:06:44 +00:00
|
|
|
|
(I128, false) => tcx.types.u128,
|
2016-08-29 00:44:19 +00:00
|
|
|
|
(I8, true) => tcx.types.i8,
|
|
|
|
|
(I16, true) => tcx.types.i16,
|
|
|
|
|
(I32, true) => tcx.types.i32,
|
|
|
|
|
(I64, true) => tcx.types.i64,
|
2016-09-27 16:06:44 +00:00
|
|
|
|
(I128, true) => tcx.types.i128,
|
2016-08-29 00:44:19 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Gets the Integer type from an attr::IntType.
|
2018-11-03 20:57:53 +00:00
|
|
|
|
fn from_attr<C: HasDataLayout>(cx: &C, ity: attr::IntType) -> Integer {
|
2017-03-10 04:25:51 +00:00
|
|
|
|
let dl = cx.data_layout();
|
|
|
|
|
|
2016-04-19 06:11:46 +00:00
|
|
|
|
match ity {
|
|
|
|
|
attr::SignedInt(IntTy::I8) | attr::UnsignedInt(UintTy::U8) => I8,
|
|
|
|
|
attr::SignedInt(IntTy::I16) | attr::UnsignedInt(UintTy::U16) => I16,
|
|
|
|
|
attr::SignedInt(IntTy::I32) | attr::UnsignedInt(UintTy::U32) => I32,
|
|
|
|
|
attr::SignedInt(IntTy::I64) | attr::UnsignedInt(UintTy::U64) => I64,
|
2016-08-23 00:56:52 +00:00
|
|
|
|
attr::SignedInt(IntTy::I128) | attr::UnsignedInt(UintTy::U128) => I128,
|
2018-01-04 01:12:04 +00:00
|
|
|
|
attr::SignedInt(IntTy::Isize) | attr::UnsignedInt(UintTy::Usize) => {
|
2016-04-19 06:11:46 +00:00
|
|
|
|
dl.ptr_sized_integer()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Finds the appropriate Integer type and signedness for the given
|
2020-05-01 20:28:15 +00:00
|
|
|
|
/// signed discriminant range and `#[repr]` attribute.
|
|
|
|
|
/// N.B.: `u128` values above `i128::MAX` will be treated as signed, but
|
2016-04-19 06:11:46 +00:00
|
|
|
|
/// that shouldn't affect anything, other than maybe debuginfo.
|
2019-06-11 21:11:55 +00:00
|
|
|
|
fn repr_discr<'tcx>(
|
2019-06-13 21:48:52 +00:00
|
|
|
|
tcx: TyCtxt<'tcx>,
|
2019-06-11 21:11:55 +00:00
|
|
|
|
ty: Ty<'tcx>,
|
|
|
|
|
repr: &ReprOptions,
|
|
|
|
|
min: i128,
|
|
|
|
|
max: i128,
|
|
|
|
|
) -> (Integer, bool) {
|
2016-04-19 06:11:46 +00:00
|
|
|
|
// Theoretically, negative values could be larger in unsigned representation
|
|
|
|
|
// than the unsigned representation of the signed minimum. However, if there
|
2017-09-23 12:04:37 +00:00
|
|
|
|
// are any negative values, the only valid unsigned representation is u128
|
|
|
|
|
// which can fit all i128 values, so the result remains unaffected.
|
|
|
|
|
let unsigned_fit = Integer::fit_unsigned(cmp::max(min as u128, max as u128));
|
2016-04-19 06:11:46 +00:00
|
|
|
|
let signed_fit = cmp::max(Integer::fit_signed(min), Integer::fit_signed(max));
|
|
|
|
|
|
2016-11-23 15:19:22 +00:00
|
|
|
|
let mut min_from_extern = None;
|
|
|
|
|
let min_default = I8;
|
|
|
|
|
|
2017-02-07 17:35:06 +00:00
|
|
|
|
if let Some(ity) = repr.int {
|
2018-11-03 20:57:53 +00:00
|
|
|
|
let discr = Integer::from_attr(&tcx, ity);
|
2017-02-07 17:35:06 +00:00
|
|
|
|
let fit = if ity.is_signed() { signed_fit } else { unsigned_fit };
|
|
|
|
|
if discr < fit {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
bug!(
|
|
|
|
|
"Integer::repr_discr: `#[repr]` hint too small for \
|
|
|
|
|
discriminant range of enum `{}",
|
|
|
|
|
ty
|
|
|
|
|
)
|
2017-02-07 17:35:06 +00:00
|
|
|
|
}
|
|
|
|
|
return (discr, ity.is_signed());
|
|
|
|
|
}
|
|
|
|
|
|
2017-04-16 13:17:13 +00:00
|
|
|
|
if repr.c() {
|
2017-02-07 17:35:06 +00:00
|
|
|
|
match &tcx.sess.target.target.arch[..] {
|
|
|
|
|
// WARNING: the ARM EABI has two variants; the one corresponding
|
|
|
|
|
// to `at_least == I32` appears to be used on Linux and NetBSD,
|
|
|
|
|
// but some systems may use the variant corresponding to no
|
2018-10-02 08:52:43 +00:00
|
|
|
|
// lower bound. However, we don't run on those yet...?
|
2017-02-07 17:35:06 +00:00
|
|
|
|
"arm" => min_from_extern = Some(I32),
|
|
|
|
|
_ => min_from_extern = Some(I32),
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
2016-11-23 15:19:22 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-11-23 22:43:36 +00:00
|
|
|
|
let at_least = min_from_extern.unwrap_or(min_default);
|
2017-02-03 03:34:46 +00:00
|
|
|
|
|
2016-04-19 06:11:46 +00:00
|
|
|
|
// If there are no negative values, we can use the unsigned fit.
|
|
|
|
|
if min >= 0 {
|
|
|
|
|
(cmp::max(unsigned_fit, at_least), false)
|
|
|
|
|
} else {
|
|
|
|
|
(cmp::max(signed_fit, at_least), true)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub trait PrimitiveExt {
|
2019-06-13 21:48:52 +00:00
|
|
|
|
fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx>;
|
2019-09-16 11:15:23 +00:00
|
|
|
|
fn to_int_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx>;
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-12-18 14:18:36 +00:00
|
|
|
|
impl PrimitiveExt for Primitive {
|
2019-06-13 21:48:52 +00:00
|
|
|
|
fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
|
2017-09-13 21:02:53 +00:00
|
|
|
|
match *self {
|
2017-09-16 13:39:53 +00:00
|
|
|
|
Int(i, signed) => i.to_ty(tcx, signed),
|
2019-11-07 14:54:25 +00:00
|
|
|
|
F32 => tcx.types.f32,
|
|
|
|
|
F64 => tcx.types.f64,
|
2018-09-10 02:07:13 +00:00
|
|
|
|
Pointer => tcx.mk_mut_ptr(tcx.mk_unit()),
|
2017-09-13 21:02:53 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-09-16 11:15:23 +00:00
|
|
|
|
|
|
|
|
|
/// Return an *integer* type matching this primitive.
|
|
|
|
|
/// Useful in particular when dealing with enum discriminants.
|
|
|
|
|
fn to_int_ty(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
|
|
|
|
|
match *self {
|
|
|
|
|
Int(i, signed) => i.to_ty(tcx, signed),
|
|
|
|
|
Pointer => tcx.types.usize,
|
2019-11-07 14:54:25 +00:00
|
|
|
|
F32 | F64 => bug!("floats do not have an int type"),
|
2019-09-16 11:15:23 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// The first half of a fat pointer.
|
2017-12-31 16:17:01 +00:00
|
|
|
|
///
|
2016-04-19 06:11:46 +00:00
|
|
|
|
/// - For a trait object, this is the address of the box.
|
|
|
|
|
/// - For a slice, this is the base address.
|
|
|
|
|
pub const FAT_PTR_ADDR: usize = 0;
|
|
|
|
|
|
|
|
|
|
/// The second half of a fat pointer.
|
2017-12-31 16:17:01 +00:00
|
|
|
|
///
|
2016-04-19 06:11:46 +00:00
|
|
|
|
/// - For a trait object, this is the address of the vtable.
|
|
|
|
|
/// - For a slice, this is the length.
|
|
|
|
|
pub const FAT_PTR_EXTRA: usize = 1;
|
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
|
#[derive(Copy, Clone, Debug, TyEncodable, TyDecodable)]
|
2016-04-19 06:11:46 +00:00
|
|
|
|
pub enum LayoutError<'tcx> {
|
|
|
|
|
Unknown(Ty<'tcx>),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
SizeOverflow(Ty<'tcx>),
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<'tcx> fmt::Display for LayoutError<'tcx> {
|
2018-08-30 05:02:42 +00:00
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2016-04-19 06:11:46 +00:00
|
|
|
|
match *self {
|
2020-08-28 10:38:43 +00:00
|
|
|
|
LayoutError::Unknown(ty) => write!(f, "the type `{}` has an unknown layout", ty),
|
2016-04-19 06:11:46 +00:00
|
|
|
|
LayoutError::SizeOverflow(ty) => {
|
2020-08-28 10:38:43 +00:00
|
|
|
|
write!(f, "the type `{}` is too big for the current architecture", ty)
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-06-11 21:11:55 +00:00
|
|
|
|
fn layout_raw<'tcx>(
|
2019-06-13 21:48:52 +00:00
|
|
|
|
tcx: TyCtxt<'tcx>,
|
2019-06-11 21:11:55 +00:00
|
|
|
|
query: ty::ParamEnvAnd<'tcx, Ty<'tcx>>,
|
2020-03-04 14:13:00 +00:00
|
|
|
|
) -> Result<&'tcx Layout, LayoutError<'tcx>> {
|
2018-04-06 12:53:11 +00:00
|
|
|
|
ty::tls::with_related_context(tcx, move |icx| {
|
|
|
|
|
let (param_env, ty) = query.into_parts();
|
2017-09-12 23:19:11 +00:00
|
|
|
|
|
2020-05-26 18:48:08 +00:00
|
|
|
|
if !tcx.sess.recursion_limit().value_within_limit(icx.layout_depth) {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
tcx.sess.fatal(&format!("overflow representing the type `{}`", ty));
|
2018-04-06 12:53:11 +00:00
|
|
|
|
}
|
2017-09-12 23:19:11 +00:00
|
|
|
|
|
2018-04-06 12:53:11 +00:00
|
|
|
|
// Update the ImplicitCtxt to increase the layout_depth
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let icx = ty::tls::ImplicitCtxt { layout_depth: icx.layout_depth + 1, ..icx.clone() };
|
2017-09-12 23:19:11 +00:00
|
|
|
|
|
2018-04-06 12:53:11 +00:00
|
|
|
|
ty::tls::enter_context(&icx, |_| {
|
|
|
|
|
let cx = LayoutCx { tcx, param_env };
|
2018-10-16 20:36:43 +00:00
|
|
|
|
let layout = cx.layout_raw_uncached(ty);
|
|
|
|
|
// Type-level uninhabitedness should always imply ABI uninhabitedness.
|
|
|
|
|
if let Ok(layout) = layout {
|
2018-11-20 19:07:17 +00:00
|
|
|
|
if ty.conservative_is_privately_uninhabited(tcx) {
|
2018-10-16 20:36:43 +00:00
|
|
|
|
assert!(layout.abi.is_uninhabited());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
layout
|
2018-04-06 12:53:11 +00:00
|
|
|
|
})
|
|
|
|
|
})
|
2017-09-12 23:19:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-07-05 20:00:14 +00:00
|
|
|
|
pub fn provide(providers: &mut ty::query::Providers) {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
*providers = ty::query::Providers { layout_raw, ..*providers };
|
2017-09-12 23:19:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-01-31 22:00:38 +00:00
|
|
|
|
pub struct LayoutCx<'tcx, C> {
|
|
|
|
|
pub tcx: C,
|
2018-10-16 20:36:43 +00:00
|
|
|
|
pub param_env: ty::ParamEnv<'tcx>,
|
2018-01-31 22:00:38 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-06-01 04:30:08 +00:00
|
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
|
enum StructKind {
|
|
|
|
|
/// A tuple, closure, or univariant which cannot be coerced to unsized.
|
|
|
|
|
AlwaysSized,
|
|
|
|
|
/// A univariant, the last field of which may be coerced to unsized.
|
|
|
|
|
MaybeUnsized,
|
|
|
|
|
/// A univariant, but with a prefix of an arbitrary size & alignment (e.g., enum tag).
|
|
|
|
|
Prefixed(Size, Align),
|
|
|
|
|
}
|
|
|
|
|
|
2019-06-23 01:37:23 +00:00
|
|
|
|
// Invert a bijective mapping, i.e. `invert(map)[y] = x` if `map[x] = y`.
|
|
|
|
|
// This is used to go between `memory_index` (source field order to memory order)
|
|
|
|
|
// and `inverse_memory_index` (memory order to source field order).
|
2020-03-31 13:44:52 +00:00
|
|
|
|
// See also `FieldsShape::Arbitrary::memory_index` for more details.
|
2019-06-23 01:37:23 +00:00
|
|
|
|
// FIXME(eddyb) build a better abstraction for permutations, if possible.
|
|
|
|
|
fn invert_mapping(map: &[u32]) -> Vec<u32> {
|
|
|
|
|
let mut inverse = vec![0; map.len()];
|
|
|
|
|
for i in 0..map.len() {
|
|
|
|
|
inverse[map[i] as usize] = i as u32;
|
|
|
|
|
}
|
|
|
|
|
inverse
|
|
|
|
|
}
|
|
|
|
|
|
2019-06-13 21:48:52 +00:00
|
|
|
|
impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
|
2020-03-04 14:13:00 +00:00
|
|
|
|
fn scalar_pair(&self, a: Scalar, b: Scalar) -> Layout {
|
2018-01-31 22:00:38 +00:00
|
|
|
|
let dl = self.data_layout();
|
2019-06-01 04:30:08 +00:00
|
|
|
|
let b_align = b.value.align(dl);
|
|
|
|
|
let align = a.value.align(dl).max(b_align).max(dl.aggregate_align);
|
|
|
|
|
let b_offset = a.value.size(dl).align_to(b_align.abi);
|
|
|
|
|
let size = (b_offset + b.value.size(dl)).align_to(align.abi);
|
2019-07-15 15:21:01 +00:00
|
|
|
|
|
|
|
|
|
// HACK(nox): We iter on `b` and then `a` because `max_by_key`
|
|
|
|
|
// returns the last maximum.
|
|
|
|
|
let largest_niche = Niche::from_scalar(dl, b_offset, b.clone())
|
|
|
|
|
.into_iter()
|
|
|
|
|
.chain(Niche::from_scalar(dl, Size::ZERO, a.clone()))
|
|
|
|
|
.max_by_key(|niche| niche.available(dl));
|
|
|
|
|
|
2020-03-04 14:13:00 +00:00
|
|
|
|
Layout {
|
2019-06-01 04:30:08 +00:00
|
|
|
|
variants: Variants::Single { index: VariantIdx::new(0) },
|
2020-03-31 13:44:52 +00:00
|
|
|
|
fields: FieldsShape::Arbitrary {
|
2019-06-01 04:30:08 +00:00
|
|
|
|
offsets: vec![Size::ZERO, b_offset],
|
2019-12-22 22:42:04 +00:00
|
|
|
|
memory_index: vec![0, 1],
|
2019-06-01 04:30:08 +00:00
|
|
|
|
},
|
|
|
|
|
abi: Abi::ScalarPair(a, b),
|
2019-07-15 15:21:01 +00:00
|
|
|
|
largest_niche,
|
2019-06-01 04:30:08 +00:00
|
|
|
|
align,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
size,
|
2017-09-19 09:38:20 +00:00
|
|
|
|
}
|
2019-06-01 04:30:08 +00:00
|
|
|
|
}
|
2018-10-02 08:52:43 +00:00
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
fn univariant_uninterned(
|
|
|
|
|
&self,
|
|
|
|
|
ty: Ty<'tcx>,
|
2020-03-04 14:50:21 +00:00
|
|
|
|
fields: &[TyAndLayout<'_>],
|
2019-12-22 22:42:04 +00:00
|
|
|
|
repr: &ReprOptions,
|
|
|
|
|
kind: StructKind,
|
2020-03-04 14:13:00 +00:00
|
|
|
|
) -> Result<Layout, LayoutError<'tcx>> {
|
2019-06-01 04:30:08 +00:00
|
|
|
|
let dl = self.data_layout();
|
2019-08-26 13:14:30 +00:00
|
|
|
|
let pack = repr.pack;
|
|
|
|
|
if pack.is_some() && repr.align.is_some() {
|
2019-06-01 04:30:08 +00:00
|
|
|
|
bug!("struct cannot be packed and aligned");
|
|
|
|
|
}
|
2017-09-19 09:38:20 +00:00
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
|
2017-09-19 09:38:20 +00:00
|
|
|
|
|
2019-06-01 04:30:08 +00:00
|
|
|
|
let mut inverse_memory_index: Vec<u32> = (0..fields.len() as u32).collect();
|
2017-09-19 09:38:20 +00:00
|
|
|
|
|
2020-04-03 21:00:47 +00:00
|
|
|
|
let optimize = !repr.inhibit_struct_field_reordering_opt();
|
2019-06-01 04:30:08 +00:00
|
|
|
|
if optimize {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let end =
|
|
|
|
|
if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
|
2019-06-01 04:30:08 +00:00
|
|
|
|
let optimizing = &mut inverse_memory_index[..end];
|
2020-03-04 14:50:21 +00:00
|
|
|
|
let field_align = |f: &TyAndLayout<'_>| {
|
2019-08-26 13:14:30 +00:00
|
|
|
|
if let Some(pack) = pack { f.align.abi.min(pack) } else { f.align.abi }
|
2019-06-01 04:30:08 +00:00
|
|
|
|
};
|
|
|
|
|
match kind {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
StructKind::AlwaysSized | StructKind::MaybeUnsized => {
|
2019-06-01 04:30:08 +00:00
|
|
|
|
optimizing.sort_by_key(|&x| {
|
|
|
|
|
// Place ZSTs first to avoid "interesting offsets",
|
|
|
|
|
// especially with only one or two non-ZST fields.
|
|
|
|
|
let f = &fields[x as usize];
|
|
|
|
|
(!f.is_zst(), cmp::Reverse(field_align(f)))
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
StructKind::Prefixed(..) => {
|
2020-04-03 21:00:47 +00:00
|
|
|
|
// Sort in ascending alignment so that the layout stay optimal
|
|
|
|
|
// regardless of the prefix
|
2019-06-01 04:30:08 +00:00
|
|
|
|
optimizing.sort_by_key(|&x| field_align(&fields[x as usize]));
|
2017-09-17 01:42:22 +00:00
|
|
|
|
}
|
2017-09-17 20:37:18 +00:00
|
|
|
|
}
|
2019-06-01 04:30:08 +00:00
|
|
|
|
}
|
2017-09-19 09:38:20 +00:00
|
|
|
|
|
2019-06-01 04:30:08 +00:00
|
|
|
|
// inverse_memory_index holds field indices by increasing memory offset.
|
|
|
|
|
// That is, if field 5 has offset 0, the first element of inverse_memory_index is 5.
|
|
|
|
|
// We now write field offsets to the corresponding offset slot;
|
|
|
|
|
// field 5 with offset 0 puts 0 in offsets[5].
|
2019-06-23 01:37:23 +00:00
|
|
|
|
// At the bottom of this function, we invert `inverse_memory_index` to
|
|
|
|
|
// produce `memory_index` (see `invert_mapping`).
|
|
|
|
|
|
2020-03-25 21:42:13 +00:00
|
|
|
|
let mut sized = true;
|
|
|
|
|
let mut offsets = vec![Size::ZERO; fields.len()];
|
2019-06-01 04:30:08 +00:00
|
|
|
|
let mut offset = Size::ZERO;
|
2019-07-15 15:21:01 +00:00
|
|
|
|
let mut largest_niche = None;
|
|
|
|
|
let mut largest_niche_available = 0;
|
2017-09-19 09:38:20 +00:00
|
|
|
|
|
2019-06-01 04:30:08 +00:00
|
|
|
|
if let StructKind::Prefixed(prefix_size, prefix_align) = kind {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let prefix_align =
|
|
|
|
|
if let Some(pack) = pack { prefix_align.min(pack) } else { prefix_align };
|
2019-06-01 04:30:08 +00:00
|
|
|
|
align = align.max(AbiAndPrefAlign::new(prefix_align));
|
|
|
|
|
offset = prefix_size.align_to(prefix_align);
|
|
|
|
|
}
|
2017-09-19 09:38:20 +00:00
|
|
|
|
|
2019-06-01 04:30:08 +00:00
|
|
|
|
for &i in &inverse_memory_index {
|
|
|
|
|
let field = fields[i as usize];
|
|
|
|
|
if !sized {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
bug!("univariant: field #{} of `{}` comes after unsized field", offsets.len(), ty);
|
2019-06-01 04:30:08 +00:00
|
|
|
|
}
|
2017-09-19 09:38:20 +00:00
|
|
|
|
|
2019-06-01 04:30:08 +00:00
|
|
|
|
if field.is_unsized() {
|
|
|
|
|
sized = false;
|
|
|
|
|
}
|
2017-09-19 09:38:20 +00:00
|
|
|
|
|
2019-06-01 04:30:08 +00:00
|
|
|
|
// Invariant: offset < dl.obj_size_bound() <= 1<<61
|
2019-08-26 13:14:30 +00:00
|
|
|
|
let field_align = if let Some(pack) = pack {
|
2019-06-01 04:30:08 +00:00
|
|
|
|
field.align.min(AbiAndPrefAlign::new(pack))
|
|
|
|
|
} else {
|
|
|
|
|
field.align
|
|
|
|
|
};
|
|
|
|
|
offset = offset.align_to(field_align.abi);
|
|
|
|
|
align = align.max(field_align);
|
2017-09-19 09:38:20 +00:00
|
|
|
|
|
2019-06-01 04:30:08 +00:00
|
|
|
|
debug!("univariant offset: {:?} field: {:#?}", offset, field);
|
|
|
|
|
offsets[i as usize] = offset;
|
2017-09-19 09:38:20 +00:00
|
|
|
|
|
2020-01-22 23:54:04 +00:00
|
|
|
|
if !repr.hide_niche() {
|
|
|
|
|
if let Some(mut niche) = field.largest_niche.clone() {
|
|
|
|
|
let available = niche.available(dl);
|
|
|
|
|
if available > largest_niche_available {
|
|
|
|
|
largest_niche_available = available;
|
|
|
|
|
niche.offset += offset;
|
|
|
|
|
largest_niche = Some(niche);
|
|
|
|
|
}
|
2019-07-15 15:21:01 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
offset = offset.checked_add(field.size, dl).ok_or(LayoutError::SizeOverflow(ty))?;
|
2019-06-01 04:30:08 +00:00
|
|
|
|
}
|
2017-09-19 09:38:20 +00:00
|
|
|
|
|
2019-08-26 13:14:30 +00:00
|
|
|
|
if let Some(repr_align) = repr.align {
|
|
|
|
|
align = align.max(AbiAndPrefAlign::new(repr_align));
|
2019-06-01 04:30:08 +00:00
|
|
|
|
}
|
2017-09-19 09:38:20 +00:00
|
|
|
|
|
2019-06-01 04:30:08 +00:00
|
|
|
|
debug!("univariant min_size: {:?}", offset);
|
|
|
|
|
let min_size = offset;
|
2017-09-19 09:38:20 +00:00
|
|
|
|
|
2019-06-01 04:30:08 +00:00
|
|
|
|
// As stated above, inverse_memory_index holds field indices by increasing offset.
|
|
|
|
|
// This makes it an already-sorted view of the offsets vec.
|
|
|
|
|
// To invert it, consider:
|
|
|
|
|
// If field 5 has offset 0, offsets[0] is 5, and memory_index[5] should be 0.
|
|
|
|
|
// Field 5 would be the first element, so memory_index is i:
|
|
|
|
|
// Note: if we didn't optimize, it's already right.
|
2017-09-19 09:38:20 +00:00
|
|
|
|
|
2020-03-21 13:44:17 +00:00
|
|
|
|
let memory_index =
|
|
|
|
|
if optimize { invert_mapping(&inverse_memory_index) } else { inverse_memory_index };
|
2017-09-19 09:38:20 +00:00
|
|
|
|
|
2019-06-01 04:30:08 +00:00
|
|
|
|
let size = min_size.align_to(align.abi);
|
|
|
|
|
let mut abi = Abi::Aggregate { sized };
|
|
|
|
|
|
|
|
|
|
// Unpack newtype ABIs and find scalar pairs.
|
|
|
|
|
if sized && size.bytes() > 0 {
|
|
|
|
|
// All other fields must be ZSTs, and we need them to all start at 0.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let mut zst_offsets = offsets.iter().enumerate().filter(|&(i, _)| fields[i].is_zst());
|
2019-06-01 04:30:08 +00:00
|
|
|
|
if zst_offsets.all(|(_, o)| o.bytes() == 0) {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let mut non_zst_fields = fields.iter().enumerate().filter(|&(_, f)| !f.is_zst());
|
2019-06-01 04:30:08 +00:00
|
|
|
|
|
|
|
|
|
match (non_zst_fields.next(), non_zst_fields.next(), non_zst_fields.next()) {
|
|
|
|
|
// We have exactly one non-ZST field.
|
|
|
|
|
(Some((i, field)), None, None) => {
|
|
|
|
|
// Field fills the struct and it has a scalar or scalar pair ABI.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
if offsets[i].bytes() == 0
|
|
|
|
|
&& align.abi == field.align.abi
|
|
|
|
|
&& size == field.size
|
|
|
|
|
{
|
2019-06-01 04:30:08 +00:00
|
|
|
|
match field.abi {
|
|
|
|
|
// For plain scalars, or vectors of them, we can't unpack
|
|
|
|
|
// newtypes for `#[repr(C)]`, as that affects C ABIs.
|
|
|
|
|
Abi::Scalar(_) | Abi::Vector { .. } if optimize => {
|
|
|
|
|
abi = field.abi.clone();
|
2017-10-08 23:31:06 +00:00
|
|
|
|
}
|
2019-06-01 04:30:08 +00:00
|
|
|
|
// But scalar pairs are Rust-specific and get
|
|
|
|
|
// treated as aggregates by C ABIs anyway.
|
|
|
|
|
Abi::ScalarPair(..) => {
|
|
|
|
|
abi = field.abi.clone();
|
2017-10-08 23:31:06 +00:00
|
|
|
|
}
|
2019-06-01 04:30:08 +00:00
|
|
|
|
_ => {}
|
2017-10-08 23:31:06 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-06-01 04:30:08 +00:00
|
|
|
|
}
|
2017-10-08 23:31:06 +00:00
|
|
|
|
|
2019-06-01 04:30:08 +00:00
|
|
|
|
// Two non-ZST fields, and they're both scalars.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
(
|
|
|
|
|
Some((
|
|
|
|
|
i,
|
2020-03-04 14:50:21 +00:00
|
|
|
|
&TyAndLayout {
|
|
|
|
|
layout: &Layout { abi: Abi::Scalar(ref a), .. }, ..
|
|
|
|
|
},
|
2019-12-22 22:42:04 +00:00
|
|
|
|
)),
|
|
|
|
|
Some((
|
|
|
|
|
j,
|
2020-03-04 14:50:21 +00:00
|
|
|
|
&TyAndLayout {
|
|
|
|
|
layout: &Layout { abi: Abi::Scalar(ref b), .. }, ..
|
|
|
|
|
},
|
2019-12-22 22:42:04 +00:00
|
|
|
|
)),
|
|
|
|
|
None,
|
|
|
|
|
) => {
|
2019-06-01 04:30:08 +00:00
|
|
|
|
// Order by the memory placement, not source order.
|
|
|
|
|
let ((i, a), (j, b)) = if offsets[i] < offsets[j] {
|
|
|
|
|
((i, a), (j, b))
|
|
|
|
|
} else {
|
|
|
|
|
((j, b), (i, a))
|
|
|
|
|
};
|
|
|
|
|
let pair = self.scalar_pair(a.clone(), b.clone());
|
|
|
|
|
let pair_offsets = match pair.fields {
|
2020-03-31 13:44:52 +00:00
|
|
|
|
FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
|
2019-06-01 04:30:08 +00:00
|
|
|
|
assert_eq!(memory_index, &[0, 1]);
|
|
|
|
|
offsets
|
2017-10-06 07:25:35 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => bug!(),
|
2019-06-01 04:30:08 +00:00
|
|
|
|
};
|
2019-12-22 22:42:04 +00:00
|
|
|
|
if offsets[i] == pair_offsets[0]
|
|
|
|
|
&& offsets[j] == pair_offsets[1]
|
|
|
|
|
&& align == pair.align
|
|
|
|
|
&& size == pair.size
|
|
|
|
|
{
|
2019-06-01 04:30:08 +00:00
|
|
|
|
// We can use `ScalarPair` only when it matches our
|
|
|
|
|
// already computed layout (including `#[repr(C)]`).
|
|
|
|
|
abi = pair.abi;
|
2017-10-06 07:25:35 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-06-01 04:30:08 +00:00
|
|
|
|
|
|
|
|
|
_ => {}
|
2017-10-06 07:25:35 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-06-01 04:30:08 +00:00
|
|
|
|
}
|
2017-10-06 07:25:35 +00:00
|
|
|
|
|
2019-06-01 04:30:08 +00:00
|
|
|
|
if sized && fields.iter().any(|f| f.abi.is_uninhabited()) {
|
|
|
|
|
abi = Abi::Uninhabited;
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-04 14:13:00 +00:00
|
|
|
|
Ok(Layout {
|
2019-06-01 04:30:08 +00:00
|
|
|
|
variants: Variants::Single { index: VariantIdx::new(0) },
|
2020-03-31 13:44:52 +00:00
|
|
|
|
fields: FieldsShape::Arbitrary { offsets, memory_index },
|
2019-06-01 04:30:08 +00:00
|
|
|
|
abi,
|
2019-07-15 15:21:01 +00:00
|
|
|
|
largest_niche,
|
2019-06-01 04:30:08 +00:00
|
|
|
|
align,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
size,
|
2019-06-01 04:30:08 +00:00
|
|
|
|
})
|
|
|
|
|
}
|
2018-05-10 16:24:06 +00:00
|
|
|
|
|
2020-03-04 14:13:00 +00:00
|
|
|
|
fn layout_raw_uncached(&self, ty: Ty<'tcx>) -> Result<&'tcx Layout, LayoutError<'tcx>> {
|
2019-06-01 04:30:08 +00:00
|
|
|
|
let tcx = self.tcx;
|
|
|
|
|
let param_env = self.param_env;
|
|
|
|
|
let dl = self.data_layout();
|
|
|
|
|
let scalar_unit = |value: Primitive| {
|
|
|
|
|
let bits = value.size(dl).bits();
|
|
|
|
|
assert!(bits <= 128);
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Scalar { value, valid_range: 0..=(!0 >> (128 - bits)) }
|
2017-09-19 09:38:20 +00:00
|
|
|
|
};
|
2020-03-04 14:13:00 +00:00
|
|
|
|
let scalar = |value: Primitive| tcx.intern_layout(Layout::scalar(self, scalar_unit(value)));
|
2019-06-01 04:30:08 +00:00
|
|
|
|
|
2020-03-04 14:50:21 +00:00
|
|
|
|
let univariant = |fields: &[TyAndLayout<'_>], repr: &ReprOptions, kind| {
|
2019-06-01 04:30:08 +00:00
|
|
|
|
Ok(tcx.intern_layout(self.univariant_uninterned(ty, fields, repr, kind)?))
|
2017-09-13 11:35:04 +00:00
|
|
|
|
};
|
2020-02-22 14:10:17 +00:00
|
|
|
|
debug_assert!(!ty.has_infer_types_or_consts());
|
2016-04-19 06:11:46 +00:00
|
|
|
|
|
2020-08-02 22:49:11 +00:00
|
|
|
|
Ok(match *ty.kind() {
|
2016-04-19 06:11:46 +00:00
|
|
|
|
// Basic scalars.
|
2020-03-04 14:13:00 +00:00
|
|
|
|
ty::Bool => tcx.intern_layout(Layout::scalar(
|
2019-12-22 22:42:04 +00:00
|
|
|
|
self,
|
|
|
|
|
Scalar { value: Int(I8, false), valid_range: 0..=1 },
|
|
|
|
|
)),
|
2020-03-04 14:13:00 +00:00
|
|
|
|
ty::Char => tcx.intern_layout(Layout::scalar(
|
2019-12-22 22:42:04 +00:00
|
|
|
|
self,
|
|
|
|
|
Scalar { value: Int(I32, false), valid_range: 0..=0x10FFFF },
|
|
|
|
|
)),
|
|
|
|
|
ty::Int(ity) => scalar(Int(Integer::from_attr(dl, attr::SignedInt(ity)), true)),
|
|
|
|
|
ty::Uint(ity) => scalar(Int(Integer::from_attr(dl, attr::UnsignedInt(ity)), false)),
|
2019-11-07 14:54:25 +00:00
|
|
|
|
ty::Float(fty) => scalar(match fty {
|
|
|
|
|
ast::FloatTy::F32 => F32,
|
|
|
|
|
ast::FloatTy::F64 => F64,
|
|
|
|
|
}),
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::FnPtr(_) => {
|
2017-10-06 07:25:35 +00:00
|
|
|
|
let mut ptr = scalar_unit(Pointer);
|
2018-04-05 21:21:47 +00:00
|
|
|
|
ptr.valid_range = 1..=*ptr.valid_range.end();
|
2020-03-04 14:13:00 +00:00
|
|
|
|
tcx.intern_layout(Layout::scalar(self, ptr))
|
2017-09-26 11:41:06 +00:00
|
|
|
|
}
|
2016-04-19 06:11:46 +00:00
|
|
|
|
|
2016-08-02 07:56:20 +00:00
|
|
|
|
// The never type.
|
2020-03-04 14:13:00 +00:00
|
|
|
|
ty::Never => tcx.intern_layout(Layout {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
variants: Variants::Single { index: VariantIdx::new(0) },
|
2020-04-16 15:15:46 +00:00
|
|
|
|
fields: FieldsShape::Primitive,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
abi: Abi::Uninhabited,
|
|
|
|
|
largest_niche: None,
|
|
|
|
|
align: dl.i8_align,
|
|
|
|
|
size: Size::ZERO,
|
|
|
|
|
}),
|
2016-05-09 16:03:59 +00:00
|
|
|
|
|
2020-06-29 15:01:44 +00:00
|
|
|
|
// Potentially-wide pointers.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::Ref(_, pointee, _) | ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => {
|
2017-10-10 17:55:21 +00:00
|
|
|
|
let mut data_ptr = scalar_unit(Pointer);
|
|
|
|
|
if !ty.is_unsafe_ptr() {
|
2018-04-05 21:21:47 +00:00
|
|
|
|
data_ptr.valid_range = 1..=*data_ptr.valid_range.end();
|
2017-10-10 17:55:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-03-03 13:23:28 +00:00
|
|
|
|
let pointee = tcx.normalize_erasing_regions(param_env, pointee);
|
2018-02-27 19:55:38 +00:00
|
|
|
|
if pointee.is_sized(tcx.at(DUMMY_SP), param_env) {
|
2020-03-04 14:13:00 +00:00
|
|
|
|
return Ok(tcx.intern_layout(Layout::scalar(self, data_ptr)));
|
2017-10-10 17:55:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-07-11 11:27:41 +00:00
|
|
|
|
let unsized_part = tcx.struct_tail_erasing_lifetimes(pointee, param_env);
|
2020-08-02 22:49:11 +00:00
|
|
|
|
let metadata = match unsized_part.kind() {
|
2018-08-22 00:35:29 +00:00
|
|
|
|
ty::Foreign(..) => {
|
2020-03-04 14:13:00 +00:00
|
|
|
|
return Ok(tcx.intern_layout(Layout::scalar(self, data_ptr)));
|
2017-10-10 17:55:21 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::Slice(_) | ty::Str => scalar_unit(Int(dl.ptr_sized_integer(), false)),
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::Dynamic(..) => {
|
2017-10-10 17:55:21 +00:00
|
|
|
|
let mut vtable = scalar_unit(Pointer);
|
2018-04-05 21:21:47 +00:00
|
|
|
|
vtable.valid_range = 1..=*vtable.valid_range.end();
|
2017-10-10 17:55:21 +00:00
|
|
|
|
vtable
|
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => return Err(LayoutError::Unknown(unsized_part)),
|
2017-10-10 17:55:21 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Effectively a (ptr, meta) tuple.
|
2019-06-01 04:30:08 +00:00
|
|
|
|
tcx.intern_layout(self.scalar_pair(data_ptr, metadata))
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Arrays and slices.
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::Array(element, mut count) => {
|
2017-08-07 05:08:53 +00:00
|
|
|
|
if count.has_projections() {
|
2018-03-03 13:23:28 +00:00
|
|
|
|
count = tcx.normalize_erasing_regions(param_env, count);
|
2017-08-07 05:08:53 +00:00
|
|
|
|
if count.has_projections() {
|
|
|
|
|
return Err(LayoutError::Unknown(ty));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-03-25 23:13:09 +00:00
|
|
|
|
let count = count.try_eval_usize(tcx, param_env).ok_or(LayoutError::Unknown(ty))?;
|
2018-01-31 22:00:38 +00:00
|
|
|
|
let element = self.layout_of(element)?;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let size =
|
|
|
|
|
element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow(ty))?;
|
2017-09-17 20:37:18 +00:00
|
|
|
|
|
2018-11-20 19:07:17 +00:00
|
|
|
|
let abi = if count != 0 && ty.conservative_is_privately_uninhabited(tcx) {
|
2018-10-16 20:36:29 +00:00
|
|
|
|
Abi::Uninhabited
|
|
|
|
|
} else {
|
|
|
|
|
Abi::Aggregate { sized: true }
|
|
|
|
|
};
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let largest_niche = if count != 0 { element.largest_niche.clone() } else { None };
|
2019-07-15 15:21:01 +00:00
|
|
|
|
|
2020-03-04 14:13:00 +00:00
|
|
|
|
tcx.intern_layout(Layout {
|
2018-11-01 15:01:24 +00:00
|
|
|
|
variants: Variants::Single { index: VariantIdx::new(0) },
|
2020-03-31 13:44:52 +00:00
|
|
|
|
fields: FieldsShape::Array { stride: element.size, count },
|
2018-10-16 20:36:29 +00:00
|
|
|
|
abi,
|
2019-07-15 15:21:01 +00:00
|
|
|
|
largest_niche,
|
2017-09-22 19:44:40 +00:00
|
|
|
|
align: element.align,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
size,
|
2017-09-19 09:38:20 +00:00
|
|
|
|
})
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::Slice(element) => {
|
2018-01-31 22:00:38 +00:00
|
|
|
|
let element = self.layout_of(element)?;
|
2020-03-04 14:13:00 +00:00
|
|
|
|
tcx.intern_layout(Layout {
|
2018-11-01 15:01:24 +00:00
|
|
|
|
variants: Variants::Single { index: VariantIdx::new(0) },
|
2020-03-31 13:44:52 +00:00
|
|
|
|
fields: FieldsShape::Array { stride: element.size, count: 0 },
|
2017-12-01 16:29:35 +00:00
|
|
|
|
abi: Abi::Aggregate { sized: false },
|
2019-07-15 15:21:01 +00:00
|
|
|
|
largest_niche: None,
|
2017-09-22 19:44:40 +00:00
|
|
|
|
align: element.align,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
size: Size::ZERO,
|
2017-09-19 09:38:20 +00:00
|
|
|
|
})
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
2020-03-04 14:13:00 +00:00
|
|
|
|
ty::Str => tcx.intern_layout(Layout {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
variants: Variants::Single { index: VariantIdx::new(0) },
|
2020-03-31 13:44:52 +00:00
|
|
|
|
fields: FieldsShape::Array { stride: Size::from_bytes(1), count: 0 },
|
2019-12-22 22:42:04 +00:00
|
|
|
|
abi: Abi::Aggregate { sized: false },
|
|
|
|
|
largest_niche: None,
|
|
|
|
|
align: dl.i8_align,
|
|
|
|
|
size: Size::ZERO,
|
|
|
|
|
}),
|
2016-04-19 06:11:46 +00:00
|
|
|
|
|
|
|
|
|
// Odd unit types.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::FnDef(..) => univariant(&[], &ReprOptions::default(), StructKind::AlwaysSized)?,
|
2018-08-22 00:35:29 +00:00
|
|
|
|
ty::Dynamic(..) | ty::Foreign(..) => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let mut unit = self.univariant_uninterned(
|
|
|
|
|
ty,
|
|
|
|
|
&[],
|
|
|
|
|
&ReprOptions::default(),
|
|
|
|
|
StructKind::AlwaysSized,
|
|
|
|
|
)?;
|
2017-09-19 09:38:20 +00:00
|
|
|
|
match unit.abi {
|
2017-12-01 16:29:35 +00:00
|
|
|
|
Abi::Aggregate { ref mut sized } => *sized = false,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => bug!(),
|
2017-09-19 09:38:20 +00:00
|
|
|
|
}
|
|
|
|
|
tcx.intern_layout(unit)
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-10-03 13:21:28 +00:00
|
|
|
|
ty::Generator(def_id, substs, _) => self.generator_layout(ty, def_id, substs)?,
|
2016-12-26 13:34:03 +00:00
|
|
|
|
|
2020-03-13 01:23:38 +00:00
|
|
|
|
ty::Closure(_, ref substs) => {
|
|
|
|
|
let tys = substs.as_closure().upvar_tys();
|
2019-12-22 22:42:04 +00:00
|
|
|
|
univariant(
|
|
|
|
|
&tys.map(|ty| self.layout_of(ty)).collect::<Result<Vec<_>, _>>()?,
|
2017-02-07 17:35:06 +00:00
|
|
|
|
&ReprOptions::default(),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
StructKind::AlwaysSized,
|
|
|
|
|
)?
|
2016-11-03 20:19:33 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::Tuple(tys) => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let kind =
|
|
|
|
|
if tys.len() == 0 { StructKind::AlwaysSized } else { StructKind::MaybeUnsized };
|
2017-06-08 05:49:54 +00:00
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
univariant(
|
|
|
|
|
&tys.iter()
|
|
|
|
|
.map(|k| self.layout_of(k.expect_ty()))
|
|
|
|
|
.collect::<Result<Vec<_>, _>>()?,
|
|
|
|
|
&ReprOptions::default(),
|
|
|
|
|
kind,
|
|
|
|
|
)?
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-09-05 22:26:02 +00:00
|
|
|
|
// SIMD vector types.
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::Adt(def, ..) if def.repr.simd() => {
|
2018-01-31 22:00:38 +00:00
|
|
|
|
let element = self.layout_of(ty.simd_type(tcx))?;
|
2019-11-16 12:31:09 +00:00
|
|
|
|
let count = ty.simd_size(tcx);
|
2017-12-12 23:57:56 +00:00
|
|
|
|
assert!(count > 0);
|
|
|
|
|
let scalar = match element.abi {
|
|
|
|
|
Abi::Scalar(ref scalar) => scalar.clone(),
|
2016-09-05 22:26:02 +00:00
|
|
|
|
_ => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
tcx.sess.fatal(&format!(
|
|
|
|
|
"monomorphising SIMD type `{}` with \
|
2018-10-02 08:52:43 +00:00
|
|
|
|
a non-machine element type `{}`",
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty, element.ty
|
|
|
|
|
));
|
2016-09-05 22:26:02 +00:00
|
|
|
|
}
|
2017-12-12 23:57:56 +00:00
|
|
|
|
};
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let size =
|
|
|
|
|
element.size.checked_mul(count, dl).ok_or(LayoutError::SizeOverflow(ty))?;
|
2017-09-22 19:44:40 +00:00
|
|
|
|
let align = dl.vector_align(size);
|
2018-09-08 22:16:45 +00:00
|
|
|
|
let size = size.align_to(align.abi);
|
2017-09-22 19:44:40 +00:00
|
|
|
|
|
2020-03-04 14:13:00 +00:00
|
|
|
|
tcx.intern_layout(Layout {
|
2018-11-01 15:01:24 +00:00
|
|
|
|
variants: Variants::Single { index: VariantIdx::new(0) },
|
2020-03-31 13:44:52 +00:00
|
|
|
|
fields: FieldsShape::Array { stride: element.size, count },
|
2019-12-22 22:42:04 +00:00
|
|
|
|
abi: Abi::Vector { element: scalar, count },
|
2019-07-15 15:21:01 +00:00
|
|
|
|
largest_niche: element.largest_niche.clone(),
|
2017-09-22 19:44:40 +00:00
|
|
|
|
size,
|
|
|
|
|
align,
|
2017-09-19 09:38:20 +00:00
|
|
|
|
})
|
2016-09-05 22:26:02 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// ADTs.
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::Adt(def, substs) => {
|
2017-09-16 20:12:39 +00:00
|
|
|
|
// Cache the field layouts.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let variants = def
|
|
|
|
|
.variants
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|v| {
|
|
|
|
|
v.fields
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|field| self.layout_of(field.ty(tcx, substs)))
|
|
|
|
|
.collect::<Result<Vec<_>, _>>()
|
|
|
|
|
})
|
|
|
|
|
.collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
|
2017-09-16 20:12:39 +00:00
|
|
|
|
|
2017-09-17 20:37:18 +00:00
|
|
|
|
if def.is_union() {
|
2019-08-26 13:14:30 +00:00
|
|
|
|
if def.repr.pack.is_some() && def.repr.align.is_some() {
|
|
|
|
|
bug!("union cannot be packed and aligned");
|
2017-09-17 20:37:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let mut align =
|
|
|
|
|
if def.repr.pack.is_some() { dl.i8_align } else { dl.aggregate_align };
|
2017-09-17 20:37:18 +00:00
|
|
|
|
|
2019-08-26 13:14:30 +00:00
|
|
|
|
if let Some(repr_align) = def.repr.align {
|
|
|
|
|
align = align.max(AbiAndPrefAlign::new(repr_align));
|
2017-11-19 18:28:52 +00:00
|
|
|
|
}
|
2017-09-17 20:37:18 +00:00
|
|
|
|
|
2018-11-08 19:12:05 +00:00
|
|
|
|
let optimize = !def.repr.inhibit_union_abi_opt();
|
2018-05-20 12:14:39 +00:00
|
|
|
|
let mut size = Size::ZERO;
|
2018-11-08 19:12:05 +00:00
|
|
|
|
let mut abi = Abi::Aggregate { sized: true };
|
2018-11-01 15:01:24 +00:00
|
|
|
|
let index = VariantIdx::new(0);
|
|
|
|
|
for field in &variants[index] {
|
2017-09-17 20:37:18 +00:00
|
|
|
|
assert!(!field.is_unsized());
|
2019-08-26 13:14:30 +00:00
|
|
|
|
align = align.max(field.align);
|
2018-11-08 19:12:05 +00:00
|
|
|
|
|
|
|
|
|
// If all non-ZST fields have the same ABI, forward this ABI
|
|
|
|
|
if optimize && !field.is_zst() {
|
|
|
|
|
// Normalize scalar_unit to the maximal valid range
|
|
|
|
|
let field_abi = match &field.abi {
|
|
|
|
|
Abi::Scalar(x) => Abi::Scalar(scalar_unit(x.value)),
|
|
|
|
|
Abi::ScalarPair(x, y) => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Abi::ScalarPair(scalar_unit(x.value), scalar_unit(y.value))
|
2018-11-08 19:12:05 +00:00
|
|
|
|
}
|
|
|
|
|
Abi::Vector { element: x, count } => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Abi::Vector { element: scalar_unit(x.value), count: *count }
|
|
|
|
|
}
|
|
|
|
|
Abi::Uninhabited | Abi::Aggregate { .. } => {
|
|
|
|
|
Abi::Aggregate { sized: true }
|
2018-11-08 19:12:05 +00:00
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if size == Size::ZERO {
|
|
|
|
|
// first non ZST: initialize 'abi'
|
|
|
|
|
abi = field_abi;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
} else if abi != field_abi {
|
2018-11-08 19:12:05 +00:00
|
|
|
|
// different fields have different ABI: reset to Aggregate
|
|
|
|
|
abi = Abi::Aggregate { sized: true };
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-09-22 19:44:40 +00:00
|
|
|
|
size = cmp::max(size, field.size);
|
2017-09-17 20:37:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-08-26 13:14:30 +00:00
|
|
|
|
if let Some(pack) = def.repr.pack {
|
|
|
|
|
align = align.min(AbiAndPrefAlign::new(pack));
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-04 14:13:00 +00:00
|
|
|
|
return Ok(tcx.intern_layout(Layout {
|
2018-11-01 15:01:24 +00:00
|
|
|
|
variants: Variants::Single { index },
|
2020-04-16 15:15:46 +00:00
|
|
|
|
fields: FieldsShape::Union(
|
|
|
|
|
NonZeroUsize::new(variants[index].len())
|
|
|
|
|
.ok_or(LayoutError::Unknown(ty))?,
|
|
|
|
|
),
|
2018-11-08 19:12:05 +00:00
|
|
|
|
abi,
|
2019-07-15 15:21:01 +00:00
|
|
|
|
largest_niche: None,
|
2017-09-22 19:44:40 +00:00
|
|
|
|
align,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
size: size.align_to(align.abi),
|
2017-09-19 09:38:20 +00:00
|
|
|
|
}));
|
2017-09-17 20:37:18 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-05-10 16:24:06 +00:00
|
|
|
|
// A variant is absent if it's uninhabited and only has ZST fields.
|
|
|
|
|
// Present uninhabited variants only require space for their fields,
|
2018-11-27 02:59:49 +00:00
|
|
|
|
// but *not* an encoding of the discriminant (e.g., a tag value).
|
2018-05-10 16:24:06 +00:00
|
|
|
|
// See issue #49298 for more details on the need to leave space
|
|
|
|
|
// for non-ZST uninhabited data (mostly partial initialization).
|
2020-03-04 14:50:21 +00:00
|
|
|
|
let absent = |fields: &[TyAndLayout<'_>]| {
|
2018-08-23 14:34:38 +00:00
|
|
|
|
let uninhabited = fields.iter().any(|f| f.abi.is_uninhabited());
|
2018-05-10 16:24:06 +00:00
|
|
|
|
let is_zst = fields.iter().all(|f| f.is_zst());
|
|
|
|
|
uninhabited && is_zst
|
|
|
|
|
};
|
|
|
|
|
let (present_first, present_second) = {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let mut present_variants = variants
|
|
|
|
|
.iter_enumerated()
|
|
|
|
|
.filter_map(|(i, v)| if absent(v) { None } else { Some(i) });
|
2018-05-10 16:24:06 +00:00
|
|
|
|
(present_variants.next(), present_variants.next())
|
2017-12-19 23:24:38 +00:00
|
|
|
|
};
|
2019-10-09 17:12:49 +00:00
|
|
|
|
let present_first = match present_first {
|
2020-07-10 23:02:10 +00:00
|
|
|
|
Some(present_first) => present_first,
|
2018-05-10 16:24:06 +00:00
|
|
|
|
// Uninhabited because it has no variants, or only absent ones.
|
2019-10-09 17:12:49 +00:00
|
|
|
|
None if def.is_enum() => return tcx.layout_raw(param_env.and(tcx.types.never)),
|
2020-03-06 10:20:27 +00:00
|
|
|
|
// If it's a struct, still compute a layout so that we can still compute the
|
|
|
|
|
// field offsets.
|
2020-07-10 23:02:10 +00:00
|
|
|
|
None => VariantIdx::new(0),
|
2019-10-02 08:39:35 +00:00
|
|
|
|
};
|
2017-12-19 23:24:38 +00:00
|
|
|
|
|
2017-09-26 18:34:10 +00:00
|
|
|
|
let is_struct = !def.is_enum() ||
|
2018-05-10 16:24:06 +00:00
|
|
|
|
// Only one variant is present.
|
|
|
|
|
(present_second.is_none() &&
|
2017-09-26 18:34:10 +00:00
|
|
|
|
// Representation optimizations are allowed.
|
2018-10-02 08:52:43 +00:00
|
|
|
|
!def.repr.inhibit_enum_layout_opt());
|
2017-09-26 18:34:10 +00:00
|
|
|
|
if is_struct {
|
|
|
|
|
// Struct, or univariant enum equivalent to a struct.
|
2016-09-05 22:26:02 +00:00
|
|
|
|
// (Typechecking will reject discriminant-sizing attrs.)
|
2016-08-29 00:44:19 +00:00
|
|
|
|
|
2020-07-10 23:02:10 +00:00
|
|
|
|
let v = present_first;
|
2020-02-28 13:20:33 +00:00
|
|
|
|
let kind = if def.is_enum() || variants[v].is_empty() {
|
2017-09-19 09:38:20 +00:00
|
|
|
|
StructKind::AlwaysSized
|
2016-11-22 00:26:56 +00:00
|
|
|
|
} else {
|
2017-05-15 22:00:35 +00:00
|
|
|
|
let param_env = tcx.param_env(def.did);
|
2017-09-26 18:34:10 +00:00
|
|
|
|
let last_field = def.variants[v].fields.last().unwrap();
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let always_sized =
|
|
|
|
|
tcx.type_of(last_field.did).is_sized(tcx.at(DUMMY_SP), param_env);
|
|
|
|
|
if !always_sized {
|
|
|
|
|
StructKind::MaybeUnsized
|
|
|
|
|
} else {
|
|
|
|
|
StructKind::AlwaysSized
|
|
|
|
|
}
|
2016-11-22 00:26:56 +00:00
|
|
|
|
};
|
|
|
|
|
|
2019-06-01 04:30:08 +00:00
|
|
|
|
let mut st = self.univariant_uninterned(ty, &variants[v], &def.repr, kind)?;
|
2017-09-26 18:34:10 +00:00
|
|
|
|
st.variants = Variants::Single { index: v };
|
2018-09-10 11:40:34 +00:00
|
|
|
|
let (start, end) = self.tcx.layout_scalar_valid_range(def.did);
|
|
|
|
|
match st.abi {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Abi::Scalar(ref mut scalar) | Abi::ScalarPair(ref mut scalar, _) => {
|
2018-09-10 11:40:34 +00:00
|
|
|
|
// the asserts ensure that we are not using the
|
|
|
|
|
// `#[rustc_layout_scalar_valid_range(n)]`
|
|
|
|
|
// attribute to widen the range of anything as that would probably
|
|
|
|
|
// result in UB somewhere
|
2019-07-15 15:21:01 +00:00
|
|
|
|
// FIXME(eddyb) the asserts are probably not needed,
|
|
|
|
|
// as larger validity ranges would result in missed
|
|
|
|
|
// optimizations, *not* wrongly assuming the inner
|
|
|
|
|
// value is valid. e.g. unions enlarge validity ranges,
|
|
|
|
|
// because the values may be uninitialized.
|
2018-09-10 11:40:34 +00:00
|
|
|
|
if let Bound::Included(start) = start {
|
2019-07-15 15:21:01 +00:00
|
|
|
|
// FIXME(eddyb) this might be incorrect - it doesn't
|
|
|
|
|
// account for wrap-around (end < start) ranges.
|
2018-09-10 11:40:34 +00:00
|
|
|
|
assert!(*scalar.valid_range.start() <= start);
|
|
|
|
|
scalar.valid_range = start..=*scalar.valid_range.end();
|
|
|
|
|
}
|
|
|
|
|
if let Bound::Included(end) = end {
|
2019-07-15 15:21:01 +00:00
|
|
|
|
// FIXME(eddyb) this might be incorrect - it doesn't
|
|
|
|
|
// account for wrap-around (end < start) ranges.
|
2018-09-10 11:40:34 +00:00
|
|
|
|
assert!(*scalar.valid_range.end() >= end);
|
|
|
|
|
scalar.valid_range = *scalar.valid_range.start()..=end;
|
2017-10-08 23:31:06 +00:00
|
|
|
|
}
|
2019-07-15 15:21:01 +00:00
|
|
|
|
|
|
|
|
|
// Update `largest_niche` if we have introduced a larger niche.
|
2020-01-22 23:54:04 +00:00
|
|
|
|
let niche = if def.repr.hide_niche() {
|
|
|
|
|
None
|
|
|
|
|
} else {
|
|
|
|
|
Niche::from_scalar(dl, Size::ZERO, scalar.clone())
|
|
|
|
|
};
|
2019-07-15 15:21:01 +00:00
|
|
|
|
if let Some(niche) = niche {
|
|
|
|
|
match &st.largest_niche {
|
|
|
|
|
Some(largest_niche) => {
|
|
|
|
|
// Replace the existing niche even if they're equal,
|
|
|
|
|
// because this one is at a lower offset.
|
|
|
|
|
if largest_niche.available(dl) <= niche.available(dl) {
|
|
|
|
|
st.largest_niche = Some(niche);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
None => st.largest_niche = Some(niche),
|
|
|
|
|
}
|
|
|
|
|
}
|
2017-10-08 23:31:06 +00:00
|
|
|
|
}
|
2018-09-10 12:39:22 +00:00
|
|
|
|
_ => assert!(
|
|
|
|
|
start == Bound::Unbounded && end == Bound::Unbounded,
|
2018-09-10 11:40:34 +00:00
|
|
|
|
"nonscalar layout for layout_scalar_valid_range type {:?}: {:#?}",
|
|
|
|
|
def,
|
|
|
|
|
st,
|
|
|
|
|
),
|
2017-10-08 23:31:06 +00:00
|
|
|
|
}
|
2019-07-15 15:21:01 +00:00
|
|
|
|
|
2017-09-26 18:34:10 +00:00
|
|
|
|
return Ok(tcx.intern_layout(st));
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-01-22 23:54:04 +00:00
|
|
|
|
// At this point, we have handled all unions and
|
|
|
|
|
// structs. (We have also handled univariant enums
|
|
|
|
|
// that allow representation optimization.)
|
|
|
|
|
assert!(def.is_enum());
|
|
|
|
|
|
2018-03-30 13:49:56 +00:00
|
|
|
|
// The current code for niche-filling relies on variant indices
|
|
|
|
|
// instead of actual discriminants, so dataful enums with
|
|
|
|
|
// explicit discriminants (RFC #2363) would misbehave.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let no_explicit_discriminants = def
|
|
|
|
|
.variants
|
|
|
|
|
.iter_enumerated()
|
2018-11-01 15:01:24 +00:00
|
|
|
|
.all(|(i, v)| v.discr == ty::VariantDiscr::Relative(i.as_u32()));
|
2016-09-05 22:26:02 +00:00
|
|
|
|
|
2020-04-11 21:39:43 +00:00
|
|
|
|
let mut niche_filling_layout = None;
|
|
|
|
|
|
2017-10-12 00:55:49 +00:00
|
|
|
|
// Niche-filling enum optimization.
|
|
|
|
|
if !def.repr.inhibit_enum_layout_opt() && no_explicit_discriminants {
|
|
|
|
|
let mut dataful_variant = None;
|
2018-11-01 15:01:24 +00:00
|
|
|
|
let mut niche_variants = VariantIdx::MAX..=VariantIdx::new(0);
|
2017-10-12 00:55:49 +00:00
|
|
|
|
|
|
|
|
|
// Find one non-ZST variant.
|
2018-11-01 15:01:24 +00:00
|
|
|
|
'variants: for (v, fields) in variants.iter_enumerated() {
|
2018-05-10 16:24:06 +00:00
|
|
|
|
if absent(fields) {
|
2018-04-01 22:43:43 +00:00
|
|
|
|
continue 'variants;
|
|
|
|
|
}
|
2017-10-12 00:55:49 +00:00
|
|
|
|
for f in fields {
|
|
|
|
|
if !f.is_zst() {
|
|
|
|
|
if dataful_variant.is_none() {
|
|
|
|
|
dataful_variant = Some(v);
|
|
|
|
|
continue 'variants;
|
|
|
|
|
} else {
|
|
|
|
|
dataful_variant = None;
|
|
|
|
|
break 'variants;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-04-05 21:21:47 +00:00
|
|
|
|
niche_variants = *niche_variants.start().min(&v)..=v;
|
2017-10-12 00:55:49 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-04-05 21:21:47 +00:00
|
|
|
|
if niche_variants.start() > niche_variants.end() {
|
2017-10-12 00:55:49 +00:00
|
|
|
|
dataful_variant = None;
|
|
|
|
|
}
|
2017-09-10 20:53:57 +00:00
|
|
|
|
|
2017-10-12 00:55:49 +00:00
|
|
|
|
if let Some(i) = dataful_variant {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let count = (niche_variants.end().as_u32()
|
|
|
|
|
- niche_variants.start().as_u32()
|
|
|
|
|
+ 1) as u128;
|
2018-05-18 12:20:48 +00:00
|
|
|
|
|
2020-03-26 12:14:25 +00:00
|
|
|
|
// Find the field with the largest niche
|
|
|
|
|
let niche_candidate = variants[i]
|
2020-03-25 23:10:14 +00:00
|
|
|
|
.iter()
|
|
|
|
|
.enumerate()
|
2020-03-26 12:14:25 +00:00
|
|
|
|
.filter_map(|(j, &field)| Some((j, field.largest_niche.as_ref()?)))
|
|
|
|
|
.max_by_key(|(_, niche)| niche.available(dl));
|
|
|
|
|
|
|
|
|
|
if let Some((field_index, niche, (niche_start, niche_scalar))) =
|
|
|
|
|
niche_candidate.and_then(|(field_index, niche)| {
|
|
|
|
|
Some((field_index, niche, niche.reserve(self, count)?))
|
|
|
|
|
})
|
2020-03-25 21:42:13 +00:00
|
|
|
|
{
|
2017-12-18 13:17:29 +00:00
|
|
|
|
let mut align = dl.aggregate_align;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let st = variants
|
|
|
|
|
.iter_enumerated()
|
|
|
|
|
.map(|(j, v)| {
|
|
|
|
|
let mut st = self.univariant_uninterned(
|
|
|
|
|
ty,
|
|
|
|
|
v,
|
|
|
|
|
&def.repr,
|
|
|
|
|
StructKind::AlwaysSized,
|
|
|
|
|
)?;
|
|
|
|
|
st.variants = Variants::Single { index: j };
|
|
|
|
|
|
|
|
|
|
align = align.max(st.align);
|
|
|
|
|
|
|
|
|
|
Ok(st)
|
|
|
|
|
})
|
|
|
|
|
.collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
|
2017-10-12 00:55:49 +00:00
|
|
|
|
|
2018-05-18 12:20:48 +00:00
|
|
|
|
let offset = st[i].fields.offset(field_index) + niche.offset;
|
2017-12-18 13:17:29 +00:00
|
|
|
|
let size = st[i].size;
|
2017-10-12 00:55:49 +00:00
|
|
|
|
|
2020-03-21 13:44:17 +00:00
|
|
|
|
let abi = if st.iter().all(|v| v.abi.is_uninhabited()) {
|
|
|
|
|
Abi::Uninhabited
|
|
|
|
|
} else {
|
|
|
|
|
match st[i].abi {
|
|
|
|
|
Abi::Scalar(_) => Abi::Scalar(niche_scalar.clone()),
|
|
|
|
|
Abi::ScalarPair(ref first, ref second) => {
|
|
|
|
|
// We need to use scalar_unit to reset the
|
|
|
|
|
// valid range to the maximal one for that
|
|
|
|
|
// primitive, because only the niche is
|
|
|
|
|
// guaranteed to be initialised, not the
|
|
|
|
|
// other primitive.
|
|
|
|
|
if offset.bytes() == 0 {
|
|
|
|
|
Abi::ScalarPair(
|
|
|
|
|
niche_scalar.clone(),
|
|
|
|
|
scalar_unit(second.value),
|
|
|
|
|
)
|
|
|
|
|
} else {
|
|
|
|
|
Abi::ScalarPair(
|
|
|
|
|
scalar_unit(first.value),
|
|
|
|
|
niche_scalar.clone(),
|
|
|
|
|
)
|
|
|
|
|
}
|
2018-03-26 14:26:03 +00:00
|
|
|
|
}
|
2020-03-21 13:44:17 +00:00
|
|
|
|
_ => Abi::Aggregate { sized: true },
|
2018-03-26 14:26:03 +00:00
|
|
|
|
}
|
2017-10-12 00:55:49 +00:00
|
|
|
|
};
|
|
|
|
|
|
2019-07-15 15:21:01 +00:00
|
|
|
|
let largest_niche =
|
|
|
|
|
Niche::from_scalar(dl, offset, niche_scalar.clone());
|
|
|
|
|
|
2020-04-11 21:39:43 +00:00
|
|
|
|
niche_filling_layout = Some(Layout {
|
2019-03-29 05:44:54 +00:00
|
|
|
|
variants: Variants::Multiple {
|
2020-05-23 11:22:45 +00:00
|
|
|
|
tag: niche_scalar,
|
|
|
|
|
tag_encoding: TagEncoding::Niche {
|
2019-03-29 05:44:54 +00:00
|
|
|
|
dataful_variant: i,
|
|
|
|
|
niche_variants,
|
|
|
|
|
niche_start,
|
|
|
|
|
},
|
2020-05-23 11:22:45 +00:00
|
|
|
|
tag_field: 0,
|
2017-10-12 00:55:49 +00:00
|
|
|
|
variants: st,
|
|
|
|
|
},
|
2020-03-31 13:44:52 +00:00
|
|
|
|
fields: FieldsShape::Arbitrary {
|
2017-10-12 00:55:49 +00:00
|
|
|
|
offsets: vec![offset],
|
2019-12-22 22:42:04 +00:00
|
|
|
|
memory_index: vec![0],
|
2017-10-12 00:55:49 +00:00
|
|
|
|
},
|
|
|
|
|
abi,
|
2019-07-15 15:21:01 +00:00
|
|
|
|
largest_niche,
|
2017-10-12 00:55:49 +00:00
|
|
|
|
size,
|
|
|
|
|
align,
|
2020-04-11 21:39:43 +00:00
|
|
|
|
});
|
2017-09-19 09:38:20 +00:00
|
|
|
|
}
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
2016-09-05 22:26:02 +00:00
|
|
|
|
}
|
2016-04-19 06:11:46 +00:00
|
|
|
|
|
2020-03-04 12:18:08 +00:00
|
|
|
|
let (mut min, mut max) = (i128::MAX, i128::MIN);
|
2018-03-22 11:38:40 +00:00
|
|
|
|
let discr_type = def.repr.discr_type();
|
2018-11-03 20:57:53 +00:00
|
|
|
|
let bits = Integer::from_attr(self, discr_type).size().bits();
|
2018-11-01 15:01:24 +00:00
|
|
|
|
for (i, discr) in def.discriminants(tcx) {
|
2018-08-23 14:34:38 +00:00
|
|
|
|
if variants[i].iter().any(|f| f.abi.is_uninhabited()) {
|
2017-09-26 18:34:10 +00:00
|
|
|
|
continue;
|
|
|
|
|
}
|
2018-03-22 11:38:40 +00:00
|
|
|
|
let mut x = discr.val as i128;
|
|
|
|
|
if discr_type.is_signed() {
|
|
|
|
|
// sign extend the raw representation to be an i128
|
|
|
|
|
x = (x << (128 - bits)) >> (128 - bits);
|
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
if x < min {
|
|
|
|
|
min = x;
|
|
|
|
|
}
|
|
|
|
|
if x > max {
|
|
|
|
|
max = x;
|
|
|
|
|
}
|
2017-09-16 20:12:39 +00:00
|
|
|
|
}
|
2018-05-14 09:23:12 +00:00
|
|
|
|
// We might have no inhabited variants, so pretend there's at least one.
|
2020-03-04 12:18:08 +00:00
|
|
|
|
if (min, max) == (i128::MAX, i128::MIN) {
|
2018-05-14 09:23:12 +00:00
|
|
|
|
min = 0;
|
|
|
|
|
max = 0;
|
|
|
|
|
}
|
2017-09-26 18:34:10 +00:00
|
|
|
|
assert!(min <= max, "discriminant range is {}...{}", min, max);
|
2017-09-16 20:12:39 +00:00
|
|
|
|
let (min_ity, signed) = Integer::repr_discr(tcx, ty, &def.repr, min, max);
|
|
|
|
|
|
2016-09-05 22:26:02 +00:00
|
|
|
|
let mut align = dl.aggregate_align;
|
2018-05-20 12:14:39 +00:00
|
|
|
|
let mut size = Size::ZERO;
|
2016-09-05 22:26:02 +00:00
|
|
|
|
|
|
|
|
|
// We're interested in the smallest alignment, so start large.
|
2018-09-08 22:16:45 +00:00
|
|
|
|
let mut start_align = Align::from_bytes(256).unwrap();
|
|
|
|
|
assert_eq!(Integer::for_align(dl, start_align), None);
|
2016-09-05 22:26:02 +00:00
|
|
|
|
|
2017-11-20 17:26:13 +00:00
|
|
|
|
// repr(C) on an enum tells us to make a (tag, union) layout,
|
|
|
|
|
// so we need to grow the prefix alignment to be at least
|
|
|
|
|
// the alignment of the union. (This value is used both for
|
|
|
|
|
// determining the alignment of the overall enum, and the
|
|
|
|
|
// determining the alignment of the payload after the tag.)
|
2018-09-08 22:16:45 +00:00
|
|
|
|
let mut prefix_align = min_ity.align(dl).abi;
|
2017-11-20 17:26:13 +00:00
|
|
|
|
if def.repr.c() {
|
|
|
|
|
for fields in &variants {
|
|
|
|
|
for field in fields {
|
2018-09-08 22:16:45 +00:00
|
|
|
|
prefix_align = prefix_align.max(field.align.abi);
|
2017-11-20 17:26:13 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-09-10 14:15:29 +00:00
|
|
|
|
// Create the set of structs that represent each variant.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let mut layout_variants = variants
|
|
|
|
|
.iter_enumerated()
|
|
|
|
|
.map(|(i, field_layouts)| {
|
|
|
|
|
let mut st = self.univariant_uninterned(
|
|
|
|
|
ty,
|
|
|
|
|
&field_layouts,
|
|
|
|
|
&def.repr,
|
|
|
|
|
StructKind::Prefixed(min_ity.size(), prefix_align),
|
|
|
|
|
)?;
|
|
|
|
|
st.variants = Variants::Single { index: i };
|
|
|
|
|
// Find the first field we can't move later
|
|
|
|
|
// to make room for a larger discriminant.
|
|
|
|
|
for field in
|
|
|
|
|
st.fields.index_by_increasing_offset().map(|j| field_layouts[j])
|
|
|
|
|
{
|
|
|
|
|
if !field.is_zst() || field.align.abi.bytes() != 1 {
|
|
|
|
|
start_align = start_align.min(field.align.abi);
|
|
|
|
|
break;
|
|
|
|
|
}
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
size = cmp::max(size, st.size);
|
|
|
|
|
align = align.max(st.align);
|
|
|
|
|
Ok(st)
|
|
|
|
|
})
|
|
|
|
|
.collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
|
2016-09-05 22:26:02 +00:00
|
|
|
|
|
|
|
|
|
// Align the maximum variant size to the largest alignment.
|
2018-09-08 22:16:45 +00:00
|
|
|
|
size = size.align_to(align.abi);
|
2016-09-05 22:26:02 +00:00
|
|
|
|
|
|
|
|
|
if size.bytes() >= dl.obj_size_bound() {
|
|
|
|
|
return Err(LayoutError::SizeOverflow(ty));
|
|
|
|
|
}
|
2016-04-19 06:11:46 +00:00
|
|
|
|
|
2017-02-14 09:32:00 +00:00
|
|
|
|
let typeck_ity = Integer::from_attr(dl, def.repr.discr_type());
|
2017-02-10 17:29:39 +00:00
|
|
|
|
if typeck_ity < min_ity {
|
|
|
|
|
// It is a bug if Layout decided on a greater discriminant size than typeck for
|
|
|
|
|
// some reason at this point (based on values discriminant can take on). Mostly
|
|
|
|
|
// because this discriminant will be loaded, and then stored into variable of
|
|
|
|
|
// type calculated by typeck. Consider such case (a bug): typeck decided on
|
|
|
|
|
// byte-sized discriminant, but layout thinks we need a 16-bit to store all
|
2018-05-08 13:10:16 +00:00
|
|
|
|
// discriminant values. That would be a bug, because then, in codegen, in order
|
2017-02-10 17:29:39 +00:00
|
|
|
|
// to store this 16-bit discriminant into 8-bit sized temporary some of the
|
|
|
|
|
// space necessary to represent would have to be discarded (or layout is wrong
|
|
|
|
|
// on thinking it needs 16 bits)
|
2019-12-22 22:42:04 +00:00
|
|
|
|
bug!(
|
|
|
|
|
"layout decided on a larger discriminant type ({:?}) than typeck ({:?})",
|
|
|
|
|
min_ity,
|
|
|
|
|
typeck_ity
|
|
|
|
|
);
|
2017-02-10 17:29:39 +00:00
|
|
|
|
// However, it is fine to make discr type however large (as an optimisation)
|
2018-05-08 13:10:16 +00:00
|
|
|
|
// after this point – we’ll just truncate the value we load in codegen.
|
2017-02-10 17:29:39 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-09-05 22:26:02 +00:00
|
|
|
|
// Check to see if we should use a different type for the
|
|
|
|
|
// discriminant. We can safely use a type with the same size
|
|
|
|
|
// as the alignment of the first field of each variant.
|
|
|
|
|
// We increase the size of the discriminant to avoid LLVM copying
|
|
|
|
|
// padding when it doesn't need to. This normally causes unaligned
|
|
|
|
|
// load/stores and excessive memcpy/memset operations. By using a
|
2018-04-30 23:13:14 +00:00
|
|
|
|
// bigger integer size, LLVM can be sure about its contents and
|
2016-09-05 22:26:02 +00:00
|
|
|
|
// won't be so conservative.
|
|
|
|
|
|
|
|
|
|
// Use the initial field alignment
|
2018-04-30 23:13:14 +00:00
|
|
|
|
let mut ity = if def.repr.c() || def.repr.int.is_some() {
|
|
|
|
|
min_ity
|
|
|
|
|
} else {
|
2018-09-08 22:16:45 +00:00
|
|
|
|
Integer::for_align(dl, start_align).unwrap_or(min_ity)
|
2018-04-30 23:13:14 +00:00
|
|
|
|
};
|
2016-09-05 22:26:02 +00:00
|
|
|
|
|
|
|
|
|
// If the alignment is not larger than the chosen discriminant size,
|
|
|
|
|
// don't use the alignment as the final size.
|
|
|
|
|
if ity <= min_ity {
|
|
|
|
|
ity = min_ity;
|
|
|
|
|
} else {
|
|
|
|
|
// Patch up the variants' first few fields.
|
2017-09-16 13:39:53 +00:00
|
|
|
|
let old_ity_size = min_ity.size();
|
|
|
|
|
let new_ity_size = ity.size();
|
2018-03-27 14:44:03 +00:00
|
|
|
|
for variant in &mut layout_variants {
|
2017-09-22 19:44:40 +00:00
|
|
|
|
match variant.fields {
|
2020-03-31 13:44:52 +00:00
|
|
|
|
FieldsShape::Arbitrary { ref mut offsets, .. } => {
|
2017-09-19 09:38:20 +00:00
|
|
|
|
for i in offsets {
|
|
|
|
|
if *i <= old_ity_size {
|
|
|
|
|
assert_eq!(*i, old_ity_size);
|
|
|
|
|
*i = new_ity_size;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// We might be making the struct larger.
|
2017-09-22 19:44:40 +00:00
|
|
|
|
if variant.size <= old_ity_size {
|
|
|
|
|
variant.size = new_ity_size;
|
2017-09-19 09:38:20 +00:00
|
|
|
|
}
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => bug!(),
|
2016-10-02 01:25:40 +00:00
|
|
|
|
}
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
2016-09-05 22:26:02 +00:00
|
|
|
|
}
|
2016-04-19 06:11:46 +00:00
|
|
|
|
|
2018-04-15 11:38:00 +00:00
|
|
|
|
let tag_mask = !0u128 >> (128 - ity.size().bits());
|
|
|
|
|
let tag = Scalar {
|
2017-09-26 11:41:06 +00:00
|
|
|
|
value: Int(ity, signed),
|
2018-04-15 11:38:00 +00:00
|
|
|
|
valid_range: (min as u128 & tag_mask)..=(max as u128 & tag_mask),
|
2017-09-26 11:41:06 +00:00
|
|
|
|
};
|
2018-03-27 14:44:03 +00:00
|
|
|
|
let mut abi = Abi::Aggregate { sized: true };
|
|
|
|
|
if tag.value.size(dl) == size {
|
|
|
|
|
abi = Abi::Scalar(tag.clone());
|
Store scalar pair bools as i8 in memory
We represent `bool` as `i1` in a `ScalarPair`, unlike other aggregates,
to optimize IR for checked operators and the like. With this patch, we
still do so when the pair is an immediate value, but we use the `i8`
memory type when the value is loaded or stored as an LLVM aggregate.
So `(bool, bool)` looks like an `{ i1, i1 }` immediate, but `{ i8, i8 }`
in memory. When a pair is a direct function argument, `PassMode::Pair`,
it is still passed using the immediate `i1` type, but as a return value
it will use the `i8` memory type. Also, `bool`-like` enum tags will now
use scalar pairs when possible, where they were previously excluded due
to optimization issues.
2018-06-15 22:47:54 +00:00
|
|
|
|
} else {
|
|
|
|
|
// Try to use a ScalarPair for all tagged enums.
|
2018-03-27 14:44:03 +00:00
|
|
|
|
let mut common_prim = None;
|
|
|
|
|
for (field_layouts, layout_variant) in variants.iter().zip(&layout_variants) {
|
|
|
|
|
let offsets = match layout_variant.fields {
|
2020-03-31 13:44:52 +00:00
|
|
|
|
FieldsShape::Arbitrary { ref offsets, .. } => offsets,
|
2018-03-27 14:44:03 +00:00
|
|
|
|
_ => bug!(),
|
|
|
|
|
};
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let mut fields =
|
|
|
|
|
field_layouts.iter().zip(offsets).filter(|p| !p.0.is_zst());
|
2018-03-27 14:44:03 +00:00
|
|
|
|
let (field, offset) = match (fields.next(), fields.next()) {
|
|
|
|
|
(None, None) => continue,
|
|
|
|
|
(Some(pair), None) => pair,
|
|
|
|
|
_ => {
|
|
|
|
|
common_prim = None;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
};
|
2020-03-04 14:13:00 +00:00
|
|
|
|
let prim = match field.abi {
|
2018-03-27 14:44:03 +00:00
|
|
|
|
Abi::Scalar(ref scalar) => scalar.value,
|
|
|
|
|
_ => {
|
|
|
|
|
common_prim = None;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
if let Some(pair) = common_prim {
|
|
|
|
|
// This is pretty conservative. We could go fancier
|
|
|
|
|
// by conflating things like i32 and u32, or even
|
|
|
|
|
// realising that (u8, u8) could just cohabit with
|
|
|
|
|
// u16 or even u32.
|
|
|
|
|
if pair != (prim, offset) {
|
|
|
|
|
common_prim = None;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
common_prim = Some((prim, offset));
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if let Some((prim, offset)) = common_prim {
|
2019-06-01 04:30:08 +00:00
|
|
|
|
let pair = self.scalar_pair(tag.clone(), scalar_unit(prim));
|
2018-03-27 14:44:03 +00:00
|
|
|
|
let pair_offsets = match pair.fields {
|
2020-03-31 13:44:52 +00:00
|
|
|
|
FieldsShape::Arbitrary { ref offsets, ref memory_index } => {
|
2018-03-27 14:44:03 +00:00
|
|
|
|
assert_eq!(memory_index, &[0, 1]);
|
|
|
|
|
offsets
|
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => bug!(),
|
2018-03-27 14:44:03 +00:00
|
|
|
|
};
|
2019-12-22 22:42:04 +00:00
|
|
|
|
if pair_offsets[0] == Size::ZERO
|
|
|
|
|
&& pair_offsets[1] == *offset
|
|
|
|
|
&& align == pair.align
|
|
|
|
|
&& size == pair.size
|
|
|
|
|
{
|
2018-03-27 14:44:03 +00:00
|
|
|
|
// We can use `ScalarPair` only when it matches our
|
|
|
|
|
// already computed layout (including `#[repr(C)]`).
|
|
|
|
|
abi = pair.abi;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-05-10 16:24:06 +00:00
|
|
|
|
|
2018-08-23 14:34:38 +00:00
|
|
|
|
if layout_variants.iter().all(|v| v.abi.is_uninhabited()) {
|
2018-05-10 16:24:06 +00:00
|
|
|
|
abi = Abi::Uninhabited;
|
|
|
|
|
}
|
|
|
|
|
|
2019-07-15 15:21:01 +00:00
|
|
|
|
let largest_niche = Niche::from_scalar(dl, Size::ZERO, tag.clone());
|
|
|
|
|
|
2020-04-11 21:39:43 +00:00
|
|
|
|
let tagged_layout = Layout {
|
2019-03-29 05:44:54 +00:00
|
|
|
|
variants: Variants::Multiple {
|
2020-05-23 11:22:45 +00:00
|
|
|
|
tag,
|
|
|
|
|
tag_encoding: TagEncoding::Direct,
|
|
|
|
|
tag_field: 0,
|
2018-03-27 14:44:03 +00:00
|
|
|
|
variants: layout_variants,
|
2017-09-19 09:38:20 +00:00
|
|
|
|
},
|
2020-03-31 13:44:52 +00:00
|
|
|
|
fields: FieldsShape::Arbitrary {
|
2018-05-20 12:14:39 +00:00
|
|
|
|
offsets: vec![Size::ZERO],
|
2019-12-22 22:42:04 +00:00
|
|
|
|
memory_index: vec![0],
|
2017-12-25 23:40:48 +00:00
|
|
|
|
},
|
2019-07-15 15:21:01 +00:00
|
|
|
|
largest_niche,
|
2017-09-26 11:41:06 +00:00
|
|
|
|
abi,
|
2017-09-22 19:44:40 +00:00
|
|
|
|
align,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
size,
|
2020-04-11 21:39:43 +00:00
|
|
|
|
};
|
|
|
|
|
|
2020-07-05 18:29:30 +00:00
|
|
|
|
let best_layout = match (tagged_layout, niche_filling_layout) {
|
|
|
|
|
(tagged_layout, Some(niche_filling_layout)) => {
|
|
|
|
|
// Pick the smaller layout; otherwise,
|
|
|
|
|
// pick the layout with the larger niche; otherwise,
|
|
|
|
|
// pick tagged as it has simpler codegen.
|
|
|
|
|
cmp::min_by_key(tagged_layout, niche_filling_layout, |layout| {
|
|
|
|
|
let niche_size =
|
|
|
|
|
layout.largest_niche.as_ref().map_or(0, |n| n.available(dl));
|
|
|
|
|
(layout.size, cmp::Reverse(niche_size))
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
(tagged_layout, None) => tagged_layout,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
tcx.intern_layout(best_layout)
|
2016-09-05 22:26:02 +00:00
|
|
|
|
}
|
2016-04-19 06:11:46 +00:00
|
|
|
|
|
|
|
|
|
// Types with no meaningful known layout.
|
2018-08-23 19:51:32 +00:00
|
|
|
|
ty::Projection(_) | ty::Opaque(..) => {
|
2018-03-03 13:23:28 +00:00
|
|
|
|
let normalized = tcx.normalize_erasing_regions(param_env, ty);
|
2016-07-01 15:36:54 +00:00
|
|
|
|
if ty == normalized {
|
|
|
|
|
return Err(LayoutError::Unknown(ty));
|
|
|
|
|
}
|
2017-09-19 09:38:20 +00:00
|
|
|
|
tcx.layout_raw(param_env.and(normalized))?
|
2016-07-01 15:36:54 +00:00
|
|
|
|
}
|
2018-10-22 18:37:56 +00:00
|
|
|
|
|
2020-05-12 05:56:29 +00:00
|
|
|
|
ty::Bound(..) | ty::Placeholder(..) | ty::GeneratorWitness(..) | ty::Infer(_) => {
|
|
|
|
|
bug!("Layout::compute: unexpected type `{}`", ty)
|
|
|
|
|
}
|
2018-10-22 18:37:56 +00:00
|
|
|
|
|
2020-05-06 04:02:09 +00:00
|
|
|
|
ty::Param(_) | ty::Error(_) => {
|
2018-07-01 19:57:23 +00:00
|
|
|
|
return Err(LayoutError::Unknown(ty));
|
|
|
|
|
}
|
2017-09-17 20:37:18 +00:00
|
|
|
|
})
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
2019-06-01 04:59:37 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Overlap eligibility and variant assignment for each GeneratorSavedLocal.
|
|
|
|
|
#[derive(Clone, Debug, PartialEq)]
|
|
|
|
|
enum SavedLocalEligibility {
|
|
|
|
|
Unassigned,
|
|
|
|
|
Assigned(VariantIdx),
|
|
|
|
|
// FIXME: Use newtype_index so we aren't wasting bytes
|
|
|
|
|
Ineligible(Option<u32>),
|
|
|
|
|
}
|
2016-04-19 06:11:46 +00:00
|
|
|
|
|
2019-06-01 04:59:37 +00:00
|
|
|
|
// When laying out generators, we divide our saved local fields into two
|
|
|
|
|
// categories: overlap-eligible and overlap-ineligible.
|
|
|
|
|
//
|
|
|
|
|
// Those fields which are ineligible for overlap go in a "prefix" at the
|
|
|
|
|
// beginning of the layout, and always have space reserved for them.
|
|
|
|
|
//
|
|
|
|
|
// Overlap-eligible fields are only assigned to one variant, so we lay
|
|
|
|
|
// those fields out for each variant and put them right after the
|
|
|
|
|
// prefix.
|
|
|
|
|
//
|
|
|
|
|
// Finally, in the layout details, we point to the fields from the
|
|
|
|
|
// variants they are assigned to. It is possible for some fields to be
|
|
|
|
|
// included in multiple variants. No field ever "moves around" in the
|
|
|
|
|
// layout; its offset is always the same.
|
|
|
|
|
//
|
|
|
|
|
// Also included in the layout are the upvars and the discriminant.
|
|
|
|
|
// These are included as fields on the "outer" layout; they are not part
|
|
|
|
|
// of any variant.
|
2019-06-13 21:48:52 +00:00
|
|
|
|
impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
|
2019-06-01 04:59:37 +00:00
|
|
|
|
/// Compute the eligibility and assignment of each local.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
fn generator_saved_local_eligibility(
|
|
|
|
|
&self,
|
|
|
|
|
info: &GeneratorLayout<'tcx>,
|
|
|
|
|
) -> (BitSet<GeneratorSavedLocal>, IndexVec<GeneratorSavedLocal, SavedLocalEligibility>) {
|
2019-06-01 04:59:37 +00:00
|
|
|
|
use SavedLocalEligibility::*;
|
|
|
|
|
|
|
|
|
|
let mut assignments: IndexVec<GeneratorSavedLocal, SavedLocalEligibility> =
|
|
|
|
|
IndexVec::from_elem_n(Unassigned, info.field_tys.len());
|
|
|
|
|
|
|
|
|
|
// The saved locals not eligible for overlap. These will get
|
|
|
|
|
// "promoted" to the prefix of our generator.
|
|
|
|
|
let mut ineligible_locals = BitSet::new_empty(info.field_tys.len());
|
|
|
|
|
|
|
|
|
|
// Figure out which of our saved locals are fields in only
|
|
|
|
|
// one variant. The rest are deemed ineligible for overlap.
|
|
|
|
|
for (variant_index, fields) in info.variant_fields.iter_enumerated() {
|
|
|
|
|
for local in fields {
|
|
|
|
|
match assignments[*local] {
|
|
|
|
|
Unassigned => {
|
|
|
|
|
assignments[*local] = Assigned(variant_index);
|
|
|
|
|
}
|
|
|
|
|
Assigned(idx) => {
|
|
|
|
|
// We've already seen this local at another suspension
|
|
|
|
|
// point, so it is no longer a candidate.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
trace!(
|
|
|
|
|
"removing local {:?} in >1 variant ({:?}, {:?})",
|
|
|
|
|
local,
|
|
|
|
|
variant_index,
|
|
|
|
|
idx
|
|
|
|
|
);
|
2019-06-01 04:59:37 +00:00
|
|
|
|
ineligible_locals.insert(*local);
|
|
|
|
|
assignments[*local] = Ineligible(None);
|
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Ineligible(_) => {}
|
2019-06-01 04:59:37 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Next, check every pair of eligible locals to see if they
|
|
|
|
|
// conflict.
|
|
|
|
|
for local_a in info.storage_conflicts.rows() {
|
|
|
|
|
let conflicts_a = info.storage_conflicts.count(local_a);
|
|
|
|
|
if ineligible_locals.contains(local_a) {
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
for local_b in info.storage_conflicts.iter(local_a) {
|
|
|
|
|
// local_a and local_b are storage live at the same time, therefore they
|
|
|
|
|
// cannot overlap in the generator layout. The only way to guarantee
|
|
|
|
|
// this is if they are in the same variant, or one is ineligible
|
|
|
|
|
// (which means it is stored in every variant).
|
2019-12-22 22:42:04 +00:00
|
|
|
|
if ineligible_locals.contains(local_b)
|
|
|
|
|
|| assignments[local_a] == assignments[local_b]
|
2019-06-01 04:59:37 +00:00
|
|
|
|
{
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If they conflict, we will choose one to make ineligible.
|
|
|
|
|
// This is not always optimal; it's just a greedy heuristic that
|
|
|
|
|
// seems to produce good results most of the time.
|
|
|
|
|
let conflicts_b = info.storage_conflicts.count(local_b);
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let (remove, other) =
|
|
|
|
|
if conflicts_a > conflicts_b { (local_a, local_b) } else { (local_b, local_a) };
|
2019-06-01 04:59:37 +00:00
|
|
|
|
ineligible_locals.insert(remove);
|
|
|
|
|
assignments[remove] = Ineligible(None);
|
|
|
|
|
trace!("removing local {:?} due to conflict with {:?}", remove, other);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-07-27 00:13:14 +00:00
|
|
|
|
// Count the number of variants in use. If only one of them, then it is
|
|
|
|
|
// impossible to overlap any locals in our layout. In this case it's
|
|
|
|
|
// always better to make the remaining locals ineligible, so we can
|
|
|
|
|
// lay them out with the other locals in the prefix and eliminate
|
|
|
|
|
// unnecessary padding bytes.
|
|
|
|
|
{
|
|
|
|
|
let mut used_variants = BitSet::new_empty(info.variant_fields.len());
|
|
|
|
|
for assignment in &assignments {
|
2020-03-22 12:36:56 +00:00
|
|
|
|
if let Assigned(idx) = assignment {
|
|
|
|
|
used_variants.insert(*idx);
|
2019-07-27 00:13:14 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
if used_variants.count() < 2 {
|
|
|
|
|
for assignment in assignments.iter_mut() {
|
|
|
|
|
*assignment = Ineligible(None);
|
|
|
|
|
}
|
|
|
|
|
ineligible_locals.insert_all();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-06-01 04:59:37 +00:00
|
|
|
|
// Write down the order of our locals that will be promoted to the prefix.
|
|
|
|
|
{
|
2020-03-01 19:21:09 +00:00
|
|
|
|
for (idx, local) in ineligible_locals.iter().enumerate() {
|
|
|
|
|
assignments[local] = Ineligible(Some(idx as u32));
|
2019-06-01 04:59:37 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
debug!("generator saved local assignments: {:?}", assignments);
|
|
|
|
|
|
|
|
|
|
(ineligible_locals, assignments)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Compute the full generator layout.
|
|
|
|
|
fn generator_layout(
|
|
|
|
|
&self,
|
|
|
|
|
ty: Ty<'tcx>,
|
|
|
|
|
def_id: hir::def_id::DefId,
|
2019-10-03 13:21:28 +00:00
|
|
|
|
substs: SubstsRef<'tcx>,
|
2020-03-04 14:13:00 +00:00
|
|
|
|
) -> Result<&'tcx Layout, LayoutError<'tcx>> {
|
2019-06-01 04:59:37 +00:00
|
|
|
|
use SavedLocalEligibility::*;
|
|
|
|
|
let tcx = self.tcx;
|
2019-06-23 01:37:23 +00:00
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let subst_field = |ty: Ty<'tcx>| ty.subst(tcx, substs);
|
2019-06-01 04:59:37 +00:00
|
|
|
|
|
|
|
|
|
let info = tcx.generator_layout(def_id);
|
|
|
|
|
let (ineligible_locals, assignments) = self.generator_saved_local_eligibility(&info);
|
|
|
|
|
|
|
|
|
|
// Build a prefix layout, including "promoting" all ineligible
|
|
|
|
|
// locals as part of the prefix. We compute the layout of all of
|
|
|
|
|
// these fields at once to get optimal packing.
|
2020-05-23 11:22:45 +00:00
|
|
|
|
let tag_index = substs.as_generator().prefix_tys().count();
|
2020-03-09 23:17:33 +00:00
|
|
|
|
|
|
|
|
|
// `info.variant_fields` already accounts for the reserved variants, so no need to add them.
|
|
|
|
|
let max_discr = (info.variant_fields.len() - 1) as u128;
|
|
|
|
|
let discr_int = Integer::fit_unsigned(max_discr);
|
|
|
|
|
let discr_int_ty = discr_int.to_ty(tcx, false);
|
2020-05-23 11:22:45 +00:00
|
|
|
|
let tag = Scalar { value: Primitive::Int(discr_int, false), valid_range: 0..=max_discr };
|
|
|
|
|
let tag_layout = self.tcx.intern_layout(Layout::scalar(self, tag.clone()));
|
|
|
|
|
let tag_layout = TyAndLayout { ty: discr_int_ty, layout: tag_layout };
|
2020-03-09 23:17:33 +00:00
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let promoted_layouts = ineligible_locals
|
|
|
|
|
.iter()
|
2019-07-15 15:21:01 +00:00
|
|
|
|
.map(|local| subst_field(info.field_tys[local]))
|
2019-07-27 02:36:26 +00:00
|
|
|
|
.map(|ty| tcx.mk_maybe_uninit(ty))
|
2019-07-15 15:21:01 +00:00
|
|
|
|
.map(|ty| self.layout_of(ty));
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let prefix_layouts = substs
|
|
|
|
|
.as_generator()
|
2020-03-13 01:23:38 +00:00
|
|
|
|
.prefix_tys()
|
2019-07-15 15:21:01 +00:00
|
|
|
|
.map(|ty| self.layout_of(ty))
|
2020-05-23 11:22:45 +00:00
|
|
|
|
.chain(iter::once(Ok(tag_layout)))
|
2019-07-15 15:21:01 +00:00
|
|
|
|
.chain(promoted_layouts)
|
|
|
|
|
.collect::<Result<Vec<_>, _>>()?;
|
2019-07-27 02:36:26 +00:00
|
|
|
|
let prefix = self.univariant_uninterned(
|
2019-06-01 04:59:37 +00:00
|
|
|
|
ty,
|
2019-07-15 15:21:01 +00:00
|
|
|
|
&prefix_layouts,
|
2019-06-01 04:59:37 +00:00
|
|
|
|
&ReprOptions::default(),
|
2019-07-15 15:21:01 +00:00
|
|
|
|
StructKind::AlwaysSized,
|
|
|
|
|
)?;
|
|
|
|
|
|
2019-06-01 04:59:37 +00:00
|
|
|
|
let (prefix_size, prefix_align) = (prefix.size, prefix.align);
|
|
|
|
|
|
|
|
|
|
// Split the prefix layout into the "outer" fields (upvars and
|
|
|
|
|
// discriminant) and the "promoted" fields. Promoted fields will
|
|
|
|
|
// get included in each variant that requested them in
|
|
|
|
|
// GeneratorLayout.
|
|
|
|
|
debug!("prefix = {:#?}", prefix);
|
2019-06-23 01:37:23 +00:00
|
|
|
|
let (outer_fields, promoted_offsets, promoted_memory_index) = match prefix.fields {
|
2020-03-31 13:44:52 +00:00
|
|
|
|
FieldsShape::Arbitrary { mut offsets, memory_index } => {
|
2019-06-23 01:37:23 +00:00
|
|
|
|
let mut inverse_memory_index = invert_mapping(&memory_index);
|
|
|
|
|
|
|
|
|
|
// "a" (`0..b_start`) and "b" (`b_start..`) correspond to
|
|
|
|
|
// "outer" and "promoted" fields respectively.
|
2020-05-23 11:22:45 +00:00
|
|
|
|
let b_start = (tag_index + 1) as u32;
|
2019-06-23 01:37:23 +00:00
|
|
|
|
let offsets_b = offsets.split_off(b_start as usize);
|
2019-06-10 22:54:00 +00:00
|
|
|
|
let offsets_a = offsets;
|
|
|
|
|
|
2019-06-23 01:37:23 +00:00
|
|
|
|
// Disentangle the "a" and "b" components of `inverse_memory_index`
|
|
|
|
|
// by preserving the order but keeping only one disjoint "half" each.
|
|
|
|
|
// FIXME(eddyb) build a better abstraction for permutations, if possible.
|
|
|
|
|
let inverse_memory_index_b: Vec<_> =
|
|
|
|
|
inverse_memory_index.iter().filter_map(|&i| i.checked_sub(b_start)).collect();
|
|
|
|
|
inverse_memory_index.retain(|&i| i < b_start);
|
|
|
|
|
let inverse_memory_index_a = inverse_memory_index;
|
|
|
|
|
|
|
|
|
|
// Since `inverse_memory_index_{a,b}` each only refer to their
|
|
|
|
|
// respective fields, they can be safely inverted
|
|
|
|
|
let memory_index_a = invert_mapping(&inverse_memory_index_a);
|
|
|
|
|
let memory_index_b = invert_mapping(&inverse_memory_index_b);
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let outer_fields =
|
2020-03-31 13:44:52 +00:00
|
|
|
|
FieldsShape::Arbitrary { offsets: offsets_a, memory_index: memory_index_a };
|
2019-06-23 01:37:23 +00:00
|
|
|
|
(outer_fields, offsets_b, memory_index_b)
|
2019-06-01 04:59:37 +00:00
|
|
|
|
}
|
|
|
|
|
_ => bug!(),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let mut size = prefix.size;
|
|
|
|
|
let mut align = prefix.align;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let variants = info
|
|
|
|
|
.variant_fields
|
|
|
|
|
.iter_enumerated()
|
|
|
|
|
.map(|(index, variant_fields)| {
|
|
|
|
|
// Only include overlap-eligible fields when we compute our variant layout.
|
|
|
|
|
let variant_only_tys = variant_fields
|
|
|
|
|
.iter()
|
|
|
|
|
.filter(|local| match assignments[**local] {
|
2019-06-01 04:59:37 +00:00
|
|
|
|
Unassigned => bug!(),
|
|
|
|
|
Assigned(v) if v == index => true,
|
|
|
|
|
Assigned(_) => bug!("assignment does not match variant"),
|
|
|
|
|
Ineligible(_) => false,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
})
|
|
|
|
|
.map(|local| subst_field(info.field_tys[*local]));
|
2016-04-19 06:11:46 +00:00
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let mut variant = self.univariant_uninterned(
|
|
|
|
|
ty,
|
|
|
|
|
&variant_only_tys
|
|
|
|
|
.map(|ty| self.layout_of(ty))
|
|
|
|
|
.collect::<Result<Vec<_>, _>>()?,
|
|
|
|
|
&ReprOptions::default(),
|
|
|
|
|
StructKind::Prefixed(prefix_size, prefix_align.abi),
|
|
|
|
|
)?;
|
|
|
|
|
variant.variants = Variants::Single { index };
|
|
|
|
|
|
|
|
|
|
let (offsets, memory_index) = match variant.fields {
|
2020-03-31 13:44:52 +00:00
|
|
|
|
FieldsShape::Arbitrary { offsets, memory_index } => (offsets, memory_index),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => bug!(),
|
2019-06-23 01:37:23 +00:00
|
|
|
|
};
|
2019-06-01 04:59:37 +00:00
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
// Now, stitch the promoted and variant-only fields back together in
|
|
|
|
|
// the order they are mentioned by our GeneratorLayout.
|
|
|
|
|
// Because we only use some subset (that can differ between variants)
|
|
|
|
|
// of the promoted fields, we can't just pick those elements of the
|
|
|
|
|
// `promoted_memory_index` (as we'd end up with gaps).
|
|
|
|
|
// So instead, we build an "inverse memory_index", as if all of the
|
|
|
|
|
// promoted fields were being used, but leave the elements not in the
|
|
|
|
|
// subset as `INVALID_FIELD_IDX`, which we can filter out later to
|
|
|
|
|
// obtain a valid (bijective) mapping.
|
|
|
|
|
const INVALID_FIELD_IDX: u32 = !0;
|
|
|
|
|
let mut combined_inverse_memory_index =
|
|
|
|
|
vec![INVALID_FIELD_IDX; promoted_memory_index.len() + memory_index.len()];
|
|
|
|
|
let mut offsets_and_memory_index = offsets.into_iter().zip(memory_index);
|
|
|
|
|
let combined_offsets = variant_fields
|
|
|
|
|
.iter()
|
|
|
|
|
.enumerate()
|
|
|
|
|
.map(|(i, local)| {
|
|
|
|
|
let (offset, memory_index) = match assignments[*local] {
|
|
|
|
|
Unassigned => bug!(),
|
|
|
|
|
Assigned(_) => {
|
|
|
|
|
let (offset, memory_index) =
|
|
|
|
|
offsets_and_memory_index.next().unwrap();
|
|
|
|
|
(offset, promoted_memory_index.len() as u32 + memory_index)
|
|
|
|
|
}
|
|
|
|
|
Ineligible(field_idx) => {
|
|
|
|
|
let field_idx = field_idx.unwrap() as usize;
|
|
|
|
|
(promoted_offsets[field_idx], promoted_memory_index[field_idx])
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
combined_inverse_memory_index[memory_index as usize] = i as u32;
|
|
|
|
|
offset
|
|
|
|
|
})
|
|
|
|
|
.collect();
|
|
|
|
|
|
|
|
|
|
// Remove the unused slots and invert the mapping to obtain the
|
|
|
|
|
// combined `memory_index` (also see previous comment).
|
|
|
|
|
combined_inverse_memory_index.retain(|&i| i != INVALID_FIELD_IDX);
|
|
|
|
|
let combined_memory_index = invert_mapping(&combined_inverse_memory_index);
|
|
|
|
|
|
2020-03-31 13:44:52 +00:00
|
|
|
|
variant.fields = FieldsShape::Arbitrary {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
offsets: combined_offsets,
|
|
|
|
|
memory_index: combined_memory_index,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
size = size.max(variant.size);
|
|
|
|
|
align = align.max(variant.align);
|
|
|
|
|
Ok(variant)
|
|
|
|
|
})
|
|
|
|
|
.collect::<Result<IndexVec<VariantIdx, _>, _>>()?;
|
2019-06-01 04:59:37 +00:00
|
|
|
|
|
2019-08-02 01:24:12 +00:00
|
|
|
|
size = size.align_to(align.abi);
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let abi = if prefix.abi.is_uninhabited() || variants.iter().all(|v| v.abi.is_uninhabited())
|
|
|
|
|
{
|
2019-06-01 04:59:37 +00:00
|
|
|
|
Abi::Uninhabited
|
|
|
|
|
} else {
|
|
|
|
|
Abi::Aggregate { sized: true }
|
|
|
|
|
};
|
|
|
|
|
|
2020-03-04 14:13:00 +00:00
|
|
|
|
let layout = tcx.intern_layout(Layout {
|
2019-06-01 04:59:37 +00:00
|
|
|
|
variants: Variants::Multiple {
|
2020-05-23 11:22:45 +00:00
|
|
|
|
tag: tag,
|
|
|
|
|
tag_encoding: TagEncoding::Direct,
|
|
|
|
|
tag_field: tag_index,
|
2019-06-01 04:59:37 +00:00
|
|
|
|
variants,
|
|
|
|
|
},
|
|
|
|
|
fields: outer_fields,
|
|
|
|
|
abi,
|
2019-07-15 15:21:01 +00:00
|
|
|
|
largest_niche: prefix.largest_niche,
|
2019-06-01 04:59:37 +00:00
|
|
|
|
size,
|
|
|
|
|
align,
|
|
|
|
|
});
|
|
|
|
|
debug!("generator layout ({:?}): {:#?}", ty, layout);
|
|
|
|
|
Ok(layout)
|
|
|
|
|
}
|
|
|
|
|
|
2017-05-22 18:20:12 +00:00
|
|
|
|
/// This is invoked by the `layout_raw` query to record the final
|
|
|
|
|
/// layout of each type.
|
2019-02-28 08:10:43 +00:00
|
|
|
|
#[inline(always)]
|
2020-03-04 14:50:21 +00:00
|
|
|
|
fn record_layout_for_printing(&self, layout: TyAndLayout<'tcx>) {
|
2019-02-28 08:10:43 +00:00
|
|
|
|
// If we are running with `-Zprint-type-sizes`, maybe record layouts
|
|
|
|
|
// for dumping later.
|
|
|
|
|
if self.tcx.sess.opts.debugging_opts.print_type_sizes {
|
|
|
|
|
self.record_layout_for_printing_outlined(layout)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-04 14:50:21 +00:00
|
|
|
|
fn record_layout_for_printing_outlined(&self, layout: TyAndLayout<'tcx>) {
|
2019-02-28 08:10:43 +00:00
|
|
|
|
// Ignore layouts that are done with non-empty environments or
|
|
|
|
|
// non-monomorphic layouts, as the user only wants to see the stuff
|
|
|
|
|
// resulting from the final codegen session.
|
2020-07-03 00:52:40 +00:00
|
|
|
|
if layout.ty.has_param_types_or_consts() || !self.param_env.caller_bounds().is_empty() {
|
2017-05-22 18:20:12 +00:00
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// (delay format until we actually need it)
|
2018-02-04 11:10:28 +00:00
|
|
|
|
let record = |kind, packed, opt_discr_size, variants| {
|
2018-01-31 22:00:38 +00:00
|
|
|
|
let type_desc = format!("{:?}", layout.ty);
|
2019-12-22 22:42:04 +00:00
|
|
|
|
self.tcx.sess.code_stats.record_type_size(
|
|
|
|
|
kind,
|
|
|
|
|
type_desc,
|
|
|
|
|
layout.align.abi,
|
|
|
|
|
layout.size,
|
|
|
|
|
packed,
|
|
|
|
|
opt_discr_size,
|
|
|
|
|
variants,
|
|
|
|
|
);
|
2017-05-22 18:20:12 +00:00
|
|
|
|
};
|
|
|
|
|
|
2020-08-02 22:49:11 +00:00
|
|
|
|
let adt_def = match *layout.ty.kind() {
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::Adt(ref adt_def, _) => {
|
2018-01-31 22:00:38 +00:00
|
|
|
|
debug!("print-type-size t: `{:?}` process adt", layout.ty);
|
2017-09-19 09:38:20 +00:00
|
|
|
|
adt_def
|
2017-05-22 18:20:12 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::Closure(..) => {
|
2018-01-31 22:00:38 +00:00
|
|
|
|
debug!("print-type-size t: `{:?}` record closure", layout.ty);
|
2018-02-04 11:10:28 +00:00
|
|
|
|
record(DataTypeKind::Closure, false, None, vec![]);
|
2017-05-22 18:20:12 +00:00
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
_ => {
|
2018-01-31 22:00:38 +00:00
|
|
|
|
debug!("print-type-size t: `{:?}` skip non-nominal", layout.ty);
|
2017-05-22 18:20:12 +00:00
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let adt_kind = adt_def.adt_kind();
|
2019-08-26 13:14:30 +00:00
|
|
|
|
let adt_packed = adt_def.repr.pack.is_some();
|
2017-05-22 18:20:12 +00:00
|
|
|
|
|
2020-04-19 11:00:18 +00:00
|
|
|
|
let build_variant_info = |n: Option<Ident>, flds: &[Symbol], layout: TyAndLayout<'tcx>| {
|
2018-05-20 12:14:39 +00:00
|
|
|
|
let mut min_size = Size::ZERO;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let field_info: Vec<_> = flds
|
|
|
|
|
.iter()
|
|
|
|
|
.enumerate()
|
|
|
|
|
.map(|(i, &name)| match layout.field(self, i) {
|
2017-09-19 09:38:20 +00:00
|
|
|
|
Err(err) => {
|
|
|
|
|
bug!("no layout found for field {}: `{:?}`", name, err);
|
|
|
|
|
}
|
|
|
|
|
Ok(field_layout) => {
|
|
|
|
|
let offset = layout.fields.offset(i);
|
2017-09-22 19:44:40 +00:00
|
|
|
|
let field_end = offset + field_layout.size;
|
2017-09-19 09:38:20 +00:00
|
|
|
|
if min_size < field_end {
|
|
|
|
|
min_size = field_end;
|
|
|
|
|
}
|
2020-03-11 11:49:08 +00:00
|
|
|
|
FieldInfo {
|
2017-09-19 09:38:20 +00:00
|
|
|
|
name: name.to_string(),
|
|
|
|
|
offset: offset.bytes(),
|
2017-09-22 19:44:40 +00:00
|
|
|
|
size: field_layout.size.bytes(),
|
2018-09-08 21:22:22 +00:00
|
|
|
|
align: field_layout.align.abi.bytes(),
|
2017-09-19 09:38:20 +00:00
|
|
|
|
}
|
2017-05-22 18:20:12 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
})
|
|
|
|
|
.collect();
|
2017-05-22 18:20:12 +00:00
|
|
|
|
|
2020-03-11 11:49:08 +00:00
|
|
|
|
VariantInfo {
|
2018-11-27 02:59:49 +00:00
|
|
|
|
name: n.map(|n| n.to_string()),
|
2020-03-11 11:49:08 +00:00
|
|
|
|
kind: if layout.is_unsized() { SizeKind::Min } else { SizeKind::Exact },
|
2018-09-08 21:22:22 +00:00
|
|
|
|
align: layout.align.abi.bytes(),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
size: if min_size.bytes() == 0 { layout.size.bytes() } else { min_size.bytes() },
|
2017-05-22 18:20:12 +00:00
|
|
|
|
fields: field_info,
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2017-09-22 22:54:45 +00:00
|
|
|
|
match layout.variants {
|
2017-09-26 18:34:10 +00:00
|
|
|
|
Variants::Single { index } => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
debug!("print-type-size `{:#?}` variant {}", layout, adt_def.variants[index].ident);
|
2017-09-26 18:34:10 +00:00
|
|
|
|
if !adt_def.variants.is_empty() {
|
|
|
|
|
let variant_def = &adt_def.variants[index];
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let fields: Vec<_> = variant_def.fields.iter().map(|f| f.ident.name).collect();
|
|
|
|
|
record(
|
|
|
|
|
adt_kind.into(),
|
|
|
|
|
adt_packed,
|
|
|
|
|
None,
|
|
|
|
|
vec![build_variant_info(Some(variant_def.ident), &fields, layout)],
|
|
|
|
|
);
|
2017-05-22 18:20:12 +00:00
|
|
|
|
} else {
|
|
|
|
|
// (This case arises for *empty* enums; so give it
|
|
|
|
|
// zero variants.)
|
2018-02-04 11:10:28 +00:00
|
|
|
|
record(adt_kind.into(), adt_packed, None, vec![]);
|
2017-05-22 18:20:12 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-05-23 11:22:45 +00:00
|
|
|
|
Variants::Multiple { ref tag, ref tag_encoding, .. } => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
debug!(
|
|
|
|
|
"print-type-size `{:#?}` adt general variants def {}",
|
|
|
|
|
layout.ty,
|
|
|
|
|
adt_def.variants.len()
|
|
|
|
|
);
|
|
|
|
|
let variant_infos: Vec<_> = adt_def
|
|
|
|
|
.variants
|
|
|
|
|
.iter_enumerated()
|
|
|
|
|
.map(|(i, variant_def)| {
|
2017-09-19 09:38:20 +00:00
|
|
|
|
let fields: Vec<_> =
|
2018-05-25 23:50:15 +00:00
|
|
|
|
variant_def.fields.iter().map(|f| f.ident.name).collect();
|
2019-12-22 22:42:04 +00:00
|
|
|
|
build_variant_info(
|
|
|
|
|
Some(variant_def.ident),
|
|
|
|
|
&fields,
|
|
|
|
|
layout.for_variant(self, i),
|
|
|
|
|
)
|
2017-09-19 09:38:20 +00:00
|
|
|
|
})
|
|
|
|
|
.collect();
|
2019-12-22 22:42:04 +00:00
|
|
|
|
record(
|
|
|
|
|
adt_kind.into(),
|
|
|
|
|
adt_packed,
|
2020-05-23 11:22:45 +00:00
|
|
|
|
match tag_encoding {
|
|
|
|
|
TagEncoding::Direct => Some(tag.value.size(self)),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => None,
|
|
|
|
|
},
|
|
|
|
|
variant_infos,
|
|
|
|
|
);
|
2017-05-22 18:20:12 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2016-04-19 06:11:46 +00:00
|
|
|
|
}
|
2016-04-19 14:03:30 +00:00
|
|
|
|
|
2018-11-27 02:59:49 +00:00
|
|
|
|
/// Type size "skeleton", i.e., the only information determining a type's size.
|
2016-04-19 14:03:30 +00:00
|
|
|
|
/// While this is conservative, (aside from constant sizes, only pointers,
|
|
|
|
|
/// newtypes thereof and null pointer optimized enums are allowed), it is
|
2018-11-12 18:05:20 +00:00
|
|
|
|
/// enough to statically check common use cases of transmute.
|
2016-04-19 14:03:30 +00:00
|
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
|
pub enum SizeSkeleton<'tcx> {
|
|
|
|
|
/// Any statically computable Layout.
|
|
|
|
|
Known(Size),
|
|
|
|
|
|
|
|
|
|
/// A potentially-fat pointer.
|
|
|
|
|
Pointer {
|
2017-08-04 10:33:48 +00:00
|
|
|
|
/// If true, this pointer is never null.
|
2016-04-19 14:03:30 +00:00
|
|
|
|
non_zero: bool,
|
2017-08-04 10:33:48 +00:00
|
|
|
|
/// The type which determines the unsized metadata, if any,
|
|
|
|
|
/// of this pointer. Either a type parameter or a projection
|
|
|
|
|
/// depending on one, with regions erased.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
tail: Ty<'tcx>,
|
|
|
|
|
},
|
2016-04-19 14:03:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-06-11 20:35:39 +00:00
|
|
|
|
impl<'tcx> SizeSkeleton<'tcx> {
|
2019-06-11 21:11:55 +00:00
|
|
|
|
pub fn compute(
|
|
|
|
|
ty: Ty<'tcx>,
|
2019-06-13 21:48:52 +00:00
|
|
|
|
tcx: TyCtxt<'tcx>,
|
2019-06-11 21:11:55 +00:00
|
|
|
|
param_env: ty::ParamEnv<'tcx>,
|
|
|
|
|
) -> Result<SizeSkeleton<'tcx>, LayoutError<'tcx>> {
|
2020-02-22 14:10:17 +00:00
|
|
|
|
debug_assert!(!ty.has_infer_types_or_consts());
|
2016-04-19 14:03:30 +00:00
|
|
|
|
|
|
|
|
|
// First try computing a static layout.
|
2018-01-31 22:00:38 +00:00
|
|
|
|
let err = match tcx.layout_of(param_env.and(ty)) {
|
2016-04-19 14:03:30 +00:00
|
|
|
|
Ok(layout) => {
|
2017-09-22 19:44:40 +00:00
|
|
|
|
return Ok(SizeSkeleton::Known(layout.size));
|
2016-04-19 14:03:30 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Err(err) => err,
|
2016-04-19 14:03:30 +00:00
|
|
|
|
};
|
|
|
|
|
|
2020-08-02 22:49:11 +00:00
|
|
|
|
match *ty.kind() {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::Ref(_, pointee, _) | ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => {
|
2017-10-10 17:55:21 +00:00
|
|
|
|
let non_zero = !ty.is_unsafe_ptr();
|
2019-07-11 11:27:41 +00:00
|
|
|
|
let tail = tcx.struct_tail_erasing_lifetimes(pointee, param_env);
|
2020-08-02 22:49:11 +00:00
|
|
|
|
match tail.kind() {
|
2018-08-22 00:35:29 +00:00
|
|
|
|
ty::Param(_) | ty::Projection(_) => {
|
2020-04-07 00:03:54 +00:00
|
|
|
|
debug_assert!(tail.has_param_types_or_consts());
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Ok(SizeSkeleton::Pointer { non_zero, tail: tcx.erase_regions(&tail) })
|
2017-10-10 17:55:21 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => bug!(
|
|
|
|
|
"SizeSkeleton::compute({}): layout errored ({}), yet \
|
2017-10-10 17:55:21 +00:00
|
|
|
|
tail `{}` is not a type parameter or a projection",
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty,
|
|
|
|
|
err,
|
|
|
|
|
tail
|
|
|
|
|
),
|
2017-10-10 17:55:21 +00:00
|
|
|
|
}
|
2016-04-19 14:03:30 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::Adt(def, substs) => {
|
2016-04-19 14:03:30 +00:00
|
|
|
|
// Only newtypes and enums w/ nullable pointer optimization.
|
2016-09-05 22:26:02 +00:00
|
|
|
|
if def.is_union() || def.variants.is_empty() || def.variants.len() > 2 {
|
2016-04-19 14:03:30 +00:00
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Get a zero-sized variant or a pointer newtype.
|
2018-11-01 15:01:24 +00:00
|
|
|
|
let zero_or_ptr_variant = |i| {
|
|
|
|
|
let i = VariantIdx::new(i);
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let fields = def.variants[i]
|
|
|
|
|
.fields
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|field| SizeSkeleton::compute(field.ty(tcx, substs), tcx, param_env));
|
2016-04-19 14:03:30 +00:00
|
|
|
|
let mut ptr = None;
|
|
|
|
|
for field in fields {
|
|
|
|
|
let field = field?;
|
|
|
|
|
match field {
|
|
|
|
|
SizeSkeleton::Known(size) => {
|
|
|
|
|
if size.bytes() > 0 {
|
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
SizeSkeleton::Pointer { .. } => {
|
2016-04-19 14:03:30 +00:00
|
|
|
|
if ptr.is_some() {
|
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
|
|
|
|
ptr = Some(field);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
Ok(ptr)
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let v0 = zero_or_ptr_variant(0)?;
|
|
|
|
|
// Newtype.
|
|
|
|
|
if def.variants.len() == 1 {
|
|
|
|
|
if let Some(SizeSkeleton::Pointer { non_zero, tail }) = v0 {
|
|
|
|
|
return Ok(SizeSkeleton::Pointer {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
non_zero: non_zero
|
|
|
|
|
|| match tcx.layout_scalar_valid_range(def.did) {
|
|
|
|
|
(Bound::Included(start), Bound::Unbounded) => start > 0,
|
|
|
|
|
(Bound::Included(start), Bound::Included(end)) => {
|
|
|
|
|
0 < start && start < end
|
|
|
|
|
}
|
|
|
|
|
_ => false,
|
|
|
|
|
},
|
2017-07-03 18:19:51 +00:00
|
|
|
|
tail,
|
2016-04-19 14:03:30 +00:00
|
|
|
|
});
|
|
|
|
|
} else {
|
|
|
|
|
return Err(err);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let v1 = zero_or_ptr_variant(1)?;
|
|
|
|
|
// Nullable pointer enum optimization.
|
|
|
|
|
match (v0, v1) {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
(Some(SizeSkeleton::Pointer { non_zero: true, tail }), None)
|
|
|
|
|
| (None, Some(SizeSkeleton::Pointer { non_zero: true, tail })) => {
|
|
|
|
|
Ok(SizeSkeleton::Pointer { non_zero: false, tail })
|
2016-04-19 14:03:30 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => Err(err),
|
2016-04-19 14:03:30 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-08-23 19:51:32 +00:00
|
|
|
|
ty::Projection(_) | ty::Opaque(..) => {
|
2018-03-03 13:23:28 +00:00
|
|
|
|
let normalized = tcx.normalize_erasing_regions(param_env, ty);
|
2016-07-01 15:36:54 +00:00
|
|
|
|
if ty == normalized {
|
|
|
|
|
Err(err)
|
|
|
|
|
} else {
|
2017-05-19 21:27:25 +00:00
|
|
|
|
SizeSkeleton::compute(normalized, tcx, param_env)
|
2016-07-01 15:36:54 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => Err(err),
|
2016-04-19 14:03:30 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-08-30 05:02:42 +00:00
|
|
|
|
pub fn same_size(self, other: SizeSkeleton<'_>) -> bool {
|
2016-04-19 14:03:30 +00:00
|
|
|
|
match (self, other) {
|
|
|
|
|
(SizeSkeleton::Known(a), SizeSkeleton::Known(b)) => a == b,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
(SizeSkeleton::Pointer { tail: a, .. }, SizeSkeleton::Pointer { tail: b, .. }) => {
|
|
|
|
|
a == b
|
|
|
|
|
}
|
|
|
|
|
_ => false,
|
2016-04-19 14:03:30 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2017-02-28 15:50:28 +00:00
|
|
|
|
|
2017-09-12 21:33:56 +00:00
|
|
|
|
pub trait HasTyCtxt<'tcx>: HasDataLayout {
|
2019-06-13 21:48:52 +00:00
|
|
|
|
fn tcx(&self) -> TyCtxt<'tcx>;
|
2017-03-10 04:25:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-05-05 18:09:04 +00:00
|
|
|
|
pub trait HasParamEnv<'tcx> {
|
|
|
|
|
fn param_env(&self) -> ty::ParamEnv<'tcx>;
|
|
|
|
|
}
|
|
|
|
|
|
2019-06-13 21:48:52 +00:00
|
|
|
|
impl<'tcx> HasDataLayout for TyCtxt<'tcx> {
|
2017-09-12 21:33:56 +00:00
|
|
|
|
fn data_layout(&self) -> &TargetDataLayout {
|
|
|
|
|
&self.data_layout
|
|
|
|
|
}
|
2017-03-10 04:25:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-06-13 21:48:52 +00:00
|
|
|
|
impl<'tcx> HasTyCtxt<'tcx> for TyCtxt<'tcx> {
|
|
|
|
|
fn tcx(&self) -> TyCtxt<'tcx> {
|
2019-09-25 19:36:14 +00:00
|
|
|
|
*self
|
2017-03-10 04:25:51 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-05-05 18:09:04 +00:00
|
|
|
|
impl<'tcx, C> HasParamEnv<'tcx> for LayoutCx<'tcx, C> {
|
|
|
|
|
fn param_env(&self) -> ty::ParamEnv<'tcx> {
|
|
|
|
|
self.param_env
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-01-31 22:00:38 +00:00
|
|
|
|
impl<'tcx, T: HasDataLayout> HasDataLayout for LayoutCx<'tcx, T> {
|
2017-03-10 04:25:51 +00:00
|
|
|
|
fn data_layout(&self) -> &TargetDataLayout {
|
2018-01-31 22:00:38 +00:00
|
|
|
|
self.tcx.data_layout()
|
2017-03-10 04:25:51 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-06-13 21:48:52 +00:00
|
|
|
|
impl<'tcx, T: HasTyCtxt<'tcx>> HasTyCtxt<'tcx> for LayoutCx<'tcx, T> {
|
|
|
|
|
fn tcx(&self) -> TyCtxt<'tcx> {
|
2018-01-31 22:00:38 +00:00
|
|
|
|
self.tcx.tcx()
|
2017-05-19 21:27:25 +00:00
|
|
|
|
}
|
2017-09-12 21:33:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-04 14:50:21 +00:00
|
|
|
|
pub type TyAndLayout<'tcx> = ::rustc_target::abi::TyAndLayout<'tcx, Ty<'tcx>>;
|
2018-02-07 17:27:43 +00:00
|
|
|
|
|
2019-06-13 21:48:52 +00:00
|
|
|
|
impl<'tcx> LayoutOf for LayoutCx<'tcx, TyCtxt<'tcx>> {
|
2018-02-05 19:07:20 +00:00
|
|
|
|
type Ty = Ty<'tcx>;
|
2020-03-04 14:50:21 +00:00
|
|
|
|
type TyAndLayout = Result<TyAndLayout<'tcx>, LayoutError<'tcx>>;
|
2017-03-10 04:25:51 +00:00
|
|
|
|
|
2017-09-12 23:19:11 +00:00
|
|
|
|
/// Computes the layout of a type. Note that this implicitly
|
|
|
|
|
/// executes in "reveal all" mode.
|
2020-03-04 14:50:21 +00:00
|
|
|
|
fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyAndLayout {
|
2020-04-11 04:50:02 +00:00
|
|
|
|
let param_env = self.param_env.with_reveal_all_normalized(self.tcx);
|
2018-03-03 13:23:28 +00:00
|
|
|
|
let ty = self.tcx.normalize_erasing_regions(param_env, ty);
|
2020-03-04 14:13:00 +00:00
|
|
|
|
let layout = self.tcx.layout_raw(param_env.and(ty))?;
|
2020-03-04 14:50:21 +00:00
|
|
|
|
let layout = TyAndLayout { ty, layout };
|
2017-09-12 23:19:11 +00:00
|
|
|
|
|
2018-11-27 02:59:49 +00:00
|
|
|
|
// N.B., this recording is normally disabled; when enabled, it
|
2017-09-13 11:35:04 +00:00
|
|
|
|
// can however trigger recursive invocations of `layout_of`.
|
2017-09-12 23:19:11 +00:00
|
|
|
|
// Therefore, we execute it *after* the main query has
|
|
|
|
|
// completed, to avoid problems around recursive structures
|
2018-02-16 14:56:50 +00:00
|
|
|
|
// and the like. (Admittedly, I wasn't able to reproduce a problem
|
2017-09-12 23:19:11 +00:00
|
|
|
|
// here, but it seems like the right thing to do. -nmatsakis)
|
2018-01-31 22:00:38 +00:00
|
|
|
|
self.record_layout_for_printing(layout);
|
2017-09-13 11:35:04 +00:00
|
|
|
|
|
2017-09-17 01:42:22 +00:00
|
|
|
|
Ok(layout)
|
2017-09-13 11:35:04 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-06-13 21:48:52 +00:00
|
|
|
|
impl LayoutOf for LayoutCx<'tcx, ty::query::TyCtxtAt<'tcx>> {
|
2018-02-05 19:07:20 +00:00
|
|
|
|
type Ty = Ty<'tcx>;
|
2020-03-04 14:50:21 +00:00
|
|
|
|
type TyAndLayout = Result<TyAndLayout<'tcx>, LayoutError<'tcx>>;
|
2017-09-13 11:35:04 +00:00
|
|
|
|
|
|
|
|
|
/// Computes the layout of a type. Note that this implicitly
|
|
|
|
|
/// executes in "reveal all" mode.
|
2020-03-04 14:50:21 +00:00
|
|
|
|
fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyAndLayout {
|
2020-04-11 04:50:02 +00:00
|
|
|
|
let param_env = self.param_env.with_reveal_all_normalized(*self.tcx);
|
2018-03-03 13:23:28 +00:00
|
|
|
|
let ty = self.tcx.normalize_erasing_regions(param_env, ty);
|
2020-03-04 14:13:00 +00:00
|
|
|
|
let layout = self.tcx.layout_raw(param_env.and(ty))?;
|
2020-03-04 14:50:21 +00:00
|
|
|
|
let layout = TyAndLayout { ty, layout };
|
2017-09-13 11:35:04 +00:00
|
|
|
|
|
2018-11-27 02:59:49 +00:00
|
|
|
|
// N.B., this recording is normally disabled; when enabled, it
|
2017-09-13 11:35:04 +00:00
|
|
|
|
// can however trigger recursive invocations of `layout_of`.
|
|
|
|
|
// Therefore, we execute it *after* the main query has
|
|
|
|
|
// completed, to avoid problems around recursive structures
|
2018-02-16 14:56:50 +00:00
|
|
|
|
// and the like. (Admittedly, I wasn't able to reproduce a problem
|
2017-09-13 11:35:04 +00:00
|
|
|
|
// here, but it seems like the right thing to do. -nmatsakis)
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let cx = LayoutCx { tcx: *self.tcx, param_env: self.param_env };
|
2018-01-31 22:00:38 +00:00
|
|
|
|
cx.record_layout_for_printing(layout);
|
2017-09-12 21:33:56 +00:00
|
|
|
|
|
2017-09-17 01:42:22 +00:00
|
|
|
|
Ok(layout)
|
2017-02-28 15:50:28 +00:00
|
|
|
|
}
|
2017-03-10 04:25:51 +00:00
|
|
|
|
}
|
2017-02-28 15:50:28 +00:00
|
|
|
|
|
2018-01-31 22:00:38 +00:00
|
|
|
|
// Helper (inherent) `layout_of` methods to avoid pushing `LayoutCx` to users.
|
2019-06-13 21:48:52 +00:00
|
|
|
|
impl TyCtxt<'tcx> {
|
2018-01-31 22:00:38 +00:00
|
|
|
|
/// Computes the layout of a type. Note that this implicitly
|
|
|
|
|
/// executes in "reveal all" mode.
|
|
|
|
|
#[inline]
|
2019-12-22 22:42:04 +00:00
|
|
|
|
pub fn layout_of(
|
|
|
|
|
self,
|
|
|
|
|
param_env_and_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>,
|
2020-03-04 14:50:21 +00:00
|
|
|
|
) -> Result<TyAndLayout<'tcx>, LayoutError<'tcx>> {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let cx = LayoutCx { tcx: self, param_env: param_env_and_ty.param_env };
|
2018-01-31 22:00:38 +00:00
|
|
|
|
cx.layout_of(param_env_and_ty.value)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-06-13 21:48:52 +00:00
|
|
|
|
impl ty::query::TyCtxtAt<'tcx> {
|
2018-01-31 22:00:38 +00:00
|
|
|
|
/// Computes the layout of a type. Note that this implicitly
|
|
|
|
|
/// executes in "reveal all" mode.
|
|
|
|
|
#[inline]
|
2019-12-22 22:42:04 +00:00
|
|
|
|
pub fn layout_of(
|
|
|
|
|
self,
|
|
|
|
|
param_env_and_ty: ty::ParamEnvAnd<'tcx, Ty<'tcx>>,
|
2020-03-04 14:50:21 +00:00
|
|
|
|
) -> Result<TyAndLayout<'tcx>, LayoutError<'tcx>> {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let cx = LayoutCx { tcx: self.at(self.span), param_env: param_env_and_ty.param_env };
|
2018-01-31 22:00:38 +00:00
|
|
|
|
cx.layout_of(param_env_and_ty.value)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-04 14:50:21 +00:00
|
|
|
|
impl<'tcx, C> TyAndLayoutMethods<'tcx, C> for Ty<'tcx>
|
2019-06-11 10:20:33 +00:00
|
|
|
|
where
|
2020-03-04 14:50:21 +00:00
|
|
|
|
C: LayoutOf<Ty = Ty<'tcx>, TyAndLayout: MaybeResult<TyAndLayout<'tcx>>>
|
2019-07-31 19:00:35 +00:00
|
|
|
|
+ HasTyCtxt<'tcx>
|
|
|
|
|
+ HasParamEnv<'tcx>,
|
2018-02-07 17:27:43 +00:00
|
|
|
|
{
|
2020-03-04 14:50:21 +00:00
|
|
|
|
fn for_variant(
|
|
|
|
|
this: TyAndLayout<'tcx>,
|
|
|
|
|
cx: &C,
|
|
|
|
|
variant_index: VariantIdx,
|
|
|
|
|
) -> TyAndLayout<'tcx> {
|
2020-03-04 14:13:00 +00:00
|
|
|
|
let layout = match this.variants {
|
2020-03-06 10:20:27 +00:00
|
|
|
|
Variants::Single { index }
|
|
|
|
|
// If all variants but one are uninhabited, the variant layout is the enum layout.
|
|
|
|
|
if index == variant_index &&
|
|
|
|
|
// Don't confuse variants of uninhabited enums with the enum itself.
|
|
|
|
|
// For more details see https://github.com/rust-lang/rust/issues/69763.
|
2020-04-16 15:15:46 +00:00
|
|
|
|
this.fields != FieldsShape::Primitive =>
|
2020-03-06 10:20:27 +00:00
|
|
|
|
{
|
2020-03-04 14:13:00 +00:00
|
|
|
|
this.layout
|
2020-03-06 10:20:27 +00:00
|
|
|
|
}
|
2017-09-26 18:34:10 +00:00
|
|
|
|
|
|
|
|
|
Variants::Single { index } => {
|
|
|
|
|
// Deny calling for_variant more than once for non-Single enums.
|
2020-03-04 14:13:00 +00:00
|
|
|
|
if let Ok(original_layout) = cx.layout_of(this.ty).to_result() {
|
|
|
|
|
assert_eq!(original_layout.variants, Variants::Single { index });
|
2019-04-30 06:49:53 +00:00
|
|
|
|
}
|
2017-09-26 18:34:10 +00:00
|
|
|
|
|
2020-08-02 22:49:11 +00:00
|
|
|
|
let fields = match this.ty.kind() {
|
2020-06-18 09:37:59 +00:00
|
|
|
|
ty::Adt(def, _) if def.variants.is_empty() =>
|
|
|
|
|
bug!("for_variant called on zero-variant enum"),
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::Adt(def, _) => def.variants[variant_index].fields.len(),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => bug!(),
|
2017-09-26 18:34:10 +00:00
|
|
|
|
};
|
2018-05-10 16:24:06 +00:00
|
|
|
|
let tcx = cx.tcx();
|
2020-03-04 14:13:00 +00:00
|
|
|
|
tcx.intern_layout(Layout {
|
2018-05-10 16:24:06 +00:00
|
|
|
|
variants: Variants::Single { index: variant_index },
|
2020-04-16 15:15:46 +00:00
|
|
|
|
fields: match NonZeroUsize::new(fields) {
|
|
|
|
|
Some(fields) => FieldsShape::Union(fields),
|
|
|
|
|
None => FieldsShape::Arbitrary { offsets: vec![], memory_index: vec![] },
|
|
|
|
|
},
|
2018-05-10 16:24:06 +00:00
|
|
|
|
abi: Abi::Uninhabited,
|
2019-07-15 15:21:01 +00:00
|
|
|
|
largest_niche: None,
|
2018-05-10 16:24:06 +00:00
|
|
|
|
align: tcx.data_layout.i8_align,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
size: Size::ZERO,
|
2018-05-10 16:24:06 +00:00
|
|
|
|
})
|
2017-09-26 18:34:10 +00:00
|
|
|
|
}
|
2017-09-22 22:54:45 +00:00
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Variants::Multiple { ref variants, .. } => &variants[variant_index],
|
2017-09-13 11:35:04 +00:00
|
|
|
|
};
|
2017-09-26 18:34:10 +00:00
|
|
|
|
|
2020-03-04 14:13:00 +00:00
|
|
|
|
assert_eq!(layout.variants, Variants::Single { index: variant_index });
|
2017-09-21 17:40:50 +00:00
|
|
|
|
|
2020-03-04 14:50:21 +00:00
|
|
|
|
TyAndLayout { ty: this.ty, layout }
|
2017-02-28 15:50:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-04 14:50:21 +00:00
|
|
|
|
fn field(this: TyAndLayout<'tcx>, cx: &C, i: usize) -> C::TyAndLayout {
|
2017-09-21 17:40:50 +00:00
|
|
|
|
let tcx = cx.tcx();
|
2020-05-23 11:22:45 +00:00
|
|
|
|
let tag_layout = |tag: &Scalar| -> C::TyAndLayout {
|
|
|
|
|
let layout = Layout::scalar(cx, tag.clone());
|
2020-03-04 14:50:21 +00:00
|
|
|
|
MaybeResult::from(Ok(TyAndLayout {
|
2020-03-04 14:13:00 +00:00
|
|
|
|
layout: tcx.intern_layout(layout),
|
2020-05-23 11:22:45 +00:00
|
|
|
|
ty: tag.value.to_ty(tcx),
|
2019-05-05 18:09:04 +00:00
|
|
|
|
}))
|
2019-04-02 23:04:51 +00:00
|
|
|
|
};
|
|
|
|
|
|
2020-08-02 22:49:11 +00:00
|
|
|
|
cx.layout_of(match *this.ty.kind() {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::Bool
|
|
|
|
|
| ty::Char
|
|
|
|
|
| ty::Int(_)
|
|
|
|
|
| ty::Uint(_)
|
|
|
|
|
| ty::Float(_)
|
|
|
|
|
| ty::FnPtr(_)
|
|
|
|
|
| ty::Never
|
|
|
|
|
| ty::FnDef(..)
|
|
|
|
|
| ty::GeneratorWitness(..)
|
|
|
|
|
| ty::Foreign(..)
|
2020-03-04 14:50:21 +00:00
|
|
|
|
| ty::Dynamic(..) => bug!("TyAndLayout::field_type({:?}): not applicable", this),
|
2017-02-28 15:50:28 +00:00
|
|
|
|
|
|
|
|
|
// Potentially-fat pointers.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::Ref(_, pointee, _) | ty::RawPtr(ty::TypeAndMut { ty: pointee, .. }) => {
|
2018-06-04 16:32:06 +00:00
|
|
|
|
assert!(i < this.fields.count());
|
2017-10-10 17:55:21 +00:00
|
|
|
|
|
2019-05-17 01:20:14 +00:00
|
|
|
|
// Reuse the fat `*T` type as its own thin pointer data field.
|
|
|
|
|
// This provides information about, e.g., DST struct pointees
|
2017-10-10 17:55:21 +00:00
|
|
|
|
// (which may have no non-DST form), and will work as long
|
2020-03-31 13:44:52 +00:00
|
|
|
|
// as the `Abi` or `FieldsShape` is checked by users.
|
2017-10-10 17:55:21 +00:00
|
|
|
|
if i == 0 {
|
2018-09-10 02:07:13 +00:00
|
|
|
|
let nil = tcx.mk_unit();
|
2018-02-07 17:27:43 +00:00
|
|
|
|
let ptr_ty = if this.ty.is_unsafe_ptr() {
|
2017-10-10 17:55:21 +00:00
|
|
|
|
tcx.mk_mut_ptr(nil)
|
|
|
|
|
} else {
|
2019-04-25 21:05:04 +00:00
|
|
|
|
tcx.mk_mut_ref(tcx.lifetimes.re_static, nil)
|
2017-10-10 17:55:21 +00:00
|
|
|
|
};
|
2019-12-22 22:42:04 +00:00
|
|
|
|
return MaybeResult::from(cx.layout_of(ptr_ty).to_result().map(
|
|
|
|
|
|mut ptr_layout| {
|
|
|
|
|
ptr_layout.ty = this.ty;
|
|
|
|
|
ptr_layout
|
|
|
|
|
},
|
|
|
|
|
));
|
2017-10-10 17:55:21 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-08-02 22:49:11 +00:00
|
|
|
|
match tcx.struct_tail_erasing_lifetimes(pointee, cx.param_env()).kind() {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::Slice(_) | ty::Str => tcx.types.usize,
|
2018-08-26 20:21:20 +00:00
|
|
|
|
ty::Dynamic(_, _) => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
tcx.mk_imm_ref(tcx.lifetimes.re_static, tcx.mk_array(tcx.types.usize, 3))
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/* FIXME: use actual fn pointers
|
2018-08-28 12:04:07 +00:00
|
|
|
|
Warning: naively computing the number of entries in the
|
|
|
|
|
vtable by counting the methods on the trait + methods on
|
|
|
|
|
all parent traits does not work, because some methods can
|
|
|
|
|
be not object safe and thus excluded from the vtable.
|
|
|
|
|
Increase this counter if you tried to implement this but
|
|
|
|
|
failed to do it without duplicating a lot of code from
|
|
|
|
|
other places in the compiler: 2
|
2018-06-04 16:32:06 +00:00
|
|
|
|
tcx.mk_tup(&[
|
|
|
|
|
tcx.mk_array(tcx.types.usize, 3),
|
|
|
|
|
tcx.mk_array(Option<fn()>),
|
|
|
|
|
])
|
|
|
|
|
*/
|
2017-10-10 17:55:21 +00:00
|
|
|
|
}
|
2020-03-04 14:50:21 +00:00
|
|
|
|
_ => bug!("TyAndLayout::field_type({:?}): not applicable", this),
|
2017-10-10 17:55:21 +00:00
|
|
|
|
}
|
2017-02-28 15:50:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Arrays and slices.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::Array(element, _) | ty::Slice(element) => element,
|
2018-08-22 00:35:55 +00:00
|
|
|
|
ty::Str => tcx.types.u8,
|
2017-02-28 15:50:28 +00:00
|
|
|
|
|
2016-12-26 13:34:03 +00:00
|
|
|
|
// Tuples, generators and closures.
|
2020-03-13 01:23:38 +00:00
|
|
|
|
ty::Closure(_, ref substs) => substs.as_closure().upvar_tys().nth(i).unwrap(),
|
2017-02-28 15:50:28 +00:00
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::Generator(def_id, ref substs, _) => match this.variants {
|
|
|
|
|
Variants::Single { index } => substs
|
|
|
|
|
.as_generator()
|
|
|
|
|
.state_tys(def_id, tcx)
|
|
|
|
|
.nth(index.as_usize())
|
|
|
|
|
.unwrap()
|
|
|
|
|
.nth(i)
|
|
|
|
|
.unwrap(),
|
2020-05-23 11:22:45 +00:00
|
|
|
|
Variants::Multiple { ref tag, tag_field, .. } => {
|
|
|
|
|
if i == tag_field {
|
|
|
|
|
return tag_layout(tag);
|
2019-04-02 23:04:51 +00:00
|
|
|
|
}
|
2020-03-13 01:23:38 +00:00
|
|
|
|
substs.as_generator().prefix_tys().nth(i).unwrap()
|
2019-04-02 23:04:51 +00:00
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
},
|
2016-12-26 13:34:03 +00:00
|
|
|
|
|
2019-04-25 23:27:33 +00:00
|
|
|
|
ty::Tuple(tys) => tys[i].expect_ty(),
|
2017-02-28 15:50:28 +00:00
|
|
|
|
|
|
|
|
|
// SIMD vector types.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::Adt(def, ..) if def.repr.simd() => this.ty.simd_type(tcx),
|
2017-02-28 15:50:28 +00:00
|
|
|
|
|
|
|
|
|
// ADTs.
|
2018-08-22 00:35:02 +00:00
|
|
|
|
ty::Adt(def, substs) => {
|
2018-02-07 17:27:43 +00:00
|
|
|
|
match this.variants {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Variants::Single { index } => def.variants[index].fields[i].ty(tcx, substs),
|
2017-06-25 09:41:24 +00:00
|
|
|
|
|
2017-09-22 22:54:45 +00:00
|
|
|
|
// Discriminant field for enums (where applicable).
|
2020-05-23 11:22:45 +00:00
|
|
|
|
Variants::Multiple { ref tag, .. } => {
|
2017-09-26 11:41:06 +00:00
|
|
|
|
assert_eq!(i, 0);
|
2020-05-23 11:22:45 +00:00
|
|
|
|
return tag_layout(tag);
|
2017-09-22 22:54:45 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2017-02-28 15:50:28 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
ty::Projection(_)
|
|
|
|
|
| ty::Bound(..)
|
|
|
|
|
| ty::Placeholder(..)
|
|
|
|
|
| ty::Opaque(..)
|
|
|
|
|
| ty::Param(_)
|
|
|
|
|
| ty::Infer(_)
|
2020-05-06 04:02:09 +00:00
|
|
|
|
| ty::Error(_) => bug!("TyAndLayout::field_type: unexpected type `{}`", this.ty),
|
2017-09-21 17:40:50 +00:00
|
|
|
|
})
|
2017-02-28 15:50:28 +00:00
|
|
|
|
}
|
2018-12-24 16:52:50 +00:00
|
|
|
|
|
2020-03-04 14:50:21 +00:00
|
|
|
|
fn pointee_info_at(this: TyAndLayout<'tcx>, cx: &C, offset: Size) -> Option<PointeeInfo> {
|
2020-06-19 07:04:30 +00:00
|
|
|
|
let addr_space_of_ty = |ty: Ty<'tcx>| {
|
|
|
|
|
if ty.is_fn() { cx.data_layout().instruction_address_space } else { AddressSpace::DATA }
|
|
|
|
|
};
|
|
|
|
|
|
2020-08-02 22:49:11 +00:00
|
|
|
|
let pointee_info = match *this.ty.kind() {
|
2018-12-24 16:52:50 +00:00
|
|
|
|
ty::RawPtr(mt) if offset.bytes() == 0 => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
cx.layout_of(mt.ty).to_result().ok().map(|layout| PointeeInfo {
|
|
|
|
|
size: layout.size,
|
|
|
|
|
align: layout.align.abi,
|
|
|
|
|
safe: None,
|
2020-06-19 07:04:30 +00:00
|
|
|
|
address_space: addr_space_of_ty(mt.ty),
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
ty::FnPtr(fn_sig) if offset.bytes() == 0 => {
|
|
|
|
|
cx.layout_of(cx.tcx().mk_fn_ptr(fn_sig)).to_result().ok().map(|layout| {
|
|
|
|
|
PointeeInfo {
|
|
|
|
|
size: layout.size,
|
|
|
|
|
align: layout.align.abi,
|
|
|
|
|
safe: None,
|
|
|
|
|
address_space: cx.data_layout().instruction_address_space,
|
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
})
|
2018-12-24 16:52:50 +00:00
|
|
|
|
}
|
|
|
|
|
ty::Ref(_, ty, mt) if offset.bytes() == 0 => {
|
2020-06-19 07:04:30 +00:00
|
|
|
|
let address_space = addr_space_of_ty(ty);
|
2018-12-24 16:52:50 +00:00
|
|
|
|
let tcx = cx.tcx();
|
2020-06-21 09:20:48 +00:00
|
|
|
|
let is_freeze = ty.is_freeze(tcx.at(DUMMY_SP), cx.param_env());
|
2018-12-24 16:52:50 +00:00
|
|
|
|
let kind = match mt {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
hir::Mutability::Not => {
|
|
|
|
|
if is_freeze {
|
|
|
|
|
PointerKind::Frozen
|
|
|
|
|
} else {
|
|
|
|
|
PointerKind::Shared
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-12-16 16:28:40 +00:00
|
|
|
|
hir::Mutability::Mut => {
|
2018-12-24 16:52:50 +00:00
|
|
|
|
// Previously we would only emit noalias annotations for LLVM >= 6 or in
|
|
|
|
|
// panic=abort mode. That was deemed right, as prior versions had many bugs
|
|
|
|
|
// in conjunction with unwinding, but later versions didn’t seem to have
|
|
|
|
|
// said issues. See issue #31681.
|
|
|
|
|
//
|
|
|
|
|
// Alas, later on we encountered a case where noalias would generate wrong
|
|
|
|
|
// code altogether even with recent versions of LLVM in *safe* code with no
|
|
|
|
|
// unwinding involved. See #54462.
|
|
|
|
|
//
|
|
|
|
|
// For now, do not enable mutable_noalias by default at all, while the
|
|
|
|
|
// issue is being figured out.
|
2020-04-02 05:44:47 +00:00
|
|
|
|
if tcx.sess.opts.debugging_opts.mutable_noalias {
|
2018-12-24 16:52:50 +00:00
|
|
|
|
PointerKind::UniqueBorrowed
|
|
|
|
|
} else {
|
|
|
|
|
PointerKind::Shared
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
cx.layout_of(ty).to_result().ok().map(|layout| PointeeInfo {
|
|
|
|
|
size: layout.size,
|
|
|
|
|
align: layout.align.abi,
|
|
|
|
|
safe: Some(kind),
|
2020-06-19 07:04:30 +00:00
|
|
|
|
address_space,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
})
|
2018-12-24 16:52:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
_ => {
|
|
|
|
|
let mut data_variant = match this.variants {
|
2019-04-24 14:59:46 +00:00
|
|
|
|
// Within the discriminant field, only the niche itself is
|
|
|
|
|
// always initialized, so we only check for a pointer at its
|
|
|
|
|
// offset.
|
|
|
|
|
//
|
|
|
|
|
// If the niche is a pointer, it's either valid (according
|
|
|
|
|
// to its type), or null (which the niche field's scalar
|
|
|
|
|
// validity range encodes). This allows using
|
|
|
|
|
// `dereferenceable_or_null` for e.g., `Option<&T>`, and
|
|
|
|
|
// this will continue to work as long as we don't start
|
|
|
|
|
// using more niches than just null (e.g., the first page of
|
|
|
|
|
// the address space, or unaligned pointers).
|
|
|
|
|
Variants::Multiple {
|
2020-05-23 11:22:45 +00:00
|
|
|
|
tag_encoding: TagEncoding::Niche { dataful_variant, .. },
|
|
|
|
|
tag_field,
|
2019-04-24 14:59:46 +00:00
|
|
|
|
..
|
2020-05-23 11:22:45 +00:00
|
|
|
|
} if this.fields.offset(tag_field) == offset => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Some(this.for_variant(cx, dataful_variant))
|
|
|
|
|
}
|
2019-04-24 14:59:46 +00:00
|
|
|
|
_ => Some(this),
|
2018-12-24 16:52:50 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
if let Some(variant) = data_variant {
|
|
|
|
|
// We're not interested in any unions.
|
2020-03-31 13:44:52 +00:00
|
|
|
|
if let FieldsShape::Union(_) = variant.fields {
|
2018-12-24 16:52:50 +00:00
|
|
|
|
data_variant = None;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-28 11:52:31 +00:00
|
|
|
|
let mut result = None;
|
|
|
|
|
|
2018-12-24 16:52:50 +00:00
|
|
|
|
if let Some(variant) = data_variant {
|
|
|
|
|
let ptr_end = offset + Pointer.size(cx);
|
|
|
|
|
for i in 0..variant.fields.count() {
|
|
|
|
|
let field_start = variant.fields.offset(i);
|
|
|
|
|
if field_start <= offset {
|
|
|
|
|
let field = variant.field(cx, i);
|
2019-12-22 22:42:04 +00:00
|
|
|
|
result = field.to_result().ok().and_then(|field| {
|
|
|
|
|
if ptr_end <= field_start + field.size {
|
|
|
|
|
// We found the right field, look inside it.
|
2020-06-19 07:04:30 +00:00
|
|
|
|
let field_info =
|
|
|
|
|
field.pointee_info_at(cx, offset - field_start);
|
|
|
|
|
field_info
|
2019-12-22 22:42:04 +00:00
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
});
|
2018-12-24 16:52:50 +00:00
|
|
|
|
if result.is_some() {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// FIXME(eddyb) This should be for `ptr::Unique<T>`, not `Box<T>`.
|
|
|
|
|
if let Some(ref mut pointee) = result {
|
2020-08-02 22:49:11 +00:00
|
|
|
|
if let ty::Adt(def, _) = this.ty.kind() {
|
2018-12-24 16:52:50 +00:00
|
|
|
|
if def.is_box() && offset.bytes() == 0 {
|
|
|
|
|
pointee.safe = Some(PointerKind::UniqueOwned);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2018-12-28 11:52:31 +00:00
|
|
|
|
|
|
|
|
|
result
|
2018-12-24 16:52:50 +00:00
|
|
|
|
}
|
2020-06-19 07:04:30 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
debug!(
|
|
|
|
|
"pointee_info_at (offset={:?}, type kind: {:?}) => {:?}",
|
2020-08-02 22:49:11 +00:00
|
|
|
|
offset,
|
|
|
|
|
this.ty.kind(),
|
|
|
|
|
pointee_info
|
2020-06-19 07:04:30 +00:00
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
pointee_info
|
2018-12-24 16:52:50 +00:00
|
|
|
|
}
|
2018-02-07 17:27:43 +00:00
|
|
|
|
}
|
2017-09-17 01:42:22 +00:00
|
|
|
|
|
2019-06-13 22:32:15 +00:00
|
|
|
|
impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for LayoutError<'tcx> {
|
2019-09-26 22:54:39 +00:00
|
|
|
|
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
|
2019-02-05 17:20:45 +00:00
|
|
|
|
use crate::ty::layout::LayoutError::*;
|
2017-08-14 16:19:42 +00:00
|
|
|
|
mem::discriminant(self).hash_stable(hcx, hasher);
|
|
|
|
|
|
|
|
|
|
match *self {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Unknown(t) | SizeOverflow(t) => t.hash_stable(hcx, hasher),
|
2017-08-14 16:19:42 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-05-10 04:49:58 +00:00
|
|
|
|
|
2019-11-27 11:44:20 +00:00
|
|
|
|
impl<'tcx> ty::Instance<'tcx> {
|
|
|
|
|
// NOTE(eddyb) this is private to avoid using it from outside of
|
|
|
|
|
// `FnAbi::of_instance` - any other uses are either too high-level
|
|
|
|
|
// for `Instance` (e.g. typeck would use `Ty::fn_sig` instead),
|
|
|
|
|
// or should go through `FnAbi` instead, to avoid losing any
|
|
|
|
|
// adjustments `FnAbi::of_instance` might be performing.
|
|
|
|
|
fn fn_sig_for_fn_abi(&self, tcx: TyCtxt<'tcx>) -> ty::PolyFnSig<'tcx> {
|
2020-06-22 12:57:03 +00:00
|
|
|
|
// FIXME(davidtwco,eddyb): A `ParamEnv` should be passed through to this function.
|
|
|
|
|
let ty = self.ty(tcx, ty::ParamEnv::reveal_all());
|
2020-08-02 22:49:11 +00:00
|
|
|
|
match *ty.kind() {
|
2020-06-22 13:00:27 +00:00
|
|
|
|
ty::FnDef(..) => {
|
|
|
|
|
// HACK(davidtwco,eddyb): This is a workaround for polymorphization considering
|
|
|
|
|
// parameters unused if they show up in the signature, but not in the `mir::Body`
|
|
|
|
|
// (i.e. due to being inside a projection that got normalized, see
|
|
|
|
|
// `src/test/ui/polymorphization/normalized_sig_types.rs`), and codegen not keeping
|
|
|
|
|
// track of a polymorphization `ParamEnv` to allow normalizing later.
|
2020-08-02 22:49:11 +00:00
|
|
|
|
let mut sig = match *ty.kind() {
|
2020-06-22 13:00:27 +00:00
|
|
|
|
ty::FnDef(def_id, substs) => tcx
|
|
|
|
|
.normalize_erasing_regions(tcx.param_env(def_id), tcx.fn_sig(def_id))
|
|
|
|
|
.subst(tcx, substs),
|
|
|
|
|
_ => unreachable!(),
|
|
|
|
|
};
|
|
|
|
|
|
2019-11-27 11:44:20 +00:00
|
|
|
|
if let ty::InstanceDef::VtableShim(..) = self.def {
|
|
|
|
|
// Modify `fn(self, ...)` to `fn(self: *mut Self, ...)`.
|
|
|
|
|
sig = sig.map_bound(|mut sig| {
|
|
|
|
|
let mut inputs_and_output = sig.inputs_and_output.to_vec();
|
|
|
|
|
inputs_and_output[0] = tcx.mk_mut_ptr(inputs_and_output[0]);
|
|
|
|
|
sig.inputs_and_output = tcx.intern_type_list(&inputs_and_output);
|
|
|
|
|
sig
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
sig
|
|
|
|
|
}
|
|
|
|
|
ty::Closure(def_id, substs) => {
|
2020-03-13 01:23:38 +00:00
|
|
|
|
let sig = substs.as_closure().sig();
|
2019-11-27 11:44:20 +00:00
|
|
|
|
|
|
|
|
|
let env_ty = tcx.closure_env_ty(def_id, substs).unwrap();
|
2020-06-22 13:00:27 +00:00
|
|
|
|
sig.map_bound(|sig| {
|
|
|
|
|
tcx.mk_fn_sig(
|
|
|
|
|
iter::once(env_ty.skip_binder()).chain(sig.inputs().iter().cloned()),
|
|
|
|
|
sig.output(),
|
|
|
|
|
sig.c_variadic,
|
|
|
|
|
sig.unsafety,
|
|
|
|
|
sig.abi,
|
|
|
|
|
)
|
|
|
|
|
})
|
2019-11-27 11:44:20 +00:00
|
|
|
|
}
|
2020-03-13 01:23:38 +00:00
|
|
|
|
ty::Generator(_, substs, _) => {
|
|
|
|
|
let sig = substs.as_generator().poly_sig();
|
2019-11-27 11:44:20 +00:00
|
|
|
|
|
|
|
|
|
let env_region = ty::ReLateBound(ty::INNERMOST, ty::BrEnv);
|
|
|
|
|
let env_ty = tcx.mk_mut_ref(tcx.mk_region(env_region), ty);
|
|
|
|
|
|
2020-08-18 10:47:27 +00:00
|
|
|
|
let pin_did = tcx.require_lang_item(LangItem::Pin, None);
|
2019-11-27 11:44:20 +00:00
|
|
|
|
let pin_adt_ref = tcx.adt_def(pin_did);
|
|
|
|
|
let pin_substs = tcx.intern_substs(&[env_ty.into()]);
|
|
|
|
|
let env_ty = tcx.mk_adt(pin_adt_ref, pin_substs);
|
|
|
|
|
|
|
|
|
|
sig.map_bound(|sig| {
|
2020-08-18 10:47:27 +00:00
|
|
|
|
let state_did = tcx.require_lang_item(LangItem::GeneratorState, None);
|
2019-11-27 11:44:20 +00:00
|
|
|
|
let state_adt_ref = tcx.adt_def(state_did);
|
2020-06-22 13:00:27 +00:00
|
|
|
|
let state_substs =
|
|
|
|
|
tcx.intern_substs(&[sig.yield_ty.into(), sig.return_ty.into()]);
|
2019-11-27 11:44:20 +00:00
|
|
|
|
let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
|
|
|
|
|
|
2020-01-25 01:27:05 +00:00
|
|
|
|
tcx.mk_fn_sig(
|
|
|
|
|
[env_ty, sig.resume_ty].iter(),
|
2020-01-25 19:03:10 +00:00
|
|
|
|
&ret_ty,
|
2019-11-27 11:44:20 +00:00
|
|
|
|
false,
|
|
|
|
|
hir::Unsafety::Normal,
|
2020-06-22 13:00:27 +00:00
|
|
|
|
rustc_target::spec::abi::Abi::Rust,
|
2019-11-27 11:44:20 +00:00
|
|
|
|
)
|
|
|
|
|
})
|
|
|
|
|
}
|
2020-06-22 13:00:27 +00:00
|
|
|
|
_ => bug!("unexpected type {:?} in Instance::fn_sig", ty),
|
2019-11-27 11:44:20 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-10-29 17:17:16 +00:00
|
|
|
|
pub trait FnAbiExt<'tcx, C>
|
2019-05-14 08:44:12 +00:00
|
|
|
|
where
|
2020-03-04 14:50:21 +00:00
|
|
|
|
C: LayoutOf<Ty = Ty<'tcx>, TyAndLayout = TyAndLayout<'tcx>>
|
2019-05-14 08:44:12 +00:00
|
|
|
|
+ HasDataLayout
|
|
|
|
|
+ HasTargetSpec
|
|
|
|
|
+ HasTyCtxt<'tcx>
|
|
|
|
|
+ HasParamEnv<'tcx>,
|
|
|
|
|
{
|
2019-11-27 11:59:59 +00:00
|
|
|
|
/// Compute a `FnAbi` suitable for indirect calls, i.e. to `fn` pointers.
|
|
|
|
|
///
|
|
|
|
|
/// NB: this doesn't handle virtual calls - those should use `FnAbi::of_instance`
|
|
|
|
|
/// instead, where the instance is a `InstanceDef::Virtual`.
|
2019-10-29 20:08:50 +00:00
|
|
|
|
fn of_fn_ptr(cx: &C, sig: ty::PolyFnSig<'tcx>, extra_args: &[Ty<'tcx>]) -> Self;
|
2019-11-27 11:59:59 +00:00
|
|
|
|
|
|
|
|
|
/// Compute a `FnAbi` suitable for declaring/defining an `fn` instance, and for
|
|
|
|
|
/// direct calls to an `fn`.
|
|
|
|
|
///
|
|
|
|
|
/// NB: that includes virtual calls, which are represented by "direct calls"
|
|
|
|
|
/// to a `InstanceDef::Virtual` instance (of `<dyn Trait as Trait>::fn`).
|
2019-10-29 19:46:25 +00:00
|
|
|
|
fn of_instance(cx: &C, instance: ty::Instance<'tcx>, extra_args: &[Ty<'tcx>]) -> Self;
|
2019-11-27 11:59:59 +00:00
|
|
|
|
|
2019-05-10 04:49:58 +00:00
|
|
|
|
fn new_internal(
|
|
|
|
|
cx: &C,
|
2019-10-29 20:08:50 +00:00
|
|
|
|
sig: ty::PolyFnSig<'tcx>,
|
2019-05-10 04:49:58 +00:00
|
|
|
|
extra_args: &[Ty<'tcx>],
|
2019-11-06 05:13:17 +00:00
|
|
|
|
caller_location: Option<Ty<'tcx>>,
|
2020-03-29 01:47:50 +00:00
|
|
|
|
codegen_fn_attr_flags: CodegenFnAttrFlags,
|
2019-10-29 14:35:26 +00:00
|
|
|
|
mk_arg_type: impl Fn(Ty<'tcx>, Option<usize>) -> ArgAbi<'tcx, Ty<'tcx>>,
|
2019-05-14 08:44:12 +00:00
|
|
|
|
) -> Self;
|
|
|
|
|
fn adjust_for_abi(&mut self, cx: &C, abi: SpecAbi);
|
2019-05-10 04:49:58 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-29 01:47:50 +00:00
|
|
|
|
fn fn_can_unwind(
|
|
|
|
|
panic_strategy: PanicStrategy,
|
|
|
|
|
codegen_fn_attr_flags: CodegenFnAttrFlags,
|
|
|
|
|
call_conv: Conv,
|
|
|
|
|
) -> bool {
|
|
|
|
|
if panic_strategy != PanicStrategy::Unwind {
|
|
|
|
|
// In panic=abort mode we assume nothing can unwind anywhere, so
|
|
|
|
|
// optimize based on this!
|
|
|
|
|
false
|
|
|
|
|
} else if codegen_fn_attr_flags.contains(CodegenFnAttrFlags::UNWIND) {
|
|
|
|
|
// If a specific #[unwind] attribute is present, use that.
|
|
|
|
|
true
|
|
|
|
|
} else if codegen_fn_attr_flags.contains(CodegenFnAttrFlags::RUSTC_ALLOCATOR_NOUNWIND) {
|
|
|
|
|
// Special attribute for allocator functions, which can't unwind.
|
|
|
|
|
false
|
|
|
|
|
} else {
|
|
|
|
|
if call_conv == Conv::Rust {
|
|
|
|
|
// Any Rust method (or `extern "Rust" fn` or `extern
|
|
|
|
|
// "rust-call" fn`) is explicitly allowed to unwind
|
|
|
|
|
// (unless it has no-unwind attribute, handled above).
|
|
|
|
|
true
|
|
|
|
|
} else {
|
|
|
|
|
// Anything else is either:
|
|
|
|
|
//
|
|
|
|
|
// 1. A foreign item using a non-Rust ABI (like `extern "C" { fn foo(); }`), or
|
|
|
|
|
//
|
|
|
|
|
// 2. A Rust item using a non-Rust ABI (like `extern "C" fn foo() { ... }`).
|
|
|
|
|
//
|
|
|
|
|
// Foreign items (case 1) are assumed to not unwind; it is
|
|
|
|
|
// UB otherwise. (At least for now; see also
|
|
|
|
|
// rust-lang/rust#63909 and Rust RFC 2753.)
|
|
|
|
|
//
|
|
|
|
|
// Items defined in Rust with non-Rust ABIs (case 2) are also
|
|
|
|
|
// not supposed to unwind. Whether this should be enforced
|
|
|
|
|
// (versus stating it is UB) and *how* it would be enforced
|
|
|
|
|
// is currently under discussion; see rust-lang/rust#58794.
|
|
|
|
|
//
|
|
|
|
|
// In either case, we mark item as explicitly nounwind.
|
|
|
|
|
false
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-10-29 17:17:16 +00:00
|
|
|
|
impl<'tcx, C> FnAbiExt<'tcx, C> for call::FnAbi<'tcx, Ty<'tcx>>
|
2019-05-14 08:44:12 +00:00
|
|
|
|
where
|
2020-03-04 14:50:21 +00:00
|
|
|
|
C: LayoutOf<Ty = Ty<'tcx>, TyAndLayout = TyAndLayout<'tcx>>
|
2019-05-14 08:44:12 +00:00
|
|
|
|
+ HasDataLayout
|
|
|
|
|
+ HasTargetSpec
|
|
|
|
|
+ HasTyCtxt<'tcx>
|
|
|
|
|
+ HasParamEnv<'tcx>,
|
|
|
|
|
{
|
2019-10-29 20:08:50 +00:00
|
|
|
|
fn of_fn_ptr(cx: &C, sig: ty::PolyFnSig<'tcx>, extra_args: &[Ty<'tcx>]) -> Self {
|
2020-03-29 01:47:50 +00:00
|
|
|
|
// Assume that fn pointers may always unwind
|
|
|
|
|
let codegen_fn_attr_flags = CodegenFnAttrFlags::UNWIND;
|
|
|
|
|
|
|
|
|
|
call::FnAbi::new_internal(cx, sig, extra_args, None, codegen_fn_attr_flags, |ty, _| {
|
|
|
|
|
ArgAbi::new(cx.layout_of(ty))
|
|
|
|
|
})
|
2019-10-29 19:46:25 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn of_instance(cx: &C, instance: ty::Instance<'tcx>, extra_args: &[Ty<'tcx>]) -> Self {
|
2019-11-27 11:44:20 +00:00
|
|
|
|
let sig = instance.fn_sig_for_fn_abi(cx.tcx());
|
2019-05-10 04:49:58 +00:00
|
|
|
|
|
2019-11-06 05:13:17 +00:00
|
|
|
|
let caller_location = if instance.def.requires_caller_location(cx.tcx()) {
|
|
|
|
|
Some(cx.tcx().caller_location_ty())
|
|
|
|
|
} else {
|
|
|
|
|
None
|
|
|
|
|
};
|
|
|
|
|
|
2020-03-29 01:47:50 +00:00
|
|
|
|
let attrs = cx.tcx().codegen_fn_attrs(instance.def_id()).flags;
|
|
|
|
|
|
|
|
|
|
call::FnAbi::new_internal(cx, sig, extra_args, caller_location, attrs, |ty, arg_idx| {
|
2019-05-10 04:49:58 +00:00
|
|
|
|
let mut layout = cx.layout_of(ty);
|
|
|
|
|
// Don't pass the vtable, it's not an argument of the virtual fn.
|
|
|
|
|
// Instead, pass just the data pointer, but give it the type `*const/mut dyn Trait`
|
|
|
|
|
// or `&/&mut dyn Trait` because this is special-cased elsewhere in codegen
|
2019-10-29 19:46:25 +00:00
|
|
|
|
if let (ty::InstanceDef::Virtual(..), Some(0)) = (&instance.def, arg_idx) {
|
2019-05-10 04:49:58 +00:00
|
|
|
|
let fat_pointer_ty = if layout.is_unsized() {
|
|
|
|
|
// unsized `self` is passed as a pointer to `self`
|
|
|
|
|
// FIXME (mikeyhew) change this to use &own if it is ever added to the language
|
|
|
|
|
cx.tcx().mk_mut_ptr(layout.ty)
|
|
|
|
|
} else {
|
|
|
|
|
match layout.abi {
|
|
|
|
|
Abi::ScalarPair(..) => (),
|
|
|
|
|
_ => bug!("receiver type has unsupported layout: {:?}", layout),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// In the case of Rc<Self>, we need to explicitly pass a *mut RcBox<Self>
|
|
|
|
|
// with a Scalar (not ScalarPair) ABI. This is a hack that is understood
|
|
|
|
|
// elsewhere in the compiler as a method on a `dyn Trait`.
|
|
|
|
|
// To get the type `*mut RcBox<Self>`, we just keep unwrapping newtypes until we
|
|
|
|
|
// get a built-in pointer type
|
|
|
|
|
let mut fat_pointer_layout = layout;
|
|
|
|
|
'descend_newtypes: while !fat_pointer_layout.ty.is_unsafe_ptr()
|
|
|
|
|
&& !fat_pointer_layout.ty.is_region_ptr()
|
|
|
|
|
{
|
2019-11-12 11:19:40 +00:00
|
|
|
|
for i in 0..fat_pointer_layout.fields.count() {
|
2019-05-10 04:49:58 +00:00
|
|
|
|
let field_layout = fat_pointer_layout.field(cx, i);
|
|
|
|
|
|
|
|
|
|
if !field_layout.is_zst() {
|
|
|
|
|
fat_pointer_layout = field_layout;
|
|
|
|
|
continue 'descend_newtypes;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
bug!("receiver has no non-zero-sized fields {:?}", fat_pointer_layout);
|
2019-05-10 04:49:58 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fat_pointer_layout.ty
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// we now have a type like `*mut RcBox<dyn Trait>`
|
|
|
|
|
// change its layout to that of `*mut ()`, a thin pointer, but keep the same type
|
|
|
|
|
// this is understood as a special case elsewhere in the compiler
|
|
|
|
|
let unit_pointer_ty = cx.tcx().mk_mut_ptr(cx.tcx().mk_unit());
|
|
|
|
|
layout = cx.layout_of(unit_pointer_ty);
|
|
|
|
|
layout.ty = fat_pointer_ty;
|
|
|
|
|
}
|
2019-10-29 14:35:26 +00:00
|
|
|
|
ArgAbi::new(layout)
|
2019-05-10 04:49:58 +00:00
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn new_internal(
|
2019-05-14 08:44:12 +00:00
|
|
|
|
cx: &C,
|
2019-10-29 20:08:50 +00:00
|
|
|
|
sig: ty::PolyFnSig<'tcx>,
|
2019-05-14 08:44:12 +00:00
|
|
|
|
extra_args: &[Ty<'tcx>],
|
2019-11-06 05:13:17 +00:00
|
|
|
|
caller_location: Option<Ty<'tcx>>,
|
2020-03-29 01:47:50 +00:00
|
|
|
|
codegen_fn_attr_flags: CodegenFnAttrFlags,
|
2019-10-29 14:35:26 +00:00
|
|
|
|
mk_arg_type: impl Fn(Ty<'tcx>, Option<usize>) -> ArgAbi<'tcx, Ty<'tcx>>,
|
2019-05-14 08:44:12 +00:00
|
|
|
|
) -> Self {
|
2019-10-29 14:35:26 +00:00
|
|
|
|
debug!("FnAbi::new_internal({:?}, {:?})", sig, extra_args);
|
2019-05-10 04:49:58 +00:00
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let sig = cx.tcx().normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), &sig);
|
2019-10-29 20:08:50 +00:00
|
|
|
|
|
2019-05-10 04:49:58 +00:00
|
|
|
|
use rustc_target::spec::abi::Abi::*;
|
|
|
|
|
let conv = match cx.tcx().sess.target.target.adjust_abi(sig.abi) {
|
2019-11-27 11:23:30 +00:00
|
|
|
|
RustIntrinsic | PlatformIntrinsic | Rust | RustCall => Conv::Rust,
|
2019-05-10 04:49:58 +00:00
|
|
|
|
|
|
|
|
|
// It's the ABI's job to select this, not ours.
|
|
|
|
|
System => bug!("system abi should be selected elsewhere"),
|
2019-10-24 15:29:29 +00:00
|
|
|
|
EfiApi => bug!("eficall abi should be selected elsewhere"),
|
2019-05-10 04:49:58 +00:00
|
|
|
|
|
|
|
|
|
Stdcall => Conv::X86Stdcall,
|
|
|
|
|
Fastcall => Conv::X86Fastcall,
|
|
|
|
|
Vectorcall => Conv::X86VectorCall,
|
|
|
|
|
Thiscall => Conv::X86ThisCall,
|
|
|
|
|
C => Conv::C,
|
|
|
|
|
Unadjusted => Conv::C,
|
|
|
|
|
Win64 => Conv::X86_64Win64,
|
|
|
|
|
SysV64 => Conv::X86_64SysV,
|
|
|
|
|
Aapcs => Conv::ArmAapcs,
|
|
|
|
|
PtxKernel => Conv::PtxKernel,
|
|
|
|
|
Msp430Interrupt => Conv::Msp430Intr,
|
|
|
|
|
X86Interrupt => Conv::X86Intr,
|
|
|
|
|
AmdGpuKernel => Conv::AmdGpuKernel,
|
2016-05-06 13:32:10 +00:00
|
|
|
|
AvrInterrupt => Conv::AvrInterrupt,
|
|
|
|
|
AvrNonBlockingInterrupt => Conv::AvrNonBlockingInterrupt,
|
2019-05-10 04:49:58 +00:00
|
|
|
|
|
|
|
|
|
// These API constants ought to be more specific...
|
|
|
|
|
Cdecl => Conv::C,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let mut inputs = sig.inputs();
|
|
|
|
|
let extra_args = if sig.abi == RustCall {
|
|
|
|
|
assert!(!sig.c_variadic && extra_args.is_empty());
|
|
|
|
|
|
2020-01-07 20:30:23 +00:00
|
|
|
|
if let Some(input) = sig.inputs().last() {
|
2020-08-02 22:49:11 +00:00
|
|
|
|
if let ty::Tuple(tupled_arguments) = input.kind() {
|
2019-05-10 04:49:58 +00:00
|
|
|
|
inputs = &sig.inputs()[0..sig.inputs().len() - 1];
|
|
|
|
|
tupled_arguments.iter().map(|k| k.expect_ty()).collect()
|
2020-01-07 20:30:23 +00:00
|
|
|
|
} else {
|
2019-05-10 04:49:58 +00:00
|
|
|
|
bug!(
|
|
|
|
|
"argument to function with \"rust-call\" ABI \
|
2020-01-07 20:30:23 +00:00
|
|
|
|
is not a tuple"
|
2019-05-10 04:49:58 +00:00
|
|
|
|
);
|
|
|
|
|
}
|
2020-01-07 20:30:23 +00:00
|
|
|
|
} else {
|
|
|
|
|
bug!(
|
|
|
|
|
"argument to function with \"rust-call\" ABI \
|
|
|
|
|
is not a tuple"
|
|
|
|
|
);
|
2019-05-10 04:49:58 +00:00
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
assert!(sig.c_variadic || extra_args.is_empty());
|
|
|
|
|
extra_args.to_vec()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let target = &cx.tcx().sess.target.target;
|
2020-02-24 15:51:08 +00:00
|
|
|
|
let target_env_gnu_like = matches!(&target.target_env[..], "gnu" | "musl");
|
2019-05-10 04:49:58 +00:00
|
|
|
|
let win_x64_gnu =
|
|
|
|
|
target.target_os == "windows" && target.arch == "x86_64" && target.target_env == "gnu";
|
2020-02-24 15:51:08 +00:00
|
|
|
|
let linux_s390x_gnu_like =
|
|
|
|
|
target.target_os == "linux" && target.arch == "s390x" && target_env_gnu_like;
|
|
|
|
|
let linux_sparc64_gnu_like =
|
|
|
|
|
target.target_os == "linux" && target.arch == "sparc64" && target_env_gnu_like;
|
|
|
|
|
let linux_powerpc_gnu_like =
|
|
|
|
|
target.target_os == "linux" && target.arch == "powerpc" && target_env_gnu_like;
|
2019-05-10 04:49:58 +00:00
|
|
|
|
let rust_abi = match sig.abi {
|
|
|
|
|
RustIntrinsic | PlatformIntrinsic | Rust | RustCall => true,
|
|
|
|
|
_ => false,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Handle safe Rust thin and fat pointers.
|
|
|
|
|
let adjust_for_rust_scalar = |attrs: &mut ArgAttributes,
|
|
|
|
|
scalar: &Scalar,
|
2020-03-04 14:50:21 +00:00
|
|
|
|
layout: TyAndLayout<'tcx>,
|
2019-05-10 04:49:58 +00:00
|
|
|
|
offset: Size,
|
|
|
|
|
is_return: bool| {
|
|
|
|
|
// Booleans are always an i1 that needs to be zero-extended.
|
|
|
|
|
if scalar.is_bool() {
|
|
|
|
|
attrs.set(ArgAttribute::ZExt);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Only pointer types handled below.
|
|
|
|
|
if scalar.value != Pointer {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if scalar.valid_range.start() < scalar.valid_range.end() {
|
|
|
|
|
if *scalar.valid_range.start() > 0 {
|
|
|
|
|
attrs.set(ArgAttribute::NonNull);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if let Some(pointee) = layout.pointee_info_at(cx, offset) {
|
|
|
|
|
if let Some(kind) = pointee.safe {
|
|
|
|
|
attrs.pointee_align = Some(pointee.align);
|
|
|
|
|
|
2020-03-06 11:13:55 +00:00
|
|
|
|
// `Box` (`UniqueBorrowed`) are not necessarily dereferenceable
|
2019-11-25 21:45:00 +00:00
|
|
|
|
// for the entire duration of the function as they can be deallocated
|
2020-05-04 15:42:57 +00:00
|
|
|
|
// at any time. Set their valid size to 0.
|
2019-11-25 21:45:00 +00:00
|
|
|
|
attrs.pointee_size = match kind {
|
2019-11-22 21:04:22 +00:00
|
|
|
|
PointerKind::UniqueOwned => Size::ZERO,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => pointee.size,
|
2019-11-22 21:04:22 +00:00
|
|
|
|
};
|
|
|
|
|
|
2019-05-10 04:49:58 +00:00
|
|
|
|
// `Box` pointer parameters never alias because ownership is transferred
|
|
|
|
|
// `&mut` pointer parameters never alias other parameters,
|
|
|
|
|
// or mutable global data
|
|
|
|
|
//
|
|
|
|
|
// `&T` where `T` contains no `UnsafeCell<U>` is immutable,
|
|
|
|
|
// and can be marked as both `readonly` and `noalias`, as
|
|
|
|
|
// LLVM's definition of `noalias` is based solely on memory
|
|
|
|
|
// dependencies rather than pointer equality
|
|
|
|
|
let no_alias = match kind {
|
|
|
|
|
PointerKind::Shared => false,
|
|
|
|
|
PointerKind::UniqueOwned => true,
|
|
|
|
|
PointerKind::Frozen | PointerKind::UniqueBorrowed => !is_return,
|
|
|
|
|
};
|
|
|
|
|
if no_alias {
|
|
|
|
|
attrs.set(ArgAttribute::NoAlias);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if kind == PointerKind::Frozen && !is_return {
|
|
|
|
|
attrs.set(ArgAttribute::ReadOnly);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let arg_of = |ty: Ty<'tcx>, arg_idx: Option<usize>| {
|
|
|
|
|
let is_return = arg_idx.is_none();
|
|
|
|
|
let mut arg = mk_arg_type(ty, arg_idx);
|
|
|
|
|
if arg.layout.is_zst() {
|
|
|
|
|
// For some forsaken reason, x86_64-pc-windows-gnu
|
|
|
|
|
// doesn't ignore zero-sized struct arguments.
|
2020-02-24 15:51:08 +00:00
|
|
|
|
// The same is true for {s390x,sparc64,powerpc}-unknown-linux-{gnu,musl}.
|
2020-02-18 15:36:05 +00:00
|
|
|
|
if is_return
|
|
|
|
|
|| rust_abi
|
2020-02-24 15:51:08 +00:00
|
|
|
|
|| (!win_x64_gnu
|
|
|
|
|
&& !linux_s390x_gnu_like
|
|
|
|
|
&& !linux_sparc64_gnu_like
|
|
|
|
|
&& !linux_powerpc_gnu_like)
|
2020-02-18 15:36:05 +00:00
|
|
|
|
{
|
2019-08-10 11:38:17 +00:00
|
|
|
|
arg.mode = PassMode::Ignore;
|
2019-05-10 04:49:58 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// FIXME(eddyb) other ABIs don't have logic for scalar pairs.
|
|
|
|
|
if !is_return && rust_abi {
|
|
|
|
|
if let Abi::ScalarPair(ref a, ref b) = arg.layout.abi {
|
|
|
|
|
let mut a_attrs = ArgAttributes::new();
|
|
|
|
|
let mut b_attrs = ArgAttributes::new();
|
|
|
|
|
adjust_for_rust_scalar(&mut a_attrs, a, arg.layout, Size::ZERO, false);
|
|
|
|
|
adjust_for_rust_scalar(
|
|
|
|
|
&mut b_attrs,
|
|
|
|
|
b,
|
|
|
|
|
arg.layout,
|
|
|
|
|
a.value.size(cx).align_to(b.value.align(cx).abi),
|
|
|
|
|
false,
|
|
|
|
|
);
|
|
|
|
|
arg.mode = PassMode::Pair(a_attrs, b_attrs);
|
|
|
|
|
return arg;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if let Abi::Scalar(ref scalar) = arg.layout.abi {
|
|
|
|
|
if let PassMode::Direct(ref mut attrs) = arg.mode {
|
|
|
|
|
adjust_for_rust_scalar(attrs, scalar, arg.layout, Size::ZERO, is_return);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
arg
|
|
|
|
|
};
|
|
|
|
|
|
2019-10-29 14:35:26 +00:00
|
|
|
|
let mut fn_abi = FnAbi {
|
2019-05-10 04:49:58 +00:00
|
|
|
|
ret: arg_of(sig.output(), None),
|
|
|
|
|
args: inputs
|
|
|
|
|
.iter()
|
|
|
|
|
.cloned()
|
|
|
|
|
.chain(extra_args)
|
2019-11-06 05:13:17 +00:00
|
|
|
|
.chain(caller_location)
|
2019-05-10 04:49:58 +00:00
|
|
|
|
.enumerate()
|
|
|
|
|
.map(|(i, ty)| arg_of(ty, Some(i)))
|
|
|
|
|
.collect(),
|
|
|
|
|
c_variadic: sig.c_variadic,
|
2020-01-21 14:52:19 +00:00
|
|
|
|
fixed_count: inputs.len(),
|
2019-05-10 04:49:58 +00:00
|
|
|
|
conv,
|
2020-03-29 01:47:50 +00:00
|
|
|
|
can_unwind: fn_can_unwind(cx.tcx().sess.panic_strategy(), codegen_fn_attr_flags, conv),
|
2019-05-10 04:49:58 +00:00
|
|
|
|
};
|
2019-10-29 14:35:26 +00:00
|
|
|
|
fn_abi.adjust_for_abi(cx, sig.abi);
|
|
|
|
|
fn_abi
|
2019-05-10 04:49:58 +00:00
|
|
|
|
}
|
2019-05-14 08:44:12 +00:00
|
|
|
|
|
|
|
|
|
fn adjust_for_abi(&mut self, cx: &C, abi: SpecAbi) {
|
|
|
|
|
if abi == SpecAbi::Unadjusted {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if abi == SpecAbi::Rust
|
|
|
|
|
|| abi == SpecAbi::RustCall
|
|
|
|
|
|| abi == SpecAbi::RustIntrinsic
|
|
|
|
|
|| abi == SpecAbi::PlatformIntrinsic
|
|
|
|
|
{
|
2019-10-29 14:35:26 +00:00
|
|
|
|
let fixup = |arg: &mut ArgAbi<'tcx, Ty<'tcx>>| {
|
2019-05-14 08:44:12 +00:00
|
|
|
|
if arg.is_ignore() {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
match arg.layout.abi {
|
|
|
|
|
Abi::Aggregate { .. } => {}
|
|
|
|
|
|
|
|
|
|
// This is a fun case! The gist of what this is doing is
|
|
|
|
|
// that we want callers and callees to always agree on the
|
|
|
|
|
// ABI of how they pass SIMD arguments. If we were to *not*
|
|
|
|
|
// make these arguments indirect then they'd be immediates
|
|
|
|
|
// in LLVM, which means that they'd used whatever the
|
|
|
|
|
// appropriate ABI is for the callee and the caller. That
|
|
|
|
|
// means, for example, if the caller doesn't have AVX
|
|
|
|
|
// enabled but the callee does, then passing an AVX argument
|
|
|
|
|
// across this boundary would cause corrupt data to show up.
|
|
|
|
|
//
|
|
|
|
|
// This problem is fixed by unconditionally passing SIMD
|
|
|
|
|
// arguments through memory between callers and callees
|
|
|
|
|
// which should get them all to agree on ABI regardless of
|
|
|
|
|
// target feature sets. Some more information about this
|
|
|
|
|
// issue can be found in #44367.
|
|
|
|
|
//
|
|
|
|
|
// Note that the platform intrinsic ABI is exempt here as
|
|
|
|
|
// that's how we connect up to LLVM and it's unstable
|
|
|
|
|
// anyway, we control all calls to it in libstd.
|
|
|
|
|
Abi::Vector { .. }
|
|
|
|
|
if abi != SpecAbi::PlatformIntrinsic
|
|
|
|
|
&& cx.tcx().sess.target.target.options.simd_types_indirect =>
|
|
|
|
|
{
|
|
|
|
|
arg.make_indirect();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
_ => return,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let size = arg.layout.size;
|
|
|
|
|
if arg.layout.is_unsized() || size > Pointer.size(cx) {
|
|
|
|
|
arg.make_indirect();
|
|
|
|
|
} else {
|
|
|
|
|
// We want to pass small aggregates as immediates, but using
|
|
|
|
|
// a LLVM aggregate type for this leads to bad optimizations,
|
|
|
|
|
// so we pick an appropriately sized integer type instead.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
arg.cast_to(Reg { kind: RegKind::Integer, size });
|
2019-05-14 08:44:12 +00:00
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
fixup(&mut self.ret);
|
|
|
|
|
for arg in &mut self.args {
|
|
|
|
|
fixup(arg);
|
|
|
|
|
}
|
|
|
|
|
if let PassMode::Indirect(ref mut attrs, _) = self.ret.mode {
|
|
|
|
|
attrs.set(ArgAttribute::StructRet);
|
|
|
|
|
}
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if let Err(msg) = self.adjust_for_cabi(cx, abi) {
|
|
|
|
|
cx.tcx().sess.fatal(&msg);
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-05-10 04:49:58 +00:00
|
|
|
|
}
|