2019-02-08 12:00:07 +00:00
|
|
|
|
pub use Integer::*;
|
|
|
|
|
pub use Primitive::*;
|
2017-12-18 14:18:36 +00:00
|
|
|
|
|
2019-02-08 12:00:07 +00:00
|
|
|
|
use crate::spec::Target;
|
2017-12-18 14:18:36 +00:00
|
|
|
|
|
2020-03-22 16:48:11 +00:00
|
|
|
|
use std::convert::{TryFrom, TryInto};
|
2021-01-05 01:01:29 +00:00
|
|
|
|
use std::fmt;
|
2020-04-16 15:15:46 +00:00
|
|
|
|
use std::num::NonZeroUsize;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
use std::ops::{Add, AddAssign, Deref, Mul, Range, RangeInclusive, Sub};
|
2021-01-05 01:01:29 +00:00
|
|
|
|
use std::str::FromStr;
|
2017-12-18 14:18:36 +00:00
|
|
|
|
|
2019-09-26 05:38:33 +00:00
|
|
|
|
use rustc_index::vec::{Idx, IndexVec};
|
2019-11-09 21:57:25 +00:00
|
|
|
|
use rustc_macros::HashStable_Generic;
|
2021-01-05 01:01:29 +00:00
|
|
|
|
use rustc_serialize::json::{Json, ToJson};
|
2019-12-31 17:15:40 +00:00
|
|
|
|
use rustc_span::Span;
|
2018-11-01 15:01:24 +00:00
|
|
|
|
|
2017-12-28 17:07:02 +00:00
|
|
|
|
pub mod call;
|
|
|
|
|
|
2021-06-23 20:26:46 +00:00
|
|
|
|
/// Parsed [Data layout](https://llvm.org/docs/LangRef.html#data-layout)
|
2017-12-18 14:18:36 +00:00
|
|
|
|
/// for a target, which contains everything needed to compute layouts.
|
|
|
|
|
pub struct TargetDataLayout {
|
|
|
|
|
pub endian: Endian,
|
2018-09-08 19:14:55 +00:00
|
|
|
|
pub i1_align: AbiAndPrefAlign,
|
|
|
|
|
pub i8_align: AbiAndPrefAlign,
|
|
|
|
|
pub i16_align: AbiAndPrefAlign,
|
|
|
|
|
pub i32_align: AbiAndPrefAlign,
|
|
|
|
|
pub i64_align: AbiAndPrefAlign,
|
|
|
|
|
pub i128_align: AbiAndPrefAlign,
|
|
|
|
|
pub f32_align: AbiAndPrefAlign,
|
|
|
|
|
pub f64_align: AbiAndPrefAlign,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub pointer_size: Size,
|
2018-09-08 19:14:55 +00:00
|
|
|
|
pub pointer_align: AbiAndPrefAlign,
|
|
|
|
|
pub aggregate_align: AbiAndPrefAlign,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
|
|
|
|
|
/// Alignments for vector types.
|
2018-09-08 19:14:55 +00:00
|
|
|
|
pub vector_align: Vec<(Size, AbiAndPrefAlign)>,
|
|
|
|
|
|
2020-06-11 05:52:09 +00:00
|
|
|
|
pub instruction_address_space: AddressSpace,
|
2021-08-10 18:34:13 +00:00
|
|
|
|
|
|
|
|
|
/// Minimum size of #[repr(C)] enums (default I32 bits)
|
|
|
|
|
pub c_enum_min_size: Integer,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl Default for TargetDataLayout {
|
|
|
|
|
/// Creates an instance of `TargetDataLayout`.
|
|
|
|
|
fn default() -> TargetDataLayout {
|
2018-09-08 21:22:22 +00:00
|
|
|
|
let align = |bits| Align::from_bits(bits).unwrap();
|
2017-12-18 14:18:36 +00:00
|
|
|
|
TargetDataLayout {
|
|
|
|
|
endian: Endian::Big,
|
2018-09-08 21:22:22 +00:00
|
|
|
|
i1_align: AbiAndPrefAlign::new(align(8)),
|
|
|
|
|
i8_align: AbiAndPrefAlign::new(align(8)),
|
|
|
|
|
i16_align: AbiAndPrefAlign::new(align(16)),
|
|
|
|
|
i32_align: AbiAndPrefAlign::new(align(32)),
|
|
|
|
|
i64_align: AbiAndPrefAlign { abi: align(32), pref: align(64) },
|
|
|
|
|
i128_align: AbiAndPrefAlign { abi: align(32), pref: align(64) },
|
|
|
|
|
f32_align: AbiAndPrefAlign::new(align(32)),
|
|
|
|
|
f64_align: AbiAndPrefAlign::new(align(64)),
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pointer_size: Size::from_bits(64),
|
2018-09-08 21:22:22 +00:00
|
|
|
|
pointer_align: AbiAndPrefAlign::new(align(64)),
|
|
|
|
|
aggregate_align: AbiAndPrefAlign { abi: align(0), pref: align(64) },
|
2017-12-18 14:18:36 +00:00
|
|
|
|
vector_align: vec![
|
2018-09-08 21:22:22 +00:00
|
|
|
|
(Size::from_bits(64), AbiAndPrefAlign::new(align(64))),
|
|
|
|
|
(Size::from_bits(128), AbiAndPrefAlign::new(align(128))),
|
2018-10-11 15:50:00 +00:00
|
|
|
|
],
|
2020-06-11 05:52:09 +00:00
|
|
|
|
instruction_address_space: AddressSpace::DATA,
|
2021-08-10 18:34:13 +00:00
|
|
|
|
c_enum_min_size: Integer::I32,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl TargetDataLayout {
|
|
|
|
|
pub fn parse(target: &Target) -> Result<TargetDataLayout, String> {
|
2018-10-11 15:50:00 +00:00
|
|
|
|
// Parse an address space index from a string.
|
|
|
|
|
let parse_address_space = |s: &str, cause: &str| {
|
2020-06-11 05:52:09 +00:00
|
|
|
|
s.parse::<u32>().map(AddressSpace).map_err(|err| {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
format!("invalid address space `{}` for `{}` in \"data-layout\": {}", s, cause, err)
|
2018-10-11 15:50:00 +00:00
|
|
|
|
})
|
|
|
|
|
};
|
|
|
|
|
|
2017-12-18 14:18:36 +00:00
|
|
|
|
// Parse a bit count from a string.
|
|
|
|
|
let parse_bits = |s: &str, kind: &str, cause: &str| {
|
|
|
|
|
s.parse::<u64>().map_err(|err| {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
format!("invalid {} `{}` for `{}` in \"data-layout\": {}", kind, s, cause, err)
|
2017-12-18 14:18:36 +00:00
|
|
|
|
})
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Parse a size string.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let size = |s: &str, cause: &str| parse_bits(s, "size", cause).map(Size::from_bits);
|
2017-12-18 14:18:36 +00:00
|
|
|
|
|
|
|
|
|
// Parse an alignment string.
|
|
|
|
|
let align = |s: &[&str], cause: &str| {
|
|
|
|
|
if s.is_empty() {
|
|
|
|
|
return Err(format!("missing alignment for `{}` in \"data-layout\"", cause));
|
|
|
|
|
}
|
2018-09-08 21:22:22 +00:00
|
|
|
|
let align_from_bits = |bits| {
|
|
|
|
|
Align::from_bits(bits).map_err(|err| {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
format!("invalid alignment for `{}` in \"data-layout\": {}", cause, err)
|
2018-09-08 21:22:22 +00:00
|
|
|
|
})
|
|
|
|
|
};
|
2017-12-18 14:18:36 +00:00
|
|
|
|
let abi = parse_bits(s[0], "alignment", cause)?;
|
|
|
|
|
let pref = s.get(1).map_or(Ok(abi), |pref| parse_bits(pref, "alignment", cause))?;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Ok(AbiAndPrefAlign { abi: align_from_bits(abi)?, pref: align_from_bits(pref)? })
|
2017-12-18 14:18:36 +00:00
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let mut dl = TargetDataLayout::default();
|
|
|
|
|
let mut i128_align_src = 64;
|
2018-07-23 13:32:57 +00:00
|
|
|
|
for spec in target.data_layout.split('-') {
|
2019-07-10 03:18:01 +00:00
|
|
|
|
let spec_parts = spec.split(':').collect::<Vec<_>>();
|
|
|
|
|
|
|
|
|
|
match &*spec_parts {
|
2018-08-09 13:42:43 +00:00
|
|
|
|
["e"] => dl.endian = Endian::Little,
|
|
|
|
|
["E"] => dl.endian = Endian::Big,
|
2020-02-26 12:03:46 +00:00
|
|
|
|
[p] if p.starts_with('P') => {
|
2018-10-11 15:50:00 +00:00
|
|
|
|
dl.instruction_address_space = parse_address_space(&p[1..], "P")?
|
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
|
["a", ref a @ ..] => dl.aggregate_align = align(a, "a")?,
|
|
|
|
|
["f32", ref a @ ..] => dl.f32_align = align(a, "f32")?,
|
|
|
|
|
["f64", ref a @ ..] => dl.f64_align = align(a, "f64")?,
|
2019-08-11 16:55:14 +00:00
|
|
|
|
[p @ "p", s, ref a @ ..] | [p @ "p0", s, ref a @ ..] => {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
dl.pointer_size = size(s, p)?;
|
|
|
|
|
dl.pointer_align = align(a, p)?;
|
|
|
|
|
}
|
2020-02-26 12:03:46 +00:00
|
|
|
|
[s, ref a @ ..] if s.starts_with('i') => {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
let bits = match s[1..].parse::<u64>() {
|
|
|
|
|
Ok(bits) => bits,
|
|
|
|
|
Err(_) => {
|
|
|
|
|
size(&s[1..], "i")?; // For the user error.
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
let a = align(a, s)?;
|
|
|
|
|
match bits {
|
|
|
|
|
1 => dl.i1_align = a,
|
|
|
|
|
8 => dl.i8_align = a,
|
|
|
|
|
16 => dl.i16_align = a,
|
|
|
|
|
32 => dl.i32_align = a,
|
|
|
|
|
64 => dl.i64_align = a,
|
|
|
|
|
_ => {}
|
|
|
|
|
}
|
|
|
|
|
if bits >= i128_align_src && bits <= 128 {
|
|
|
|
|
// Default alignment for i128 is decided by taking the alignment of
|
2019-06-14 17:24:38 +00:00
|
|
|
|
// largest-sized i{64..=128}.
|
2017-12-18 14:18:36 +00:00
|
|
|
|
i128_align_src = bits;
|
|
|
|
|
dl.i128_align = a;
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-02-26 12:03:46 +00:00
|
|
|
|
[s, ref a @ ..] if s.starts_with('v') => {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
let v_size = size(&s[1..], "v")?;
|
|
|
|
|
let a = align(a, s)?;
|
|
|
|
|
if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {
|
|
|
|
|
v.1 = a;
|
|
|
|
|
continue;
|
|
|
|
|
}
|
|
|
|
|
// No existing entry, add a new one.
|
|
|
|
|
dl.vector_align.push((v_size, a));
|
|
|
|
|
}
|
|
|
|
|
_ => {} // Ignore everything else.
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Perform consistency checks against the Target information.
|
2021-01-05 01:01:29 +00:00
|
|
|
|
if dl.endian != target.endian {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
return Err(format!(
|
|
|
|
|
"inconsistent target specification: \"data-layout\" claims \
|
2021-01-05 01:01:29 +00:00
|
|
|
|
architecture is {}-endian, while \"target-endian\" is `{}`",
|
|
|
|
|
dl.endian.as_str(),
|
|
|
|
|
target.endian.as_str(),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
));
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-10-14 16:22:10 +00:00
|
|
|
|
if dl.pointer_size.bits() != target.pointer_width.into() {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
return Err(format!(
|
|
|
|
|
"inconsistent target specification: \"data-layout\" claims \
|
2021-01-05 01:01:29 +00:00
|
|
|
|
pointers are {}-bit, while \"target-pointer-width\" is `{}`",
|
2019-12-22 22:42:04 +00:00
|
|
|
|
dl.pointer_size.bits(),
|
2020-10-14 16:22:10 +00:00
|
|
|
|
target.pointer_width
|
2019-12-22 22:42:04 +00:00
|
|
|
|
));
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2021-08-10 18:34:13 +00:00
|
|
|
|
dl.c_enum_min_size = Integer::from_size(Size::from_bits(target.c_enum_min_bits))?;
|
|
|
|
|
|
2017-12-18 14:18:36 +00:00
|
|
|
|
Ok(dl)
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Returns exclusive upper bound on object size.
|
2017-12-18 14:18:36 +00:00
|
|
|
|
///
|
|
|
|
|
/// The theoretical maximum object size is defined as the maximum positive `isize` value.
|
|
|
|
|
/// This ensures that the `offset` semantics remain well-defined by allowing it to correctly
|
|
|
|
|
/// index every address within an object along with one byte past the end, along with allowing
|
|
|
|
|
/// `isize` to store the difference between any two pointers into an object.
|
|
|
|
|
///
|
|
|
|
|
/// The upper bound on 64-bit currently needs to be lower because LLVM uses a 64-bit integer
|
|
|
|
|
/// to represent object size in bits. It would need to be 1 << 61 to account for this, but is
|
|
|
|
|
/// currently conservatively bounded to 1 << 47 as that is enough to cover the current usable
|
|
|
|
|
/// address space on 64-bit ARMv8 and x86_64.
|
|
|
|
|
pub fn obj_size_bound(&self) -> u64 {
|
|
|
|
|
match self.pointer_size.bits() {
|
|
|
|
|
16 => 1 << 15,
|
|
|
|
|
32 => 1 << 31,
|
|
|
|
|
64 => 1 << 47,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
bits => panic!("obj_size_bound: unknown pointer bit size {}", bits),
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn ptr_sized_integer(&self) -> Integer {
|
|
|
|
|
match self.pointer_size.bits() {
|
|
|
|
|
16 => I16,
|
|
|
|
|
32 => I32,
|
|
|
|
|
64 => I64,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
bits => panic!("ptr_sized_integer: unknown pointer bit size {}", bits),
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-09-08 19:14:55 +00:00
|
|
|
|
pub fn vector_align(&self, vec_size: Size) -> AbiAndPrefAlign {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
for &(size, align) in &self.vector_align {
|
|
|
|
|
if size == vec_size {
|
|
|
|
|
return align;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
// Default to natural alignment, which is what LLVM does.
|
|
|
|
|
// That is, use the size, rounded up to a power of 2.
|
2018-09-08 21:22:22 +00:00
|
|
|
|
AbiAndPrefAlign::new(Align::from_bytes(vec_size.bytes().next_power_of_two()).unwrap())
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-11-03 20:57:53 +00:00
|
|
|
|
pub trait HasDataLayout {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
fn data_layout(&self) -> &TargetDataLayout;
|
|
|
|
|
}
|
|
|
|
|
|
2018-11-03 20:57:53 +00:00
|
|
|
|
impl HasDataLayout for TargetDataLayout {
|
2021-06-01 00:00:00 +00:00
|
|
|
|
#[inline]
|
2017-12-18 14:18:36 +00:00
|
|
|
|
fn data_layout(&self) -> &TargetDataLayout {
|
|
|
|
|
self
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Endianness of the target, which must match cfg(target-endian).
|
2018-09-16 16:34:15 +00:00
|
|
|
|
#[derive(Copy, Clone, PartialEq)]
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub enum Endian {
|
|
|
|
|
Little,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Big,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2021-01-05 01:01:29 +00:00
|
|
|
|
impl Endian {
|
|
|
|
|
pub fn as_str(&self) -> &'static str {
|
|
|
|
|
match self {
|
|
|
|
|
Self::Little => "little",
|
|
|
|
|
Self::Big => "big",
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl fmt::Debug for Endian {
|
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
|
|
|
f.write_str(self.as_str())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl FromStr for Endian {
|
|
|
|
|
type Err = String;
|
|
|
|
|
|
|
|
|
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
|
|
|
|
match s {
|
|
|
|
|
"little" => Ok(Self::Little),
|
|
|
|
|
"big" => Ok(Self::Big),
|
|
|
|
|
_ => Err(format!(r#"unknown endian: "{}""#, s)),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl ToJson for Endian {
|
|
|
|
|
fn to_json(&self) -> Json {
|
|
|
|
|
self.as_str().to_json()
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-18 14:18:36 +00:00
|
|
|
|
/// Size of a type in bytes.
|
2020-06-11 14:49:57 +00:00
|
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Encodable, Decodable)]
|
2019-11-16 10:52:00 +00:00
|
|
|
|
#[derive(HashStable_Generic)]
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub struct Size {
|
2020-12-14 23:45:19 +00:00
|
|
|
|
// The top 3 bits are ALWAYS zero.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
raw: u64,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl Size {
|
2020-03-22 16:48:11 +00:00
|
|
|
|
pub const ZERO: Size = Size { raw: 0 };
|
2018-05-20 12:14:39 +00:00
|
|
|
|
|
2020-12-14 23:45:19 +00:00
|
|
|
|
/// Rounds `bits` up to the next-higher byte boundary, if `bits` is
|
|
|
|
|
/// is not aligned.
|
2020-03-22 16:48:11 +00:00
|
|
|
|
pub fn from_bits(bits: impl TryInto<u64>) -> Size {
|
|
|
|
|
let bits = bits.try_into().ok().unwrap();
|
2020-12-14 23:45:19 +00:00
|
|
|
|
|
|
|
|
|
#[cold]
|
|
|
|
|
fn overflow(bits: u64) -> ! {
|
|
|
|
|
panic!("Size::from_bits({}) has overflowed", bits);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// This is the largest value of `bits` that does not cause overflow
|
|
|
|
|
// during rounding, and guarantees that the resulting number of bytes
|
|
|
|
|
// cannot cause overflow when multiplied by 8.
|
|
|
|
|
if bits > 0xffff_ffff_ffff_fff8 {
|
|
|
|
|
overflow(bits);
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-18 14:18:36 +00:00
|
|
|
|
// Avoid potential overflow from `bits + 7`.
|
2020-12-14 23:45:19 +00:00
|
|
|
|
Size { raw: bits / 8 + ((bits % 8) + 7) / 8 }
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-06-27 23:34:42 +00:00
|
|
|
|
#[inline]
|
2020-03-22 16:48:11 +00:00
|
|
|
|
pub fn from_bytes(bytes: impl TryInto<u64>) -> Size {
|
2020-12-14 23:45:19 +00:00
|
|
|
|
let bytes: u64 = bytes.try_into().ok().unwrap();
|
|
|
|
|
Size { raw: bytes }
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-06-27 04:33:38 +00:00
|
|
|
|
#[inline]
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub fn bytes(self) -> u64 {
|
|
|
|
|
self.raw
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-24 16:13:26 +00:00
|
|
|
|
#[inline]
|
|
|
|
|
pub fn bytes_usize(self) -> usize {
|
|
|
|
|
self.bytes().try_into().unwrap()
|
|
|
|
|
}
|
|
|
|
|
|
2018-06-27 23:34:42 +00:00
|
|
|
|
#[inline]
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub fn bits(self) -> u64 {
|
2020-12-14 23:45:19 +00:00
|
|
|
|
self.raw << 3
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-24 16:13:26 +00:00
|
|
|
|
#[inline]
|
|
|
|
|
pub fn bits_usize(self) -> usize {
|
|
|
|
|
self.bits().try_into().unwrap()
|
|
|
|
|
}
|
|
|
|
|
|
2018-06-27 23:34:42 +00:00
|
|
|
|
#[inline]
|
2018-09-08 22:16:45 +00:00
|
|
|
|
pub fn align_to(self, align: Align) -> Size {
|
|
|
|
|
let mask = align.bytes() - 1;
|
2017-12-18 14:18:36 +00:00
|
|
|
|
Size::from_bytes((self.bytes() + mask) & !mask)
|
|
|
|
|
}
|
|
|
|
|
|
2018-06-27 23:34:42 +00:00
|
|
|
|
#[inline]
|
2018-09-08 22:16:45 +00:00
|
|
|
|
pub fn is_aligned(self, align: Align) -> bool {
|
|
|
|
|
let mask = align.bytes() - 1;
|
2017-12-18 14:18:36 +00:00
|
|
|
|
self.bytes() & mask == 0
|
|
|
|
|
}
|
|
|
|
|
|
2018-06-27 23:34:42 +00:00
|
|
|
|
#[inline]
|
2018-11-03 20:57:53 +00:00
|
|
|
|
pub fn checked_add<C: HasDataLayout>(self, offset: Size, cx: &C) -> Option<Size> {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
let dl = cx.data_layout();
|
|
|
|
|
|
2018-05-20 22:58:25 +00:00
|
|
|
|
let bytes = self.bytes().checked_add(offset.bytes())?;
|
2017-12-18 14:18:36 +00:00
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-06-27 23:34:42 +00:00
|
|
|
|
#[inline]
|
2018-11-03 20:57:53 +00:00
|
|
|
|
pub fn checked_mul<C: HasDataLayout>(self, count: u64, cx: &C) -> Option<Size> {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
let dl = cx.data_layout();
|
|
|
|
|
|
2018-05-20 22:58:25 +00:00
|
|
|
|
let bytes = self.bytes().checked_mul(count)?;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
if bytes < dl.obj_size_bound() { Some(Size::from_bytes(bytes)) } else { None }
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
2020-11-04 13:41:58 +00:00
|
|
|
|
|
2020-11-04 14:13:06 +00:00
|
|
|
|
/// Truncates `value` to `self` bits and then sign-extends it to 128 bits
|
2020-11-04 13:41:58 +00:00
|
|
|
|
/// (i.e., if it is negative, fill with 1's on the left).
|
|
|
|
|
#[inline]
|
|
|
|
|
pub fn sign_extend(self, value: u128) -> u128 {
|
|
|
|
|
let size = self.bits();
|
|
|
|
|
if size == 0 {
|
|
|
|
|
// Truncated until nothing is left.
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
// Sign-extend it.
|
|
|
|
|
let shift = 128 - size;
|
|
|
|
|
// Shift the unsigned value to the left, then shift back to the right as signed
|
2020-11-04 14:12:44 +00:00
|
|
|
|
// (essentially fills with sign bit on the left).
|
2020-11-04 13:41:58 +00:00
|
|
|
|
(((value << shift) as i128) >> shift) as u128
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Truncates `value` to `self` bits.
|
|
|
|
|
#[inline]
|
|
|
|
|
pub fn truncate(self, value: u128) -> u128 {
|
|
|
|
|
let size = self.bits();
|
|
|
|
|
if size == 0 {
|
|
|
|
|
// Truncated until nothing is left.
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
let shift = 128 - size;
|
|
|
|
|
// Truncate (shift left to drop out leftover values, shift right to fill with zeroes).
|
|
|
|
|
(value << shift) >> shift
|
|
|
|
|
}
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Panicking addition, subtraction and multiplication for convenience.
|
|
|
|
|
// Avoid during layout computation, return `LayoutError` instead.
|
|
|
|
|
|
|
|
|
|
impl Add for Size {
|
|
|
|
|
type Output = Size;
|
2018-06-27 23:34:42 +00:00
|
|
|
|
#[inline]
|
2017-12-18 14:18:36 +00:00
|
|
|
|
fn add(self, other: Size) -> Size {
|
2018-05-20 22:58:25 +00:00
|
|
|
|
Size::from_bytes(self.bytes().checked_add(other.bytes()).unwrap_or_else(|| {
|
|
|
|
|
panic!("Size::add: {} + {} doesn't fit in u64", self.bytes(), other.bytes())
|
|
|
|
|
}))
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl Sub for Size {
|
|
|
|
|
type Output = Size;
|
2018-06-27 23:34:42 +00:00
|
|
|
|
#[inline]
|
2017-12-18 14:18:36 +00:00
|
|
|
|
fn sub(self, other: Size) -> Size {
|
2018-05-20 22:58:25 +00:00
|
|
|
|
Size::from_bytes(self.bytes().checked_sub(other.bytes()).unwrap_or_else(|| {
|
|
|
|
|
panic!("Size::sub: {} - {} would result in negative size", self.bytes(), other.bytes())
|
|
|
|
|
}))
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl Mul<Size> for u64 {
|
|
|
|
|
type Output = Size;
|
2018-06-27 23:34:42 +00:00
|
|
|
|
#[inline]
|
2018-05-20 22:58:25 +00:00
|
|
|
|
fn mul(self, size: Size) -> Size {
|
|
|
|
|
size * self
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl Mul<u64> for Size {
|
|
|
|
|
type Output = Size;
|
2018-06-27 23:34:42 +00:00
|
|
|
|
#[inline]
|
2017-12-18 14:18:36 +00:00
|
|
|
|
fn mul(self, count: u64) -> Size {
|
|
|
|
|
match self.bytes().checked_mul(count) {
|
|
|
|
|
Some(bytes) => Size::from_bytes(bytes),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
None => panic!("Size::mul: {} * {} doesn't fit in u64", self.bytes(), count),
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl AddAssign for Size {
|
2018-06-27 23:34:42 +00:00
|
|
|
|
#[inline]
|
2017-12-18 14:18:36 +00:00
|
|
|
|
fn add_assign(&mut self, other: Size) {
|
|
|
|
|
*self = *self + other;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-09-08 21:22:22 +00:00
|
|
|
|
/// Alignment of a type in bytes (always a power of two).
|
2020-06-11 14:49:57 +00:00
|
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Encodable, Decodable)]
|
2019-11-16 10:52:00 +00:00
|
|
|
|
#[derive(HashStable_Generic)]
|
2018-09-08 21:22:22 +00:00
|
|
|
|
pub struct Align {
|
|
|
|
|
pow2: u8,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-09-08 21:22:22 +00:00
|
|
|
|
impl Align {
|
2021-05-17 11:08:12 +00:00
|
|
|
|
pub const ONE: Align = Align { pow2: 0 };
|
|
|
|
|
|
2021-01-27 00:00:00 +00:00
|
|
|
|
#[inline]
|
2018-09-08 21:22:22 +00:00
|
|
|
|
pub fn from_bits(bits: u64) -> Result<Align, String> {
|
|
|
|
|
Align::from_bytes(Size::from_bits(bits).bytes())
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2021-01-27 00:00:00 +00:00
|
|
|
|
#[inline]
|
2018-09-08 21:22:22 +00:00
|
|
|
|
pub fn from_bytes(align: u64) -> Result<Align, String> {
|
|
|
|
|
// Treat an alignment of 0 bytes like 1-byte alignment.
|
|
|
|
|
if align == 0 {
|
2021-05-17 11:08:12 +00:00
|
|
|
|
return Ok(Align::ONE);
|
2018-09-08 21:22:22 +00:00
|
|
|
|
}
|
2017-12-18 14:18:36 +00:00
|
|
|
|
|
2021-01-27 00:00:00 +00:00
|
|
|
|
#[cold]
|
|
|
|
|
fn not_power_of_2(align: u64) -> String {
|
|
|
|
|
format!("`{}` is not a power of 2", align)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[cold]
|
|
|
|
|
fn too_large(align: u64) -> String {
|
|
|
|
|
format!("`{}` is too large", align)
|
|
|
|
|
}
|
|
|
|
|
|
2018-09-08 21:22:22 +00:00
|
|
|
|
let mut bytes = align;
|
|
|
|
|
let mut pow2: u8 = 0;
|
|
|
|
|
while (bytes & 1) == 0 {
|
|
|
|
|
pow2 += 1;
|
|
|
|
|
bytes >>= 1;
|
|
|
|
|
}
|
|
|
|
|
if bytes != 1 {
|
2021-01-27 00:00:00 +00:00
|
|
|
|
return Err(not_power_of_2(align));
|
2018-09-08 21:22:22 +00:00
|
|
|
|
}
|
|
|
|
|
if pow2 > 29 {
|
2021-01-27 00:00:00 +00:00
|
|
|
|
return Err(too_large(align));
|
2018-09-08 21:22:22 +00:00
|
|
|
|
}
|
2017-12-18 14:18:36 +00:00
|
|
|
|
|
2018-09-08 21:22:22 +00:00
|
|
|
|
Ok(Align { pow2 })
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2021-01-27 00:00:00 +00:00
|
|
|
|
#[inline]
|
2018-09-08 21:22:22 +00:00
|
|
|
|
pub fn bytes(self) -> u64 {
|
|
|
|
|
1 << self.pow2
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2021-01-27 00:00:00 +00:00
|
|
|
|
#[inline]
|
2018-09-08 21:22:22 +00:00
|
|
|
|
pub fn bits(self) -> u64 {
|
|
|
|
|
self.bytes() * 8
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Computes the best alignment possible for the given offset
|
2018-09-08 21:22:22 +00:00
|
|
|
|
/// (the largest power of two that the offset is a multiple of).
|
|
|
|
|
///
|
2018-11-27 02:59:49 +00:00
|
|
|
|
/// N.B., for an offset of `0`, this happens to return `2^64`.
|
2021-01-27 00:00:00 +00:00
|
|
|
|
#[inline]
|
2018-09-08 21:22:22 +00:00
|
|
|
|
pub fn max_for_offset(offset: Size) -> Align {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Align { pow2: offset.bytes().trailing_zeros() as u8 }
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-09-08 21:22:22 +00:00
|
|
|
|
/// Lower the alignment, if necessary, such that the given offset
|
|
|
|
|
/// is aligned to it (the offset is a multiple of the alignment).
|
2021-01-27 00:00:00 +00:00
|
|
|
|
#[inline]
|
2018-09-08 21:22:22 +00:00
|
|
|
|
pub fn restrict_for_offset(self, offset: Size) -> Align {
|
|
|
|
|
self.min(Align::max_for_offset(offset))
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
2018-09-08 21:22:22 +00:00
|
|
|
|
}
|
2017-12-18 14:18:36 +00:00
|
|
|
|
|
2020-03-06 11:13:55 +00:00
|
|
|
|
/// A pair of alignments, ABI-mandated and preferred.
|
2020-06-11 14:49:57 +00:00
|
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, Encodable, Decodable)]
|
2019-11-16 10:52:00 +00:00
|
|
|
|
#[derive(HashStable_Generic)]
|
2018-09-08 21:22:22 +00:00
|
|
|
|
pub struct AbiAndPrefAlign {
|
|
|
|
|
pub abi: Align,
|
|
|
|
|
pub pref: Align,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl AbiAndPrefAlign {
|
|
|
|
|
pub fn new(align: Align) -> AbiAndPrefAlign {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
AbiAndPrefAlign { abi: align, pref: align }
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-09-08 21:22:22 +00:00
|
|
|
|
pub fn min(self, other: AbiAndPrefAlign) -> AbiAndPrefAlign {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
AbiAndPrefAlign { abi: self.abi.min(other.abi), pref: self.pref.min(other.pref) }
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
2018-09-06 15:42:15 +00:00
|
|
|
|
|
2018-09-08 21:22:22 +00:00
|
|
|
|
pub fn max(self, other: AbiAndPrefAlign) -> AbiAndPrefAlign {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
AbiAndPrefAlign { abi: self.abi.max(other.abi), pref: self.pref.max(other.pref) }
|
2018-09-06 15:42:15 +00:00
|
|
|
|
}
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Integers, also used for enum discriminants.
|
2019-11-09 21:57:25 +00:00
|
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, HashStable_Generic)]
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub enum Integer {
|
|
|
|
|
I8,
|
|
|
|
|
I16,
|
|
|
|
|
I32,
|
|
|
|
|
I64,
|
|
|
|
|
I128,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl Integer {
|
2018-08-09 13:42:43 +00:00
|
|
|
|
pub fn size(self) -> Size {
|
|
|
|
|
match self {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
I8 => Size::from_bytes(1),
|
|
|
|
|
I16 => Size::from_bytes(2),
|
|
|
|
|
I32 => Size::from_bytes(4),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
I64 => Size::from_bytes(8),
|
|
|
|
|
I128 => Size::from_bytes(16),
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-09-08 19:14:55 +00:00
|
|
|
|
pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
let dl = cx.data_layout();
|
|
|
|
|
|
2018-08-09 13:42:43 +00:00
|
|
|
|
match self {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
I8 => dl.i8_align,
|
|
|
|
|
I16 => dl.i16_align,
|
|
|
|
|
I32 => dl.i32_align,
|
|
|
|
|
I64 => dl.i64_align,
|
|
|
|
|
I128 => dl.i128_align,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Finds the smallest Integer type which can represent the signed value.
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub fn fit_signed(x: i128) -> Integer {
|
|
|
|
|
match x {
|
2018-05-29 02:42:11 +00:00
|
|
|
|
-0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
|
|
|
|
|
-0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
|
|
|
|
|
-0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32,
|
|
|
|
|
-0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
_ => I128,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Finds the smallest Integer type which can represent the unsigned value.
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub fn fit_unsigned(x: u128) -> Integer {
|
|
|
|
|
match x {
|
2018-05-29 02:42:11 +00:00
|
|
|
|
0..=0x0000_0000_0000_00ff => I8,
|
|
|
|
|
0..=0x0000_0000_0000_ffff => I16,
|
|
|
|
|
0..=0x0000_0000_ffff_ffff => I32,
|
|
|
|
|
0..=0xffff_ffff_ffff_ffff => I64,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
_ => I128,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Finds the smallest integer with the given alignment.
|
2018-09-08 22:16:45 +00:00
|
|
|
|
pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
let dl = cx.data_layout();
|
|
|
|
|
|
2021-06-14 21:40:09 +00:00
|
|
|
|
for candidate in [I8, I16, I32, I64, I128] {
|
2018-09-08 21:22:22 +00:00
|
|
|
|
if wanted == candidate.align(dl).abi && wanted.bytes() == candidate.size().bytes() {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
return Some(candidate);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
None
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Find the largest integer with the given alignment or less.
|
2018-09-08 22:16:45 +00:00
|
|
|
|
pub fn approximate_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Integer {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
let dl = cx.data_layout();
|
|
|
|
|
|
|
|
|
|
// FIXME(eddyb) maybe include I128 in the future, when it works everywhere.
|
2021-06-14 21:40:09 +00:00
|
|
|
|
for candidate in [I64, I32, I16] {
|
2018-09-08 21:22:22 +00:00
|
|
|
|
if wanted >= candidate.align(dl).abi && wanted.bytes() >= candidate.size().bytes() {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
return candidate;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
I8
|
|
|
|
|
}
|
2021-08-10 18:34:13 +00:00
|
|
|
|
|
|
|
|
|
fn from_size(size: Size) -> Result<Self, String> {
|
|
|
|
|
match size.bits() {
|
|
|
|
|
8 => Ok(Integer::I8),
|
|
|
|
|
16 => Ok(Integer::I16),
|
|
|
|
|
32 => Ok(Integer::I32),
|
|
|
|
|
64 => Ok(Integer::I64),
|
|
|
|
|
128 => Ok(Integer::I128),
|
|
|
|
|
_ => Err(format!("rust does not support integers with {} bits", size.bits())),
|
|
|
|
|
}
|
|
|
|
|
}
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Fundamental unit of memory access and layout.
|
2019-11-09 21:57:25 +00:00
|
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, HashStable_Generic)]
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub enum Primitive {
|
|
|
|
|
/// The `bool` is the signedness of the `Integer` type.
|
|
|
|
|
///
|
|
|
|
|
/// One would think we would not care about such details this low down,
|
|
|
|
|
/// but some ABIs are described in terms of C types and ISAs where the
|
|
|
|
|
/// integer arithmetic is done on {sign,zero}-extended registers, e.g.
|
|
|
|
|
/// a negative integer passed by zero-extension will appear positive in
|
|
|
|
|
/// the callee, and most operations on it will produce the wrong values.
|
|
|
|
|
Int(Integer, bool),
|
2019-11-07 14:54:25 +00:00
|
|
|
|
F32,
|
|
|
|
|
F64,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Pointer,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-06-11 09:20:56 +00:00
|
|
|
|
impl Primitive {
|
2018-11-03 20:57:53 +00:00
|
|
|
|
pub fn size<C: HasDataLayout>(self, cx: &C) -> Size {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
let dl = cx.data_layout();
|
|
|
|
|
|
|
|
|
|
match self {
|
|
|
|
|
Int(i, _) => i.size(),
|
2019-11-07 14:54:25 +00:00
|
|
|
|
F32 => Size::from_bits(32),
|
|
|
|
|
F64 => Size::from_bits(64),
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Pointer => dl.pointer_size,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2018-09-08 19:14:55 +00:00
|
|
|
|
pub fn align<C: HasDataLayout>(self, cx: &C) -> AbiAndPrefAlign {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
let dl = cx.data_layout();
|
|
|
|
|
|
|
|
|
|
match self {
|
|
|
|
|
Int(i, _) => i.align(dl),
|
2019-11-07 14:54:25 +00:00
|
|
|
|
F32 => dl.f32_align,
|
|
|
|
|
F64 => dl.f64_align,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Pointer => dl.pointer_align,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-05-23 17:39:49 +00:00
|
|
|
|
|
|
|
|
|
pub fn is_float(self) -> bool {
|
2020-10-27 01:02:48 +00:00
|
|
|
|
matches!(self, F32 | F64)
|
2018-05-23 17:39:49 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn is_int(self) -> bool {
|
2020-10-27 01:02:48 +00:00
|
|
|
|
matches!(self, Int(..))
|
2018-05-23 17:39:49 +00:00
|
|
|
|
}
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2021-08-22 19:46:03 +00:00
|
|
|
|
/// Inclusive wrap-around range of valid values, that is, if
|
|
|
|
|
/// start > end, it represents `start..=MAX`,
|
|
|
|
|
/// followed by `0..=end`.
|
|
|
|
|
///
|
|
|
|
|
/// That is, for an i8 primitive, a range of `254..=2` means following
|
|
|
|
|
/// sequence:
|
|
|
|
|
///
|
|
|
|
|
/// 254 (-2), 255 (-1), 0, 1, 2
|
|
|
|
|
///
|
|
|
|
|
/// This is intended specifically to mirror LLVM’s `!range` metadata,
|
|
|
|
|
/// semantics.
|
2021-08-23 13:05:40 +00:00
|
|
|
|
#[derive(Clone, PartialEq, Eq, Hash)]
|
2021-08-22 19:46:03 +00:00
|
|
|
|
#[derive(HashStable_Generic)]
|
2021-08-23 12:24:34 +00:00
|
|
|
|
pub struct WrappingRange {
|
2021-08-22 19:46:03 +00:00
|
|
|
|
pub start: u128,
|
|
|
|
|
pub end: u128,
|
|
|
|
|
}
|
|
|
|
|
|
2021-08-23 12:24:34 +00:00
|
|
|
|
impl WrappingRange {
|
2021-08-22 19:46:03 +00:00
|
|
|
|
/// Returns `true` if `v` is contained in the range.
|
2021-08-24 08:18:07 +00:00
|
|
|
|
#[inline(always)]
|
2021-08-22 19:46:03 +00:00
|
|
|
|
pub fn contains(&self, v: u128) -> bool {
|
|
|
|
|
if self.start <= self.end {
|
|
|
|
|
self.start <= v && v <= self.end
|
|
|
|
|
} else {
|
|
|
|
|
self.start <= v || v <= self.end
|
|
|
|
|
}
|
|
|
|
|
}
|
2021-08-23 12:20:38 +00:00
|
|
|
|
|
|
|
|
|
/// Returns `true` if zero is contained in the range.
|
|
|
|
|
/// Equal to `range.contains(0)` but should be faster.
|
2021-08-24 08:18:07 +00:00
|
|
|
|
#[inline(always)]
|
2021-08-23 12:20:38 +00:00
|
|
|
|
pub fn contains_zero(&self) -> bool {
|
2021-08-23 13:52:47 +00:00
|
|
|
|
self.start > self.end || self.start == 0
|
2021-08-23 12:20:38 +00:00
|
|
|
|
}
|
2021-08-23 13:44:56 +00:00
|
|
|
|
|
2021-08-24 17:41:58 +00:00
|
|
|
|
/// Returns `self` with replaced `start`
|
2021-08-24 08:18:07 +00:00
|
|
|
|
#[inline(always)]
|
2021-08-24 17:41:58 +00:00
|
|
|
|
pub fn with_start(mut self, start: u128) -> Self {
|
|
|
|
|
self.start = start;
|
|
|
|
|
self
|
2021-08-23 13:44:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
2021-08-24 17:41:58 +00:00
|
|
|
|
/// Returns `self` with replaced `end`
|
2021-08-24 08:18:07 +00:00
|
|
|
|
#[inline(always)]
|
2021-08-24 17:41:58 +00:00
|
|
|
|
pub fn with_end(mut self, end: u128) -> Self {
|
|
|
|
|
self.end = end;
|
|
|
|
|
self
|
2021-08-23 13:44:56 +00:00
|
|
|
|
}
|
2021-08-22 19:46:03 +00:00
|
|
|
|
}
|
|
|
|
|
|
2021-08-23 13:05:40 +00:00
|
|
|
|
impl fmt::Debug for WrappingRange {
|
|
|
|
|
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
|
|
|
write!(fmt, "{}..={}", self.start, self.end)?;
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-18 14:18:36 +00:00
|
|
|
|
/// Information about one scalar component of a Rust type.
|
|
|
|
|
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
|
2019-11-16 10:52:00 +00:00
|
|
|
|
#[derive(HashStable_Generic)]
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub struct Scalar {
|
|
|
|
|
pub value: Primitive,
|
|
|
|
|
|
2018-11-27 02:59:49 +00:00
|
|
|
|
// FIXME(eddyb) always use the shortest range, e.g., by finding
|
2017-12-18 14:18:36 +00:00
|
|
|
|
// the largest space between two consecutive valid values and
|
|
|
|
|
// taking everything else as the (shortest) valid range.
|
2021-08-23 12:24:34 +00:00
|
|
|
|
pub valid_range: WrappingRange,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl Scalar {
|
|
|
|
|
pub fn is_bool(&self) -> bool {
|
2021-08-22 19:46:03 +00:00
|
|
|
|
matches!(self.value, Int(I8, false))
|
2021-08-23 12:24:34 +00:00
|
|
|
|
&& matches!(self.valid_range, WrappingRange { start: 0, end: 1 })
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
2018-04-22 16:40:54 +00:00
|
|
|
|
|
|
|
|
|
/// Returns the valid range as a `x..y` range.
|
|
|
|
|
///
|
|
|
|
|
/// If `x` and `y` are equal, the range is full, not empty.
|
2018-11-03 20:57:53 +00:00
|
|
|
|
pub fn valid_range_exclusive<C: HasDataLayout>(&self, cx: &C) -> Range<u128> {
|
2018-04-22 16:40:54 +00:00
|
|
|
|
// For a (max) value of -1, max will be `-1 as usize`, which overflows.
|
|
|
|
|
// However, that is fine here (it would still represent the full range),
|
|
|
|
|
// i.e., if the range is everything.
|
|
|
|
|
let bits = self.value.size(cx).bits();
|
|
|
|
|
assert!(bits <= 128);
|
|
|
|
|
let mask = !0u128 >> (128 - bits);
|
2021-08-22 19:46:03 +00:00
|
|
|
|
let start = self.valid_range.start;
|
|
|
|
|
let end = self.valid_range.end;
|
2018-04-22 16:40:54 +00:00
|
|
|
|
assert_eq!(start, start & mask);
|
|
|
|
|
assert_eq!(end, end & mask);
|
|
|
|
|
start..(end.wrapping_add(1) & mask)
|
|
|
|
|
}
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Describes how the fields of a type are located in memory.
|
2019-11-09 21:57:25 +00:00
|
|
|
|
#[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)]
|
2020-03-31 13:44:52 +00:00
|
|
|
|
pub enum FieldsShape {
|
2020-04-16 15:15:46 +00:00
|
|
|
|
/// Scalar primitives and `!`, which never have fields.
|
|
|
|
|
Primitive,
|
|
|
|
|
|
2017-12-18 14:18:36 +00:00
|
|
|
|
/// All fields start at no offset. The `usize` is the field count.
|
2020-04-16 15:15:46 +00:00
|
|
|
|
Union(NonZeroUsize),
|
2017-12-18 14:18:36 +00:00
|
|
|
|
|
|
|
|
|
/// Array/vector-like placement, with all fields of identical types.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Array { stride: Size, count: u64 },
|
2017-12-18 14:18:36 +00:00
|
|
|
|
|
|
|
|
|
/// Struct-like placement, with precomputed offsets.
|
|
|
|
|
///
|
|
|
|
|
/// Fields are guaranteed to not overlap, but note that gaps
|
|
|
|
|
/// before, between and after all the fields are NOT always
|
|
|
|
|
/// padding, and as such their contents may not be discarded.
|
|
|
|
|
/// For example, enum variants leave a gap at the start,
|
|
|
|
|
/// where the discriminant field in the enum layout goes.
|
|
|
|
|
Arbitrary {
|
|
|
|
|
/// Offsets for the first byte of each field,
|
|
|
|
|
/// ordered to match the source definition order.
|
|
|
|
|
/// This vector does not go in increasing order.
|
|
|
|
|
// FIXME(eddyb) use small vector optimization for the common case.
|
|
|
|
|
offsets: Vec<Size>,
|
|
|
|
|
|
|
|
|
|
/// Maps source order field indices to memory order indices,
|
2019-06-23 01:37:23 +00:00
|
|
|
|
/// depending on how the fields were reordered (if at all).
|
|
|
|
|
/// This is a permutation, with both the source order and the
|
|
|
|
|
/// memory order using the same (0..n) index ranges.
|
|
|
|
|
///
|
|
|
|
|
/// Note that during computation of `memory_index`, sometimes
|
|
|
|
|
/// it is easier to operate on the inverse mapping (that is,
|
|
|
|
|
/// from memory order to source order), and that is usually
|
|
|
|
|
/// named `inverse_memory_index`.
|
|
|
|
|
///
|
|
|
|
|
// FIXME(eddyb) build a better abstraction for permutations, if possible.
|
2017-12-18 14:18:36 +00:00
|
|
|
|
// FIXME(camlorn) also consider small vector optimization here.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
memory_index: Vec<u32>,
|
|
|
|
|
},
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-31 13:44:52 +00:00
|
|
|
|
impl FieldsShape {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub fn count(&self) -> usize {
|
|
|
|
|
match *self {
|
2020-04-16 15:15:46 +00:00
|
|
|
|
FieldsShape::Primitive => 0,
|
|
|
|
|
FieldsShape::Union(count) => count.get(),
|
2021-06-06 23:27:40 +00:00
|
|
|
|
FieldsShape::Array { count, .. } => count.try_into().unwrap(),
|
2020-03-31 13:44:52 +00:00
|
|
|
|
FieldsShape::Arbitrary { ref offsets, .. } => offsets.len(),
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn offset(&self, i: usize) -> Size {
|
|
|
|
|
match *self {
|
2020-04-16 15:15:46 +00:00
|
|
|
|
FieldsShape::Primitive => {
|
|
|
|
|
unreachable!("FieldsShape::offset: `Primitive`s have no fields")
|
|
|
|
|
}
|
2020-03-31 13:44:52 +00:00
|
|
|
|
FieldsShape::Union(count) => {
|
2020-04-16 15:15:46 +00:00
|
|
|
|
assert!(
|
|
|
|
|
i < count.get(),
|
|
|
|
|
"tried to access field {} of union with {} fields",
|
|
|
|
|
i,
|
|
|
|
|
count
|
|
|
|
|
);
|
2020-03-06 10:20:27 +00:00
|
|
|
|
Size::ZERO
|
2020-03-11 13:31:07 +00:00
|
|
|
|
}
|
2020-03-31 13:44:52 +00:00
|
|
|
|
FieldsShape::Array { stride, count } => {
|
2020-03-21 16:17:01 +00:00
|
|
|
|
let i = u64::try_from(i).unwrap();
|
2017-12-18 14:18:36 +00:00
|
|
|
|
assert!(i < count);
|
|
|
|
|
stride * i
|
|
|
|
|
}
|
2020-03-31 13:44:52 +00:00
|
|
|
|
FieldsShape::Arbitrary { ref offsets, .. } => offsets[i],
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn memory_index(&self, i: usize) -> usize {
|
|
|
|
|
match *self {
|
2020-04-16 15:15:46 +00:00
|
|
|
|
FieldsShape::Primitive => {
|
|
|
|
|
unreachable!("FieldsShape::memory_index: `Primitive`s have no fields")
|
|
|
|
|
}
|
2020-03-31 13:44:52 +00:00
|
|
|
|
FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
|
2021-06-06 23:27:40 +00:00
|
|
|
|
FieldsShape::Arbitrary { ref memory_index, .. } => memory_index[i].try_into().unwrap(),
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Gets source indices of the fields by increasing offsets.
|
2017-12-18 14:18:36 +00:00
|
|
|
|
#[inline]
|
2019-12-22 22:42:04 +00:00
|
|
|
|
pub fn index_by_increasing_offset<'a>(&'a self) -> impl Iterator<Item = usize> + 'a {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
let mut inverse_small = [0u8; 64];
|
|
|
|
|
let mut inverse_big = vec![];
|
|
|
|
|
let use_small = self.count() <= inverse_small.len();
|
|
|
|
|
|
|
|
|
|
// We have to write this logic twice in order to keep the array small.
|
2020-03-31 13:44:52 +00:00
|
|
|
|
if let FieldsShape::Arbitrary { ref memory_index, .. } = *self {
|
2017-12-18 14:18:36 +00:00
|
|
|
|
if use_small {
|
|
|
|
|
for i in 0..self.count() {
|
|
|
|
|
inverse_small[memory_index[i] as usize] = i as u8;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
inverse_big = vec![0; self.count()];
|
|
|
|
|
for i in 0..self.count() {
|
|
|
|
|
inverse_big[memory_index[i] as usize] = i as u32;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
(0..self.count()).map(move |i| match *self {
|
2020-04-16 15:15:46 +00:00
|
|
|
|
FieldsShape::Primitive | FieldsShape::Union(_) | FieldsShape::Array { .. } => i,
|
2020-03-31 13:44:52 +00:00
|
|
|
|
FieldsShape::Arbitrary { .. } => {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
if use_small {
|
|
|
|
|
inverse_small[i] as usize
|
|
|
|
|
} else {
|
|
|
|
|
inverse_big[i] as usize
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-11 05:52:09 +00:00
|
|
|
|
/// An identifier that specifies the address space that some operation
|
|
|
|
|
/// should operate on. Special address spaces have an effect on code generation,
|
|
|
|
|
/// depending on the target and the address spaces it implements.
|
|
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
|
|
|
|
pub struct AddressSpace(pub u32);
|
|
|
|
|
|
|
|
|
|
impl AddressSpace {
|
|
|
|
|
/// The default address space, corresponding to data space.
|
|
|
|
|
pub const DATA: Self = AddressSpace(0);
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-18 14:18:36 +00:00
|
|
|
|
/// Describes how values of the type are passed by target ABIs,
|
|
|
|
|
/// in terms of categories of C types there are ABI rules for.
|
2019-11-09 21:57:25 +00:00
|
|
|
|
#[derive(Clone, PartialEq, Eq, Hash, Debug, HashStable_Generic)]
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub enum Abi {
|
|
|
|
|
Uninhabited,
|
|
|
|
|
Scalar(Scalar),
|
|
|
|
|
ScalarPair(Scalar, Scalar),
|
|
|
|
|
Vector {
|
|
|
|
|
element: Scalar,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
count: u64,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
},
|
|
|
|
|
Aggregate {
|
|
|
|
|
/// If true, the size is exact, otherwise it's only a lower bound.
|
|
|
|
|
sized: bool,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
},
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl Abi {
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Returns `true` if the layout corresponds to an unsized type.
|
2021-06-01 00:00:00 +00:00
|
|
|
|
#[inline]
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub fn is_unsized(&self) -> bool {
|
|
|
|
|
match *self {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Abi::Uninhabited | Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. } => false,
|
|
|
|
|
Abi::Aggregate { sized } => !sized,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Returns `true` if this is a single signed integer scalar
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub fn is_signed(&self) -> bool {
|
|
|
|
|
match *self {
|
|
|
|
|
Abi::Scalar(ref scal) => match scal.value {
|
|
|
|
|
Primitive::Int(_, signed) => signed,
|
|
|
|
|
_ => false,
|
|
|
|
|
},
|
2020-03-20 16:05:00 +00:00
|
|
|
|
_ => panic!("`is_signed` on non-scalar ABI {:?}", self),
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2018-08-19 15:36:04 +00:00
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Returns `true` if this is an uninhabited type
|
2021-06-01 00:00:00 +00:00
|
|
|
|
#[inline]
|
2018-08-19 15:36:04 +00:00
|
|
|
|
pub fn is_uninhabited(&self) -> bool {
|
2020-10-27 01:02:48 +00:00
|
|
|
|
matches!(*self, Abi::Uninhabited)
|
2018-08-19 15:36:04 +00:00
|
|
|
|
}
|
2019-12-05 07:40:24 +00:00
|
|
|
|
|
|
|
|
|
/// Returns `true` is this is a scalar type
|
2021-06-01 00:00:00 +00:00
|
|
|
|
#[inline]
|
2019-12-05 07:40:24 +00:00
|
|
|
|
pub fn is_scalar(&self) -> bool {
|
2020-10-27 01:02:48 +00:00
|
|
|
|
matches!(*self, Abi::Scalar(_))
|
2019-12-05 07:40:24 +00:00
|
|
|
|
}
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-09-26 05:38:33 +00:00
|
|
|
|
rustc_index::newtype_index! {
|
2019-11-16 10:52:00 +00:00
|
|
|
|
pub struct VariantIdx {
|
|
|
|
|
derive [HashStable_Generic]
|
2019-11-09 21:57:25 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)]
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub enum Variants {
|
|
|
|
|
/// Single enum variants, structs/tuples, unions, and all non-ADTs.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Single { index: VariantIdx },
|
2017-12-18 14:18:36 +00:00
|
|
|
|
|
2020-05-23 11:22:45 +00:00
|
|
|
|
/// Enum-likes with more than one inhabited variant: each variant comes with
|
|
|
|
|
/// a *discriminant* (usually the same as the variant index but the user can
|
|
|
|
|
/// assign explicit discriminant values). That discriminant is encoded
|
|
|
|
|
/// as a *tag* on the machine. The layout of each variant is
|
|
|
|
|
/// a struct, and they all have space reserved for the tag.
|
|
|
|
|
/// For enums, the tag is the sole field of the layout.
|
2019-03-29 05:44:54 +00:00
|
|
|
|
Multiple {
|
2020-05-23 11:22:45 +00:00
|
|
|
|
tag: Scalar,
|
|
|
|
|
tag_encoding: TagEncoding,
|
|
|
|
|
tag_field: usize,
|
2020-03-04 14:13:00 +00:00
|
|
|
|
variants: IndexVec<VariantIdx, Layout>,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
},
|
2019-03-29 05:44:54 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-11-16 10:52:00 +00:00
|
|
|
|
#[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)]
|
2020-05-23 11:22:45 +00:00
|
|
|
|
pub enum TagEncoding {
|
|
|
|
|
/// The tag directly stores the discriminant, but possibly with a smaller layout
|
|
|
|
|
/// (so converting the tag to the discriminant can require sign extension).
|
|
|
|
|
Direct,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
|
2019-03-29 05:44:54 +00:00
|
|
|
|
/// Niche (values invalid for a type) encoding the discriminant:
|
2020-05-23 11:22:45 +00:00
|
|
|
|
/// Discriminant and variant index coincide.
|
|
|
|
|
/// The variant `dataful_variant` contains a niche at an arbitrary
|
|
|
|
|
/// offset (field `tag_field` of the enum), which for a variant with
|
2019-04-02 22:57:50 +00:00
|
|
|
|
/// discriminant `d` is set to
|
|
|
|
|
/// `(d - niche_variants.start).wrapping_add(niche_start)`.
|
2017-12-18 14:18:36 +00:00
|
|
|
|
///
|
|
|
|
|
/// For example, `Option<(usize, &T)>` is represented such that
|
|
|
|
|
/// `None` has a null pointer for the second tuple field, and
|
|
|
|
|
/// `Some` is the identity function (with a non-null reference).
|
2019-03-29 05:44:54 +00:00
|
|
|
|
Niche {
|
2018-11-01 15:01:24 +00:00
|
|
|
|
dataful_variant: VariantIdx,
|
|
|
|
|
niche_variants: RangeInclusive<VariantIdx>,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
niche_start: u128,
|
2019-03-29 05:44:54 +00:00
|
|
|
|
},
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-11-09 21:57:25 +00:00
|
|
|
|
#[derive(Clone, PartialEq, Eq, Hash, Debug, HashStable_Generic)]
|
2019-07-12 08:55:29 +00:00
|
|
|
|
pub struct Niche {
|
|
|
|
|
pub offset: Size,
|
|
|
|
|
pub scalar: Scalar,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl Niche {
|
2019-07-15 15:21:01 +00:00
|
|
|
|
pub fn from_scalar<C: HasDataLayout>(cx: &C, offset: Size, scalar: Scalar) -> Option<Self> {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let niche = Niche { offset, scalar };
|
|
|
|
|
if niche.available(cx) > 0 { Some(niche) } else { None }
|
2019-07-15 15:21:01 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-07-12 08:55:29 +00:00
|
|
|
|
pub fn available<C: HasDataLayout>(&self, cx: &C) -> u128 {
|
2021-08-23 12:18:48 +00:00
|
|
|
|
let Scalar { value, valid_range: ref v } = self.scalar;
|
2019-07-12 08:55:29 +00:00
|
|
|
|
let bits = value.size(cx).bits();
|
|
|
|
|
assert!(bits <= 128);
|
|
|
|
|
let max_value = !0u128 >> (128 - bits);
|
|
|
|
|
|
|
|
|
|
// Find out how many values are outside the valid range.
|
2021-08-22 19:46:03 +00:00
|
|
|
|
let niche = v.end.wrapping_add(1)..v.start;
|
2019-07-12 08:55:29 +00:00
|
|
|
|
niche.end.wrapping_sub(niche.start) & max_value
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn reserve<C: HasDataLayout>(&self, cx: &C, count: u128) -> Option<(u128, Scalar)> {
|
|
|
|
|
assert!(count > 0);
|
|
|
|
|
|
2021-08-24 17:41:58 +00:00
|
|
|
|
let Scalar { value, valid_range: v } = self.scalar.clone();
|
2019-07-12 08:55:29 +00:00
|
|
|
|
let bits = value.size(cx).bits();
|
|
|
|
|
assert!(bits <= 128);
|
|
|
|
|
let max_value = !0u128 >> (128 - bits);
|
|
|
|
|
|
|
|
|
|
if count > max_value {
|
|
|
|
|
return None;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Compute the range of invalid values being reserved.
|
2021-08-22 19:46:03 +00:00
|
|
|
|
let start = v.end.wrapping_add(1) & max_value;
|
|
|
|
|
let end = v.end.wrapping_add(count) & max_value;
|
2019-07-12 08:55:29 +00:00
|
|
|
|
|
2021-08-22 19:46:03 +00:00
|
|
|
|
if v.contains(end) {
|
2019-07-12 08:55:29 +00:00
|
|
|
|
return None;
|
|
|
|
|
}
|
|
|
|
|
|
2021-08-23 13:44:56 +00:00
|
|
|
|
Some((start, Scalar { value, valid_range: v.with_end(end) }))
|
2019-07-12 08:55:29 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-11-09 21:57:25 +00:00
|
|
|
|
#[derive(PartialEq, Eq, Hash, Debug, HashStable_Generic)]
|
2020-03-04 14:13:00 +00:00
|
|
|
|
pub struct Layout {
|
2020-02-26 09:26:14 +00:00
|
|
|
|
/// Says where the fields are located within the layout.
|
2020-03-31 13:44:52 +00:00
|
|
|
|
pub fields: FieldsShape,
|
2020-02-17 21:58:24 +00:00
|
|
|
|
|
|
|
|
|
/// Encodes information about multi-variant layouts.
|
2020-02-26 09:26:14 +00:00
|
|
|
|
/// Even with `Multiple` variants, a layout still has its own fields! Those are then
|
2020-02-17 21:58:24 +00:00
|
|
|
|
/// shared between all variants. One of them will be the discriminant,
|
|
|
|
|
/// but e.g. generators can have more.
|
|
|
|
|
///
|
2020-02-26 09:26:14 +00:00
|
|
|
|
/// To access all fields of this layout, both `fields` and the fields of the active variant
|
|
|
|
|
/// must be taken into account.
|
2020-02-17 21:58:24 +00:00
|
|
|
|
pub variants: Variants,
|
|
|
|
|
|
|
|
|
|
/// The `abi` defines how this data is passed between functions, and it defines
|
|
|
|
|
/// value restrictions via `valid_range`.
|
|
|
|
|
///
|
2020-02-26 09:26:14 +00:00
|
|
|
|
/// Note that this is entirely orthogonal to the recursive structure defined by
|
2020-02-17 21:58:24 +00:00
|
|
|
|
/// `variants` and `fields`; for example, `ManuallyDrop<Result<isize, isize>>` has
|
2020-02-26 09:26:14 +00:00
|
|
|
|
/// `Abi::ScalarPair`! So, even with non-`Aggregate` `abi`, `fields` and `variants`
|
|
|
|
|
/// have to be taken into account to find all fields of this layout.
|
2017-12-18 14:18:36 +00:00
|
|
|
|
pub abi: Abi,
|
2019-07-15 15:21:01 +00:00
|
|
|
|
|
|
|
|
|
/// The leaf scalar with the largest number of invalid values
|
|
|
|
|
/// (i.e. outside of its `valid_range`), if it exists.
|
|
|
|
|
pub largest_niche: Option<Niche>,
|
|
|
|
|
|
2018-09-08 19:14:55 +00:00
|
|
|
|
pub align: AbiAndPrefAlign,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
pub size: Size,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-04 14:13:00 +00:00
|
|
|
|
impl Layout {
|
2018-11-03 20:57:53 +00:00
|
|
|
|
pub fn scalar<C: HasDataLayout>(cx: &C, scalar: Scalar) -> Self {
|
2019-07-15 15:21:01 +00:00
|
|
|
|
let largest_niche = Niche::from_scalar(cx, Size::ZERO, scalar.clone());
|
2017-12-18 14:18:36 +00:00
|
|
|
|
let size = scalar.value.size(cx);
|
|
|
|
|
let align = scalar.value.align(cx);
|
2020-03-04 14:13:00 +00:00
|
|
|
|
Layout {
|
2018-11-01 15:01:24 +00:00
|
|
|
|
variants: Variants::Single { index: VariantIdx::new(0) },
|
2020-04-16 15:15:46 +00:00
|
|
|
|
fields: FieldsShape::Primitive,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
abi: Abi::Scalar(scalar),
|
2019-07-15 15:21:01 +00:00
|
|
|
|
largest_niche,
|
2017-12-18 14:18:36 +00:00
|
|
|
|
size,
|
|
|
|
|
align,
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-04 14:13:00 +00:00
|
|
|
|
/// The layout of a type, alongside the type itself.
|
2018-11-27 02:59:49 +00:00
|
|
|
|
/// Provides various type traversal APIs (e.g., recursing into fields).
|
2018-02-07 17:27:43 +00:00
|
|
|
|
///
|
2020-03-04 14:13:00 +00:00
|
|
|
|
/// Note that the layout is NOT guaranteed to always be identical
|
|
|
|
|
/// to that obtained from `layout_of(ty)`, as we need to produce
|
2018-02-07 17:27:43 +00:00
|
|
|
|
/// layouts for which Rust types do not exist, such as enum variants
|
2018-11-27 02:59:49 +00:00
|
|
|
|
/// or synthetic fields of enums (i.e., discriminants) and fat pointers.
|
2018-10-09 16:48:44 +00:00
|
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
|
2020-03-04 14:50:21 +00:00
|
|
|
|
pub struct TyAndLayout<'a, Ty> {
|
2018-02-07 17:27:43 +00:00
|
|
|
|
pub ty: Ty,
|
2020-03-04 14:13:00 +00:00
|
|
|
|
pub layout: &'a Layout,
|
2018-02-07 17:27:43 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-04 14:50:21 +00:00
|
|
|
|
impl<'a, Ty> Deref for TyAndLayout<'a, Ty> {
|
2020-03-04 14:13:00 +00:00
|
|
|
|
type Target = &'a Layout;
|
|
|
|
|
fn deref(&self) -> &&'a Layout {
|
|
|
|
|
&self.layout
|
2018-02-07 17:27:43 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-11-03 16:21:37 +00:00
|
|
|
|
/// Trait for context types that can compute layouts of things.
|
2018-02-05 19:07:20 +00:00
|
|
|
|
pub trait LayoutOf {
|
|
|
|
|
type Ty;
|
2020-03-04 14:50:21 +00:00
|
|
|
|
type TyAndLayout;
|
2017-12-18 14:18:36 +00:00
|
|
|
|
|
2020-03-04 14:50:21 +00:00
|
|
|
|
fn layout_of(&self, ty: Self::Ty) -> Self::TyAndLayout;
|
|
|
|
|
fn spanned_layout_of(&self, ty: Self::Ty, _span: Span) -> Self::TyAndLayout {
|
2019-08-04 05:04:39 +00:00
|
|
|
|
self.layout_of(ty)
|
|
|
|
|
}
|
2018-02-05 19:07:20 +00:00
|
|
|
|
}
|
2018-02-07 17:27:43 +00:00
|
|
|
|
|
2020-03-04 14:50:21 +00:00
|
|
|
|
/// The `TyAndLayout` above will always be a `MaybeResult<TyAndLayout<'_, Self>>`.
|
2019-11-03 16:21:37 +00:00
|
|
|
|
/// We can't add the bound due to the lifetime, but this trait is still useful when
|
|
|
|
|
/// writing code that's generic over the `LayoutOf` impl.
|
|
|
|
|
pub trait MaybeResult<T> {
|
|
|
|
|
type Error;
|
|
|
|
|
|
|
|
|
|
fn from(x: Result<T, Self::Error>) -> Self;
|
|
|
|
|
fn to_result(self) -> Result<T, Self::Error>;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<T> MaybeResult<T> for T {
|
|
|
|
|
type Error = !;
|
|
|
|
|
|
2020-02-29 08:41:59 +00:00
|
|
|
|
fn from(Ok(x): Result<T, Self::Error>) -> Self {
|
2019-11-03 16:21:37 +00:00
|
|
|
|
x
|
|
|
|
|
}
|
|
|
|
|
fn to_result(self) -> Result<T, Self::Error> {
|
|
|
|
|
Ok(self)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<T, E> MaybeResult<T> for Result<T, E> {
|
|
|
|
|
type Error = E;
|
|
|
|
|
|
|
|
|
|
fn from(x: Result<T, Self::Error>) -> Self {
|
|
|
|
|
x
|
|
|
|
|
}
|
|
|
|
|
fn to_result(self) -> Result<T, Self::Error> {
|
|
|
|
|
self
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-19 07:04:30 +00:00
|
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
|
2018-12-24 16:52:50 +00:00
|
|
|
|
pub enum PointerKind {
|
|
|
|
|
/// Most general case, we know no restrictions to tell LLVM.
|
|
|
|
|
Shared,
|
|
|
|
|
|
|
|
|
|
/// `&T` where `T` contains no `UnsafeCell`, is `noalias` and `readonly`.
|
|
|
|
|
Frozen,
|
|
|
|
|
|
2021-03-18 20:50:28 +00:00
|
|
|
|
/// `&mut T` which is `noalias` but not `readonly`.
|
2018-12-24 16:52:50 +00:00
|
|
|
|
UniqueBorrowed,
|
|
|
|
|
|
|
|
|
|
/// `Box<T>`, unlike `UniqueBorrowed`, it also has `noalias` on returns.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
UniqueOwned,
|
2018-12-24 16:52:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-06-19 07:04:30 +00:00
|
|
|
|
#[derive(Copy, Clone, Debug)]
|
2018-12-24 16:52:50 +00:00
|
|
|
|
pub struct PointeeInfo {
|
|
|
|
|
pub size: Size,
|
|
|
|
|
pub align: Align,
|
|
|
|
|
pub safe: Option<PointerKind>,
|
2020-06-19 07:04:30 +00:00
|
|
|
|
pub address_space: AddressSpace,
|
2018-12-24 16:52:50 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-04 14:50:21 +00:00
|
|
|
|
pub trait TyAndLayoutMethods<'a, C: LayoutOf<Ty = Self>>: Sized {
|
2018-11-01 18:03:38 +00:00
|
|
|
|
fn for_variant(
|
2020-03-04 14:50:21 +00:00
|
|
|
|
this: TyAndLayout<'a, Self>,
|
2018-11-01 18:03:38 +00:00
|
|
|
|
cx: &C,
|
|
|
|
|
variant_index: VariantIdx,
|
2020-03-04 14:50:21 +00:00
|
|
|
|
) -> TyAndLayout<'a, Self>;
|
|
|
|
|
fn field(this: TyAndLayout<'a, Self>, cx: &C, i: usize) -> C::TyAndLayout;
|
|
|
|
|
fn pointee_info_at(this: TyAndLayout<'a, Self>, cx: &C, offset: Size) -> Option<PointeeInfo>;
|
2018-02-07 17:27:43 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-04 14:50:21 +00:00
|
|
|
|
impl<'a, Ty> TyAndLayout<'a, Ty> {
|
2018-11-01 15:01:24 +00:00
|
|
|
|
pub fn for_variant<C>(self, cx: &C, variant_index: VariantIdx) -> Self
|
2019-12-22 22:42:04 +00:00
|
|
|
|
where
|
2020-03-04 14:50:21 +00:00
|
|
|
|
Ty: TyAndLayoutMethods<'a, C>,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
C: LayoutOf<Ty = Ty>,
|
|
|
|
|
{
|
2018-02-07 17:27:43 +00:00
|
|
|
|
Ty::for_variant(self, cx, variant_index)
|
|
|
|
|
}
|
2019-11-03 16:21:37 +00:00
|
|
|
|
|
2020-03-04 14:50:21 +00:00
|
|
|
|
/// Callers might want to use `C: LayoutOf<Ty=Ty, TyAndLayout: MaybeResult<Self>>`
|
2019-11-03 16:21:37 +00:00
|
|
|
|
/// to allow recursion (see `might_permit_zero_init` below for an example).
|
2020-03-04 14:50:21 +00:00
|
|
|
|
pub fn field<C>(self, cx: &C, i: usize) -> C::TyAndLayout
|
2019-12-22 22:42:04 +00:00
|
|
|
|
where
|
2020-03-04 14:50:21 +00:00
|
|
|
|
Ty: TyAndLayoutMethods<'a, C>,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
C: LayoutOf<Ty = Ty>,
|
|
|
|
|
{
|
2018-02-07 17:27:43 +00:00
|
|
|
|
Ty::field(self, cx, i)
|
|
|
|
|
}
|
2019-11-03 16:21:37 +00:00
|
|
|
|
|
2019-05-04 12:36:40 +00:00
|
|
|
|
pub fn pointee_info_at<C>(self, cx: &C, offset: Size) -> Option<PointeeInfo>
|
2019-12-22 22:42:04 +00:00
|
|
|
|
where
|
2020-03-04 14:50:21 +00:00
|
|
|
|
Ty: TyAndLayoutMethods<'a, C>,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
C: LayoutOf<Ty = Ty>,
|
|
|
|
|
{
|
2019-05-04 12:36:40 +00:00
|
|
|
|
Ty::pointee_info_at(self, cx, offset)
|
2019-03-11 20:26:49 +00:00
|
|
|
|
}
|
2018-02-07 17:27:43 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-04 14:50:21 +00:00
|
|
|
|
impl<'a, Ty> TyAndLayout<'a, Ty> {
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Returns `true` if the layout corresponds to an unsized type.
|
2018-02-07 17:27:43 +00:00
|
|
|
|
pub fn is_unsized(&self) -> bool {
|
|
|
|
|
self.abi.is_unsized()
|
|
|
|
|
}
|
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
|
/// Returns `true` if the type is a ZST and not unsized.
|
2018-02-07 17:27:43 +00:00
|
|
|
|
pub fn is_zst(&self) -> bool {
|
|
|
|
|
match self.abi {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Abi::Scalar(_) | Abi::ScalarPair(..) | Abi::Vector { .. } => false,
|
2018-05-10 16:24:06 +00:00
|
|
|
|
Abi::Uninhabited => self.size.bytes() == 0,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Abi::Aggregate { sized } => sized && self.size.bytes() == 0,
|
2018-02-07 17:27:43 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-11-03 16:21:37 +00:00
|
|
|
|
|
|
|
|
|
/// Determines if this type permits "raw" initialization by just transmuting some
|
|
|
|
|
/// memory into an instance of `T`.
|
|
|
|
|
/// `zero` indicates if the memory is zero-initialized, or alternatively
|
|
|
|
|
/// left entirely uninitialized.
|
|
|
|
|
/// This is conservative: in doubt, it will answer `true`.
|
2019-11-13 08:00:29 +00:00
|
|
|
|
///
|
|
|
|
|
/// FIXME: Once we removed all the conservatism, we could alternatively
|
2020-02-17 08:21:02 +00:00
|
|
|
|
/// create an all-0/all-undef constant and run the const value validator to see if
|
2019-11-13 08:00:29 +00:00
|
|
|
|
/// this is a valid value for the given type.
|
2020-02-16 21:45:28 +00:00
|
|
|
|
pub fn might_permit_raw_init<C, E>(self, cx: &C, zero: bool) -> Result<bool, E>
|
2019-11-03 16:21:37 +00:00
|
|
|
|
where
|
|
|
|
|
Self: Copy,
|
2020-03-04 14:50:21 +00:00
|
|
|
|
Ty: TyAndLayoutMethods<'a, C>,
|
|
|
|
|
C: LayoutOf<Ty = Ty, TyAndLayout: MaybeResult<Self, Error = E>> + HasDataLayout,
|
2019-11-03 16:21:37 +00:00
|
|
|
|
{
|
|
|
|
|
let scalar_allows_raw_init = move |s: &Scalar| -> bool {
|
|
|
|
|
if zero {
|
|
|
|
|
// The range must contain 0.
|
2021-08-23 12:20:38 +00:00
|
|
|
|
s.valid_range.contains_zero()
|
2019-11-03 16:21:37 +00:00
|
|
|
|
} else {
|
2019-11-09 11:19:44 +00:00
|
|
|
|
// The range must include all values. `valid_range_exclusive` handles
|
|
|
|
|
// the wrap-around using target arithmetic; with wrap-around then the full
|
|
|
|
|
// range is one where `start == end`.
|
|
|
|
|
let range = s.valid_range_exclusive(cx);
|
|
|
|
|
range.start == range.end
|
2019-11-03 16:21:37 +00:00
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2020-02-17 08:21:02 +00:00
|
|
|
|
// Check the ABI.
|
|
|
|
|
let valid = match &self.abi {
|
2019-11-03 16:21:37 +00:00
|
|
|
|
Abi::Uninhabited => false, // definitely UB
|
|
|
|
|
Abi::Scalar(s) => scalar_allows_raw_init(s),
|
2020-02-16 21:45:28 +00:00
|
|
|
|
Abi::ScalarPair(s1, s2) => scalar_allows_raw_init(s1) && scalar_allows_raw_init(s2),
|
|
|
|
|
Abi::Vector { element: s, count } => *count == 0 || scalar_allows_raw_init(s),
|
2020-04-18 07:54:05 +00:00
|
|
|
|
Abi::Aggregate { .. } => true, // Fields are checked below.
|
2019-11-03 16:21:37 +00:00
|
|
|
|
};
|
2020-02-17 08:21:02 +00:00
|
|
|
|
if !valid {
|
|
|
|
|
// This is definitely not okay.
|
|
|
|
|
return Ok(false);
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-18 07:54:05 +00:00
|
|
|
|
// If we have not found an error yet, we need to recursively descend into fields.
|
|
|
|
|
match &self.fields {
|
|
|
|
|
FieldsShape::Primitive | FieldsShape::Union { .. } => {}
|
|
|
|
|
FieldsShape::Array { .. } => {
|
|
|
|
|
// FIXME(#66151): For now, we are conservative and do not check arrays.
|
|
|
|
|
}
|
|
|
|
|
FieldsShape::Arbitrary { offsets, .. } => {
|
|
|
|
|
for idx in 0..offsets.len() {
|
|
|
|
|
let field = self.field(cx, idx).to_result()?;
|
|
|
|
|
if !field.might_permit_raw_init(cx, zero)? {
|
|
|
|
|
// We found a field that is unhappy with this kind of initialization.
|
|
|
|
|
return Ok(false);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// FIXME(#66151): For now, we are conservative and do not check `self.variants`.
|
2020-02-17 08:21:02 +00:00
|
|
|
|
Ok(true)
|
2019-11-03 16:21:37 +00:00
|
|
|
|
}
|
2018-04-26 13:07:26 +00:00
|
|
|
|
}
|