2019-02-08 13:53:55 +00:00
|
|
|
//! Miscellaneous type-system utilities that are too small to deserve their own modules.
|
2015-09-14 11:55:56 +00:00
|
|
|
|
2020-04-17 16:55:23 +00:00
|
|
|
use crate::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
2020-04-11 04:50:02 +00:00
|
|
|
use crate::ty::fold::TypeFolder;
|
2020-03-31 16:16:47 +00:00
|
|
|
use crate::ty::layout::IntegerExt;
|
2019-02-05 17:20:45 +00:00
|
|
|
use crate::ty::query::TyCtxtAt;
|
2020-08-18 17:40:03 +00:00
|
|
|
use crate::ty::subst::{GenericArgKind, Subst, SubstsRef};
|
2019-02-05 17:20:45 +00:00
|
|
|
use crate::ty::TyKind::*;
|
2021-07-02 01:14:13 +00:00
|
|
|
use crate::ty::{self, DebruijnIndex, DefIdTree, List, Ty, TyCtxt, TypeFoldable};
|
2019-12-11 09:04:34 +00:00
|
|
|
use rustc_apfloat::Float as _;
|
2020-04-27 17:56:11 +00:00
|
|
|
use rustc_ast as ast;
|
2020-01-11 12:15:20 +00:00
|
|
|
use rustc_attr::{self as attr, SignedInt, UnsignedInt};
|
2019-12-11 09:04:34 +00:00
|
|
|
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
|
|
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
2020-03-31 19:18:30 +00:00
|
|
|
use rustc_errors::ErrorReported;
|
2020-01-05 01:37:57 +00:00
|
|
|
use rustc_hir as hir;
|
|
|
|
use rustc_hir::def::DefKind;
|
|
|
|
use rustc_hir::def_id::DefId;
|
2018-12-03 00:14:35 +00:00
|
|
|
use rustc_macros::HashStable;
|
2020-11-14 15:48:54 +00:00
|
|
|
use rustc_query_system::ich::NodeIdHashingMode;
|
2021-04-27 13:01:37 +00:00
|
|
|
use rustc_span::DUMMY_SP;
|
2020-03-31 16:16:47 +00:00
|
|
|
use rustc_target::abi::{Integer, Size, TargetDataLayout};
|
2020-01-30 20:28:16 +00:00
|
|
|
use smallvec::SmallVec;
|
2021-04-27 13:01:37 +00:00
|
|
|
use std::{fmt, iter};
|
2015-09-14 11:55:56 +00:00
|
|
|
|
2018-01-25 15:44:45 +00:00
|
|
|
#[derive(Copy, Clone, Debug)]
|
|
|
|
pub struct Discr<'tcx> {
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Bit representation of the discriminant (e.g., `-128i8` is `0xFF_u128`).
|
2018-01-25 15:44:45 +00:00
|
|
|
pub val: u128,
|
2019-12-22 22:42:04 +00:00
|
|
|
pub ty: Ty<'tcx>,
|
2017-03-13 00:12:13 +00:00
|
|
|
}
|
2017-02-05 05:01:48 +00:00
|
|
|
|
2018-01-25 15:44:45 +00:00
|
|
|
impl<'tcx> fmt::Display for Discr<'tcx> {
|
2018-08-30 05:02:42 +00:00
|
|
|
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2020-08-02 22:49:11 +00:00
|
|
|
match *self.ty.kind() {
|
2018-08-22 00:35:55 +00:00
|
|
|
ty::Int(ity) => {
|
2020-12-12 14:28:49 +00:00
|
|
|
let size = ty::tls::with(|tcx| Integer::from_int_ty(&tcx, ity).size());
|
2019-02-26 08:54:57 +00:00
|
|
|
let x = self.val;
|
2018-03-22 11:38:40 +00:00
|
|
|
// sign extend the raw representation to be an i128
|
2020-11-04 13:41:58 +00:00
|
|
|
let x = size.sign_extend(x) as i128;
|
2018-03-22 11:38:40 +00:00
|
|
|
write!(fmt, "{}", x)
|
2019-12-22 22:42:04 +00:00
|
|
|
}
|
2018-03-22 11:38:40 +00:00
|
|
|
_ => write!(fmt, "{}", self.val),
|
2018-01-25 15:44:45 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2015-09-14 11:55:56 +00:00
|
|
|
|
2019-12-11 09:04:34 +00:00
|
|
|
fn int_size_and_signed<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> (Size, bool) {
|
2020-08-02 22:49:11 +00:00
|
|
|
let (int, signed) = match *ty.kind() {
|
2020-12-12 14:28:49 +00:00
|
|
|
Int(ity) => (Integer::from_int_ty(&tcx, ity), true),
|
|
|
|
Uint(uty) => (Integer::from_uint_ty(&tcx, uty), false),
|
2019-12-11 09:04:34 +00:00
|
|
|
_ => bug!("non integer discriminant"),
|
|
|
|
};
|
|
|
|
(int.size(), signed)
|
|
|
|
}
|
|
|
|
|
2018-01-25 15:44:45 +00:00
|
|
|
impl<'tcx> Discr<'tcx> {
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Adds `1` to the value and wraps around if the maximum for the type is reached.
|
2019-06-13 21:48:52 +00:00
|
|
|
pub fn wrap_incr(self, tcx: TyCtxt<'tcx>) -> Self {
|
2018-01-25 15:44:45 +00:00
|
|
|
self.checked_add(tcx, 1).0
|
|
|
|
}
|
2019-06-13 21:48:52 +00:00
|
|
|
pub fn checked_add(self, tcx: TyCtxt<'tcx>, n: u128) -> (Self, bool) {
|
2019-12-11 09:04:34 +00:00
|
|
|
let (size, signed) = int_size_and_signed(tcx, self.ty);
|
|
|
|
let (val, oflo) = if signed {
|
2021-09-07 18:44:33 +00:00
|
|
|
let min = size.signed_int_min();
|
|
|
|
let max = size.signed_int_max();
|
2020-11-04 13:41:58 +00:00
|
|
|
let val = size.sign_extend(self.val) as i128;
|
2020-03-04 12:18:08 +00:00
|
|
|
assert!(n < (i128::MAX as u128));
|
2018-01-25 15:44:45 +00:00
|
|
|
let n = n as i128;
|
|
|
|
let oflo = val > max - n;
|
2019-12-22 22:42:04 +00:00
|
|
|
let val = if oflo { min + (n - (max - val) - 1) } else { val + n };
|
2018-03-22 11:38:40 +00:00
|
|
|
// zero the upper bits
|
|
|
|
let val = val as u128;
|
2020-11-04 13:41:58 +00:00
|
|
|
let val = size.truncate(val);
|
2019-12-11 09:04:34 +00:00
|
|
|
(val, oflo)
|
2018-01-25 15:44:45 +00:00
|
|
|
} else {
|
2021-09-07 18:44:33 +00:00
|
|
|
let max = size.unsigned_int_max();
|
2018-01-26 12:37:46 +00:00
|
|
|
let val = self.val;
|
|
|
|
let oflo = val > max - n;
|
2019-12-22 22:42:04 +00:00
|
|
|
let val = if oflo { n - (max - val) - 1 } else { val + n };
|
2019-12-11 09:04:34 +00:00
|
|
|
(val, oflo)
|
|
|
|
};
|
|
|
|
(Self { val, ty: self.ty }, oflo)
|
2017-02-15 13:00:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-01-25 15:44:45 +00:00
|
|
|
pub trait IntTypeExt {
|
2019-06-13 21:48:52 +00:00
|
|
|
fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx>;
|
2019-06-13 22:32:15 +00:00
|
|
|
fn disr_incr<'tcx>(&self, tcx: TyCtxt<'tcx>, val: Option<Discr<'tcx>>) -> Option<Discr<'tcx>>;
|
2019-06-13 21:48:52 +00:00
|
|
|
fn initial_discriminant<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Discr<'tcx>;
|
2018-01-25 15:44:45 +00:00
|
|
|
}
|
|
|
|
|
2015-09-14 11:55:56 +00:00
|
|
|
impl IntTypeExt for attr::IntType {
|
2019-06-13 21:48:52 +00:00
|
|
|
fn to_ty<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Ty<'tcx> {
|
2017-02-05 05:01:48 +00:00
|
|
|
match *self {
|
2019-12-22 22:42:04 +00:00
|
|
|
SignedInt(ast::IntTy::I8) => tcx.types.i8,
|
|
|
|
SignedInt(ast::IntTy::I16) => tcx.types.i16,
|
|
|
|
SignedInt(ast::IntTy::I32) => tcx.types.i32,
|
|
|
|
SignedInt(ast::IntTy::I64) => tcx.types.i64,
|
|
|
|
SignedInt(ast::IntTy::I128) => tcx.types.i128,
|
|
|
|
SignedInt(ast::IntTy::Isize) => tcx.types.isize,
|
|
|
|
UnsignedInt(ast::UintTy::U8) => tcx.types.u8,
|
|
|
|
UnsignedInt(ast::UintTy::U16) => tcx.types.u16,
|
|
|
|
UnsignedInt(ast::UintTy::U32) => tcx.types.u32,
|
|
|
|
UnsignedInt(ast::UintTy::U64) => tcx.types.u64,
|
|
|
|
UnsignedInt(ast::UintTy::U128) => tcx.types.u128,
|
2018-01-04 01:12:04 +00:00
|
|
|
UnsignedInt(ast::UintTy::Usize) => tcx.types.usize,
|
2017-02-05 05:01:48 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-13 21:48:52 +00:00
|
|
|
fn initial_discriminant<'tcx>(&self, tcx: TyCtxt<'tcx>) -> Discr<'tcx> {
|
2019-12-22 22:42:04 +00:00
|
|
|
Discr { val: 0, ty: self.to_ty(tcx) }
|
2017-02-05 05:01:48 +00:00
|
|
|
}
|
|
|
|
|
2019-06-13 22:32:15 +00:00
|
|
|
fn disr_incr<'tcx>(&self, tcx: TyCtxt<'tcx>, val: Option<Discr<'tcx>>) -> Option<Discr<'tcx>> {
|
2017-02-05 05:01:48 +00:00
|
|
|
if let Some(val) = val {
|
2018-01-25 15:44:45 +00:00
|
|
|
assert_eq!(self.to_ty(tcx), val.ty);
|
|
|
|
let (new, oflo) = val.checked_add(tcx, 1);
|
2019-12-22 22:42:04 +00:00
|
|
|
if oflo { None } else { Some(new) }
|
2017-02-05 05:01:48 +00:00
|
|
|
} else {
|
|
|
|
Some(self.initial_discriminant(tcx))
|
|
|
|
}
|
2015-09-14 11:55:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-13 21:48:52 +00:00
|
|
|
impl<'tcx> TyCtxt<'tcx> {
|
2017-04-05 21:39:02 +00:00
|
|
|
/// Creates a hash of the type `Ty` which will be the same no matter what crate
|
|
|
|
/// context it's calculated within. This is used by the `type_id` intrinsic.
|
|
|
|
pub fn type_id_hash(self, ty: Ty<'tcx>) -> u64 {
|
|
|
|
let mut hasher = StableHasher::new();
|
2017-09-14 13:10:24 +00:00
|
|
|
let mut hcx = self.create_stable_hashing_context();
|
2017-04-05 21:39:02 +00:00
|
|
|
|
2017-08-08 16:11:39 +00:00
|
|
|
// We want the type_id be independent of the types free regions, so we
|
|
|
|
// erase them. The erase_regions() call will also anonymize bound
|
|
|
|
// regions, which is desirable too.
|
2020-10-24 00:21:18 +00:00
|
|
|
let ty = self.erase_regions(ty);
|
2017-08-08 16:11:39 +00:00
|
|
|
|
2017-04-05 21:39:02 +00:00
|
|
|
hcx.while_hashing_spans(false, |hcx| {
|
|
|
|
hcx.with_node_id_hashing_mode(NodeIdHashingMode::HashDefPath, |hcx| {
|
|
|
|
ty.hash_stable(hcx, &mut hasher);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
hasher.finish()
|
|
|
|
}
|
|
|
|
|
2016-07-30 15:58:30 +00:00
|
|
|
pub fn has_error_field(self, ty: Ty<'tcx>) -> bool {
|
2020-08-02 22:49:11 +00:00
|
|
|
if let ty::Adt(def, substs) = *ty.kind() {
|
2018-10-02 08:56:36 +00:00
|
|
|
for field in def.all_fields() {
|
|
|
|
let field_ty = field.ty(self, substs);
|
2020-08-02 22:49:11 +00:00
|
|
|
if let Error(_) = field_ty.kind() {
|
2018-10-02 08:56:36 +00:00
|
|
|
return true;
|
2016-07-30 15:58:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
false
|
|
|
|
}
|
|
|
|
|
2019-07-11 11:27:41 +00:00
|
|
|
/// Attempts to returns the deeply last field of nested structures, but
|
|
|
|
/// does not apply any normalization in its search. Returns the same type
|
|
|
|
/// if input `ty` is not a structure at all.
|
2019-12-22 22:42:04 +00:00
|
|
|
pub fn struct_tail_without_normalization(self, ty: Ty<'tcx>) -> Ty<'tcx> {
|
2019-07-11 11:27:41 +00:00
|
|
|
let tcx = self;
|
|
|
|
tcx.struct_tail_with_normalize(ty, |ty| ty)
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the deeply last field of nested structures, or the same type if
|
|
|
|
/// not a structure at all. Corresponds to the only possible unsized field,
|
|
|
|
/// and its type can be used to determine unsizing strategy.
|
2019-07-12 09:34:23 +00:00
|
|
|
///
|
|
|
|
/// Should only be called if `ty` has no inference variables and does not
|
|
|
|
/// need its lifetimes preserved (e.g. as part of codegen); otherwise
|
|
|
|
/// normalization attempt may cause compiler bugs.
|
2019-12-22 22:42:04 +00:00
|
|
|
pub fn struct_tail_erasing_lifetimes(
|
|
|
|
self,
|
|
|
|
ty: Ty<'tcx>,
|
|
|
|
param_env: ty::ParamEnv<'tcx>,
|
|
|
|
) -> Ty<'tcx> {
|
2019-07-11 11:27:41 +00:00
|
|
|
let tcx = self;
|
|
|
|
tcx.struct_tail_with_normalize(ty, |ty| tcx.normalize_erasing_regions(param_env, ty))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the deeply last field of nested structures, or the same type if
|
|
|
|
/// not a structure at all. Corresponds to the only possible unsized field,
|
|
|
|
/// and its type can be used to determine unsizing strategy.
|
|
|
|
///
|
|
|
|
/// This is parameterized over the normalization strategy (i.e. how to
|
|
|
|
/// handle `<T as Trait>::Assoc` and `impl Trait`); pass the identity
|
|
|
|
/// function to indicate no normalization should take place.
|
|
|
|
///
|
2019-07-12 09:34:23 +00:00
|
|
|
/// See also `struct_tail_erasing_lifetimes`, which is suitable for use
|
|
|
|
/// during codegen.
|
2019-12-22 22:42:04 +00:00
|
|
|
pub fn struct_tail_with_normalize(
|
|
|
|
self,
|
|
|
|
mut ty: Ty<'tcx>,
|
|
|
|
normalize: impl Fn(Ty<'tcx>) -> Ty<'tcx>,
|
|
|
|
) -> Ty<'tcx> {
|
2021-07-04 18:02:51 +00:00
|
|
|
let recursion_limit = self.recursion_limit();
|
2020-11-26 15:32:41 +00:00
|
|
|
for iteration in 0.. {
|
2021-06-25 23:48:26 +00:00
|
|
|
if !recursion_limit.value_within_limit(iteration) {
|
2020-11-26 15:32:41 +00:00
|
|
|
return self.ty_error_with_message(
|
|
|
|
DUMMY_SP,
|
|
|
|
&format!("reached the recursion limit finding the struct tail for {}", ty),
|
|
|
|
);
|
|
|
|
}
|
2020-08-02 22:49:11 +00:00
|
|
|
match *ty.kind() {
|
2018-08-22 00:35:02 +00:00
|
|
|
ty::Adt(def, substs) => {
|
2017-05-26 20:36:40 +00:00
|
|
|
if !def.is_struct() {
|
|
|
|
break;
|
|
|
|
}
|
2018-01-07 21:41:41 +00:00
|
|
|
match def.non_enum_variant().fields.last() {
|
2017-05-26 20:36:40 +00:00
|
|
|
Some(f) => ty = f.ty(self, substs),
|
|
|
|
None => break,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-08-16 15:29:49 +00:00
|
|
|
ty::Tuple(tys) if let Some((&last_ty, _)) = tys.split_last() => {
|
|
|
|
ty = last_ty.expect_ty();
|
2017-05-26 20:36:40 +00:00
|
|
|
}
|
|
|
|
|
2021-08-16 15:29:49 +00:00
|
|
|
ty::Tuple(_) => break,
|
|
|
|
|
2019-07-11 11:27:41 +00:00
|
|
|
ty::Projection(_) | ty::Opaque(..) => {
|
|
|
|
let normalized = normalize(ty);
|
|
|
|
if ty == normalized {
|
|
|
|
return ty;
|
|
|
|
} else {
|
|
|
|
ty = normalized;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-05-26 20:36:40 +00:00
|
|
|
_ => {
|
|
|
|
break;
|
|
|
|
}
|
2015-09-14 11:55:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
ty
|
|
|
|
}
|
|
|
|
|
2019-05-17 01:20:14 +00:00
|
|
|
/// Same as applying `struct_tail` on `source` and `target`, but only
|
2015-09-14 11:55:56 +00:00
|
|
|
/// keeps going as long as the two types are instances of the same
|
|
|
|
/// structure definitions.
|
2018-11-20 14:34:15 +00:00
|
|
|
/// For `(Foo<Foo<T>>, Foo<dyn Trait>)`, the result will be `(Foo<T>, Trait)`,
|
2015-09-14 11:55:56 +00:00
|
|
|
/// whereas struct_tail produces `T`, and `Trait`, respectively.
|
2019-07-11 11:27:41 +00:00
|
|
|
///
|
2019-07-12 09:34:23 +00:00
|
|
|
/// Should only be called if the types have no inference variables and do
|
2019-05-17 01:20:14 +00:00
|
|
|
/// not need their lifetimes preserved (e.g., as part of codegen); otherwise,
|
2019-07-11 11:27:41 +00:00
|
|
|
/// normalization attempt may cause compiler bugs.
|
2019-12-22 22:42:04 +00:00
|
|
|
pub fn struct_lockstep_tails_erasing_lifetimes(
|
|
|
|
self,
|
|
|
|
source: Ty<'tcx>,
|
|
|
|
target: Ty<'tcx>,
|
|
|
|
param_env: ty::ParamEnv<'tcx>,
|
|
|
|
) -> (Ty<'tcx>, Ty<'tcx>) {
|
2019-07-11 11:27:41 +00:00
|
|
|
let tcx = self;
|
2019-12-22 22:42:04 +00:00
|
|
|
tcx.struct_lockstep_tails_with_normalize(source, target, |ty| {
|
|
|
|
tcx.normalize_erasing_regions(param_env, ty)
|
|
|
|
})
|
2019-07-11 11:27:41 +00:00
|
|
|
}
|
|
|
|
|
2019-05-17 01:20:14 +00:00
|
|
|
/// Same as applying `struct_tail` on `source` and `target`, but only
|
2019-07-11 11:27:41 +00:00
|
|
|
/// keeps going as long as the two types are instances of the same
|
|
|
|
/// structure definitions.
|
|
|
|
/// For `(Foo<Foo<T>>, Foo<dyn Trait>)`, the result will be `(Foo<T>, Trait)`,
|
|
|
|
/// whereas struct_tail produces `T`, and `Trait`, respectively.
|
|
|
|
///
|
2019-07-12 09:34:23 +00:00
|
|
|
/// See also `struct_lockstep_tails_erasing_lifetimes`, which is suitable for use
|
|
|
|
/// during codegen.
|
2019-12-22 22:42:04 +00:00
|
|
|
pub fn struct_lockstep_tails_with_normalize(
|
|
|
|
self,
|
|
|
|
source: Ty<'tcx>,
|
|
|
|
target: Ty<'tcx>,
|
|
|
|
normalize: impl Fn(Ty<'tcx>) -> Ty<'tcx>,
|
|
|
|
) -> (Ty<'tcx>, Ty<'tcx>) {
|
2015-09-14 11:55:56 +00:00
|
|
|
let (mut a, mut b) = (source, target);
|
2017-06-08 05:49:54 +00:00
|
|
|
loop {
|
2020-08-02 22:49:11 +00:00
|
|
|
match (&a.kind(), &b.kind()) {
|
2018-08-22 00:35:02 +00:00
|
|
|
(&Adt(a_def, a_substs), &Adt(b_def, b_substs))
|
2019-12-22 22:42:04 +00:00
|
|
|
if a_def == b_def && a_def.is_struct() =>
|
|
|
|
{
|
2018-01-07 21:41:41 +00:00
|
|
|
if let Some(f) = a_def.non_enum_variant().fields.last() {
|
2017-06-08 05:49:54 +00:00
|
|
|
a = f.ty(self, a_substs);
|
|
|
|
b = f.ty(self, b_substs);
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
}
|
|
|
|
(&Tuple(a_tys), &Tuple(b_tys)) if a_tys.len() == b_tys.len() => {
|
2017-06-08 05:49:54 +00:00
|
|
|
if let Some(a_last) = a_tys.last() {
|
2019-04-25 23:27:33 +00:00
|
|
|
a = a_last.expect_ty();
|
|
|
|
b = b_tys.last().unwrap().expect_ty();
|
2017-06-08 05:49:54 +00:00
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
}
|
2020-04-17 00:38:52 +00:00
|
|
|
(ty::Projection(_) | ty::Opaque(..), _)
|
|
|
|
| (_, ty::Projection(_) | ty::Opaque(..)) => {
|
2019-07-11 11:27:41 +00:00
|
|
|
// If either side is a projection, attempt to
|
|
|
|
// progress via normalization. (Should be safe to
|
|
|
|
// apply to both sides as normalization is
|
|
|
|
// idempotent.)
|
|
|
|
let a_norm = normalize(a);
|
|
|
|
let b_norm = normalize(b);
|
|
|
|
if a == a_norm && b == b_norm {
|
|
|
|
break;
|
|
|
|
} else {
|
|
|
|
a = a_norm;
|
|
|
|
b = b_norm;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-03-13 00:12:13 +00:00
|
|
|
_ => break,
|
2015-09-14 11:55:56 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
(a, b)
|
|
|
|
}
|
|
|
|
|
2017-03-01 16:42:26 +00:00
|
|
|
/// Calculate the destructor of a given type.
|
|
|
|
pub fn calculate_dtor(
|
|
|
|
self,
|
|
|
|
adt_did: DefId,
|
2020-10-09 15:22:25 +00:00
|
|
|
validate: impl Fn(Self, DefId) -> Result<(), ErrorReported>,
|
2017-03-01 16:42:26 +00:00
|
|
|
) -> Option<ty::Destructor> {
|
2020-03-05 20:50:44 +00:00
|
|
|
let drop_trait = self.lang_items().drop_trait()?;
|
2019-01-26 10:47:56 +00:00
|
|
|
self.ensure().coherent_trait(drop_trait);
|
2017-03-01 16:42:26 +00:00
|
|
|
|
2017-04-24 12:20:46 +00:00
|
|
|
let ty = self.type_of(adt_did);
|
2021-09-01 11:06:15 +00:00
|
|
|
let (did, constness) = self.find_map_relevant_impl(drop_trait, ty, |impl_did| {
|
2020-03-03 00:19:00 +00:00
|
|
|
if let Some(item) = self.associated_items(impl_did).in_definition_order().next() {
|
2018-10-02 08:56:36 +00:00
|
|
|
if validate(self, impl_did).is_ok() {
|
2021-09-01 11:06:15 +00:00
|
|
|
return Some((item.def_id, self.impl_constness(impl_did)));
|
2017-03-01 16:42:26 +00:00
|
|
|
}
|
|
|
|
}
|
2020-10-09 13:28:32 +00:00
|
|
|
None
|
2021-09-01 11:06:15 +00:00
|
|
|
})?;
|
2017-03-01 16:42:26 +00:00
|
|
|
|
2021-09-01 11:06:15 +00:00
|
|
|
Some(ty::Destructor { did, constness })
|
2017-04-23 14:43:23 +00:00
|
|
|
}
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Returns the set of types that are required to be alive in
|
2017-04-23 14:43:23 +00:00
|
|
|
/// order to run the destructor of `def` (see RFCs 769 and
|
|
|
|
/// 1238).
|
|
|
|
///
|
|
|
|
/// Note that this returns only the constraints for the
|
|
|
|
/// destructor of `def` itself. For the destructors of the
|
|
|
|
/// contents, you need `adt_dtorck_constraint`.
|
2019-12-22 22:42:04 +00:00
|
|
|
pub fn destructor_constraints(self, def: &'tcx ty::AdtDef) -> Vec<ty::subst::GenericArg<'tcx>> {
|
2017-04-23 14:43:23 +00:00
|
|
|
let dtor = match def.destructor(self) {
|
|
|
|
None => {
|
|
|
|
debug!("destructor_constraints({:?}) - no dtor", def.did);
|
2019-12-22 22:42:04 +00:00
|
|
|
return vec![];
|
2017-04-23 14:43:23 +00:00
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
Some(dtor) => dtor.did,
|
2017-04-23 14:43:23 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
let impl_def_id = self.associated_item(dtor).container.id();
|
2017-04-24 12:20:46 +00:00
|
|
|
let impl_generics = self.generics_of(impl_def_id);
|
2017-04-23 14:43:23 +00:00
|
|
|
|
|
|
|
// We have a destructor - all the parameters that are not
|
|
|
|
// pure_wrt_drop (i.e, don't have a #[may_dangle] attribute)
|
|
|
|
// must be live.
|
|
|
|
|
|
|
|
// We need to return the list of parameters from the ADTs
|
|
|
|
// generics/substs that correspond to impure parameters on the
|
|
|
|
// impl's generics. This is a bit ugly, but conceptually simple:
|
|
|
|
//
|
|
|
|
// Suppose our ADT looks like the following
|
|
|
|
//
|
|
|
|
// struct S<X, Y, Z>(X, Y, Z);
|
|
|
|
//
|
|
|
|
// and the impl is
|
|
|
|
//
|
|
|
|
// impl<#[may_dangle] P0, P1, P2> Drop for S<P1, P2, P0>
|
|
|
|
//
|
|
|
|
// We want to return the parameters (X, Y). For that, we match
|
|
|
|
// up the item-substs <X, Y, Z> with the substs on the impl ADT,
|
|
|
|
// <P1, P2, P0>, and then look up which of the impl substs refer to
|
|
|
|
// parameters marked as pure.
|
|
|
|
|
2020-08-02 22:49:11 +00:00
|
|
|
let impl_substs = match *self.type_of(impl_def_id).kind() {
|
2018-08-22 00:35:02 +00:00
|
|
|
ty::Adt(def_, substs) if def_ == def => substs,
|
2019-12-22 22:42:04 +00:00
|
|
|
_ => bug!(),
|
2017-04-23 14:43:23 +00:00
|
|
|
};
|
|
|
|
|
2020-08-02 22:49:11 +00:00
|
|
|
let item_substs = match *self.type_of(def.did).kind() {
|
2018-08-22 00:35:02 +00:00
|
|
|
ty::Adt(def_, substs) if def_ == def => substs,
|
2019-12-22 22:42:04 +00:00
|
|
|
_ => bug!(),
|
2017-04-23 14:43:23 +00:00
|
|
|
};
|
|
|
|
|
2021-03-08 23:32:41 +00:00
|
|
|
let result = iter::zip(item_substs, impl_substs)
|
2020-05-23 09:49:24 +00:00
|
|
|
.filter(|&(_, k)| {
|
2018-02-23 01:13:54 +00:00
|
|
|
match k.unpack() {
|
2019-09-25 15:39:44 +00:00
|
|
|
GenericArgKind::Lifetime(&ty::RegionKind::ReEarlyBound(ref ebr)) => {
|
2018-05-10 22:46:57 +00:00
|
|
|
!impl_generics.region_param(ebr, self).pure_wrt_drop
|
2018-02-23 01:13:54 +00:00
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
GenericArgKind::Type(&ty::TyS { kind: ty::Param(ref pt), .. }) => {
|
2018-05-10 22:46:57 +00:00
|
|
|
!impl_generics.type_param(pt, self).pure_wrt_drop
|
2018-02-23 01:13:54 +00:00
|
|
|
}
|
2019-09-25 15:39:44 +00:00
|
|
|
GenericArgKind::Const(&ty::Const {
|
2019-12-22 22:42:04 +00:00
|
|
|
val: ty::ConstKind::Param(ref pc), ..
|
|
|
|
}) => !impl_generics.const_param(pc, self).pure_wrt_drop,
|
|
|
|
GenericArgKind::Lifetime(_)
|
|
|
|
| GenericArgKind::Type(_)
|
|
|
|
| GenericArgKind::Const(_) => {
|
2019-02-20 01:20:06 +00:00
|
|
|
// Not a type, const or region param: this should be reported
|
2018-02-23 01:13:54 +00:00
|
|
|
// as an error.
|
|
|
|
false
|
|
|
|
}
|
2017-04-23 14:43:23 +00:00
|
|
|
}
|
2018-10-02 08:52:43 +00:00
|
|
|
})
|
2020-05-23 09:49:24 +00:00
|
|
|
.map(|(item_param, _)| item_param)
|
2018-10-02 08:52:43 +00:00
|
|
|
.collect();
|
2017-04-23 14:43:23 +00:00
|
|
|
debug!("destructor_constraint({:?}) = {:?}", def.did, result);
|
|
|
|
result
|
|
|
|
}
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Returns `true` if `def_id` refers to a closure (e.g., `|x| x * 2`). Note
|
|
|
|
/// that closures have a `DefId`, but the closure *expression* also
|
2018-07-02 14:34:19 +00:00
|
|
|
/// has a `HirId` that is located within the context where the
|
|
|
|
/// closure appears (and, sadly, a corresponding `NodeId`, since
|
|
|
|
/// those are not yet phased out). The parent of the closure's
|
2019-02-08 13:53:55 +00:00
|
|
|
/// `DefId` will also be the context where it appears.
|
2017-11-10 17:20:53 +00:00
|
|
|
pub fn is_closure(self, def_id: DefId) -> bool {
|
2020-04-17 18:55:17 +00:00
|
|
|
matches!(self.def_kind(def_id), DefKind::Closure | DefKind::Generator)
|
2017-11-10 17:20:53 +00:00
|
|
|
}
|
|
|
|
|
2021-10-02 12:12:33 +00:00
|
|
|
/// Returns `true` if `def_id` refers to a closure, generator or inline const.
|
|
|
|
pub fn is_closure_or_inline_const(self, def_id: DefId) -> bool {
|
|
|
|
matches!(
|
|
|
|
self.def_kind(def_id),
|
|
|
|
DefKind::Closure | DefKind::Generator | DefKind::InlineConst
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Returns `true` if `def_id` refers to a trait (i.e., `trait Foo { ... }`).
|
2018-07-02 14:34:19 +00:00
|
|
|
pub fn is_trait(self, def_id: DefId) -> bool {
|
2020-04-17 18:55:17 +00:00
|
|
|
self.def_kind(def_id) == DefKind::Trait
|
2018-07-02 14:34:19 +00:00
|
|
|
}
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Returns `true` if `def_id` refers to a trait alias (i.e., `trait Foo = ...;`),
|
|
|
|
/// and `false` otherwise.
|
2019-01-08 15:55:18 +00:00
|
|
|
pub fn is_trait_alias(self, def_id: DefId) -> bool {
|
2020-04-17 18:55:17 +00:00
|
|
|
self.def_kind(def_id) == DefKind::TraitAlias
|
2019-01-08 15:55:18 +00:00
|
|
|
}
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Returns `true` if this `DefId` refers to the implicit constructor for
|
|
|
|
/// a tuple struct like `struct Foo(u32)`, and `false` otherwise.
|
2019-03-24 14:49:58 +00:00
|
|
|
pub fn is_constructor(self, def_id: DefId) -> bool {
|
2020-04-17 18:55:17 +00:00
|
|
|
matches!(self.def_kind(def_id), DefKind::Ctor(..))
|
2018-06-26 15:00:39 +00:00
|
|
|
}
|
|
|
|
|
2021-10-02 12:12:33 +00:00
|
|
|
/// Given the `DefId`, returns the `DefId` of the innermost item that
|
|
|
|
/// has its own type-checking context or "inference enviornment".
|
|
|
|
///
|
|
|
|
/// For example, a closure has its own `DefId`, but it is type-checked
|
|
|
|
/// with the containing item. Similarly, an inline const block has its
|
|
|
|
/// own `DefId` but it is type-checked together with the containing item.
|
|
|
|
///
|
|
|
|
/// Therefore, when we fetch the
|
2020-07-17 08:47:04 +00:00
|
|
|
/// `typeck` the closure, for example, we really wind up
|
|
|
|
/// fetching the `typeck` the enclosing fn item.
|
2017-04-23 14:43:23 +00:00
|
|
|
pub fn closure_base_def_id(self, def_id: DefId) -> DefId {
|
2016-11-03 20:19:33 +00:00
|
|
|
let mut def_id = def_id;
|
2021-10-02 12:12:33 +00:00
|
|
|
while self.is_closure_or_inline_const(def_id) {
|
2018-12-19 10:20:59 +00:00
|
|
|
def_id = self.parent(def_id).unwrap_or_else(|| {
|
2016-11-03 20:19:33 +00:00
|
|
|
bug!("closure {:?} has no parent", def_id);
|
|
|
|
});
|
|
|
|
}
|
|
|
|
def_id
|
|
|
|
}
|
2017-02-08 17:31:03 +00:00
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Given the `DefId` and substs a closure, creates the type of
|
2017-11-21 16:18:40 +00:00
|
|
|
/// `self` argument that the closure expects. For example, for a
|
|
|
|
/// `Fn` closure, this would return a reference type `&T` where
|
2019-02-08 13:53:55 +00:00
|
|
|
/// `T = closure_ty`.
|
2017-11-21 16:18:40 +00:00
|
|
|
///
|
|
|
|
/// Returns `None` if this closure's kind has not yet been inferred.
|
|
|
|
/// This should only be possible during type checking.
|
|
|
|
///
|
|
|
|
/// Note that the return value is a late-bound region and hence
|
|
|
|
/// wrapped in a binder.
|
2019-12-22 22:42:04 +00:00
|
|
|
pub fn closure_env_ty(
|
|
|
|
self,
|
|
|
|
closure_def_id: DefId,
|
|
|
|
closure_substs: SubstsRef<'tcx>,
|
2020-10-26 18:18:31 +00:00
|
|
|
env_region: ty::RegionKind,
|
|
|
|
) -> Option<Ty<'tcx>> {
|
2017-11-21 16:18:40 +00:00
|
|
|
let closure_ty = self.mk_closure(closure_def_id, closure_substs);
|
2020-03-13 01:23:38 +00:00
|
|
|
let closure_kind_ty = closure_substs.as_closure().kind_ty();
|
2017-11-21 16:18:40 +00:00
|
|
|
let closure_kind = closure_kind_ty.to_opt_closure_kind()?;
|
|
|
|
let env_ty = match closure_kind {
|
|
|
|
ty::ClosureKind::Fn => self.mk_imm_ref(self.mk_region(env_region), closure_ty),
|
|
|
|
ty::ClosureKind::FnMut => self.mk_mut_ref(self.mk_region(env_region), closure_ty),
|
|
|
|
ty::ClosureKind::FnOnce => closure_ty,
|
|
|
|
};
|
2020-10-26 18:18:31 +00:00
|
|
|
Some(env_ty)
|
2017-11-21 16:18:40 +00:00
|
|
|
}
|
|
|
|
|
2019-04-21 11:41:51 +00:00
|
|
|
/// Returns `true` if the node pointed to by `def_id` is a `static` item.
|
2020-09-18 18:49:25 +00:00
|
|
|
pub fn is_static(self, def_id: DefId) -> bool {
|
2019-04-21 11:41:51 +00:00
|
|
|
self.static_mutability(def_id).is_some()
|
|
|
|
}
|
|
|
|
|
2020-04-17 16:55:23 +00:00
|
|
|
/// Returns `true` if this is a `static` item with the `#[thread_local]` attribute.
|
2020-09-18 18:49:25 +00:00
|
|
|
pub fn is_thread_local_static(self, def_id: DefId) -> bool {
|
2020-04-17 16:55:23 +00:00
|
|
|
self.codegen_fn_attrs(def_id).flags.contains(CodegenFnAttrFlags::THREAD_LOCAL)
|
|
|
|
}
|
|
|
|
|
2019-04-21 11:41:51 +00:00
|
|
|
/// Returns `true` if the node pointed to by `def_id` is a mutable `static` item.
|
2020-09-18 18:49:25 +00:00
|
|
|
pub fn is_mutable_static(self, def_id: DefId) -> bool {
|
2019-12-16 16:28:40 +00:00
|
|
|
self.static_mutability(def_id) == Some(hir::Mutability::Mut)
|
2017-11-12 11:40:56 +00:00
|
|
|
}
|
2018-11-18 18:33:44 +00:00
|
|
|
|
2019-11-26 22:30:54 +00:00
|
|
|
/// Get the type of the pointer to the static that we use in MIR.
|
2020-09-18 18:49:25 +00:00
|
|
|
pub fn static_ptr_ty(self, def_id: DefId) -> Ty<'tcx> {
|
2019-11-26 22:30:54 +00:00
|
|
|
// Make sure that any constants in the static's type are evaluated.
|
2019-12-22 22:42:04 +00:00
|
|
|
let static_ty = self.normalize_erasing_regions(ty::ParamEnv::empty(), self.type_of(def_id));
|
2019-11-26 22:30:54 +00:00
|
|
|
|
2020-10-19 07:47:18 +00:00
|
|
|
// Make sure that accesses to unsafe statics end up using raw pointers.
|
2020-10-19 08:53:20 +00:00
|
|
|
// For thread-locals, this needs to be kept in sync with `Rvalue::ty`.
|
2019-11-26 22:30:54 +00:00
|
|
|
if self.is_mutable_static(def_id) {
|
|
|
|
self.mk_mut_ptr(static_ty)
|
2020-10-19 07:47:18 +00:00
|
|
|
} else if self.is_foreign_item(def_id) {
|
|
|
|
self.mk_imm_ptr(static_ty)
|
2019-11-26 22:30:54 +00:00
|
|
|
} else {
|
|
|
|
self.mk_imm_ref(self.lifetimes.re_erased, static_ty)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-18 18:33:44 +00:00
|
|
|
/// Expands the given impl trait type, stopping if the type is recursive.
|
2021-08-20 13:36:04 +00:00
|
|
|
#[instrument(skip(self), level = "debug")]
|
2018-11-18 18:33:44 +00:00
|
|
|
pub fn try_expand_impl_trait_type(
|
|
|
|
self,
|
|
|
|
def_id: DefId,
|
2019-02-09 14:11:53 +00:00
|
|
|
substs: SubstsRef<'tcx>,
|
2018-11-18 18:33:44 +00:00
|
|
|
) -> Result<Ty<'tcx>, Ty<'tcx>> {
|
|
|
|
let mut visitor = OpaqueTypeExpander {
|
|
|
|
seen_opaque_tys: FxHashSet::default(),
|
2019-10-10 23:38:05 +00:00
|
|
|
expanded_cache: FxHashMap::default(),
|
2020-04-11 04:50:02 +00:00
|
|
|
primary_def_id: Some(def_id),
|
2018-11-18 18:33:44 +00:00
|
|
|
found_recursion: false,
|
2021-07-28 12:21:59 +00:00
|
|
|
found_any_recursion: false,
|
2020-04-11 04:50:02 +00:00
|
|
|
check_recursion: true,
|
2018-11-18 18:33:44 +00:00
|
|
|
tcx: self,
|
|
|
|
};
|
2020-04-11 04:50:02 +00:00
|
|
|
|
2018-11-18 18:33:44 +00:00
|
|
|
let expanded_type = visitor.expand_opaque_ty(def_id, substs).unwrap();
|
2021-08-20 13:36:04 +00:00
|
|
|
trace!(?expanded_type);
|
2019-12-22 22:42:04 +00:00
|
|
|
if visitor.found_recursion { Err(expanded_type) } else { Ok(expanded_type) }
|
2018-11-18 18:33:44 +00:00
|
|
|
}
|
2015-09-14 11:55:56 +00:00
|
|
|
}
|
|
|
|
|
2020-04-11 04:50:02 +00:00
|
|
|
struct OpaqueTypeExpander<'tcx> {
|
|
|
|
// Contains the DefIds of the opaque types that are currently being
|
|
|
|
// expanded. When we expand an opaque type we insert the DefId of
|
|
|
|
// that type, and when we finish expanding that type we remove the
|
|
|
|
// its DefId.
|
|
|
|
seen_opaque_tys: FxHashSet<DefId>,
|
|
|
|
// Cache of all expansions we've seen so far. This is a critical
|
|
|
|
// optimization for some large types produced by async fn trees.
|
|
|
|
expanded_cache: FxHashMap<(DefId, SubstsRef<'tcx>), Ty<'tcx>>,
|
|
|
|
primary_def_id: Option<DefId>,
|
|
|
|
found_recursion: bool,
|
2021-07-28 12:21:59 +00:00
|
|
|
found_any_recursion: bool,
|
2020-04-11 04:50:02 +00:00
|
|
|
/// Whether or not to check for recursive opaque types.
|
|
|
|
/// This is `true` when we're explicitly checking for opaque type
|
2020-08-02 15:20:00 +00:00
|
|
|
/// recursion, and 'false' otherwise to avoid unnecessary work.
|
2020-04-11 04:50:02 +00:00
|
|
|
check_recursion: bool,
|
|
|
|
tcx: TyCtxt<'tcx>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'tcx> OpaqueTypeExpander<'tcx> {
|
|
|
|
fn expand_opaque_ty(&mut self, def_id: DefId, substs: SubstsRef<'tcx>) -> Option<Ty<'tcx>> {
|
2021-07-28 12:21:59 +00:00
|
|
|
if self.found_any_recursion {
|
2020-04-11 04:50:02 +00:00
|
|
|
return None;
|
|
|
|
}
|
|
|
|
let substs = substs.fold_with(self);
|
|
|
|
if !self.check_recursion || self.seen_opaque_tys.insert(def_id) {
|
|
|
|
let expanded_ty = match self.expanded_cache.get(&(def_id, substs)) {
|
|
|
|
Some(expanded_ty) => expanded_ty,
|
|
|
|
None => {
|
|
|
|
let generic_ty = self.tcx.type_of(def_id);
|
|
|
|
let concrete_ty = generic_ty.subst(self.tcx, substs);
|
|
|
|
let expanded_ty = self.fold_ty(concrete_ty);
|
|
|
|
self.expanded_cache.insert((def_id, substs), expanded_ty);
|
|
|
|
expanded_ty
|
|
|
|
}
|
|
|
|
};
|
|
|
|
if self.check_recursion {
|
|
|
|
self.seen_opaque_tys.remove(&def_id);
|
|
|
|
}
|
|
|
|
Some(expanded_ty)
|
|
|
|
} else {
|
|
|
|
// If another opaque type that we contain is recursive, then it
|
|
|
|
// will report the error, so we don't have to.
|
2021-07-28 12:21:59 +00:00
|
|
|
self.found_any_recursion = true;
|
2020-04-11 04:50:02 +00:00
|
|
|
self.found_recursion = def_id == *self.primary_def_id.as_ref().unwrap();
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'tcx> TypeFolder<'tcx> for OpaqueTypeExpander<'tcx> {
|
|
|
|
fn tcx(&self) -> TyCtxt<'tcx> {
|
|
|
|
self.tcx
|
|
|
|
}
|
|
|
|
|
|
|
|
fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
|
|
|
|
if let ty::Opaque(def_id, substs) = t.kind {
|
|
|
|
self.expand_opaque_ty(def_id, substs).unwrap_or(t)
|
|
|
|
} else if t.has_opaque_types() {
|
|
|
|
t.super_fold_with(self)
|
|
|
|
} else {
|
|
|
|
t
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-06-11 20:35:39 +00:00
|
|
|
impl<'tcx> ty::TyS<'tcx> {
|
2019-12-11 09:04:34 +00:00
|
|
|
/// Returns the maximum value for the given numeric type (including `char`s)
|
|
|
|
/// or returns `None` if the type is not numeric.
|
|
|
|
pub fn numeric_max_val(&'tcx self, tcx: TyCtxt<'tcx>) -> Option<&'tcx ty::Const<'tcx>> {
|
2020-08-02 22:49:11 +00:00
|
|
|
let val = match self.kind() {
|
2019-12-11 09:04:34 +00:00
|
|
|
ty::Int(_) | ty::Uint(_) => {
|
|
|
|
let (size, signed) = int_size_and_signed(tcx, self);
|
2021-09-07 18:44:33 +00:00
|
|
|
let val =
|
|
|
|
if signed { size.signed_int_max() as u128 } else { size.unsigned_int_max() };
|
2019-12-11 09:04:34 +00:00
|
|
|
Some(val)
|
|
|
|
}
|
|
|
|
ty::Char => Some(std::char::MAX as u128),
|
|
|
|
ty::Float(fty) => Some(match fty {
|
2020-12-12 14:28:49 +00:00
|
|
|
ty::FloatTy::F32 => rustc_apfloat::ieee::Single::INFINITY.to_bits(),
|
|
|
|
ty::FloatTy::F64 => rustc_apfloat::ieee::Double::INFINITY.to_bits(),
|
2019-12-11 09:04:34 +00:00
|
|
|
}),
|
|
|
|
_ => None,
|
|
|
|
};
|
|
|
|
val.map(|v| ty::Const::from_bits(tcx, v, ty::ParamEnv::empty().and(self)))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns the minimum value for the given numeric type (including `char`s)
|
|
|
|
/// or returns `None` if the type is not numeric.
|
|
|
|
pub fn numeric_min_val(&'tcx self, tcx: TyCtxt<'tcx>) -> Option<&'tcx ty::Const<'tcx>> {
|
2020-08-02 22:49:11 +00:00
|
|
|
let val = match self.kind() {
|
2019-12-11 09:04:34 +00:00
|
|
|
ty::Int(_) | ty::Uint(_) => {
|
|
|
|
let (size, signed) = int_size_and_signed(tcx, self);
|
2021-09-07 18:44:33 +00:00
|
|
|
let val = if signed { size.truncate(size.signed_int_min() as u128) } else { 0 };
|
2019-12-11 09:04:34 +00:00
|
|
|
Some(val)
|
|
|
|
}
|
|
|
|
ty::Char => Some(0),
|
|
|
|
ty::Float(fty) => Some(match fty {
|
2020-12-12 14:28:49 +00:00
|
|
|
ty::FloatTy::F32 => (-::rustc_apfloat::ieee::Single::INFINITY).to_bits(),
|
|
|
|
ty::FloatTy::F64 => (-::rustc_apfloat::ieee::Double::INFINITY).to_bits(),
|
2019-12-11 09:04:34 +00:00
|
|
|
}),
|
|
|
|
_ => None,
|
|
|
|
};
|
|
|
|
val.map(|v| ty::Const::from_bits(tcx, v, ty::ParamEnv::empty().and(self)))
|
|
|
|
}
|
|
|
|
|
2018-11-19 15:56:24 +00:00
|
|
|
/// Checks whether values of this type `T` are *moved* or *copied*
|
|
|
|
/// when referenced -- this amounts to a check for whether `T:
|
|
|
|
/// Copy`, but note that we **don't** consider lifetimes when
|
|
|
|
/// doing this check. This means that we may generate MIR which
|
|
|
|
/// does copies even when the type actually doesn't satisfy the
|
|
|
|
/// full requirements for the `Copy` trait (cc #29149) -- this
|
|
|
|
/// winds up being reported as an error during NLL borrow check.
|
2019-06-11 21:11:55 +00:00
|
|
|
pub fn is_copy_modulo_regions(
|
|
|
|
&'tcx self,
|
2020-06-21 09:20:48 +00:00
|
|
|
tcx_at: TyCtxtAt<'tcx>,
|
2019-06-11 21:11:55 +00:00
|
|
|
param_env: ty::ParamEnv<'tcx>,
|
|
|
|
) -> bool {
|
2020-06-21 09:20:48 +00:00
|
|
|
tcx_at.is_copy_raw(param_env.and(self))
|
2015-09-14 11:55:56 +00:00
|
|
|
}
|
|
|
|
|
2018-11-19 15:56:24 +00:00
|
|
|
/// Checks whether values of this type `T` have a size known at
|
|
|
|
/// compile time (i.e., whether `T: Sized`). Lifetimes are ignored
|
|
|
|
/// for the purposes of this check, so it can be an
|
|
|
|
/// over-approximation in generic contexts, where one can have
|
|
|
|
/// strange rules like `<T as Foo<'static>>::Bar: Sized` that
|
|
|
|
/// actually carry lifetime requirements.
|
2019-06-13 22:32:15 +00:00
|
|
|
pub fn is_sized(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
|
2020-02-15 14:21:50 +00:00
|
|
|
self.is_trivially_sized(tcx_at.tcx) || tcx_at.is_sized_raw(param_env.and(self))
|
2015-09-14 11:55:56 +00:00
|
|
|
}
|
|
|
|
|
2018-11-19 15:56:24 +00:00
|
|
|
/// Checks whether values of this type `T` implement the `Freeze`
|
2021-08-22 15:27:18 +00:00
|
|
|
/// trait -- frozen types are those that do not contain an
|
2019-02-08 13:53:55 +00:00
|
|
|
/// `UnsafeCell` anywhere. This is a language concept used to
|
2018-11-19 16:57:22 +00:00
|
|
|
/// distinguish "true immutability", which is relevant to
|
|
|
|
/// optimization as well as the rules around static values. Note
|
|
|
|
/// that the `Freeze` trait is not exposed to end users and is
|
|
|
|
/// effectively an implementation detail.
|
2020-06-21 09:20:48 +00:00
|
|
|
pub fn is_freeze(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
|
|
|
|
self.is_trivially_freeze() || tcx_at.is_freeze_raw(param_env.and(self))
|
2020-02-15 14:21:50 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Fast path helper for testing if a type is `Freeze`.
|
|
|
|
///
|
|
|
|
/// Returning true means the type is known to be `Freeze`. Returning
|
|
|
|
/// `false` means nothing -- could be `Freeze`, might not be.
|
|
|
|
fn is_trivially_freeze(&self) -> bool {
|
2020-08-02 22:49:11 +00:00
|
|
|
match self.kind() {
|
2020-02-15 14:21:50 +00:00
|
|
|
ty::Int(_)
|
|
|
|
| ty::Uint(_)
|
|
|
|
| ty::Float(_)
|
|
|
|
| ty::Bool
|
|
|
|
| ty::Char
|
|
|
|
| ty::Str
|
|
|
|
| ty::Never
|
|
|
|
| ty::Ref(..)
|
|
|
|
| ty::RawPtr(_)
|
|
|
|
| ty::FnDef(..)
|
2020-05-06 04:02:09 +00:00
|
|
|
| ty::Error(_)
|
2020-02-15 14:21:50 +00:00
|
|
|
| ty::FnPtr(_) => true,
|
|
|
|
ty::Tuple(_) => self.tuple_fields().all(Self::is_trivially_freeze),
|
|
|
|
ty::Slice(elem_ty) | ty::Array(elem_ty, _) => elem_ty.is_trivially_freeze(),
|
|
|
|
ty::Adt(..)
|
|
|
|
| ty::Bound(..)
|
|
|
|
| ty::Closure(..)
|
|
|
|
| ty::Dynamic(..)
|
|
|
|
| ty::Foreign(_)
|
|
|
|
| ty::Generator(..)
|
|
|
|
| ty::GeneratorWitness(_)
|
|
|
|
| ty::Infer(_)
|
|
|
|
| ty::Opaque(..)
|
|
|
|
| ty::Param(_)
|
|
|
|
| ty::Placeholder(_)
|
2020-05-12 05:56:29 +00:00
|
|
|
| ty::Projection(_) => false,
|
2020-02-15 14:21:50 +00:00
|
|
|
}
|
2017-04-17 18:18:56 +00:00
|
|
|
}
|
|
|
|
|
2021-03-18 21:44:36 +00:00
|
|
|
/// Checks whether values of this type `T` implement the `Unpin` trait.
|
|
|
|
pub fn is_unpin(&'tcx self, tcx_at: TyCtxtAt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
|
|
|
|
self.is_trivially_unpin() || tcx_at.is_unpin_raw(param_env.and(self))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Fast path helper for testing if a type is `Unpin`.
|
|
|
|
///
|
|
|
|
/// Returning true means the type is known to be `Unpin`. Returning
|
|
|
|
/// `false` means nothing -- could be `Unpin`, might not be.
|
|
|
|
fn is_trivially_unpin(&self) -> bool {
|
|
|
|
match self.kind() {
|
|
|
|
ty::Int(_)
|
|
|
|
| ty::Uint(_)
|
|
|
|
| ty::Float(_)
|
|
|
|
| ty::Bool
|
|
|
|
| ty::Char
|
|
|
|
| ty::Str
|
|
|
|
| ty::Never
|
|
|
|
| ty::Ref(..)
|
|
|
|
| ty::RawPtr(_)
|
|
|
|
| ty::FnDef(..)
|
|
|
|
| ty::Error(_)
|
|
|
|
| ty::FnPtr(_) => true,
|
|
|
|
ty::Tuple(_) => self.tuple_fields().all(Self::is_trivially_unpin),
|
|
|
|
ty::Slice(elem_ty) | ty::Array(elem_ty, _) => elem_ty.is_trivially_unpin(),
|
|
|
|
ty::Adt(..)
|
|
|
|
| ty::Bound(..)
|
|
|
|
| ty::Closure(..)
|
|
|
|
| ty::Dynamic(..)
|
|
|
|
| ty::Foreign(_)
|
|
|
|
| ty::Generator(..)
|
|
|
|
| ty::GeneratorWitness(_)
|
|
|
|
| ty::Infer(_)
|
|
|
|
| ty::Opaque(..)
|
|
|
|
| ty::Param(_)
|
|
|
|
| ty::Placeholder(_)
|
|
|
|
| ty::Projection(_) => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-04-17 21:22:55 +00:00
|
|
|
/// If `ty.needs_drop(...)` returns `true`, then `ty` is definitely
|
|
|
|
/// non-copy and *might* have a destructor attached; if it returns
|
2018-11-27 02:59:49 +00:00
|
|
|
/// `false`, then `ty` definitely has no destructor (i.e., no drop glue).
|
2017-04-17 21:22:55 +00:00
|
|
|
///
|
|
|
|
/// (Note that this implies that if `ty` has a destructor attached,
|
|
|
|
/// then `needs_drop` will definitely return `true` for `ty`.)
|
2018-12-19 19:58:20 +00:00
|
|
|
///
|
|
|
|
/// Note that this method is used to check eligible types in unions.
|
2017-04-17 20:26:48 +00:00
|
|
|
#[inline]
|
2019-06-13 21:48:52 +00:00
|
|
|
pub fn needs_drop(&'tcx self, tcx: TyCtxt<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
|
2020-01-30 20:28:16 +00:00
|
|
|
// Avoid querying in simple cases.
|
2020-01-31 22:22:30 +00:00
|
|
|
match needs_drop_components(self, &tcx.data_layout) {
|
2020-01-30 20:28:16 +00:00
|
|
|
Err(AlwaysRequiresDrop) => true,
|
|
|
|
Ok(components) => {
|
|
|
|
let query_ty = match *components {
|
|
|
|
[] => return false,
|
|
|
|
// If we've got a single component, call the query with that
|
|
|
|
// to increase the chance that we hit the query cache.
|
|
|
|
[component_ty] => component_ty,
|
|
|
|
_ => self,
|
|
|
|
};
|
2020-01-31 22:22:30 +00:00
|
|
|
// This doesn't depend on regions, so try to minimize distinct
|
2020-01-30 20:28:16 +00:00
|
|
|
// query keys used.
|
|
|
|
let erased = tcx.normalize_erasing_regions(param_env, query_ty);
|
|
|
|
tcx.needs_drop_raw(param_env.and(erased))
|
|
|
|
}
|
|
|
|
}
|
2017-04-17 20:26:48 +00:00
|
|
|
}
|
|
|
|
|
2021-04-13 07:43:11 +00:00
|
|
|
/// Checks if `ty` has has a significant drop.
|
|
|
|
///
|
|
|
|
/// Note that this method can return false even if `ty` has a destructor
|
|
|
|
/// attached; even if that is the case then the adt has been marked with
|
|
|
|
/// the attribute `rustc_insignificant_dtor`.
|
|
|
|
///
|
|
|
|
/// Note that this method is used to check for change in drop order for
|
|
|
|
/// 2229 drop reorder migration analysis.
|
|
|
|
#[inline]
|
|
|
|
pub fn has_significant_drop(
|
|
|
|
&'tcx self,
|
|
|
|
tcx: TyCtxt<'tcx>,
|
|
|
|
param_env: ty::ParamEnv<'tcx>,
|
|
|
|
) -> bool {
|
|
|
|
// Avoid querying in simple cases.
|
|
|
|
match needs_drop_components(self, &tcx.data_layout) {
|
|
|
|
Err(AlwaysRequiresDrop) => true,
|
|
|
|
Ok(components) => {
|
|
|
|
let query_ty = match *components {
|
|
|
|
[] => return false,
|
|
|
|
// If we've got a single component, call the query with that
|
|
|
|
// to increase the chance that we hit the query cache.
|
|
|
|
[component_ty] => component_ty,
|
|
|
|
_ => self,
|
|
|
|
};
|
2021-07-04 15:41:40 +00:00
|
|
|
|
2021-07-04 16:50:28 +00:00
|
|
|
// FIXME(#86868): We should be canonicalizing, or else moving this to a method of inference
|
2021-07-04 15:41:40 +00:00
|
|
|
// context, or *something* like that, but for now just avoid passing inference
|
|
|
|
// variables to queries that can't cope with them. Instead, conservatively
|
|
|
|
// return "true" (may change drop order).
|
|
|
|
if query_ty.needs_infer() {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2021-04-13 07:43:11 +00:00
|
|
|
// This doesn't depend on regions, so try to minimize distinct
|
|
|
|
// query keys used.
|
|
|
|
let erased = tcx.normalize_erasing_regions(param_env, query_ty);
|
|
|
|
tcx.has_significant_drop_raw(param_env.and(erased))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-12 15:47:13 +00:00
|
|
|
/// Returns `true` if equality for this type is both reflexive and structural.
|
2020-05-13 20:40:22 +00:00
|
|
|
///
|
2020-06-12 15:47:13 +00:00
|
|
|
/// Reflexive equality for a type is indicated by an `Eq` impl for that type.
|
2020-05-13 20:40:22 +00:00
|
|
|
///
|
|
|
|
/// Primitive types (`u32`, `str`) have structural equality by definition. For composite data
|
|
|
|
/// types, equality for the type as a whole is structural when it is the same as equality
|
|
|
|
/// between all components (fields, array elements, etc.) of that type. For ADTs, structural
|
|
|
|
/// equality is indicated by an implementation of `PartialStructuralEq` and `StructuralEq` for
|
|
|
|
/// that type.
|
|
|
|
///
|
|
|
|
/// This function is "shallow" because it may return `true` for a composite type whose fields
|
|
|
|
/// are not `StructuralEq`. For example, `[T; 4]` has structural equality regardless of `T`
|
|
|
|
/// because equality for arrays is determined by the equality of each array element. If you
|
|
|
|
/// want to know whether a given call to `PartialEq::eq` will proceed structurally all the way
|
|
|
|
/// down, you will need to use a type visitor.
|
|
|
|
#[inline]
|
|
|
|
pub fn is_structural_eq_shallow(&'tcx self, tcx: TyCtxt<'tcx>) -> bool {
|
2020-08-02 22:49:11 +00:00
|
|
|
match self.kind() {
|
2020-05-13 20:40:22 +00:00
|
|
|
// Look for an impl of both `PartialStructuralEq` and `StructuralEq`.
|
|
|
|
Adt(..) => tcx.has_structural_eq_impls(self),
|
|
|
|
|
|
|
|
// Primitive types that satisfy `Eq`.
|
|
|
|
Bool | Char | Int(_) | Uint(_) | Str | Never => true,
|
|
|
|
|
|
|
|
// Composite types that satisfy `Eq` when all of their fields do.
|
|
|
|
//
|
|
|
|
// Because this function is "shallow", we return `true` for these composites regardless
|
|
|
|
// of the type(s) contained within.
|
|
|
|
Ref(..) | Array(..) | Slice(_) | Tuple(..) => true,
|
|
|
|
|
|
|
|
// Raw pointers use bitwise comparison.
|
|
|
|
RawPtr(_) | FnPtr(_) => true,
|
|
|
|
|
|
|
|
// Floating point numbers are not `Eq`.
|
|
|
|
Float(_) => false,
|
|
|
|
|
|
|
|
// Conservatively return `false` for all others...
|
|
|
|
|
|
|
|
// Anonymous function types
|
|
|
|
FnDef(..) | Closure(..) | Dynamic(..) | Generator(..) => false,
|
|
|
|
|
|
|
|
// Generic or inferred types
|
|
|
|
//
|
|
|
|
// FIXME(ecstaticmorse): Maybe we should `bug` here? This should probably only be
|
|
|
|
// called for known, fully-monomorphized types.
|
|
|
|
Projection(_) | Opaque(..) | Param(_) | Bound(..) | Placeholder(_) | Infer(_) => false,
|
|
|
|
|
2020-05-06 04:02:09 +00:00
|
|
|
Foreign(_) | GeneratorWitness(..) | Error(_) => false,
|
2020-05-13 20:40:22 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-29 18:54:32 +00:00
|
|
|
pub fn same_type(a: Ty<'tcx>, b: Ty<'tcx>) -> bool {
|
2020-08-02 22:49:11 +00:00
|
|
|
match (&a.kind(), &b.kind()) {
|
2018-12-29 18:54:32 +00:00
|
|
|
(&Adt(did_a, substs_a), &Adt(did_b, substs_b)) => {
|
|
|
|
if did_a != did_b {
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
substs_a.types().zip(substs_b.types()).all(|(a, b)| Self::same_type(a, b))
|
|
|
|
}
|
|
|
|
_ => a == b,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-07 22:55:38 +00:00
|
|
|
/// Peel off all reference types in this type until there are none left.
|
|
|
|
///
|
|
|
|
/// This method is idempotent, i.e. `ty.peel_refs().peel_refs() == ty.peel_refs()`.
|
|
|
|
///
|
|
|
|
/// # Examples
|
|
|
|
///
|
|
|
|
/// - `u8` -> `u8`
|
|
|
|
/// - `&'a mut u8` -> `u8`
|
|
|
|
/// - `&'a &'b u8` -> `u8`
|
|
|
|
/// - `&'a *const &'b u8 -> *const &'b u8`
|
|
|
|
pub fn peel_refs(&'tcx self) -> Ty<'tcx> {
|
|
|
|
let mut ty = self;
|
2020-08-02 22:49:11 +00:00
|
|
|
while let Ref(_, inner_ty, _) = ty.kind() {
|
2019-09-07 22:55:38 +00:00
|
|
|
ty = inner_ty;
|
|
|
|
}
|
|
|
|
ty
|
|
|
|
}
|
2021-07-02 01:14:13 +00:00
|
|
|
|
|
|
|
pub fn outer_exclusive_binder(&'tcx self) -> DebruijnIndex {
|
|
|
|
self.outer_exclusive_binder
|
|
|
|
}
|
2015-09-14 11:55:56 +00:00
|
|
|
}
|
2017-05-10 14:28:06 +00:00
|
|
|
|
2017-11-08 10:27:39 +00:00
|
|
|
pub enum ExplicitSelf<'tcx> {
|
|
|
|
ByValue,
|
|
|
|
ByReference(ty::Region<'tcx>, hir::Mutability),
|
2017-11-09 21:15:35 +00:00
|
|
|
ByRawPointer(hir::Mutability),
|
2017-11-08 10:27:39 +00:00
|
|
|
ByBox,
|
2019-12-22 22:42:04 +00:00
|
|
|
Other,
|
2017-11-08 10:27:39 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'tcx> ExplicitSelf<'tcx> {
|
|
|
|
/// Categorizes an explicit self declaration like `self: SomeType`
|
|
|
|
/// into either `self`, `&self`, `&mut self`, `Box<self>`, or
|
|
|
|
/// `Other`.
|
|
|
|
/// This is mainly used to require the arbitrary_self_types feature
|
|
|
|
/// in the case of `Other`, to improve error messages in the common cases,
|
|
|
|
/// and to make `Other` non-object-safe.
|
|
|
|
///
|
|
|
|
/// Examples:
|
|
|
|
///
|
|
|
|
/// ```
|
|
|
|
/// impl<'a> Foo for &'a T {
|
|
|
|
/// // Legal declarations:
|
|
|
|
/// fn method1(self: &&'a T); // ExplicitSelf::ByReference
|
|
|
|
/// fn method2(self: &'a T); // ExplicitSelf::ByValue
|
|
|
|
/// fn method3(self: Box<&'a T>); // ExplicitSelf::ByBox
|
|
|
|
/// fn method4(self: Rc<&'a T>); // ExplicitSelf::Other
|
|
|
|
///
|
|
|
|
/// // Invalid cases will be caught by `check_method_receiver`:
|
|
|
|
/// fn method_err1(self: &'a mut T); // ExplicitSelf::Other
|
|
|
|
/// fn method_err2(self: &'static T) // ExplicitSelf::ByValue
|
|
|
|
/// fn method_err3(self: &&T) // ExplicitSelf::ByReference
|
|
|
|
/// }
|
|
|
|
/// ```
|
|
|
|
///
|
2019-12-22 22:42:04 +00:00
|
|
|
pub fn determine<P>(self_arg_ty: Ty<'tcx>, is_self_ty: P) -> ExplicitSelf<'tcx>
|
2017-11-08 10:27:39 +00:00
|
|
|
where
|
2019-12-22 22:42:04 +00:00
|
|
|
P: Fn(Ty<'tcx>) -> bool,
|
2017-11-08 10:27:39 +00:00
|
|
|
{
|
|
|
|
use self::ExplicitSelf::*;
|
|
|
|
|
2020-08-02 22:49:11 +00:00
|
|
|
match *self_arg_ty.kind() {
|
2017-11-08 10:27:39 +00:00
|
|
|
_ if is_self_ty(self_arg_ty) => ByValue,
|
2019-12-22 22:42:04 +00:00
|
|
|
ty::Ref(region, ty, mutbl) if is_self_ty(ty) => ByReference(region, mutbl),
|
|
|
|
ty::RawPtr(ty::TypeAndMut { ty, mutbl }) if is_self_ty(ty) => ByRawPointer(mutbl),
|
|
|
|
ty::Adt(def, _) if def.is_box() && is_self_ty(self_arg_ty.boxed_ty()) => ByBox,
|
|
|
|
_ => Other,
|
2017-11-08 10:27:39 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-01-30 20:28:16 +00:00
|
|
|
|
|
|
|
/// Returns a list of types such that the given type needs drop if and only if
|
|
|
|
/// *any* of the returned types need drop. Returns `Err(AlwaysRequiresDrop)` if
|
|
|
|
/// this type always needs drop.
|
2020-01-31 22:22:30 +00:00
|
|
|
pub fn needs_drop_components(
|
|
|
|
ty: Ty<'tcx>,
|
|
|
|
target_layout: &TargetDataLayout,
|
|
|
|
) -> Result<SmallVec<[Ty<'tcx>; 2]>, AlwaysRequiresDrop> {
|
2020-08-02 22:49:11 +00:00
|
|
|
match ty.kind() {
|
2020-01-30 20:28:16 +00:00
|
|
|
ty::Infer(ty::FreshIntTy(_))
|
|
|
|
| ty::Infer(ty::FreshFloatTy(_))
|
|
|
|
| ty::Bool
|
|
|
|
| ty::Int(_)
|
|
|
|
| ty::Uint(_)
|
|
|
|
| ty::Float(_)
|
|
|
|
| ty::Never
|
|
|
|
| ty::FnDef(..)
|
|
|
|
| ty::FnPtr(_)
|
|
|
|
| ty::Char
|
|
|
|
| ty::GeneratorWitness(..)
|
|
|
|
| ty::RawPtr(_)
|
|
|
|
| ty::Ref(..)
|
|
|
|
| ty::Str => Ok(SmallVec::new()),
|
|
|
|
|
2020-02-09 10:16:57 +00:00
|
|
|
// Foreign types can never have destructors.
|
2020-01-30 20:28:16 +00:00
|
|
|
ty::Foreign(..) => Ok(SmallVec::new()),
|
|
|
|
|
2020-05-06 04:02:09 +00:00
|
|
|
ty::Dynamic(..) | ty::Error(_) => Err(AlwaysRequiresDrop),
|
2020-01-30 20:28:16 +00:00
|
|
|
|
2020-01-31 22:22:30 +00:00
|
|
|
ty::Slice(ty) => needs_drop_components(ty, target_layout),
|
|
|
|
ty::Array(elem_ty, size) => {
|
|
|
|
match needs_drop_components(elem_ty, target_layout) {
|
2020-01-30 20:28:16 +00:00
|
|
|
Ok(v) if v.is_empty() => Ok(v),
|
2020-01-31 22:22:30 +00:00
|
|
|
res => match size.val.try_to_bits(target_layout.pointer_size) {
|
|
|
|
// Arrays of size zero don't need drop, even if their element
|
|
|
|
// type does.
|
|
|
|
Some(0) => Ok(SmallVec::new()),
|
|
|
|
Some(_) => res,
|
|
|
|
// We don't know which of the cases above we are in, so
|
|
|
|
// return the whole type and let the caller decide what to
|
|
|
|
// do.
|
|
|
|
None => Ok(smallvec![ty]),
|
|
|
|
},
|
2020-01-30 20:28:16 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
// If any field needs drop, then the whole tuple does.
|
2020-01-31 22:22:30 +00:00
|
|
|
ty::Tuple(..) => ty.tuple_fields().try_fold(SmallVec::new(), move |mut acc, elem| {
|
|
|
|
acc.extend(needs_drop_components(elem, target_layout)?);
|
2020-01-30 20:28:16 +00:00
|
|
|
Ok(acc)
|
|
|
|
}),
|
|
|
|
|
|
|
|
// These require checking for `Copy` bounds or `Adt` destructors.
|
|
|
|
ty::Adt(..)
|
|
|
|
| ty::Projection(..)
|
|
|
|
| ty::Param(_)
|
|
|
|
| ty::Bound(..)
|
|
|
|
| ty::Placeholder(..)
|
|
|
|
| ty::Opaque(..)
|
|
|
|
| ty::Infer(_)
|
2020-03-14 23:24:47 +00:00
|
|
|
| ty::Closure(..)
|
|
|
|
| ty::Generator(..) => Ok(smallvec![ty]),
|
2020-01-30 20:28:16 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-10-24 07:27:15 +00:00
|
|
|
// Does the equivalent of
|
|
|
|
// ```
|
|
|
|
// let v = self.iter().map(|p| p.fold_with(folder)).collect::<SmallVec<[_; 8]>>();
|
|
|
|
// folder.tcx().intern_*(&v)
|
|
|
|
// ```
|
|
|
|
pub fn fold_list<'tcx, F, T>(
|
|
|
|
list: &'tcx ty::List<T>,
|
|
|
|
folder: &mut F,
|
|
|
|
intern: impl FnOnce(TyCtxt<'tcx>, &[T]) -> &'tcx ty::List<T>,
|
|
|
|
) -> &'tcx ty::List<T>
|
|
|
|
where
|
|
|
|
F: TypeFolder<'tcx>,
|
|
|
|
T: TypeFoldable<'tcx> + PartialEq + Copy,
|
|
|
|
{
|
|
|
|
let mut iter = list.iter();
|
|
|
|
// Look for the first element that changed
|
|
|
|
if let Some((i, new_t)) = iter.by_ref().enumerate().find_map(|(i, t)| {
|
|
|
|
let new_t = t.fold_with(folder);
|
|
|
|
if new_t == t { None } else { Some((i, new_t)) }
|
|
|
|
}) {
|
|
|
|
// An element changed, prepare to intern the resulting list
|
|
|
|
let mut new_list = SmallVec::<[_; 8]>::with_capacity(list.len());
|
|
|
|
new_list.extend_from_slice(&list[..i]);
|
|
|
|
new_list.push(new_t);
|
|
|
|
new_list.extend(iter.map(|t| t.fold_with(folder)));
|
|
|
|
intern(folder.tcx(), &new_list)
|
|
|
|
} else {
|
|
|
|
list
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
#[derive(Copy, Clone, Debug, HashStable, TyEncodable, TyDecodable)]
|
2020-01-30 20:28:16 +00:00
|
|
|
pub struct AlwaysRequiresDrop;
|
2020-04-11 04:50:02 +00:00
|
|
|
|
|
|
|
/// Normalizes all opaque types in the given value, replacing them
|
|
|
|
/// with their underlying types.
|
|
|
|
pub fn normalize_opaque_types(
|
|
|
|
tcx: TyCtxt<'tcx>,
|
|
|
|
val: &'tcx List<ty::Predicate<'tcx>>,
|
|
|
|
) -> &'tcx List<ty::Predicate<'tcx>> {
|
|
|
|
let mut visitor = OpaqueTypeExpander {
|
|
|
|
seen_opaque_tys: FxHashSet::default(),
|
|
|
|
expanded_cache: FxHashMap::default(),
|
|
|
|
primary_def_id: None,
|
|
|
|
found_recursion: false,
|
2021-07-28 12:21:59 +00:00
|
|
|
found_any_recursion: false,
|
2020-04-11 04:50:02 +00:00
|
|
|
check_recursion: false,
|
|
|
|
tcx,
|
|
|
|
};
|
|
|
|
val.fold_with(&mut visitor)
|
|
|
|
}
|
|
|
|
|
2020-07-22 05:13:42 +00:00
|
|
|
pub fn provide(providers: &mut ty::query::Providers) {
|
2020-04-11 04:50:02 +00:00
|
|
|
*providers = ty::query::Providers { normalize_opaque_types, ..*providers }
|
|
|
|
}
|