remove box derefs from codgen

This commit is contained in:
DrMeepster 2022-05-13 21:53:03 -07:00
parent 3e9d3d917a
commit cb417881a9
16 changed files with 385 additions and 146 deletions

View File

@ -118,22 +118,20 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
}
pub fn deref<Cx: LayoutTypeMethods<'tcx>>(self, cx: &Cx) -> PlaceRef<'tcx, V> {
if self.layout.ty.is_box() && !self.layout.abi.is_scalar() {
bug!("dereferencing non-scalar box ({:?}) in codegen", self.layout.ty);
}
let projected_ty = self
.layout
.ty
.builtin_deref(true)
.unwrap_or_else(|| bug!("deref of non-pointer {:?}", self))
.ty;
let (llptr, llextra) = match self.val {
OperandValue::Immediate(llptr) => (llptr, None),
OperandValue::Pair(llptr, llextra) => {
// if the box's allocator isn't a ZST, then "llextra" is actually the allocator
if self.layout.ty.is_box() && !self.layout.field(cx, 1).is_zst() {
(llptr, None)
} else {
(llptr, Some(llextra))
}
}
OperandValue::Pair(llptr, llextra) => (llptr, Some(llextra)),
OperandValue::Ref(..) => bug!("Deref of by-Ref operand {:?}", self),
};
let layout = cx.layout_of(projected_ty);

View File

@ -446,16 +446,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
mir::PlaceRef { projection: &place_ref.projection[..elem.0], ..place_ref },
);
// a box with a non-zst allocator should not be directly dereferenced
if cg_base.layout.ty.is_box() && !cg_base.layout.field(cx, 1).is_zst() {
// Extract `Box<T>` -> `Unique<T>` -> `NonNull<T>` -> `*const T`
let ptr =
cg_base.extract_field(bx, 0).extract_field(bx, 0).extract_field(bx, 0);
ptr.deref(bx.cx())
} else {
cg_base.deref(bx.cx())
}
} else {
bug!("using operand local {:?} as place", place_ref);
}
@ -463,18 +454,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
};
for elem in place_ref.projection[base..].iter() {
cg_base = match *elem {
mir::ProjectionElem::Deref => {
// a box with a non-zst allocator should not be directly dereferenced
if cg_base.layout.ty.is_box() && !cg_base.layout.field(cx, 1).is_zst() {
// Project `Box<T>` -> `Unique<T>` -> `NonNull<T>` -> `*const T`
let ptr =
cg_base.project_field(bx, 0).project_field(bx, 0).project_field(bx, 0);
bx.load_operand(ptr).deref(bx.cx())
} else {
bx.load_operand(cg_base).deref(bx.cx())
}
}
mir::ProjectionElem::Deref => bx.load_operand(cg_base).deref(bx.cx()),
mir::ProjectionElem::Field(ref field, _) => {
cg_base.project_field(bx, field.index())
}

View File

@ -240,13 +240,24 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
context: PlaceContext,
location: Location,
) {
if let ProjectionElem::Index(index) = elem {
match elem {
ProjectionElem::Index(index) => {
let index_ty = self.body.local_decls[index].ty;
if index_ty != self.tcx.types.usize {
self.fail(location, format!("bad index ({:?} != usize)", index_ty))
}
}
if let ProjectionElem::Field(f, ty) = elem {
ProjectionElem::Deref if self.mir_phase >= MirPhase::GeneratorsLowered => {
let base_ty = Place::ty_from(local, proj_base, &self.body.local_decls, self.tcx).ty;
if base_ty.is_box() {
self.fail(
location,
format!("{:?} dereferenced after ElaborateBoxDerefs", base_ty),
)
}
}
ProjectionElem::Field(f, ty) => {
let parent = Place { local, projection: self.tcx.intern_place_elems(proj_base) };
let parent_ty = parent.ty(&self.body.local_decls, self.tcx);
let fail_out_of_bounds = |this: &Self, location| {
@ -263,6 +274,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
)
}
};
match parent_ty.ty.kind() {
ty::Tuple(fields) => {
let Some(f_ty) = fields.get(f.as_usize()) else {
@ -300,6 +312,8 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
}
}
}
_ => {}
}
self.super_projection_elem(local, proj_base, elem, context, location);
}

View File

@ -176,8 +176,9 @@ pub enum MirPhase {
DropsLowered = 3,
/// After this projections may only contain deref projections as the first element.
Derefered = 4,
/// Beginning with this phase, the following variant is disallowed:
/// Beginning with this phase, the following variants are disallowed:
/// * [`Rvalue::Aggregate`] for any `AggregateKind` except `Array`
/// * [`ProjectionElem::Deref`] of `Box`
///
/// And the following variant is allowed:
/// * [`StatementKind::SetDiscriminant`]

View File

@ -410,7 +410,18 @@ where
fn open_drop_for_box(&mut self, adt: ty::AdtDef<'tcx>, substs: SubstsRef<'tcx>) -> BasicBlock {
debug!("open_drop_for_box({:?}, {:?}, {:?})", self, adt, substs);
let interior = self.tcx().mk_place_deref(self.place);
// drop glue is sent straight to codegen
// box cannot be directly dereferenced
let unique_ty = adt.non_enum_variant().fields[0].ty(self.tcx(), substs);
let nonnull_ty =
unique_ty.ty_adt_def().unwrap().non_enum_variant().fields[0].ty(self.tcx(), substs);
let ptr_ty = self.tcx().mk_imm_ptr(substs[0].expect_ty());
let unique_place = self.tcx().mk_place_field(self.place, Field::new(0), unique_ty);
let nonnull_place = self.tcx().mk_place_field(unique_place, Field::new(0), nonnull_ty);
let ptr_place = self.tcx().mk_place_field(nonnull_place, Field::new(0), ptr_ty);
let interior = self.tcx().mk_place_deref(ptr_place);
let interior_path = self.elaborator.deref_subpath(self.path);
let succ = self.box_free_block(adt, substs, self.succ, self.unwind);

View File

@ -0,0 +1,130 @@
//! This pass transforms derefs of Box into a deref of the pointer inside Box
//! Codegen does not allow box to be directly dereferenced
use crate::MirPass;
use rustc_hir::def_id::DefId;
use rustc_index::vec::Idx;
use rustc_middle::mir::patch::MirPatch;
use rustc_middle::mir::visit::MutVisitor;
use rustc_middle::mir::*;
use rustc_middle::ty::subst::Subst;
use rustc_middle::ty::TyCtxt;
struct ElaborateBoxDerefVistor<'tcx, 'a> {
tcx: TyCtxt<'tcx>,
unique_did: DefId,
nonnull_did: DefId,
local_decls: &'a mut LocalDecls<'tcx>,
patch: MirPatch<'tcx>,
}
impl<'tcx, 'a> MutVisitor<'tcx> for ElaborateBoxDerefVistor<'tcx, 'a> {
fn tcx(&self) -> TyCtxt<'tcx> {
self.tcx
}
fn visit_place(
&mut self,
place: &mut Place<'tcx>,
context: visit::PlaceContext,
location: Location,
) {
let tcx = self.tcx;
let base_ty = self.local_decls[place.local].ty;
// Derefer ensures that derefs are always the first projection
if place.projection.first() == Some(&PlaceElem::Deref) && base_ty.is_box() {
let source_info = self.local_decls[place.local].source_info;
let substs = tcx.intern_substs(&[base_ty.boxed_ty().into()]);
let unique_ty = tcx.bound_type_of(self.unique_did).subst(tcx, substs);
let nonnull_ty = tcx.bound_type_of(self.nonnull_did).subst(tcx, substs);
let ptr_ty = tcx.mk_imm_ptr(base_ty.boxed_ty());
let ptr_local = self.patch.new_temp(ptr_ty, source_info.span);
self.local_decls.push(LocalDecl::new(ptr_ty, source_info.span));
self.patch.add_statement(location, StatementKind::StorageLive(ptr_local));
self.patch.add_assign(
location,
Place::from(ptr_local),
Rvalue::Use(Operand::Copy(Place::from(place.local).project_deeper(
&[
PlaceElem::Field(Field::new(0), unique_ty),
PlaceElem::Field(Field::new(0), nonnull_ty),
PlaceElem::Field(Field::new(0), ptr_ty),
],
tcx,
))),
);
place.local = ptr_local;
self.patch.add_statement(
Location { block: location.block, statement_index: location.statement_index + 1 },
StatementKind::StorageDead(ptr_local),
);
}
self.super_place(place, context, location);
}
}
pub struct ElaborateBoxDerefs;
impl<'tcx> MirPass<'tcx> for ElaborateBoxDerefs {
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
if let Some(def_id) = tcx.lang_items().owned_box() {
let unique_did = tcx.adt_def(def_id).non_enum_variant().fields[0].did;
let Some(nonnull_def) = tcx.type_of(unique_did).ty_adt_def() else {
span_bug!(tcx.def_span(unique_did), "expected Box to contain Unique")
};
let nonnull_did = nonnull_def.non_enum_variant().fields[0].did;
let patch = MirPatch::new(body);
let (basic_blocks, local_decls) = body.basic_blocks_and_local_decls_mut();
let mut visitor =
ElaborateBoxDerefVistor { tcx, unique_did, nonnull_did, local_decls, patch };
for (block, BasicBlockData { statements, terminator, .. }) in
basic_blocks.iter_enumerated_mut()
{
let mut index = 0;
for statement in statements {
let location = Location { block, statement_index: index };
visitor.visit_statement(statement, location);
index += 1;
}
if let Some(terminator) = terminator
&& !matches!(terminator.kind, TerminatorKind::Yield{..})
{
let location = Location { block, statement_index: index };
visitor.visit_terminator(terminator, location);
}
let location = Location { block, statement_index: index };
match terminator {
// yielding into a box is handed when lowering generators
Some(Terminator { kind: TerminatorKind::Yield { value, .. }, .. }) => {
visitor.visit_operand(value, location);
}
Some(terminator) => {
visitor.visit_terminator(terminator, location);
}
None => {}
}
}
visitor.patch.apply(body);
} else {
// box is not present, this pass doesn't need to do anything
}
}
}

View File

@ -56,7 +56,7 @@ use crate::MirPass;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir as hir;
use rustc_hir::lang_items::LangItem;
use rustc_index::bit_set::{BitMatrix, BitSet};
use rustc_index::bit_set::{BitMatrix, BitSet, GrowableBitSet};
use rustc_index::vec::{Idx, IndexVec};
use rustc_middle::mir::dump_mir;
use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
@ -206,7 +206,7 @@ struct SuspensionPoint<'tcx> {
/// Which block to jump to if the generator is dropped in this state.
drop: Option<BasicBlock>,
/// Set of locals that have live storage while at this suspension point.
storage_liveness: BitSet<Local>,
storage_liveness: GrowableBitSet<Local>,
}
struct TransformVisitor<'tcx> {
@ -362,7 +362,7 @@ impl<'tcx> MutVisitor<'tcx> for TransformVisitor<'tcx> {
resume,
resume_arg,
drop,
storage_liveness: self.storage_liveness[block].clone().unwrap(),
storage_liveness: self.storage_liveness[block].clone().unwrap().into(),
});
VariantIdx::new(state)
@ -1177,6 +1177,8 @@ fn create_cases<'tcx>(
transform: &TransformVisitor<'tcx>,
operation: Operation,
) -> Vec<(usize, BasicBlock)> {
let tcx = transform.tcx;
let source_info = SourceInfo::outermost(body.span);
transform
@ -1209,6 +1211,76 @@ fn create_cases<'tcx>(
if operation == Operation::Resume {
// Move the resume argument to the destination place of the `Yield` terminator
let resume_arg = Local::new(2); // 0 = return, 1 = self
// handle `box yield` properly
let box_place = if let [projection @ .., ProjectionElem::Deref] =
&**point.resume_arg.projection
{
let box_place =
Place::from(point.resume_arg.local).project_deeper(projection, tcx);
let box_ty = box_place.ty(&body.local_decls, tcx).ty;
if box_ty.is_box() { Some((box_place, box_ty)) } else { None }
} else {
None
};
if let Some((box_place, box_ty)) = box_place {
let unique_did = box_ty
.ty_adt_def()
.expect("expected Box to be an Adt")
.non_enum_variant()
.fields[0]
.did;
let Some(nonnull_def) = tcx.type_of(unique_did).ty_adt_def() else {
span_bug!(tcx.def_span(unique_did), "expected Box to contain Unique")
};
let nonnull_did = nonnull_def.non_enum_variant().fields[0].did;
let substs = tcx.intern_substs(&[box_ty.boxed_ty().into()]);
let unique_ty = tcx.bound_type_of(unique_did).subst(tcx, substs);
let nonnull_ty = tcx.bound_type_of(nonnull_did).subst(tcx, substs);
let ptr_ty = tcx.mk_imm_ptr(box_ty.boxed_ty());
let ptr_local = body.local_decls.push(LocalDecl::new(ptr_ty, body.span));
statements.push(Statement {
source_info,
kind: StatementKind::StorageLive(ptr_local),
});
statements.push(Statement {
source_info,
kind: StatementKind::Assign(Box::new((
Place::from(ptr_local),
Rvalue::Use(Operand::Copy(box_place.project_deeper(
&[
PlaceElem::Field(Field::new(0), unique_ty),
PlaceElem::Field(Field::new(0), nonnull_ty),
PlaceElem::Field(Field::new(0), ptr_ty),
],
tcx,
))),
))),
});
statements.push(Statement {
source_info,
kind: StatementKind::Assign(Box::new((
Place::from(ptr_local)
.project_deeper(&[ProjectionElem::Deref], tcx),
Rvalue::Use(Operand::Move(resume_arg.into())),
))),
});
statements.push(Statement {
source_info,
kind: StatementKind::StorageDead(ptr_local),
});
} else {
statements.push(Statement {
source_info,
kind: StatementKind::Assign(Box::new((
@ -1217,6 +1289,7 @@ fn create_cases<'tcx>(
))),
});
}
}
// Then jump to the real target
let block = body.basic_blocks_mut().push(BasicBlockData {

View File

@ -57,6 +57,7 @@ mod deref_separator;
mod dest_prop;
pub mod dump_mir;
mod early_otherwise_branch;
mod elaborate_box_derefs;
mod elaborate_drops;
mod function_item_references;
mod generator;
@ -427,6 +428,7 @@ fn run_post_borrowck_cleanup_passes<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tc
// `AddRetag` needs to run after `ElaborateDrops`. Otherwise it should run fairly late,
// but before optimizations begin.
&deref_separator::Derefer,
&elaborate_box_derefs::ElaborateBoxDerefs,
&add_retag::AddRetag,
&lower_intrinsics::LowerIntrinsics,
&simplify::SimplifyCfg::new("elaborate-drops"),

View File

@ -23,8 +23,10 @@ use core::panic::PanicInfo;
extern crate libc;
struct Unique<T>(*mut T);
#[lang = "owned_box"]
pub struct Box<T>(*mut T);
pub struct Box<T>(Unique<T>);
#[lang = "exchange_malloc"]
unsafe fn allocate(size: usize, _align: usize) -> *mut u8 {

View File

@ -28,93 +28,93 @@ pub fn ptr_alignment_helper(x: &&()) {}
// CHECK-LABEL: @load_ref
#[no_mangle]
pub fn load_ref<'a>(x: &&'a i32) -> &'a i32 {
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META:[0-9]+]], !noundef !{{[0-9]+}}
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META:[0-9]+]], !noundef !{{[0-9]+}}
*x
}
// CHECK-LABEL: @load_ref_higher_alignment
#[no_mangle]
pub fn load_ref_higher_alignment<'a>(x: &&'a Align16) -> &'a Align16 {
// CHECK: load {{%Align16\*|i128\*|ptr}}, {{%Align16\*\*|i128\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_16_META:[0-9]+]], !noundef !{{[0-9]+}}
// CHECK: load {{%Align16\*|i128\*|ptr}}, {{%Align16\*\*|i128\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_16_META:[0-9]+]], !noundef !{{[0-9]+}}
*x
}
// CHECK-LABEL: @load_scalar_pair
#[no_mangle]
pub fn load_scalar_pair<'a>(x: &(&'a i32, &'a Align16)) -> (&'a i32, &'a Align16) {
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %{{.+}}, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META]], !noundef !{{[0-9]+}}
// CHECK: load {{i64\*|ptr}}, {{i64\*\*|ptr}} %{{.+}}, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_16_META]], !noundef !{{[0-9]+}}
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %{{.+}}, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META]], !noundef !{{[0-9]+}}
// CHECK: load {{i64\*|ptr}}, {{i64\*\*|ptr}} %{{.+}}, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_16_META]], !noundef !{{[0-9]+}}
*x
}
// CHECK-LABEL: @load_raw_pointer
#[no_mangle]
pub fn load_raw_pointer<'a>(x: &*const i32) -> *const i32 {
// loaded raw pointer should not have !nonnull, !align, or !noundef metadata
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]]{{$}}
// loaded raw pointer should not have !nonnull, !align, or !noundef metadata
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]]{{$}}
*x
}
// CHECK-LABEL: @load_box
#[no_mangle]
pub fn load_box<'a>(x: Box<Box<i32>>) -> Box<i32> {
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META]], !noundef !{{[0-9]+}}
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %{{.*}}, align [[PTR_ALIGNMENT]], !nonnull !{{[0-9]+}}, !align ![[ALIGN_4_META]], !noundef !{{[0-9]+}}
*x
}
// CHECK-LABEL: @load_bool
#[no_mangle]
pub fn load_bool(x: &bool) -> bool {
// CHECK: load i8, {{i8\*|ptr}} %x, align 1, !range ![[BOOL_RANGE:[0-9]+]], !noundef !{{[0-9]+}}
// CHECK: load i8, {{i8\*|ptr}} %x, align 1, !range ![[BOOL_RANGE:[0-9]+]], !noundef !{{[0-9]+}}
*x
}
// CHECK-LABEL: @load_maybeuninit_bool
#[no_mangle]
pub fn load_maybeuninit_bool(x: &MaybeUninit<bool>) -> MaybeUninit<bool> {
// CHECK: load i8, {{i8\*|ptr}} %x, align 1{{$}}
// CHECK: load i8, {{i8\*|ptr}} %x, align 1{{$}}
*x
}
// CHECK-LABEL: @load_enum_bool
#[no_mangle]
pub fn load_enum_bool(x: &MyBool) -> MyBool {
// CHECK: load i8, {{i8\*|ptr}} %x, align 1, !range ![[BOOL_RANGE]], !noundef !{{[0-9]+}}
// CHECK: load i8, {{i8\*|ptr}} %x, align 1, !range ![[BOOL_RANGE]], !noundef !{{[0-9]+}}
*x
}
// CHECK-LABEL: @load_maybeuninit_enum_bool
#[no_mangle]
pub fn load_maybeuninit_enum_bool(x: &MaybeUninit<MyBool>) -> MaybeUninit<MyBool> {
// CHECK: load i8, {{i8\*|ptr}} %x, align 1{{$}}
// CHECK: load i8, {{i8\*|ptr}} %x, align 1{{$}}
*x
}
// CHECK-LABEL: @load_int
#[no_mangle]
pub fn load_int(x: &u16) -> u16 {
// CHECK: load i16, {{i16\*|ptr}} %x, align 2{{$}}
// CHECK: load i16, {{i16\*|ptr}} %x, align 2{{$}}
*x
}
// CHECK-LABEL: @load_nonzero_int
#[no_mangle]
pub fn load_nonzero_int(x: &NonZeroU16) -> NonZeroU16 {
// CHECK: load i16, {{i16\*|ptr}} %x, align 2, !range ![[NONZEROU16_RANGE:[0-9]+]], !noundef !{{[0-9]+}}
// CHECK: load i16, {{i16\*|ptr}} %x, align 2, !range ![[NONZEROU16_RANGE:[0-9]+]], !noundef !{{[0-9]+}}
*x
}
// CHECK-LABEL: @load_option_nonzero_int
#[no_mangle]
pub fn load_option_nonzero_int(x: &Option<NonZeroU16>) -> Option<NonZeroU16> {
// CHECK: load i16, {{i16\*|ptr}} %x, align 2{{$}}
// CHECK: load i16, {{i16\*|ptr}} %x, align 2{{$}}
*x
}
// CHECK-LABEL: @borrow
#[no_mangle]
pub fn borrow(x: &i32) -> &i32 {
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x{{.*}}, !nonnull
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x{{.*}}, !nonnull
&x; // keep variable in an alloca
x
}
@ -122,7 +122,7 @@ pub fn borrow(x: &i32) -> &i32 {
// CHECK-LABEL: @_box
#[no_mangle]
pub fn _box(x: Box<i32>) -> i32 {
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x{{.*}}, !nonnull
// CHECK: load {{i32\*|ptr}}, {{i32\*\*|ptr}} %x{{.*}}, align [[PTR_ALIGNMENT]]
*x
}
@ -131,8 +131,8 @@ pub fn _box(x: Box<i32>) -> i32 {
// dependent alignment
#[no_mangle]
pub fn small_array_alignment(x: [i8; 4]) -> [i8; 4] {
// CHECK: [[VAR:%[0-9]+]] = load i32, {{i32\*|ptr}} %{{.*}}, align 1
// CHECK: ret i32 [[VAR]]
// CHECK: [[VAR:%[0-9]+]] = load i32, {{i32\*|ptr}} %{{.*}}, align 1
// CHECK: ret i32 [[VAR]]
x
}
@ -141,8 +141,8 @@ pub fn small_array_alignment(x: [i8; 4]) -> [i8; 4] {
// dependent alignment
#[no_mangle]
pub fn small_struct_alignment(x: Bytes) -> Bytes {
// CHECK: [[VAR:%[0-9]+]] = load i32, {{i32\*|ptr}} %{{.*}}, align 1
// CHECK: ret i32 [[VAR]]
// CHECK: [[VAR:%[0-9]+]] = load i32, {{i32\*|ptr}} %{{.*}}, align 1
// CHECK: ret i32 [[VAR]]
x
}

View File

@ -10,6 +10,10 @@
let mut _5: usize; // in scope 0 at $DIR/boxes.rs:12:14: 12:22
let mut _6: *mut u8; // in scope 0 at $DIR/boxes.rs:12:14: 12:22
let mut _7: std::boxed::Box<i32>; // in scope 0 at $DIR/boxes.rs:12:14: 12:22
let mut _8: *const i32; // in scope 0 at $DIR/boxes.rs:12:14: 12:22
let mut _9: *const i32; // in scope 0 at $DIR/boxes.rs:12:14: 12:22
let mut _10: *const i32; // in scope 0 at $DIR/boxes.rs:12:14: 12:22
let mut _11: *const i32; // in scope 0 at $DIR/boxes.rs:12:14: 12:22
scope 1 {
debug x => _1; // in scope 1 at $DIR/boxes.rs:12:9: 12:10
}
@ -34,10 +38,16 @@
bb1: {
StorageLive(_7); // scope 0 at $DIR/boxes.rs:12:14: 12:22
_7 = ShallowInitBox(move _6, i32); // scope 0 at $DIR/boxes.rs:12:14: 12:22
(*_7) = const 42_i32; // scope 0 at $DIR/boxes.rs:12:19: 12:21
StorageLive(_8); // scope 0 at $DIR/boxes.rs:12:19: 12:21
_8 = (((_7.0: std::ptr::Unique<i32>).0: std::ptr::NonNull<i32>).0: *const i32); // scope 0 at $DIR/boxes.rs:12:19: 12:21
(*_8) = const 42_i32; // scope 0 at $DIR/boxes.rs:12:19: 12:21
StorageDead(_8); // scope 0 at $DIR/boxes.rs:12:14: 12:22
_3 = move _7; // scope 0 at $DIR/boxes.rs:12:14: 12:22
StorageDead(_7); // scope 0 at $DIR/boxes.rs:12:21: 12:22
_2 = (*_3); // scope 0 at $DIR/boxes.rs:12:13: 12:22
StorageLive(_9); // scope 0 at $DIR/boxes.rs:12:13: 12:22
_9 = (((_3.0: std::ptr::Unique<i32>).0: std::ptr::NonNull<i32>).0: *const i32); // scope 0 at $DIR/boxes.rs:12:13: 12:22
_2 = (*_9); // scope 0 at $DIR/boxes.rs:12:13: 12:22
StorageDead(_9); // scope 0 at $DIR/boxes.rs:12:13: 12:26
_1 = Add(move _2, const 0_i32); // scope 0 at $DIR/boxes.rs:12:13: 12:26
StorageDead(_2); // scope 0 at $DIR/boxes.rs:12:25: 12:26
drop(_3) -> [return: bb2, unwind: bb3]; // scope 0 at $DIR/boxes.rs:12:26: 12:27

View File

@ -9,7 +9,8 @@
let mut _4: *mut u8; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
let mut _5: std::boxed::Box<std::vec::Vec<u32>>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
let mut _6: (); // in scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
+ let mut _7: &mut std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
let mut _7: *const std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
+ let mut _8: &mut std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
scope 1 {
debug _x => _1; // in scope 1 at $DIR/inline-into-box-place.rs:8:9: 8:11
}
@ -32,7 +33,7 @@
bb1: {
StorageLive(_5); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
_5 = ShallowInitBox(move _4, std::vec::Vec<u32>); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
- (*_5) = Vec::<u32>::new() -> [return: bb2, unwind: bb5]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
- (*((_5.0: std::ptr::Unique<std::vec::Vec<u32>>).0: *const std::vec::Vec<u32>)) = Vec::<u32>::new() -> [return: bb2, unwind: bb5]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ StorageLive(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ _7 = &mut (*_5); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ StorageLive(_8); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
@ -71,6 +72,7 @@
- }
-
- bb5 (cleanup): {
- StorageDead(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
- _6 = alloc::alloc::box_free::<Vec<u32>, std::alloc::Global>(move (_5.0: std::ptr::Unique<std::vec::Vec<u32>>), move (_5.1: std::alloc::Global)) -> bb4; // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
- // mir::Constant
- // + span: $DIR/inline-into-box-place.rs:8:42: 8:43

View File

@ -9,14 +9,16 @@
let mut _4: *mut u8; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
let mut _5: std::boxed::Box<std::vec::Vec<u32>>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
let mut _6: (); // in scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
+ let mut _7: &mut std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
let mut _7: *const std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
let mut _8: *const std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
+ let mut _9: &mut std::vec::Vec<u32>; // in scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
scope 1 {
debug _x => _1; // in scope 1 at $DIR/inline-into-box-place.rs:8:9: 8:11
}
scope 2 {
}
+ scope 3 (inlined Vec::<u32>::new) { // at $DIR/inline-into-box-place.rs:8:33: 8:43
+ let mut _8: alloc::raw_vec::RawVec<u32>; // in scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ let mut _10: alloc::raw_vec::RawVec<u32>; // in scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ }
bb0: {
@ -32,11 +34,13 @@
bb1: {
StorageLive(_5); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
_5 = ShallowInitBox(move _4, std::vec::Vec<u32>); // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
- (*_5) = Vec::<u32>::new() -> [return: bb2, unwind: bb5]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ StorageLive(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ _7 = &mut (*_5); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ StorageLive(_8); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ _8 = const alloc::raw_vec::RawVec::<u32>::NEW; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
StorageLive(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
_7 = (((_5.0: std::ptr::Unique<std::vec::Vec<u32>>).0: std::ptr::NonNull<std::vec::Vec<u32>>).0: *const std::vec::Vec<u32>); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
- (*_7) = Vec::<u32>::new() -> [return: bb2, unwind: bb4]; // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ StorageLive(_9); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ _9 = &mut (*_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ StorageLive(_10); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ _10 = const alloc::raw_vec::RawVec::<u32>::NEW; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
// mir::Constant
- // + span: $DIR/inline-into-box-place.rs:8:33: 8:41
- // + user_ty: UserType(1)
@ -47,15 +51,16 @@
+ // + span: $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ // + user_ty: UserType(0)
+ // + literal: Const { ty: alloc::raw_vec::RawVec<u32>, val: Unevaluated(alloc::raw_vec::RawVec::<T>::NEW, [u32], None) }
+ Deinit((*_7)); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ ((*_7).0: alloc::raw_vec::RawVec<u32>) = move _8; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ ((*_7).1: usize) = const 0_usize; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ StorageDead(_8); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ StorageDead(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
+ Deinit((*_9)); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ ((*_9).0: alloc::raw_vec::RawVec<u32>) = move _10; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ ((*_9).1: usize) = const 0_usize; // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ StorageDead(_10); // scope 3 at $SRC_DIR/alloc/src/vec/mod.rs:LL:COL
+ StorageDead(_9); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
StorageDead(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
_1 = move _5; // scope 0 at $DIR/inline-into-box-place.rs:8:29: 8:43
StorageDead(_5); // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
_0 = const (); // scope 0 at $DIR/inline-into-box-place.rs:7:11: 9:2
- drop(_1) -> [return: bb3, unwind: bb4]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2
- drop(_1) -> [return: bb3, unwind: bb5]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2
+ drop(_1) -> [return: bb2, unwind: bb3]; // scope 0 at $DIR/inline-into-box-place.rs:9:1: 9:2
}
@ -66,15 +71,16 @@
}
- bb4 (cleanup): {
+ bb3 (cleanup): {
resume; // scope 0 at $DIR/inline-into-box-place.rs:7:1: 9:2
- }
-
- bb5 (cleanup): {
- _6 = alloc::alloc::box_free::<Vec<u32>, std::alloc::Global>(move (_5.0: std::ptr::Unique<std::vec::Vec<u32>>), move (_5.1: std::alloc::Global)) -> bb4; // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
- StorageDead(_7); // scope 0 at $DIR/inline-into-box-place.rs:8:33: 8:43
- _6 = alloc::alloc::box_free::<Vec<u32>, std::alloc::Global>(move (_5.0: std::ptr::Unique<std::vec::Vec<u32>>), move (_5.1: std::alloc::Global)) -> bb5; // scope 0 at $DIR/inline-into-box-place.rs:8:42: 8:43
- // mir::Constant
- // + span: $DIR/inline-into-box-place.rs:8:42: 8:43
- // + literal: Const { ty: unsafe fn(Unique<Vec<u32>>, std::alloc::Global) {alloc::alloc::box_free::<Vec<u32>, std::alloc::Global>}, val: Value(Scalar(<ZST>)) }
- }
-
- bb5 (cleanup): {
+ bb3 (cleanup): {
resume; // scope 0 at $DIR/inline-into-box-place.rs:7:1: 9:2
}
}

View File

@ -11,6 +11,7 @@ fn b(_1: &mut Box<T>) -> &mut T {
let mut _5: &mut T; // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
let mut _6: &mut T; // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
let mut _7: std::boxed::Box<T>; // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
let mut _8: *const T; // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
}
bb0: {
@ -22,7 +23,10 @@ fn b(_1: &mut Box<T>) -> &mut T {
StorageLive(_6); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
StorageLive(_7); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_7 = move (*_4); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_6 = &mut (*_7); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
StorageLive(_8); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_8 = (((_7.0: std::ptr::Unique<T>).0: std::ptr::NonNull<T>).0: *const T); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_6 = &mut (*_8); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
StorageDead(_8); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
StorageDead(_7); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_5 = &mut (*_6); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_3 = &mut (*_5); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL

View File

@ -8,6 +8,7 @@ fn d(_1: &Box<T>) -> &T {
scope 1 (inlined <Box<T> as AsRef<T>>::as_ref) { // at $DIR/issue-58867-inline-as-ref-as-mut.rs:18:5: 18:15
debug self => _3; // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
let mut _4: std::boxed::Box<T>; // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
let mut _5: *const T; // in scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
}
bb0: {
@ -16,7 +17,10 @@ fn d(_1: &Box<T>) -> &T {
_3 = &(*_1); // scope 0 at $DIR/issue-58867-inline-as-ref-as-mut.rs:18:5: 18:15
StorageLive(_4); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_4 = move (*_3); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_2 = &(*_4); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
StorageLive(_5); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_5 = (((_4.0: std::ptr::Unique<T>).0: std::ptr::NonNull<T>).0: *const T); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_2 = &(*_5); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
StorageDead(_5); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
StorageDead(_4); // scope 1 at $SRC_DIR/alloc/src/boxed.rs:LL:COL
_0 = &(*_2); // scope 0 at $DIR/issue-58867-inline-as-ref-as-mut.rs:18:5: 18:15
StorageDead(_3); // scope 0 at $DIR/issue-58867-inline-as-ref-as-mut.rs:18:14: 18:15

View File

@ -1,27 +1,29 @@
// build-pass
// compile-flags: -Z mir-opt-level=4
#![crate_type="lib"]
#![crate_type = "lib"]
#![feature(lang_items)]
#![no_std]
struct NonNull<T: ?Sized>(*mut T);
struct Unique<T: ?Sized>(NonNull<T>);
#[lang = "owned_box"]
pub struct Box<T: ?Sized>(*mut T, ());
pub struct Box<T: ?Sized>(Unique<T>);
impl<T: ?Sized> Drop for Box<T> {
fn drop(&mut self) {
}
fn drop(&mut self) {}
}
#[lang = "box_free"]
#[inline(always)]
unsafe fn box_free<T: ?Sized>(ptr: *mut T, _: ()) {
dealloc(ptr)
unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
dealloc(ptr.0.0)
}
#[inline(never)]
fn dealloc<T: ?Sized>(_: *mut T) {
}
fn dealloc<T: ?Sized>(_: *mut T) {}
pub struct Foo<T>(T);