mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 14:55:26 +00:00
don't inhibit random field reordering on repr(packed(1))
This commit is contained in:
parent
e875391458
commit
37aeb75eb6
@ -970,7 +970,7 @@ fn univariant<
|
|||||||
let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
|
let mut align = if pack.is_some() { dl.i8_align } else { dl.aggregate_align };
|
||||||
let mut max_repr_align = repr.align;
|
let mut max_repr_align = repr.align;
|
||||||
let mut inverse_memory_index: IndexVec<u32, FieldIdx> = fields.indices().collect();
|
let mut inverse_memory_index: IndexVec<u32, FieldIdx> = fields.indices().collect();
|
||||||
let optimize = !repr.inhibit_struct_field_reordering_opt();
|
let optimize = !repr.inhibit_struct_field_reordering();
|
||||||
if optimize && fields.len() > 1 {
|
if optimize && fields.len() > 1 {
|
||||||
let end = if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
|
let end = if let StructKind::MaybeUnsized = kind { fields.len() - 1 } else { fields.len() };
|
||||||
let optimizing = &mut inverse_memory_index.raw[..end];
|
let optimizing = &mut inverse_memory_index.raw[..end];
|
||||||
@ -1007,13 +1007,15 @@ fn univariant<
|
|||||||
// Calculates a sort key to group fields by their alignment or possibly some
|
// Calculates a sort key to group fields by their alignment or possibly some
|
||||||
// size-derived pseudo-alignment.
|
// size-derived pseudo-alignment.
|
||||||
let alignment_group_key = |layout: &F| {
|
let alignment_group_key = |layout: &F| {
|
||||||
|
// The two branches here return values that cannot be meaningfully compared with
|
||||||
|
// each other. However, we know that consistently for all executions of
|
||||||
|
// `alignment_group_key`, one or the other branch will be taken, so this is okay.
|
||||||
if let Some(pack) = pack {
|
if let Some(pack) = pack {
|
||||||
// Return the packed alignment in bytes.
|
// Return the packed alignment in bytes.
|
||||||
layout.align.abi.min(pack).bytes()
|
layout.align.abi.min(pack).bytes()
|
||||||
} else {
|
} else {
|
||||||
// Returns `log2(effective-align)`. This is ok since `pack` applies to all
|
// Returns `log2(effective-align)`. The calculation assumes that size is an
|
||||||
// fields equally. The calculation assumes that size is an integer multiple of
|
// integer multiple of align, except for ZSTs.
|
||||||
// align, except for ZSTs.
|
|
||||||
let align = layout.align.abi.bytes();
|
let align = layout.align.abi.bytes();
|
||||||
let size = layout.size.bytes();
|
let size = layout.size.bytes();
|
||||||
let niche_size = layout.largest_niche.map(|n| n.available(dl)).unwrap_or(0);
|
let niche_size = layout.largest_niche.map(|n| n.available(dl)).unwrap_or(0);
|
||||||
|
@ -137,23 +137,16 @@ impl ReprOptions {
|
|||||||
self.c() || self.int.is_some()
|
self.c() || self.int.is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if this `#[repr()]` should inhibit struct field reordering
|
/// Returns `true` if this `#[repr()]` guarantees a fixed field order,
|
||||||
/// optimizations, such as with `repr(C)`, `repr(packed(1))`, or `repr(<int>)`.
|
/// e.g. `repr(C)` or `repr(<int>)`.
|
||||||
pub fn inhibit_struct_field_reordering_opt(&self) -> bool {
|
pub fn inhibit_struct_field_reordering(&self) -> bool {
|
||||||
if let Some(pack) = self.pack {
|
|
||||||
if pack.bytes() == 1 {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.int.is_some()
|
self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.int.is_some()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if this type is valid for reordering and `-Z randomize-layout`
|
/// Returns `true` if this type is valid for reordering and `-Z randomize-layout`
|
||||||
/// was enabled for its declaration crate.
|
/// was enabled for its declaration crate.
|
||||||
pub fn can_randomize_type_layout(&self) -> bool {
|
pub fn can_randomize_type_layout(&self) -> bool {
|
||||||
!self.inhibit_struct_field_reordering_opt()
|
!self.inhibit_struct_field_reordering() && self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
|
||||||
&& self.flags.contains(ReprFlags::RANDOMIZE_LAYOUT)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns `true` if this `#[repr()]` should inhibit union ABI optimisations.
|
/// Returns `true` if this `#[repr()]` should inhibit union ABI optimisations.
|
||||||
|
@ -278,7 +278,7 @@ fn reduce_ty<'tcx>(cx: &LateContext<'tcx>, mut ty: Ty<'tcx>) -> ReducedTy<'tcx>
|
|||||||
ty = sized_ty;
|
ty = sized_ty;
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if def.repr().inhibit_struct_field_reordering_opt() {
|
if def.repr().inhibit_struct_field_reordering() {
|
||||||
ReducedTy::OrderedFields(Some(sized_ty))
|
ReducedTy::OrderedFields(Some(sized_ty))
|
||||||
} else {
|
} else {
|
||||||
ReducedTy::UnorderedFields(ty)
|
ReducedTy::UnorderedFields(ty)
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
#![allow(dead_code)]
|
#![allow(dead_code)]
|
||||||
|
|
||||||
// We use packed structs to get around alignment restrictions
|
// We use packed structs to get around alignment restrictions
|
||||||
#[repr(packed)]
|
#[repr(C, packed)]
|
||||||
struct Data {
|
struct Data {
|
||||||
pad: u8,
|
pad: u8,
|
||||||
ptr: &'static i32,
|
ptr: &'static i32,
|
||||||
|
@ -7,7 +7,7 @@ pub struct Aligned {
|
|||||||
_pad: [u8; 11],
|
_pad: [u8; 11],
|
||||||
packed: Packed,
|
packed: Packed,
|
||||||
}
|
}
|
||||||
#[repr(packed)]
|
#[repr(C, packed)]
|
||||||
#[derive(Default, Copy, Clone)]
|
#[derive(Default, Copy, Clone)]
|
||||||
pub struct Packed {
|
pub struct Packed {
|
||||||
_pad: [u8; 5],
|
_pad: [u8; 5],
|
||||||
|
@ -7,7 +7,7 @@ fn main() {
|
|||||||
FOO;
|
FOO;
|
||||||
}
|
}
|
||||||
|
|
||||||
#[repr(packed)]
|
#[repr(C, packed)]
|
||||||
struct Packed {
|
struct Packed {
|
||||||
a: [u8; 28],
|
a: [u8; 28],
|
||||||
b: &'static i32,
|
b: &'static i32,
|
||||||
|
Loading…
Reference in New Issue
Block a user