auto merge of #19765 : luqmana/rust/nonzero-lang-item, r=nikomatsakis

This extends the nullable enum opt to traverse beyond just the first level to find possible fields to use as the discriminant. So now, it'll work through structs, tuples, and fixed sized arrays. This also introduces a new lang item, NonZero, that you can use to wrap raw pointers or integral types to indicate to rustc that the underlying value is known to never be 0/NULL. We then use this in Vec, Rc and Arc to have them also benefit from the nullable enum opt.

As per https://github.com/rust-lang/rfcs/pull/499 NonZero is not exposed via the `libstd` facade.

```
x86_64 Linux:
                        T       Option<T> (Before)      Option<T> (After)
----------------------------------------------------------------------------------
Vec<int>                24          32                      24
String                  24          32                      24
Rc<int>                 8           16                      8
Arc<int>                8           16                      8
[Box<int>, ..2]         16          24                      16
(String, uint)          32          40                      32
```

Fixes #19419.
Fixes #13194.
Fixes #9378.
Fixes #7576.
This commit is contained in:
bors 2014-12-29 08:06:20 +00:00
commit 25fb12b8a5
13 changed files with 371 additions and 156 deletions

View File

@ -76,11 +76,11 @@ use core::default::Default;
use core::kinds::{Sync, Send};
use core::mem::{min_align_of, size_of, drop};
use core::mem;
use core::nonzero::NonZero;
use core::ops::{Drop, Deref};
use core::option::Option;
use core::option::Option::{Some, None};
use core::ptr::RawPtr;
use core::ptr;
use core::ptr::{mod, RawPtr};
use heap::deallocate;
/// An atomically reference counted wrapper for shared state.
@ -114,7 +114,7 @@ use heap::deallocate;
pub struct Arc<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut ArcInner<T>,
_ptr: NonZero<*mut ArcInner<T>>,
}
unsafe impl<T: Sync + Send> Send for Arc<T> { }
@ -130,7 +130,7 @@ unsafe impl<T: Sync + Send> Sync for Arc<T> { }
pub struct Weak<T> {
// FIXME #12808: strange name to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut ArcInner<T>,
_ptr: NonZero<*mut ArcInner<T>>,
}
unsafe impl<T: Sync + Send> Send for Weak<T> { }
@ -165,7 +165,7 @@ impl<T> Arc<T> {
weak: atomic::AtomicUint::new(1),
data: data,
};
Arc { _ptr: unsafe { mem::transmute(x) } }
Arc { _ptr: unsafe { NonZero::new(mem::transmute(x)) } }
}
/// Downgrades the `Arc<T>` to a `Weak<T>` reference.
@ -194,7 +194,7 @@ impl<T> Arc<T> {
// pointer is valid. Furthermore, we know that the `ArcInner` structure itself is `Sync`
// because the inner data is `Sync` as well, so we're ok loaning out an immutable pointer
// to these contents.
unsafe { &*self._ptr }
unsafe { &**self._ptr }
}
}
@ -281,7 +281,7 @@ impl<T: Send + Sync + Clone> Arc<T> {
// pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at
// this point, and we required the Arc itself to be `mut`, so we're returning the only
// possible reference to the inner data.
let inner = unsafe { &mut *self._ptr };
let inner = unsafe { &mut **self._ptr };
&mut inner.data
}
}
@ -316,7 +316,8 @@ impl<T: Sync + Send> Drop for Arc<T> {
fn drop(&mut self) {
// This structure has #[unsafe_no_drop_flag], so this drop glue may run more than once (but
// it is guaranteed to be zeroed after the first if it's run more than once)
if self._ptr.is_null() { return }
let ptr = *self._ptr;
if ptr.is_null() { return }
// Because `fetch_sub` is already atomic, we do not need to synchronize with other threads
// unless we are going to delete the object. This same logic applies to the below
@ -346,7 +347,7 @@ impl<T: Sync + Send> Drop for Arc<T> {
if self.inner().weak.fetch_sub(1, atomic::Release) == 1 {
atomic::fence(atomic::Acquire);
unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(),
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}
@ -386,7 +387,7 @@ impl<T: Sync + Send> Weak<T> {
#[inline]
fn inner(&self) -> &ArcInner<T> {
// See comments above for why this is "safe"
unsafe { &*self._ptr }
unsafe { &**self._ptr }
}
}
@ -442,14 +443,16 @@ impl<T: Sync + Send> Drop for Weak<T> {
/// } // implicit drop
/// ```
fn drop(&mut self) {
let ptr = *self._ptr;
// see comments above for why this check is here
if self._ptr.is_null() { return }
if ptr.is_null() { return }
// If we find out that we were the last weak pointer, then its time to deallocate the data
// entirely. See the discussion in Arc::drop() about the memory orderings
if self.inner().weak.fetch_sub(1, atomic::Release) == 1 {
atomic::fence(atomic::Acquire);
unsafe { deallocate(self._ptr as *mut u8, size_of::<ArcInner<T>>(),
unsafe { deallocate(ptr as *mut u8, size_of::<ArcInner<T>>(),
min_align_of::<ArcInner<T>>()) }
}
}

View File

@ -150,11 +150,11 @@ use core::fmt;
use core::hash::{mod, Hash};
use core::kinds::marker;
use core::mem::{transmute, min_align_of, size_of, forget};
use core::nonzero::NonZero;
use core::ops::{Deref, Drop};
use core::option::Option;
use core::option::Option::{Some, None};
use core::ptr;
use core::ptr::RawPtr;
use core::ptr::{mod, RawPtr};
use core::result::Result;
use core::result::Result::{Ok, Err};
@ -174,7 +174,7 @@ struct RcBox<T> {
pub struct Rc<T> {
// FIXME #12808: strange names to try to avoid interfering with field accesses of the contained
// type via Deref
_ptr: *mut RcBox<T>,
_ptr: NonZero<*mut RcBox<T>>,
_nosend: marker::NoSend,
_noshare: marker::NoSync
}
@ -196,11 +196,11 @@ impl<T> Rc<T> {
// there is an implicit weak pointer owned by all the strong pointers, which
// ensures that the weak destructor never frees the allocation while the strong
// destructor is running, even if the weak pointer is stored inside the strong one.
_ptr: transmute(box RcBox {
_ptr: NonZero::new(transmute(box RcBox {
value: value,
strong: Cell::new(1),
weak: Cell::new(1)
}),
})),
_nosend: marker::NoSend,
_noshare: marker::NoSync
}
@ -281,7 +281,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
let val = ptr::read(&*rc); // copy the contained object
// destruct the box and skip our Drop
// we can ignore the refcounts because we know we're unique
deallocate(rc._ptr as *mut u8, size_of::<RcBox<T>>(),
deallocate(*rc._ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>());
forget(rc);
Ok(val)
@ -311,7 +311,7 @@ pub fn try_unwrap<T>(rc: Rc<T>) -> Result<T, Rc<T>> {
#[experimental]
pub fn get_mut<'a, T>(rc: &'a mut Rc<T>) -> Option<&'a mut T> {
if is_unique(rc) {
let inner = unsafe { &mut *rc._ptr };
let inner = unsafe { &mut **rc._ptr };
Some(&mut inner.value)
} else {
None
@ -343,7 +343,7 @@ impl<T: Clone> Rc<T> {
// pointer that will ever be returned to T. Our reference count is guaranteed to be 1 at
// this point, and we required the `Rc<T>` itself to be `mut`, so we're returning the only
// possible reference to the inner value.
let inner = unsafe { &mut *self._ptr };
let inner = unsafe { &mut **self._ptr };
&mut inner.value
}
}
@ -391,7 +391,8 @@ impl<T> Drop for Rc<T> {
/// ```
fn drop(&mut self) {
unsafe {
if !self._ptr.is_null() {
let ptr = *self._ptr;
if !ptr.is_null() {
self.dec_strong();
if self.strong() == 0 {
ptr::read(&**self); // destroy the contained object
@ -401,7 +402,7 @@ impl<T> Drop for Rc<T> {
self.dec_weak();
if self.weak() == 0 {
deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
}
}
@ -618,7 +619,7 @@ impl<T: fmt::Show> fmt::Show for Rc<T> {
pub struct Weak<T> {
// FIXME #12808: strange names to try to avoid interfering with
// field accesses of the contained type via Deref
_ptr: *mut RcBox<T>,
_ptr: NonZero<*mut RcBox<T>>,
_nosend: marker::NoSend,
_noshare: marker::NoSync
}
@ -682,12 +683,13 @@ impl<T> Drop for Weak<T> {
/// ```
fn drop(&mut self) {
unsafe {
if !self._ptr.is_null() {
let ptr = *self._ptr;
if !ptr.is_null() {
self.dec_weak();
// the weak count starts at 1, and will only go to zero if all the strong pointers
// have disappeared.
if self.weak() == 0 {
deallocate(self._ptr as *mut u8, size_of::<RcBox<T>>(),
deallocate(ptr as *mut u8, size_of::<RcBox<T>>(),
min_align_of::<RcBox<T>>())
}
}
@ -742,12 +744,12 @@ trait RcBoxPtr<T> {
impl<T> RcBoxPtr<T> for Rc<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> { unsafe { &(*self._ptr) } }
fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
}
impl<T> RcBoxPtr<T> for Weak<T> {
#[inline(always)]
fn inner(&self) -> &RcBox<T> { unsafe { &(*self._ptr) } }
fn inner(&self) -> &RcBox<T> { unsafe { &(**self._ptr) } }
}
#[cfg(test)]

View File

@ -56,9 +56,10 @@ use core::hash::{mod, Hash};
use core::iter::repeat;
use core::kinds::marker::{ContravariantLifetime, InvariantType};
use core::mem;
use core::nonzero::NonZero;
use core::num::{Int, UnsignedInt};
use core::ops;
use core::ptr::{mod, Unique};
use core::ptr;
use core::raw::Slice as RawSlice;
use core::uint;
@ -133,11 +134,14 @@ use slice::CloneSliceExt;
#[unsafe_no_drop_flag]
#[stable]
pub struct Vec<T> {
ptr: Unique<T>,
ptr: NonZero<*mut T>,
len: uint,
cap: uint,
}
unsafe impl<T: Send> Send for Vec<T> { }
unsafe impl<T: Sync> Sync for Vec<T> { }
/// A clone-on-write vector
pub type CowVec<'a, T> = Cow<'a, Vec<T>, [T]>;
@ -176,7 +180,7 @@ impl<T> Vec<T> {
// non-null value which is fine since we never call deallocate on the ptr
// if cap is 0. The reason for this is because the pointer of a slice
// being NULL would break the null pointer optimization for enums.
Vec { ptr: Unique(EMPTY as *mut T), len: 0, cap: 0 }
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: 0 }
}
/// Constructs a new, empty `Vec<T>` with the specified capacity.
@ -209,7 +213,7 @@ impl<T> Vec<T> {
#[stable]
pub fn with_capacity(capacity: uint) -> Vec<T> {
if mem::size_of::<T>() == 0 {
Vec { ptr: Unique(EMPTY as *mut T), len: 0, cap: uint::MAX }
Vec { ptr: unsafe { NonZero::new(EMPTY as *mut T) }, len: 0, cap: uint::MAX }
} else if capacity == 0 {
Vec::new()
} else {
@ -217,7 +221,7 @@ impl<T> Vec<T> {
.expect("capacity overflow");
let ptr = unsafe { allocate(size, mem::min_align_of::<T>()) };
if ptr.is_null() { ::alloc::oom() }
Vec { ptr: Unique(ptr as *mut T), len: 0, cap: capacity }
Vec { ptr: unsafe { NonZero::new(ptr as *mut T) }, len: 0, cap: capacity }
}
}
@ -284,7 +288,7 @@ impl<T> Vec<T> {
#[unstable = "needs finalization"]
pub unsafe fn from_raw_parts(ptr: *mut T, length: uint,
capacity: uint) -> Vec<T> {
Vec { ptr: Unique(ptr), len: length, cap: capacity }
Vec { ptr: NonZero::new(ptr), len: length, cap: capacity }
}
/// Creates a vector by copying the elements from a raw pointer.
@ -795,7 +799,7 @@ impl<T> Vec<T> {
if self.len == 0 {
if self.cap != 0 {
unsafe {
dealloc(self.ptr.0, self.cap)
dealloc(*self.ptr, self.cap)
}
self.cap = 0;
}
@ -803,11 +807,12 @@ impl<T> Vec<T> {
unsafe {
// Overflow check is unnecessary as the vector is already at
// least this large.
self.ptr = Unique(reallocate(self.ptr.0 as *mut u8,
self.cap * mem::size_of::<T>(),
self.len * mem::size_of::<T>(),
mem::min_align_of::<T>()) as *mut T);
if self.ptr.0.is_null() { ::alloc::oom() }
let ptr = reallocate(*self.ptr as *mut u8,
self.cap * mem::size_of::<T>(),
self.len * mem::size_of::<T>(),
mem::min_align_of::<T>()) as *mut T;
if ptr.is_null() { ::alloc::oom() }
self.ptr = NonZero::new(ptr);
}
self.cap = self.len;
}
@ -867,7 +872,7 @@ impl<T> Vec<T> {
pub fn as_mut_slice<'a>(&'a mut self) -> &'a mut [T] {
unsafe {
mem::transmute(RawSlice {
data: self.ptr.0 as *const T,
data: *self.ptr as *const T,
len: self.len,
})
}
@ -890,9 +895,9 @@ impl<T> Vec<T> {
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn into_iter(self) -> IntoIter<T> {
unsafe {
let ptr = self.ptr.0;
let ptr = *self.ptr;
let cap = self.cap;
let begin = self.ptr.0 as *const T;
let begin = ptr as *const T;
let end = if mem::size_of::<T>() == 0 {
(ptr as uint + self.len()) as *const T
} else {
@ -1110,14 +1115,15 @@ impl<T> Vec<T> {
let size = max(old_size, 2 * mem::size_of::<T>()) * 2;
if old_size > size { panic!("capacity overflow") }
unsafe {
self.ptr = Unique(alloc_or_realloc(self.ptr.0, old_size, size));
if self.ptr.0.is_null() { ::alloc::oom() }
let ptr = alloc_or_realloc(*self.ptr, old_size, size);
if ptr.is_null() { ::alloc::oom() }
self.ptr = NonZero::new(ptr);
}
self.cap = max(self.cap, 2) * 2;
}
unsafe {
let end = self.ptr.0.offset(self.len as int);
let end = (*self.ptr).offset(self.len as int);
ptr::write(&mut *end, value);
self.len += 1;
}
@ -1162,11 +1168,11 @@ impl<T> Vec<T> {
#[unstable = "matches collection reform specification, waiting for dust to settle"]
pub fn drain<'a>(&'a mut self) -> Drain<'a, T> {
unsafe {
let begin = self.ptr.0 as *const T;
let begin = *self.ptr as *const T;
let end = if mem::size_of::<T>() == 0 {
(self.ptr.0 as uint + self.len()) as *const T
(*self.ptr as uint + self.len()) as *const T
} else {
self.ptr.0.offset(self.len() as int) as *const T
(*self.ptr).offset(self.len() as int) as *const T
};
self.set_len(0);
Drain {
@ -1231,10 +1237,9 @@ impl<T> Vec<T> {
let size = capacity.checked_mul(mem::size_of::<T>())
.expect("capacity overflow");
unsafe {
self.ptr = Unique(alloc_or_realloc(self.ptr.0,
self.cap * mem::size_of::<T>(),
size));
if self.ptr.0.is_null() { ::alloc::oom() }
let ptr = alloc_or_realloc(*self.ptr, self.cap * mem::size_of::<T>(), size);
if ptr.is_null() { ::alloc::oom() }
self.ptr = NonZero::new(ptr);
}
self.cap = capacity;
}
@ -1357,7 +1362,7 @@ impl<T> AsSlice<T> for Vec<T> {
fn as_slice<'a>(&'a self) -> &'a [T] {
unsafe {
mem::transmute(RawSlice {
data: self.ptr.0 as *const T,
data: *self.ptr as *const T,
len: self.len
})
}
@ -1382,7 +1387,7 @@ impl<T> Drop for Vec<T> {
for x in self.iter() {
ptr::read(x);
}
dealloc(self.ptr.0, self.cap)
dealloc(*self.ptr, self.cap)
}
}
}
@ -1420,7 +1425,7 @@ impl<T> IntoIter<T> {
for _x in self { }
let IntoIter { allocation, cap, ptr: _ptr, end: _end } = self;
mem::forget(self);
Vec { ptr: Unique(allocation), cap: cap, len: 0 }
Vec { ptr: NonZero::new(allocation), cap: cap, len: 0 }
}
}

View File

@ -93,6 +93,7 @@ pub mod prelude;
pub mod intrinsics;
pub mod mem;
pub mod nonzero;
pub mod ptr;
/* Core language traits */

53
src/libcore/nonzero.rs Normal file
View File

@ -0,0 +1,53 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Exposes the NonZero lang item which provides optimization hints.
use ops::Deref;
/// Unsafe trait to indicate what types are usable with the NonZero struct
pub unsafe trait Zeroable {}
unsafe impl<T> Zeroable for *const T {}
unsafe impl<T> Zeroable for *mut T {}
unsafe impl Zeroable for int {}
unsafe impl Zeroable for uint {}
unsafe impl Zeroable for i8 {}
unsafe impl Zeroable for u8 {}
unsafe impl Zeroable for i16 {}
unsafe impl Zeroable for u16 {}
unsafe impl Zeroable for i32 {}
unsafe impl Zeroable for u32 {}
unsafe impl Zeroable for i64 {}
unsafe impl Zeroable for u64 {}
/// A wrapper type for raw pointers and integers that will never be
/// NULL or 0 that might allow certain optimizations.
#[lang="non_zero"]
#[deriving(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Show)]
#[experimental]
pub struct NonZero<T: Zeroable>(T);
impl<T: Zeroable> NonZero<T> {
/// Create an instance of NonZero with the provided value.
/// You must indeed ensure that the value is actually "non-zero".
#[inline(always)]
pub unsafe fn new(inner: T) -> NonZero<T> {
NonZero(inner)
}
}
impl<T: Zeroable> Deref<T> for NonZero<T> {
#[inline]
fn deref<'a>(&'a self) -> &'a T {
let NonZero(ref inner) = *self;
inner
}
}

View File

@ -90,9 +90,9 @@
use mem;
use clone::Clone;
use intrinsics;
use kinds::{Send, Sync};
use option::Option;
use option::Option::{Some, None};
use kinds::{Send, Sync};
use cmp::{PartialEq, Eq, Ord, PartialOrd, Equiv};
use cmp::Ordering;
@ -109,6 +109,7 @@ pub use intrinsics::copy_memory;
#[experimental = "uncertain about naming and semantics"]
pub use intrinsics::set_memory;
/// Creates a null raw pointer.
///
/// # Examples

View File

@ -25,6 +25,7 @@ mod fmt;
mod hash;
mod iter;
mod mem;
mod nonzero;
mod num;
mod ops;
mod option;

100
src/libcoretest/nonzero.rs Normal file
View File

@ -0,0 +1,100 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use core::nonzero::NonZero;
use core::option::Option;
use core::option::Option::{Some, None};
use std::mem::size_of;
#[test]
fn test_create_nonzero_instance() {
let _a = unsafe {
NonZero::new(21i)
};
}
#[test]
fn test_size_nonzero_in_option() {
assert_eq!(size_of::<NonZero<u32>>(), size_of::<Option<NonZero<u32>>>());
}
#[test]
fn test_match_on_nonzero_option() {
let a = Some(unsafe {
NonZero::new(42i)
});
match a {
Some(val) => assert_eq!(*val, 42),
None => panic!("unexpected None while matching on Some(NonZero(_))")
}
match unsafe { Some(NonZero::new(43i)) } {
Some(val) => assert_eq!(*val, 43),
None => panic!("unexpected None while matching on Some(NonZero(_))")
}
}
#[test]
fn test_match_option_empty_vec() {
let a: Option<Vec<int>> = Some(vec![]);
match a {
None => panic!("unexpected None while matching on Some(vec![])"),
_ => {}
}
}
#[test]
fn test_match_option_vec() {
let a = Some(vec![1i, 2, 3, 4]);
match a {
Some(v) => assert_eq!(v, vec![1i, 2, 3, 4]),
None => panic!("unexpected None while matching on Some(vec![1, 2, 3, 4])")
}
}
#[test]
fn test_match_option_rc() {
use std::rc::Rc;
let five = Rc::new(5i);
match Some(five) {
Some(r) => assert_eq!(*r, 5i),
None => panic!("unexpected None while matching on Some(Rc::new(5))")
}
}
#[test]
fn test_match_option_arc() {
use std::sync::Arc;
let five = Arc::new(5i);
match Some(five) {
Some(a) => assert_eq!(*a, 5i),
None => panic!("unexpected None while matching on Some(Arc::new(5))")
}
}
#[test]
fn test_match_option_empty_string() {
let a = Some(String::new());
match a {
None => panic!("unexpected None while matching on Some(String::new())"),
_ => {}
}
}
#[test]
fn test_match_option_string() {
let five = "Five".into_string();
match Some(five) {
Some(s) => assert_eq!(s, "Five"),
None => panic!("unexpected None while matching on Some(String { ... })")
}
}

View File

@ -327,6 +327,8 @@ lets_do_this! {
NoSyncItem, "no_sync_bound", no_sync_bound;
ManagedItem, "managed_bound", managed_bound;
NonZeroItem, "non_zero", non_zero;
IteratorItem, "iterator", iterator;
StackExhaustedLangItem, "stack_exhausted", stack_exhausted;

View File

@ -43,14 +43,13 @@
#![allow(unsigned_negation)]
pub use self::PointerField::*;
pub use self::Repr::*;
use std::num::Int;
use std::rc::Rc;
use llvm::{ValueRef, True, IntEQ, IntNE};
use back::abi;
use back::abi::FAT_PTR_ADDR;
use middle::subst;
use middle::subst::Subst;
use trans::_match;
@ -71,7 +70,6 @@ use util::ppaux::ty_to_string;
type Hint = attr::ReprAttr;
/// Representations.
#[deriving(Eq, PartialEq, Show)]
pub enum Repr<'tcx> {
@ -101,7 +99,7 @@ pub enum Repr<'tcx> {
nullfields: Vec<Ty<'tcx>>
},
/// Two cases distinguished by a nullable pointer: the case with discriminant
/// `nndiscr` is represented by the struct `nonnull`, where the `ptrfield`th
/// `nndiscr` is represented by the struct `nonnull`, where the `discrfield`th
/// field is known to be nonnull due to its type; if that field is null, then
/// it represents the other case, which is inhabited by at most one value
/// (and all other fields are undefined/unused).
@ -112,7 +110,7 @@ pub enum Repr<'tcx> {
StructWrappedNullablePointer {
nonnull: Struct<'tcx>,
nndiscr: Disr,
ptrfield: PointerField,
discrfield: DiscrField,
nullfields: Vec<Ty<'tcx>>,
}
}
@ -230,22 +228,24 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
let st = mk_struct(cx, cases[discr].tys[],
false, t);
match cases[discr].find_ptr(cx) {
Some(ThinPointer(_)) if st.fields.len() == 1 => {
Some(ref df) if df.len() == 1 && st.fields.len() == 1 => {
return RawNullablePointer {
nndiscr: discr as Disr,
nnty: st.fields[0],
nullfields: cases[1 - discr].tys.clone()
};
}
Some(ptrfield) => {
Some(mut discrfield) => {
discrfield.push(0);
discrfield.reverse();
return StructWrappedNullablePointer {
nndiscr: discr as Disr,
nonnull: st,
ptrfield: ptrfield,
discrfield: discrfield,
nullfields: cases[1 - discr].tys.clone()
};
}
None => { }
None => {}
}
}
discr += 1;
@ -335,49 +335,98 @@ struct Case<'tcx> {
tys: Vec<Ty<'tcx>>
}
/// This represents the (GEP) indices to follow to get to the discriminant field
pub type DiscrField = Vec<uint>;
#[deriving(Copy, Eq, PartialEq, Show)]
pub enum PointerField {
ThinPointer(uint),
FatPointer(uint)
fn find_discr_field_candidate<'tcx>(tcx: &ty::ctxt<'tcx>,
ty: Ty<'tcx>,
mut path: DiscrField) -> Option<DiscrField> {
match ty.sty {
// Fat &T/&mut T/Box<T> i.e. T is [T], str, or Trait
ty::ty_rptr(_, ty::mt { ty, .. }) | ty::ty_uniq(ty) if !ty::type_is_sized(tcx, ty) => {
path.push(FAT_PTR_ADDR);
Some(path)
},
// Regular thin pointer: &T/&mut T/Box<T>
ty::ty_rptr(..) | ty::ty_uniq(..) => Some(path),
// Functions are just pointers
ty::ty_bare_fn(..) => Some(path),
// Closures are a pair of pointers: the code and environment
ty::ty_closure(..) => {
path.push(FAT_PTR_ADDR);
Some(path)
},
// Is this the NonZero lang item wrapping a pointer or integer type?
ty::ty_struct(did, ref substs) if Some(did) == tcx.lang_items.non_zero() => {
let nonzero_fields = ty::lookup_struct_fields(tcx, did);
assert_eq!(nonzero_fields.len(), 1);
let nonzero_field = ty::lookup_field_type(tcx, did, nonzero_fields[0].id, substs);
match nonzero_field.sty {
ty::ty_ptr(..) | ty::ty_int(..) | ty::ty_uint(..) => {
path.push(0);
Some(path)
},
_ => None
}
},
// Perhaps one of the fields of this struct is non-zero
// let's recurse and find out
ty::ty_struct(def_id, ref substs) => {
let fields = ty::lookup_struct_fields(tcx, def_id);
for (j, field) in fields.iter().enumerate() {
let field_ty = ty::lookup_field_type(tcx, def_id, field.id, substs);
if let Some(mut fpath) = find_discr_field_candidate(tcx, field_ty, path.clone()) {
fpath.push(j);
return Some(fpath);
}
}
None
},
// Can we use one of the fields in this tuple?
ty::ty_tup(ref tys) => {
for (j, &ty) in tys.iter().enumerate() {
if let Some(mut fpath) = find_discr_field_candidate(tcx, ty, path.clone()) {
fpath.push(j);
return Some(fpath);
}
}
None
},
// Is this a fixed-size array of something non-zero
// with at least one element?
ty::ty_vec(ety, Some(d)) if d > 0 => {
if let Some(mut vpath) = find_discr_field_candidate(tcx, ety, path) {
vpath.push(0);
Some(vpath)
} else {
None
}
},
// Anything else is not a pointer
_ => None
}
}
impl<'tcx> Case<'tcx> {
fn is_zerolen<'a>(&self, cx: &CrateContext<'a, 'tcx>, scapegoat: Ty<'tcx>)
-> bool {
fn is_zerolen<'a>(&self, cx: &CrateContext<'a, 'tcx>, scapegoat: Ty<'tcx>) -> bool {
mk_struct(cx, self.tys[], false, scapegoat).size == 0
}
fn find_ptr<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> Option<PointerField> {
fn find_ptr<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> Option<DiscrField> {
for (i, &ty) in self.tys.iter().enumerate() {
match ty.sty {
// &T/&mut T/Box<T> could either be a thin or fat pointer depending on T
ty::ty_rptr(_, ty::mt { ty, .. }) | ty::ty_uniq(ty) => match ty.sty {
// &[T] and &str are a pointer and length pair
ty::ty_vec(_, None) | ty::ty_str => return Some(FatPointer(i)),
// &Trait is a pair of pointers: the actual object and a vtable
ty::ty_trait(..) => return Some(FatPointer(i)),
ty::ty_struct(..) if !ty::type_is_sized(cx.tcx(), ty) => {
return Some(FatPointer(i))
}
// Any other &T is just a pointer
_ => return Some(ThinPointer(i))
},
// Functions are just pointers
ty::ty_bare_fn(..) => return Some(ThinPointer(i)),
// Closures are a pair of pointers: the code and environment
ty::ty_closure(..) => return Some(FatPointer(i)),
// Anything else is not a pointer
_ => continue
if let Some(mut path) = find_discr_field_candidate(cx.tcx(), ty, vec![]) {
path.push(i);
return Some(path);
}
}
None
}
}
@ -709,8 +758,8 @@ pub fn trans_get_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>,
val = ICmp(bcx, cmp, Load(bcx, scrutinee), C_null(llptrty));
signed = false;
}
StructWrappedNullablePointer { nndiscr, ptrfield, .. } => {
val = struct_wrapped_nullable_bitdiscr(bcx, nndiscr, ptrfield, scrutinee);
StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
val = struct_wrapped_nullable_bitdiscr(bcx, nndiscr, discrfield, scrutinee);
signed = false;
}
}
@ -720,12 +769,9 @@ pub fn trans_get_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>,
}
}
fn struct_wrapped_nullable_bitdiscr(bcx: Block, nndiscr: Disr, ptrfield: PointerField,
fn struct_wrapped_nullable_bitdiscr(bcx: Block, nndiscr: Disr, discrfield: &DiscrField,
scrutinee: ValueRef) -> ValueRef {
let llptrptr = match ptrfield {
ThinPointer(field) => GEPi(bcx, scrutinee, &[0, field]),
FatPointer(field) => GEPi(bcx, scrutinee, &[0, field, abi::FAT_PTR_ADDR])
};
let llptrptr = GEPi(bcx, scrutinee, discrfield[]);
let llptr = Load(bcx, llptrptr);
let cmp = if nndiscr == 0 { IntEQ } else { IntNE };
ICmp(bcx, cmp, llptr, C_null(val_ty(llptr)))
@ -811,17 +857,10 @@ pub fn trans_set_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>,
Store(bcx, C_null(llptrty), val)
}
}
StructWrappedNullablePointer { ref nonnull, nndiscr, ptrfield, .. } => {
StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => {
if discr != nndiscr {
let (llptrptr, llptrty) = match ptrfield {
ThinPointer(field) =>
(GEPi(bcx, val, &[0, field]),
type_of::type_of(bcx.ccx(), nonnull.fields[field])),
FatPointer(field) => {
let v = GEPi(bcx, val, &[0, field, abi::FAT_PTR_ADDR]);
(v, val_ty(v).element_type())
}
};
let llptrptr = GEPi(bcx, val, discrfield[]);
let llptrty = val_ty(llptrptr).element_type();
Store(bcx, C_null(llptrty), llptrptr)
}
}
@ -1041,7 +1080,7 @@ pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr
false)
} else {
let vals = nonnull.fields.iter().map(|&ty| {
// Always use null even if it's not the `ptrfield`th
// Always use null even if it's not the `discrfield`th
// field; see #8506.
C_null(type_of::sizing_type_of(ccx, ty))
}).collect::<Vec<ValueRef>>();
@ -1121,9 +1160,8 @@ fn padding(ccx: &CrateContext, size: u64) -> ValueRef {
#[inline]
fn roundup(x: u64, a: u32) -> u64 { let a = a as u64; ((x + (a - 1)) / a) * a }
/// Get the discriminant of a constant value. (Not currently used.)
pub fn const_get_discrim(ccx: &CrateContext, r: &Repr, val: ValueRef)
-> Disr {
/// Get the discriminant of a constant value.
pub fn const_get_discrim(ccx: &CrateContext, r: &Repr, val: ValueRef) -> Disr {
match *r {
CEnum(ity, _, _) => {
match ity {
@ -1138,25 +1176,8 @@ pub fn const_get_discrim(ccx: &CrateContext, r: &Repr, val: ValueRef)
}
}
Univariant(..) => 0,
RawNullablePointer { nndiscr, .. } => {
if is_null(val) {
/* subtraction as uint is ok because nndiscr is either 0 or 1 */
(1 - nndiscr) as Disr
} else {
nndiscr
}
}
StructWrappedNullablePointer { nndiscr, ptrfield, .. } => {
let (idx, sub_idx) = match ptrfield {
ThinPointer(field) => (field, None),
FatPointer(field) => (field, Some(abi::FAT_PTR_ADDR))
};
if is_null(const_struct_field(ccx, val, idx, sub_idx)) {
/* subtraction as uint is ok because nndiscr is either 0 or 1 */
(1 - nndiscr) as Disr
} else {
nndiscr
}
RawNullablePointer { .. } | StructWrappedNullablePointer { .. } => {
ccx.sess().bug("const discrim access of non c-like enum")
}
}
}
@ -1170,29 +1191,25 @@ pub fn const_get_field(ccx: &CrateContext, r: &Repr, val: ValueRef,
_discr: Disr, ix: uint) -> ValueRef {
match *r {
CEnum(..) => ccx.sess().bug("element access in C-like enum const"),
Univariant(..) => const_struct_field(ccx, val, ix, None),
General(..) => const_struct_field(ccx, val, ix + 1, None),
Univariant(..) => const_struct_field(ccx, val, ix),
General(..) => const_struct_field(ccx, val, ix + 1),
RawNullablePointer { .. } => {
assert_eq!(ix, 0);
val
}
StructWrappedNullablePointer{ .. } => const_struct_field(ccx, val, ix, None)
},
StructWrappedNullablePointer{ .. } => const_struct_field(ccx, val, ix)
}
}
/// Extract field of struct-like const, skipping our alignment padding.
fn const_struct_field(ccx: &CrateContext, val: ValueRef, ix: uint, sub_idx: Option<uint>)
-> ValueRef {
fn const_struct_field(ccx: &CrateContext, val: ValueRef, ix: uint) -> ValueRef {
// Get the ix-th non-undef element of the struct.
let mut real_ix = 0; // actual position in the struct
let mut ix = ix; // logical index relative to real_ix
let mut field;
loop {
loop {
field = match sub_idx {
Some(si) => const_get_elt(ccx, val, &[real_ix, si as u32]),
None => const_get_elt(ccx, val, &[real_ix])
};
field = const_get_elt(ccx, val, &[real_ix]);
if !is_undef(field) {
break;
}

View File

@ -750,6 +750,7 @@ pub fn is_undef(val: ValueRef) -> bool {
}
}
#[allow(dead_code)] // potentially useful
pub fn is_null(val: ValueRef) -> bool {
unsafe {
llvm::LLVMIsNull(val) != False

View File

@ -2292,14 +2292,14 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> {
},
adt::StructWrappedNullablePointer { nonnull: ref struct_def,
nndiscr,
ptrfield, ..} => {
ref discrfield, ..} => {
// Create a description of the non-null variant
let (variant_type_metadata, variant_llvm_type, member_description_factory) =
describe_enum_variant(cx,
self.enum_type,
struct_def,
&*(*self.variants)[nndiscr as uint],
OptimizedDiscriminant(ptrfield),
OptimizedDiscriminant,
self.containing_scope,
self.span);
@ -2315,10 +2315,10 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> {
// member's name.
let null_variant_index = (1 - nndiscr) as uint;
let null_variant_name = token::get_name((*self.variants)[null_variant_index].name);
let discrfield = match ptrfield {
adt::ThinPointer(field) => format!("{}", field),
adt::FatPointer(field) => format!("{}", field)
};
let discrfield = discrfield.iter()
.skip(1)
.map(|x| x.to_string())
.collect::<Vec<_>>().connect("$");
let union_member_name = format!("RUST$ENCODED$ENUM${}${}",
discrfield,
null_variant_name);
@ -2367,7 +2367,7 @@ impl<'tcx> VariantMemberDescriptionFactory<'tcx> {
#[deriving(Copy)]
enum EnumDiscriminantInfo {
RegularDiscriminant(DIType),
OptimizedDiscriminant(adt::PointerField),
OptimizedDiscriminant,
NoDiscriminant
}

View File

@ -9,7 +9,12 @@
// except according to those terms.
extern crate core;
use core::nonzero::NonZero;
use std::mem::size_of;
use std::rc::Rc;
use std::sync::Arc;
trait Trait {}
@ -34,9 +39,33 @@ fn main() {
// Pointers - Box<T>
assert_eq!(size_of::<Box<int>>(), size_of::<Option<Box<int>>>());
// The optimization can't apply to raw pointers
assert!(size_of::<Option<*const int>>() != size_of::<*const int>());
assert!(Some(0 as *const int).is_some()); // Can't collapse None to null
struct Foo {
_a: Box<int>
}
struct Bar(Box<int>);
// Should apply through structs
assert_eq!(size_of::<Foo>(), size_of::<Option<Foo>>());
assert_eq!(size_of::<Bar>(), size_of::<Option<Bar>>());
// and tuples
assert_eq!(size_of::<(u8, Box<int>)>(), size_of::<Option<(u8, Box<int>)>>());
// and fixed-size arrays
assert_eq!(size_of::<[Box<int>, ..1]>(), size_of::<Option<[Box<int>, ..1]>>());
// Should apply to NonZero
assert_eq!(size_of::<NonZero<uint>>(), size_of::<Option<NonZero<uint>>>());
assert_eq!(size_of::<NonZero<*mut i8>>(), size_of::<Option<NonZero<*mut i8>>>());
// Should apply to types that use NonZero internally
assert_eq!(size_of::<Vec<int>>(), size_of::<Option<Vec<int>>>());
assert_eq!(size_of::<Arc<int>>(), size_of::<Option<Arc<int>>>());
assert_eq!(size_of::<Rc<int>>(), size_of::<Option<Rc<int>>>());
// Should apply to types that have NonZero transitively
assert_eq!(size_of::<String>(), size_of::<Option<String>>());
}