mirror of
https://github.com/rust-lang/rust.git
synced 2024-10-30 22:12:15 +00:00
rollup merge of #22286: nikomatsakis/variance-4b
Conflicts: src/librustc/middle/infer/combine.rs src/librustc_typeck/check/wf.rs
This commit is contained in:
commit
1506b34e0c
@ -572,7 +572,7 @@ the final namespace qualifier is omitted.
|
||||
Two examples of paths with type arguments:
|
||||
|
||||
```
|
||||
# struct HashMap<K, V>;
|
||||
# struct HashMap<K, V>(K,V);
|
||||
# fn f() {
|
||||
# fn id<T>(t: T) -> T { t }
|
||||
type T = HashMap<i32,String>; // Type arguments used in a type expression
|
||||
@ -1599,7 +1599,7 @@ pointer values (pointing to a type for which an implementation of the given
|
||||
trait is in scope) to pointers to the trait name, used as a type.
|
||||
|
||||
```
|
||||
# trait Shape { }
|
||||
# trait Shape { fn dummy(&self) { } }
|
||||
# impl Shape for i32 { }
|
||||
# let mycircle = 0i32;
|
||||
let myshape: Box<Shape> = Box::new(mycircle) as Box<Shape>;
|
||||
@ -1630,8 +1630,8 @@ let x: f64 = Num::from_i32(42);
|
||||
Traits may inherit from other traits. For example, in
|
||||
|
||||
```
|
||||
trait Shape { fn area() -> f64; }
|
||||
trait Circle : Shape { fn radius() -> f64; }
|
||||
trait Shape { fn area(&self) -> f64; }
|
||||
trait Circle : Shape { fn radius(&self) -> f64; }
|
||||
```
|
||||
|
||||
the syntax `Circle : Shape` means that types that implement `Circle` must also
|
||||
@ -1725,7 +1725,7 @@ type parameters taken by the trait it implements. Implementation parameters
|
||||
are written after the `impl` keyword.
|
||||
|
||||
```
|
||||
# trait Seq<T> { }
|
||||
# trait Seq<T> { fn dummy(&self, _: T) { } }
|
||||
impl<T> Seq<T> for Vec<T> {
|
||||
/* ... */
|
||||
}
|
||||
|
@ -96,7 +96,7 @@ pub struct Arena<'longer_than_self> {
|
||||
head: RefCell<Chunk>,
|
||||
copy_head: RefCell<Chunk>,
|
||||
chunks: RefCell<Vec<Chunk>>,
|
||||
_invariant: marker::InvariantLifetime<'longer_than_self>,
|
||||
_marker: marker::PhantomData<*mut &'longer_than_self()>,
|
||||
}
|
||||
|
||||
impl<'a> Arena<'a> {
|
||||
@ -111,7 +111,7 @@ impl<'a> Arena<'a> {
|
||||
head: RefCell::new(chunk(initial_size, false)),
|
||||
copy_head: RefCell::new(chunk(initial_size, true)),
|
||||
chunks: RefCell::new(Vec::new()),
|
||||
_invariant: marker::InvariantLifetime,
|
||||
_marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -361,6 +361,8 @@ pub struct TypedArena<T> {
|
||||
}
|
||||
|
||||
struct TypedArenaChunk<T> {
|
||||
marker: marker::PhantomData<T>,
|
||||
|
||||
/// Pointer to the next arena segment.
|
||||
next: *mut TypedArenaChunk<T>,
|
||||
|
||||
|
@ -512,13 +512,22 @@ mod stack {
|
||||
use super::super::node::handle;
|
||||
use vec::Vec;
|
||||
|
||||
struct InvariantLifetime<'id>(
|
||||
marker::PhantomData<::core::cell::Cell<&'id ()>>);
|
||||
|
||||
impl<'id> InvariantLifetime<'id> {
|
||||
fn new() -> InvariantLifetime<'id> {
|
||||
InvariantLifetime(marker::PhantomData)
|
||||
}
|
||||
}
|
||||
|
||||
/// A generic mutable reference, identical to `&mut` except for the fact that its lifetime
|
||||
/// parameter is invariant. This means that wherever an `IdRef` is expected, only an `IdRef`
|
||||
/// with the exact requested lifetime can be used. This is in contrast to normal references,
|
||||
/// where `&'static` can be used in any function expecting any lifetime reference.
|
||||
pub struct IdRef<'id, T: 'id> {
|
||||
inner: &'id mut T,
|
||||
marker: marker::InvariantLifetime<'id>
|
||||
_marker: InvariantLifetime<'id>,
|
||||
}
|
||||
|
||||
impl<'id, T> Deref for IdRef<'id, T> {
|
||||
@ -560,7 +569,7 @@ mod stack {
|
||||
pub struct Pusher<'id, 'a, K:'a, V:'a> {
|
||||
map: &'a mut BTreeMap<K, V>,
|
||||
stack: Stack<K, V>,
|
||||
marker: marker::InvariantLifetime<'id>
|
||||
_marker: InvariantLifetime<'id>,
|
||||
}
|
||||
|
||||
impl<'a, K, V> PartialSearchStack<'a, K, V> {
|
||||
@ -595,11 +604,11 @@ mod stack {
|
||||
let pusher = Pusher {
|
||||
map: self.map,
|
||||
stack: self.stack,
|
||||
marker: marker::InvariantLifetime
|
||||
_marker: InvariantLifetime::new(),
|
||||
};
|
||||
let node = IdRef {
|
||||
inner: unsafe { &mut *self.next },
|
||||
marker: marker::InvariantLifetime
|
||||
_marker: InvariantLifetime::new(),
|
||||
};
|
||||
|
||||
closure(pusher, node)
|
||||
|
@ -20,10 +20,11 @@ use core::prelude::*;
|
||||
|
||||
use core::cmp::Ordering::{Greater, Less, Equal};
|
||||
use core::iter::Zip;
|
||||
use core::marker::PhantomData;
|
||||
use core::ops::{Deref, DerefMut, Index, IndexMut};
|
||||
use core::ptr::Unique;
|
||||
use core::{slice, mem, ptr, cmp, num, raw};
|
||||
use alloc::heap;
|
||||
use alloc::heap::{self, EMPTY};
|
||||
|
||||
use borrow::Borrow;
|
||||
|
||||
@ -58,8 +59,8 @@ pub struct Node<K, V> {
|
||||
keys: Unique<K>,
|
||||
vals: Unique<V>,
|
||||
|
||||
// In leaf nodes, this will be null, and no space will be allocated for edges.
|
||||
edges: Unique<Node<K, V>>,
|
||||
// In leaf nodes, this will be None, and no space will be allocated for edges.
|
||||
edges: Option<Unique<Node<K, V>>>,
|
||||
|
||||
// At any given time, there will be `_len` keys, `_len` values, and (in an internal node)
|
||||
// `_len + 1` edges. In a leaf node, there will never be any edges.
|
||||
@ -279,8 +280,11 @@ impl<T> Drop for RawItems<T> {
|
||||
#[unsafe_destructor]
|
||||
impl<K, V> Drop for Node<K, V> {
|
||||
fn drop(&mut self) {
|
||||
if self.keys.ptr.is_null() {
|
||||
// We have already cleaned up this node.
|
||||
if self.keys.is_null() {
|
||||
// Since we have #[unsafe_no_drop_flag], we have to watch
|
||||
// out for a null value being stored in self.keys. (Using
|
||||
// null is technically a violation of the `Unique`
|
||||
// requirements, though.)
|
||||
return;
|
||||
}
|
||||
|
||||
@ -293,7 +297,7 @@ impl<K, V> Drop for Node<K, V> {
|
||||
self.destroy();
|
||||
}
|
||||
|
||||
self.keys.ptr = ptr::null_mut();
|
||||
self.keys = unsafe { Unique::new(0 as *mut K) };
|
||||
}
|
||||
}
|
||||
|
||||
@ -309,9 +313,9 @@ impl<K, V> Node<K, V> {
|
||||
let (vals_offset, edges_offset) = calculate_offsets_generic::<K, V>(capacity, false);
|
||||
|
||||
Node {
|
||||
keys: Unique(buffer as *mut K),
|
||||
vals: Unique(buffer.offset(vals_offset as isize) as *mut V),
|
||||
edges: Unique(buffer.offset(edges_offset as isize) as *mut Node<K, V>),
|
||||
keys: Unique::new(buffer as *mut K),
|
||||
vals: Unique::new(buffer.offset(vals_offset as isize) as *mut V),
|
||||
edges: Some(Unique::new(buffer.offset(edges_offset as isize) as *mut Node<K, V>)),
|
||||
_len: 0,
|
||||
_capacity: capacity,
|
||||
}
|
||||
@ -327,9 +331,9 @@ impl<K, V> Node<K, V> {
|
||||
let (vals_offset, _) = calculate_offsets_generic::<K, V>(capacity, true);
|
||||
|
||||
Node {
|
||||
keys: Unique(buffer as *mut K),
|
||||
vals: Unique(unsafe { buffer.offset(vals_offset as isize) as *mut V }),
|
||||
edges: Unique(ptr::null_mut()),
|
||||
keys: unsafe { Unique::new(buffer as *mut K) },
|
||||
vals: unsafe { Unique::new(buffer.offset(vals_offset as isize) as *mut V) },
|
||||
edges: None,
|
||||
_len: 0,
|
||||
_capacity: capacity,
|
||||
}
|
||||
@ -338,18 +342,18 @@ impl<K, V> Node<K, V> {
|
||||
unsafe fn destroy(&mut self) {
|
||||
let (alignment, size) =
|
||||
calculate_allocation_generic::<K, V>(self.capacity(), self.is_leaf());
|
||||
heap::deallocate(self.keys.ptr as *mut u8, size, alignment);
|
||||
heap::deallocate(*self.keys as *mut u8, size, alignment);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn as_slices<'a>(&'a self) -> (&'a [K], &'a [V]) {
|
||||
unsafe {(
|
||||
mem::transmute(raw::Slice {
|
||||
data: self.keys.ptr,
|
||||
data: *self.keys as *const K,
|
||||
len: self.len()
|
||||
}),
|
||||
mem::transmute(raw::Slice {
|
||||
data: self.vals.ptr,
|
||||
data: *self.vals as *const V,
|
||||
len: self.len()
|
||||
})
|
||||
)}
|
||||
@ -368,8 +372,12 @@ impl<K, V> Node<K, V> {
|
||||
&[]
|
||||
} else {
|
||||
unsafe {
|
||||
let data = match self.edges {
|
||||
None => heap::EMPTY as *const Node<K,V>,
|
||||
Some(ref p) => **p as *const Node<K,V>,
|
||||
};
|
||||
mem::transmute(raw::Slice {
|
||||
data: self.edges.ptr,
|
||||
data: data,
|
||||
len: self.len() + 1
|
||||
})
|
||||
}
|
||||
@ -525,7 +533,8 @@ impl<K: Clone, V: Clone> Clone for Node<K, V> {
|
||||
#[derive(Copy)]
|
||||
pub struct Handle<NodeRef, Type, NodeType> {
|
||||
node: NodeRef,
|
||||
index: usize
|
||||
index: usize,
|
||||
marker: PhantomData<(Type, NodeType)>,
|
||||
}
|
||||
|
||||
pub mod handle {
|
||||
@ -549,8 +558,8 @@ impl<K: Ord, V> Node<K, V> {
|
||||
// For the B configured as of this writing (B = 6), binary search was *significantly*
|
||||
// worse for usizes.
|
||||
match node.as_slices_internal().search_linear(key) {
|
||||
(index, true) => Found(Handle { node: node, index: index }),
|
||||
(index, false) => GoDown(Handle { node: node, index: index }),
|
||||
(index, true) => Found(Handle { node: node, index: index, marker: PhantomData }),
|
||||
(index, false) => GoDown(Handle { node: node, index: index, marker: PhantomData }),
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -587,7 +596,7 @@ impl <K, V> Node<K, V> {
|
||||
|
||||
/// If the node has any children
|
||||
pub fn is_leaf(&self) -> bool {
|
||||
self.edges.ptr.is_null()
|
||||
self.edges.is_none()
|
||||
}
|
||||
|
||||
/// if the node has too few elements
|
||||
@ -619,7 +628,8 @@ impl<K, V, NodeRef, Type, NodeType> Handle<NodeRef, Type, NodeType> where
|
||||
pub fn as_raw(&mut self) -> Handle<*mut Node<K, V>, Type, NodeType> {
|
||||
Handle {
|
||||
node: &mut *self.node as *mut _,
|
||||
index: self.index
|
||||
index: self.index,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -631,7 +641,8 @@ impl<K, V, Type, NodeType> Handle<*mut Node<K, V>, Type, NodeType> {
|
||||
pub unsafe fn from_raw<'a>(&'a self) -> Handle<&'a Node<K, V>, Type, NodeType> {
|
||||
Handle {
|
||||
node: &*self.node,
|
||||
index: self.index
|
||||
index: self.index,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
@ -641,7 +652,8 @@ impl<K, V, Type, NodeType> Handle<*mut Node<K, V>, Type, NodeType> {
|
||||
pub unsafe fn from_raw_mut<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, Type, NodeType> {
|
||||
Handle {
|
||||
node: &mut *self.node,
|
||||
index: self.index
|
||||
index: self.index,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -689,12 +701,14 @@ impl<K, V, NodeRef: Deref<Target=Node<K, V>>, Type> Handle<NodeRef, Type, handle
|
||||
if self.node.is_leaf() {
|
||||
Leaf(Handle {
|
||||
node: self.node,
|
||||
index: self.index
|
||||
index: self.index,
|
||||
marker: PhantomData,
|
||||
})
|
||||
} else {
|
||||
Internal(Handle {
|
||||
node: self.node,
|
||||
index: self.index
|
||||
index: self.index,
|
||||
marker: PhantomData,
|
||||
})
|
||||
}
|
||||
}
|
||||
@ -827,7 +841,8 @@ impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::Edge, NodeType> where
|
||||
unsafe fn left_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
|
||||
Handle {
|
||||
node: &mut *self.node,
|
||||
index: self.index - 1
|
||||
index: self.index - 1,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
@ -837,7 +852,8 @@ impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::Edge, NodeType> where
|
||||
unsafe fn right_kv<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::KV, NodeType> {
|
||||
Handle {
|
||||
node: &mut *self.node,
|
||||
index: self.index
|
||||
index: self.index,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -877,7 +893,8 @@ impl<'a, K: 'a, V: 'a, NodeType> Handle<&'a mut Node<K, V>, handle::KV, NodeType
|
||||
pub fn into_left_edge(self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
|
||||
Handle {
|
||||
node: &mut *self.node,
|
||||
index: self.index
|
||||
index: self.index,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -927,7 +944,8 @@ impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType> where
|
||||
pub fn left_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
|
||||
Handle {
|
||||
node: &mut *self.node,
|
||||
index: self.index
|
||||
index: self.index,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
@ -936,7 +954,8 @@ impl<K, V, NodeRef, NodeType> Handle<NodeRef, handle::KV, NodeType> where
|
||||
pub fn right_edge<'a>(&'a mut self) -> Handle<&'a mut Node<K, V>, handle::Edge, NodeType> {
|
||||
Handle {
|
||||
node: &mut *self.node,
|
||||
index: self.index + 1
|
||||
index: self.index + 1,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1045,7 +1064,8 @@ impl<K, V> Node<K, V> {
|
||||
debug_assert!(index < self.len(), "kv_handle index out of bounds");
|
||||
Handle {
|
||||
node: self,
|
||||
index: index
|
||||
index: index,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
@ -1065,7 +1085,7 @@ impl<K, V> Node<K, V> {
|
||||
vals: RawItems::from_slice(self.vals()),
|
||||
edges: RawItems::from_slice(self.edges()),
|
||||
|
||||
ptr: self.keys.ptr as *mut u8,
|
||||
ptr: *self.keys as *mut u8,
|
||||
capacity: self.capacity(),
|
||||
is_leaf: self.is_leaf()
|
||||
},
|
||||
|
@ -14,6 +14,7 @@
|
||||
//! representation to hold C-like enum variants.
|
||||
|
||||
use core::prelude::*;
|
||||
use core::marker;
|
||||
use core::fmt;
|
||||
use core::num::Int;
|
||||
use core::iter::{FromIterator, IntoIterator};
|
||||
@ -26,7 +27,8 @@ use core::ops::{Sub, BitOr, BitAnd, BitXor};
|
||||
pub struct EnumSet<E> {
|
||||
// We must maintain the invariant that no bits are set
|
||||
// for which no variant exists
|
||||
bits: usize
|
||||
bits: usize,
|
||||
marker: marker::PhantomData<E>,
|
||||
}
|
||||
|
||||
impl<E> Copy for EnumSet<E> {}
|
||||
@ -86,7 +88,7 @@ impl<E:CLike> EnumSet<E> {
|
||||
#[unstable(feature = "collections",
|
||||
reason = "matches collection reform specification, waiting for dust to settle")]
|
||||
pub fn new() -> EnumSet<E> {
|
||||
EnumSet {bits: 0}
|
||||
EnumSet {bits: 0, marker: marker::PhantomData}
|
||||
}
|
||||
|
||||
/// Returns the number of elements in the given `EnumSet`.
|
||||
@ -130,12 +132,14 @@ impl<E:CLike> EnumSet<E> {
|
||||
|
||||
/// Returns the union of both `EnumSets`.
|
||||
pub fn union(&self, e: EnumSet<E>) -> EnumSet<E> {
|
||||
EnumSet {bits: self.bits | e.bits}
|
||||
EnumSet {bits: self.bits | e.bits,
|
||||
marker: marker::PhantomData}
|
||||
}
|
||||
|
||||
/// Returns the intersection of both `EnumSets`.
|
||||
pub fn intersection(&self, e: EnumSet<E>) -> EnumSet<E> {
|
||||
EnumSet {bits: self.bits & e.bits}
|
||||
EnumSet {bits: self.bits & e.bits,
|
||||
marker: marker::PhantomData}
|
||||
}
|
||||
|
||||
/// Adds an enum to the `EnumSet`, and returns `true` if it wasn't there before
|
||||
@ -175,7 +179,7 @@ impl<E:CLike> Sub for EnumSet<E> {
|
||||
type Output = EnumSet<E>;
|
||||
|
||||
fn sub(self, e: EnumSet<E>) -> EnumSet<E> {
|
||||
EnumSet {bits: self.bits & !e.bits}
|
||||
EnumSet {bits: self.bits & !e.bits, marker: marker::PhantomData}
|
||||
}
|
||||
}
|
||||
|
||||
@ -183,7 +187,7 @@ impl<E:CLike> BitOr for EnumSet<E> {
|
||||
type Output = EnumSet<E>;
|
||||
|
||||
fn bitor(self, e: EnumSet<E>) -> EnumSet<E> {
|
||||
EnumSet {bits: self.bits | e.bits}
|
||||
EnumSet {bits: self.bits | e.bits, marker: marker::PhantomData}
|
||||
}
|
||||
}
|
||||
|
||||
@ -191,7 +195,7 @@ impl<E:CLike> BitAnd for EnumSet<E> {
|
||||
type Output = EnumSet<E>;
|
||||
|
||||
fn bitand(self, e: EnumSet<E>) -> EnumSet<E> {
|
||||
EnumSet {bits: self.bits & e.bits}
|
||||
EnumSet {bits: self.bits & e.bits, marker: marker::PhantomData}
|
||||
}
|
||||
}
|
||||
|
||||
@ -199,7 +203,7 @@ impl<E:CLike> BitXor for EnumSet<E> {
|
||||
type Output = EnumSet<E>;
|
||||
|
||||
fn bitxor(self, e: EnumSet<E>) -> EnumSet<E> {
|
||||
EnumSet {bits: self.bits ^ e.bits}
|
||||
EnumSet {bits: self.bits ^ e.bits, marker: marker::PhantomData}
|
||||
}
|
||||
}
|
||||
|
||||
@ -207,6 +211,7 @@ impl<E:CLike> BitXor for EnumSet<E> {
|
||||
pub struct Iter<E> {
|
||||
index: usize,
|
||||
bits: usize,
|
||||
marker: marker::PhantomData<E>,
|
||||
}
|
||||
|
||||
// FIXME(#19839) Remove in favor of `#[derive(Clone)]`
|
||||
@ -215,13 +220,14 @@ impl<E> Clone for Iter<E> {
|
||||
Iter {
|
||||
index: self.index,
|
||||
bits: self.bits,
|
||||
marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<E:CLike> Iter<E> {
|
||||
fn new(bits: usize) -> Iter<E> {
|
||||
Iter { index: 0, bits: bits }
|
||||
Iter { index: 0, bits: bits, marker: marker::PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -57,13 +57,13 @@ use core::fmt;
|
||||
use core::hash::{self, Hash};
|
||||
use core::intrinsics::assume;
|
||||
use core::iter::{repeat, FromIterator, IntoIterator};
|
||||
use core::marker::{self, ContravariantLifetime, InvariantType};
|
||||
use core::marker::PhantomData;
|
||||
use core::mem;
|
||||
use core::nonzero::NonZero;
|
||||
use core::num::{Int, UnsignedInt};
|
||||
use core::ops::{Index, IndexMut, Deref, Add};
|
||||
use core::ops;
|
||||
use core::ptr;
|
||||
use core::ptr::Unique;
|
||||
use core::raw::Slice as RawSlice;
|
||||
use core::slice;
|
||||
use core::usize;
|
||||
@ -139,10 +139,9 @@ use borrow::{Cow, IntoCow};
|
||||
#[unsafe_no_drop_flag]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub struct Vec<T> {
|
||||
ptr: NonZero<*mut T>,
|
||||
ptr: Unique<T>,
|
||||
len: usize,
|
||||
cap: usize,
|
||||
_own: marker::PhantomData<T>,
|
||||
}
|
||||
|
||||
unsafe impl<T: Send> Send for Vec<T> { }
|
||||
@ -251,10 +250,9 @@ impl<T> Vec<T> {
|
||||
pub unsafe fn from_raw_parts(ptr: *mut T, length: usize,
|
||||
capacity: usize) -> Vec<T> {
|
||||
Vec {
|
||||
ptr: NonZero::new(ptr),
|
||||
ptr: Unique::new(ptr),
|
||||
len: length,
|
||||
cap: capacity,
|
||||
_own: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
@ -375,7 +373,7 @@ impl<T> Vec<T> {
|
||||
self.len * mem::size_of::<T>(),
|
||||
mem::min_align_of::<T>()) as *mut T;
|
||||
if ptr.is_null() { ::alloc::oom() }
|
||||
self.ptr = NonZero::new(ptr);
|
||||
self.ptr = Unique::new(ptr);
|
||||
}
|
||||
self.cap = self.len;
|
||||
}
|
||||
@ -657,7 +655,7 @@ impl<T> Vec<T> {
|
||||
unsafe {
|
||||
let ptr = alloc_or_realloc(*self.ptr, old_size, size);
|
||||
if ptr.is_null() { ::alloc::oom() }
|
||||
self.ptr = NonZero::new(ptr);
|
||||
self.ptr = Unique::new(ptr);
|
||||
}
|
||||
self.cap = max(self.cap, 2) * 2;
|
||||
}
|
||||
@ -758,7 +756,7 @@ impl<T> Vec<T> {
|
||||
Drain {
|
||||
ptr: begin,
|
||||
end: end,
|
||||
marker: ContravariantLifetime,
|
||||
marker: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -873,6 +871,8 @@ impl<T> Vec<T> {
|
||||
end_t: unsafe { start.offset(offset) },
|
||||
start_u: start as *mut U,
|
||||
end_u: start as *mut U,
|
||||
|
||||
_marker: PhantomData,
|
||||
};
|
||||
// start_t
|
||||
// start_u
|
||||
@ -969,8 +969,7 @@ impl<T> Vec<T> {
|
||||
let mut pv = PartialVecZeroSized::<T,U> {
|
||||
num_t: vec.len(),
|
||||
num_u: 0,
|
||||
marker_t: InvariantType,
|
||||
marker_u: InvariantType,
|
||||
marker: PhantomData,
|
||||
};
|
||||
unsafe { mem::forget(vec); }
|
||||
|
||||
@ -1228,7 +1227,7 @@ impl<T> Vec<T> {
|
||||
unsafe {
|
||||
let ptr = alloc_or_realloc(*self.ptr, self.cap * mem::size_of::<T>(), size);
|
||||
if ptr.is_null() { ::alloc::oom() }
|
||||
self.ptr = NonZero::new(ptr);
|
||||
self.ptr = Unique::new(ptr);
|
||||
}
|
||||
self.cap = capacity;
|
||||
}
|
||||
@ -1795,10 +1794,10 @@ impl<T> Drop for IntoIter<T> {
|
||||
#[unsafe_no_drop_flag]
|
||||
#[unstable(feature = "collections",
|
||||
reason = "recently added as part of collections reform 2")]
|
||||
pub struct Drain<'a, T> {
|
||||
pub struct Drain<'a, T:'a> {
|
||||
ptr: *const T,
|
||||
end: *const T,
|
||||
marker: ContravariantLifetime<'a>,
|
||||
marker: PhantomData<&'a T>,
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
@ -1883,9 +1882,9 @@ impl<'a, T> Drop for Drain<'a, T> {
|
||||
|
||||
/// Wrapper type providing a `&Vec<T>` reference via `Deref`.
|
||||
#[unstable(feature = "collections")]
|
||||
pub struct DerefVec<'a, T> {
|
||||
pub struct DerefVec<'a, T:'a> {
|
||||
x: Vec<T>,
|
||||
l: ContravariantLifetime<'a>
|
||||
l: PhantomData<&'a T>,
|
||||
}
|
||||
|
||||
#[unstable(feature = "collections")]
|
||||
@ -1913,7 +1912,7 @@ pub fn as_vec<'a, T>(x: &'a [T]) -> DerefVec<'a, T> {
|
||||
unsafe {
|
||||
DerefVec {
|
||||
x: Vec::from_raw_parts(x.as_ptr() as *mut T, x.len(), x.len()),
|
||||
l: ContravariantLifetime::<'a>
|
||||
l: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1937,6 +1936,8 @@ struct PartialVecNonZeroSized<T,U> {
|
||||
end_u: *mut U,
|
||||
start_t: *mut T,
|
||||
end_t: *mut T,
|
||||
|
||||
_marker: PhantomData<U>,
|
||||
}
|
||||
|
||||
/// An owned, partially type-converted vector of zero-sized elements.
|
||||
@ -1946,8 +1947,7 @@ struct PartialVecNonZeroSized<T,U> {
|
||||
struct PartialVecZeroSized<T,U> {
|
||||
num_t: usize,
|
||||
num_u: usize,
|
||||
marker_t: InvariantType<T>,
|
||||
marker_u: InvariantType<U>,
|
||||
marker: PhantomData<::core::cell::Cell<(T,U)>>,
|
||||
}
|
||||
|
||||
#[unsafe_destructor]
|
||||
|
@ -28,7 +28,7 @@ use core::marker;
|
||||
use core::mem;
|
||||
use core::num::{Int, UnsignedInt};
|
||||
use core::ops::{Index, IndexMut};
|
||||
use core::ptr;
|
||||
use core::ptr::{self, Unique};
|
||||
use core::raw::Slice as RawSlice;
|
||||
|
||||
use core::hash::{Hash, Hasher};
|
||||
@ -57,7 +57,7 @@ pub struct VecDeque<T> {
|
||||
tail: usize,
|
||||
head: usize,
|
||||
cap: usize,
|
||||
ptr: *mut T
|
||||
ptr: Unique<T>,
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
@ -80,7 +80,7 @@ impl<T> Drop for VecDeque<T> {
|
||||
self.clear();
|
||||
unsafe {
|
||||
if mem::size_of::<T>() != 0 {
|
||||
heap::deallocate(self.ptr as *mut u8,
|
||||
heap::deallocate(*self.ptr as *mut u8,
|
||||
self.cap * mem::size_of::<T>(),
|
||||
mem::min_align_of::<T>())
|
||||
}
|
||||
@ -98,13 +98,13 @@ impl<T> VecDeque<T> {
|
||||
/// Turn ptr into a slice
|
||||
#[inline]
|
||||
unsafe fn buffer_as_slice(&self) -> &[T] {
|
||||
mem::transmute(RawSlice { data: self.ptr, len: self.cap })
|
||||
mem::transmute(RawSlice { data: *self.ptr as *const T, len: self.cap })
|
||||
}
|
||||
|
||||
/// Turn ptr into a mut slice
|
||||
#[inline]
|
||||
unsafe fn buffer_as_mut_slice(&mut self) -> &mut [T] {
|
||||
mem::transmute(RawSlice { data: self.ptr, len: self.cap })
|
||||
mem::transmute(RawSlice { data: *self.ptr as *const T, len: self.cap })
|
||||
}
|
||||
|
||||
/// Moves an element out of the buffer
|
||||
@ -171,21 +171,21 @@ impl<T> VecDeque<T> {
|
||||
let size = cap.checked_mul(mem::size_of::<T>())
|
||||
.expect("capacity overflow");
|
||||
|
||||
let ptr = if mem::size_of::<T>() != 0 {
|
||||
unsafe {
|
||||
let ptr = unsafe {
|
||||
if mem::size_of::<T>() != 0 {
|
||||
let ptr = heap::allocate(size, mem::min_align_of::<T>()) as *mut T;;
|
||||
if ptr.is_null() { ::alloc::oom() }
|
||||
ptr
|
||||
Unique::new(ptr)
|
||||
} else {
|
||||
Unique::new(heap::EMPTY as *mut T)
|
||||
}
|
||||
} else {
|
||||
heap::EMPTY as *mut T
|
||||
};
|
||||
|
||||
VecDeque {
|
||||
tail: 0,
|
||||
head: 0,
|
||||
cap: cap,
|
||||
ptr: ptr
|
||||
ptr: ptr,
|
||||
}
|
||||
}
|
||||
|
||||
@ -341,11 +341,12 @@ impl<T> VecDeque<T> {
|
||||
let new = count.checked_mul(mem::size_of::<T>())
|
||||
.expect("capacity overflow");
|
||||
unsafe {
|
||||
self.ptr = heap::reallocate(self.ptr as *mut u8,
|
||||
old,
|
||||
new,
|
||||
mem::min_align_of::<T>()) as *mut T;
|
||||
if self.ptr.is_null() { ::alloc::oom() }
|
||||
let ptr = heap::reallocate(*self.ptr as *mut u8,
|
||||
old,
|
||||
new,
|
||||
mem::min_align_of::<T>()) as *mut T;
|
||||
if ptr.is_null() { ::alloc::oom() }
|
||||
self.ptr = Unique::new(ptr);
|
||||
}
|
||||
}
|
||||
|
||||
@ -459,11 +460,12 @@ impl<T> VecDeque<T> {
|
||||
let old = self.cap * mem::size_of::<T>();
|
||||
let new_size = target_cap * mem::size_of::<T>();
|
||||
unsafe {
|
||||
self.ptr = heap::reallocate(self.ptr as *mut u8,
|
||||
old,
|
||||
new_size,
|
||||
mem::min_align_of::<T>()) as *mut T;
|
||||
if self.ptr.is_null() { ::alloc::oom() }
|
||||
let ptr = heap::reallocate(*self.ptr as *mut u8,
|
||||
old,
|
||||
new_size,
|
||||
mem::min_align_of::<T>()) as *mut T;
|
||||
if ptr.is_null() { ::alloc::oom() }
|
||||
self.ptr = Unique::new(ptr);
|
||||
}
|
||||
}
|
||||
self.cap = target_cap;
|
||||
@ -545,8 +547,8 @@ impl<T> VecDeque<T> {
|
||||
tail: self.tail,
|
||||
head: self.head,
|
||||
cap: self.cap,
|
||||
ptr: self.ptr,
|
||||
marker: marker::ContravariantLifetime,
|
||||
ptr: *self.ptr,
|
||||
marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
@ -1342,7 +1344,7 @@ impl<T> VecDeque<T> {
|
||||
// `at` lies in the first half.
|
||||
let amount_in_first = first_len - at;
|
||||
|
||||
ptr::copy_nonoverlapping_memory(other.ptr,
|
||||
ptr::copy_nonoverlapping_memory(*other.ptr,
|
||||
first_half.as_ptr().offset(at as isize),
|
||||
amount_in_first);
|
||||
|
||||
@ -1355,7 +1357,7 @@ impl<T> VecDeque<T> {
|
||||
// in the first half.
|
||||
let offset = at - first_len;
|
||||
let amount_in_second = second_len - offset;
|
||||
ptr::copy_nonoverlapping_memory(other.ptr,
|
||||
ptr::copy_nonoverlapping_memory(*other.ptr,
|
||||
second_half.as_ptr().offset(offset as isize),
|
||||
amount_in_second);
|
||||
}
|
||||
@ -1524,7 +1526,7 @@ pub struct IterMut<'a, T:'a> {
|
||||
tail: usize,
|
||||
head: usize,
|
||||
cap: usize,
|
||||
marker: marker::ContravariantLifetime<'a>,
|
||||
marker: marker::PhantomData<&'a mut T>,
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
@ -2004,9 +2006,9 @@ mod tests {
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
enum Taggypar<T> {
|
||||
Onepar(i32),
|
||||
Twopar(i32, i32),
|
||||
Threepar(i32, i32, i32),
|
||||
Onepar(T),
|
||||
Twopar(T, T),
|
||||
Threepar(T, T, T),
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
|
@ -876,7 +876,7 @@ pub struct IntoIter<V> {
|
||||
}
|
||||
|
||||
#[unstable(feature = "collections")]
|
||||
pub struct Drain<'a, V> {
|
||||
pub struct Drain<'a, V:'a> {
|
||||
iter: FilterMap<
|
||||
Enumerate<vec::Drain<'a, Option<V>>>,
|
||||
fn((usize, Option<V>)) -> Option<(usize, V)>>
|
||||
|
@ -76,6 +76,7 @@ use marker::Sync;
|
||||
|
||||
use intrinsics;
|
||||
use cell::UnsafeCell;
|
||||
use marker::PhantomData;
|
||||
|
||||
/// A boolean type which can be safely shared between threads.
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
@ -105,6 +106,7 @@ unsafe impl Sync for AtomicUsize {}
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub struct AtomicPtr<T> {
|
||||
p: UnsafeCell<usize>,
|
||||
_marker: PhantomData<*mut T>,
|
||||
}
|
||||
|
||||
unsafe impl<T> Sync for AtomicPtr<T> {}
|
||||
@ -791,7 +793,8 @@ impl<T> AtomicPtr<T> {
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn new(p: *mut T) -> AtomicPtr<T> {
|
||||
AtomicPtr { p: UnsafeCell::new(p as usize) }
|
||||
AtomicPtr { p: UnsafeCell::new(p as usize),
|
||||
_marker: PhantomData }
|
||||
}
|
||||
|
||||
/// Loads a value from the pointer.
|
||||
|
@ -16,7 +16,7 @@ use any;
|
||||
use cell::{Cell, RefCell, Ref, RefMut, BorrowState};
|
||||
use char::CharExt;
|
||||
use iter::{Iterator, IteratorExt};
|
||||
use marker::{Copy, Sized};
|
||||
use marker::{Copy, PhantomData, Sized};
|
||||
use mem;
|
||||
use option::Option;
|
||||
use option::Option::{Some, None};
|
||||
@ -914,6 +914,11 @@ impl Debug for () {
|
||||
f.pad("()")
|
||||
}
|
||||
}
|
||||
impl<T> Debug for PhantomData<T> {
|
||||
fn fmt(&self, f: &mut Formatter) -> Result {
|
||||
f.pad("PhantomData")
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<T: Copy + Debug> Debug for Cell<T> {
|
||||
|
@ -62,6 +62,7 @@ use clone::Clone;
|
||||
use cmp;
|
||||
use cmp::Ord;
|
||||
use default::Default;
|
||||
use marker;
|
||||
use mem;
|
||||
use num::{ToPrimitive, Int};
|
||||
use ops::{Add, Deref, FnMut};
|
||||
@ -947,7 +948,7 @@ pub trait IteratorExt: Iterator + Sized {
|
||||
FromB: Default + Extend<B>,
|
||||
Self: Iterator<Item=(A, B)>,
|
||||
{
|
||||
struct SizeHint<A>(usize, Option<usize>);
|
||||
struct SizeHint<A>(usize, Option<usize>, marker::PhantomData<A>);
|
||||
impl<A> Iterator for SizeHint<A> {
|
||||
type Item = A;
|
||||
|
||||
@ -961,8 +962,8 @@ pub trait IteratorExt: Iterator + Sized {
|
||||
let mut ts: FromA = Default::default();
|
||||
let mut us: FromB = Default::default();
|
||||
|
||||
ts.extend(SizeHint(lo, hi));
|
||||
us.extend(SizeHint(lo, hi));
|
||||
ts.extend(SizeHint(lo, hi, marker::PhantomData));
|
||||
us.extend(SizeHint(lo, hi, marker::PhantomData));
|
||||
|
||||
for (t, u) in self {
|
||||
ts.extend(Some(t).into_iter());
|
||||
@ -2064,8 +2065,8 @@ pub struct Scan<I, St, F> {
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<B, I: Iterator, St, F> Iterator for Scan<I, St, F> where
|
||||
F: FnMut(&mut St, I::Item) -> Option<B>,
|
||||
impl<A, B, I: Iterator<Item=A>, St, F> Iterator for Scan<I, St, F> where
|
||||
F: FnMut(&mut St, A) -> Option<B>,
|
||||
{
|
||||
type Item = B;
|
||||
|
||||
|
@ -26,6 +26,10 @@
|
||||
#![stable(feature = "rust1", since = "1.0.0")]
|
||||
|
||||
use clone::Clone;
|
||||
use cmp;
|
||||
use option::Option;
|
||||
use hash::Hash;
|
||||
use hash::Hasher;
|
||||
|
||||
/// Types able to be transferred across thread boundaries.
|
||||
#[unstable(feature = "core",
|
||||
@ -37,12 +41,11 @@ pub unsafe trait Send: 'static {
|
||||
// empty.
|
||||
}
|
||||
/// Types able to be transferred across thread boundaries.
|
||||
#[unstable(feature = "core",
|
||||
reason = "will be overhauled with new lifetime rules; see RFC 458")]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[lang="send"]
|
||||
#[rustc_on_unimplemented = "`{Self}` cannot be sent between threads safely"]
|
||||
#[cfg(not(stage0))]
|
||||
pub unsafe trait Send {
|
||||
pub unsafe trait Send : MarkerTrait {
|
||||
// empty.
|
||||
}
|
||||
|
||||
@ -50,7 +53,7 @@ pub unsafe trait Send {
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[lang="sized"]
|
||||
#[rustc_on_unimplemented = "`{Self}` does not have a constant size known at compile-time"]
|
||||
pub trait Sized {
|
||||
pub trait Sized : MarkerTrait {
|
||||
// Empty.
|
||||
}
|
||||
|
||||
@ -155,7 +158,7 @@ pub trait Sized {
|
||||
/// change: that second example would fail to compile if we made `Foo` non-`Copy`.
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[lang="copy"]
|
||||
pub trait Copy {
|
||||
pub trait Copy : MarkerTrait {
|
||||
// Empty.
|
||||
}
|
||||
|
||||
@ -204,220 +207,13 @@ pub trait Copy {
|
||||
/// around the value(s) which can be mutated when behind a `&`
|
||||
/// reference; not doing this is undefined behaviour (for example,
|
||||
/// `transmute`-ing from `&T` to `&mut T` is illegal).
|
||||
#[unstable(feature = "core",
|
||||
reason = "will be overhauled with new lifetime rules; see RFC 458")]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[lang="sync"]
|
||||
#[rustc_on_unimplemented = "`{Self}` cannot be shared between threads safely"]
|
||||
pub unsafe trait Sync {
|
||||
pub unsafe trait Sync : MarkerTrait {
|
||||
// Empty
|
||||
}
|
||||
|
||||
/// A marker type that indicates to the compiler that the instances
|
||||
/// of the type itself owns instances of the type parameter `T`.
|
||||
///
|
||||
/// This is used to indicate that one or more instances of the type
|
||||
/// `T` could be dropped when instances of the type itself is dropped,
|
||||
/// though that may not be apparent from the other structure of the
|
||||
/// type itself. For example, the type may hold a `*mut T`, which the
|
||||
/// compiler does not automatically treat as owned.
|
||||
#[unstable(feature = "core",
|
||||
reason = "Newly added to deal with scoping and destructor changes")]
|
||||
#[lang="phantom_data"]
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct PhantomData<T: ?Sized>;
|
||||
|
||||
impl<T: ?Sized> Copy for PhantomData<T> {}
|
||||
impl<T: ?Sized> Clone for PhantomData<T> {
|
||||
fn clone(&self) -> PhantomData<T> { *self }
|
||||
}
|
||||
|
||||
/// A marker type whose type parameter `T` is considered to be
|
||||
/// covariant with respect to the type itself. This is (typically)
|
||||
/// used to indicate that an instance of the type `T` is being stored
|
||||
/// into memory and read from, even though that may not be apparent.
|
||||
///
|
||||
/// For more information about variance, refer to this Wikipedia
|
||||
/// article <http://en.wikipedia.org/wiki/Variance_%28computer_science%29>.
|
||||
///
|
||||
/// *Note:* It is very unusual to have to add a covariant constraint.
|
||||
/// If you are not sure, you probably want to use `InvariantType`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// Given a struct `S` that includes a type parameter `T`
|
||||
/// but does not actually *reference* that type parameter:
|
||||
///
|
||||
/// ```ignore
|
||||
/// use std::mem;
|
||||
///
|
||||
/// struct S<T> { x: *() }
|
||||
/// fn get<T>(s: &S<T>) -> T {
|
||||
/// unsafe {
|
||||
/// let x: *T = mem::transmute(s.x);
|
||||
/// *x
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// The type system would currently infer that the value of
|
||||
/// the type parameter `T` is irrelevant, and hence a `S<int>` is
|
||||
/// a subtype of `S<Box<int>>` (or, for that matter, `S<U>` for
|
||||
/// any `U`). But this is incorrect because `get()` converts the
|
||||
/// `*()` into a `*T` and reads from it. Therefore, we should include the
|
||||
/// a marker field `CovariantType<T>` to inform the type checker that
|
||||
/// `S<T>` is a subtype of `S<U>` if `T` is a subtype of `U`
|
||||
/// (for example, `S<&'static int>` is a subtype of `S<&'a int>`
|
||||
/// for some lifetime `'a`, but not the other way around).
|
||||
#[unstable(feature = "core",
|
||||
reason = "likely to change with new variance strategy")]
|
||||
#[lang="covariant_type"]
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct CovariantType<T: ?Sized>;
|
||||
|
||||
impl<T: ?Sized> Copy for CovariantType<T> {}
|
||||
impl<T: ?Sized> Clone for CovariantType<T> {
|
||||
fn clone(&self) -> CovariantType<T> { *self }
|
||||
}
|
||||
|
||||
/// A marker type whose type parameter `T` is considered to be
|
||||
/// contravariant with respect to the type itself. This is (typically)
|
||||
/// used to indicate that an instance of the type `T` will be consumed
|
||||
/// (but not read from), even though that may not be apparent.
|
||||
///
|
||||
/// For more information about variance, refer to this Wikipedia
|
||||
/// article <http://en.wikipedia.org/wiki/Variance_%28computer_science%29>.
|
||||
///
|
||||
/// *Note:* It is very unusual to have to add a contravariant constraint.
|
||||
/// If you are not sure, you probably want to use `InvariantType`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// Given a struct `S` that includes a type parameter `T`
|
||||
/// but does not actually *reference* that type parameter:
|
||||
///
|
||||
/// ```
|
||||
/// use std::mem;
|
||||
///
|
||||
/// struct S<T> { x: *const () }
|
||||
/// fn get<T>(s: &S<T>, v: T) {
|
||||
/// unsafe {
|
||||
/// let x: fn(T) = mem::transmute(s.x);
|
||||
/// x(v)
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// The type system would currently infer that the value of
|
||||
/// the type parameter `T` is irrelevant, and hence a `S<int>` is
|
||||
/// a subtype of `S<Box<int>>` (or, for that matter, `S<U>` for
|
||||
/// any `U`). But this is incorrect because `get()` converts the
|
||||
/// `*()` into a `fn(T)` and then passes a value of type `T` to it.
|
||||
///
|
||||
/// Supplying a `ContravariantType` marker would correct the
|
||||
/// problem, because it would mark `S` so that `S<T>` is only a
|
||||
/// subtype of `S<U>` if `U` is a subtype of `T`; given that the
|
||||
/// function requires arguments of type `T`, it must also accept
|
||||
/// arguments of type `U`, hence such a conversion is safe.
|
||||
#[unstable(feature = "core",
|
||||
reason = "likely to change with new variance strategy")]
|
||||
#[lang="contravariant_type"]
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct ContravariantType<T: ?Sized>;
|
||||
|
||||
impl<T: ?Sized> Copy for ContravariantType<T> {}
|
||||
impl<T: ?Sized> Clone for ContravariantType<T> {
|
||||
fn clone(&self) -> ContravariantType<T> { *self }
|
||||
}
|
||||
|
||||
/// A marker type whose type parameter `T` is considered to be
|
||||
/// invariant with respect to the type itself. This is (typically)
|
||||
/// used to indicate that instances of the type `T` may be read or
|
||||
/// written, even though that may not be apparent.
|
||||
///
|
||||
/// For more information about variance, refer to this Wikipedia
|
||||
/// article <http://en.wikipedia.org/wiki/Variance_%28computer_science%29>.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// The Cell type is an example of an `InvariantType` which uses unsafe
|
||||
/// code to achieve "interior" mutability:
|
||||
///
|
||||
/// ```
|
||||
/// struct Cell<T> { value: T }
|
||||
/// ```
|
||||
///
|
||||
/// The type system would infer that `value` is only read here
|
||||
/// and never written, but in fact `Cell` uses unsafe code to achieve
|
||||
/// interior mutability. In order to get correct behavior, the
|
||||
/// `InvariantType` marker must be applied.
|
||||
#[unstable(feature = "core",
|
||||
reason = "likely to change with new variance strategy")]
|
||||
#[lang="invariant_type"]
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct InvariantType<T: ?Sized>;
|
||||
|
||||
#[unstable(feature = "core",
|
||||
reason = "likely to change with new variance strategy")]
|
||||
impl<T: ?Sized> Copy for InvariantType<T> {}
|
||||
#[unstable(feature = "core",
|
||||
reason = "likely to change with new variance strategy")]
|
||||
impl<T: ?Sized> Clone for InvariantType<T> {
|
||||
fn clone(&self) -> InvariantType<T> { *self }
|
||||
}
|
||||
|
||||
/// As `CovariantType`, but for lifetime parameters. Using
|
||||
/// `CovariantLifetime<'a>` indicates that it is ok to substitute
|
||||
/// a *longer* lifetime for `'a` than the one you originally
|
||||
/// started with (e.g., you could convert any lifetime `'foo` to
|
||||
/// `'static`). You almost certainly want `ContravariantLifetime`
|
||||
/// instead, or possibly `InvariantLifetime`. The only case where
|
||||
/// it would be appropriate is that you have a (type-casted, and
|
||||
/// hence hidden from the type system) function pointer with a
|
||||
/// signature like `fn(&'a T)` (and no other uses of `'a`). In
|
||||
/// this case, it is ok to substitute a larger lifetime for `'a`
|
||||
/// (e.g., `fn(&'static T)`), because the function is only
|
||||
/// becoming more selective in terms of what it accepts as
|
||||
/// argument.
|
||||
///
|
||||
/// For more information about variance, refer to this Wikipedia
|
||||
/// article <http://en.wikipedia.org/wiki/Variance_%28computer_science%29>.
|
||||
#[unstable(feature = "core",
|
||||
reason = "likely to change with new variance strategy")]
|
||||
#[lang="covariant_lifetime"]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct CovariantLifetime<'a>;
|
||||
|
||||
/// As `ContravariantType`, but for lifetime parameters. Using
|
||||
/// `ContravariantLifetime<'a>` indicates that it is ok to
|
||||
/// substitute a *shorter* lifetime for `'a` than the one you
|
||||
/// originally started with (e.g., you could convert `'static` to
|
||||
/// any lifetime `'foo`). This is appropriate for cases where you
|
||||
/// have an unsafe pointer that is actually a pointer into some
|
||||
/// memory with lifetime `'a`, and thus you want to limit the
|
||||
/// lifetime of your data structure to `'a`. An example of where
|
||||
/// this is used is the iterator for vectors.
|
||||
///
|
||||
/// For more information about variance, refer to this Wikipedia
|
||||
/// article <http://en.wikipedia.org/wiki/Variance_%28computer_science%29>.
|
||||
#[unstable(feature = "core",
|
||||
reason = "likely to change with new variance strategy")]
|
||||
#[lang="contravariant_lifetime"]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct ContravariantLifetime<'a>;
|
||||
|
||||
/// As `InvariantType`, but for lifetime parameters. Using
|
||||
/// `InvariantLifetime<'a>` indicates that it is not ok to
|
||||
/// substitute any other lifetime for `'a` besides its original
|
||||
/// value. This is appropriate for cases where you have an unsafe
|
||||
/// pointer that is actually a pointer into memory with lifetime `'a`,
|
||||
/// and this pointer is itself stored in an inherently mutable
|
||||
/// location (such as a `Cell`).
|
||||
#[unstable(feature = "core",
|
||||
reason = "likely to change with new variance strategy")]
|
||||
#[lang="invariant_lifetime"]
|
||||
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct InvariantLifetime<'a>;
|
||||
|
||||
/// A type which is considered "not POD", meaning that it is not
|
||||
/// implicitly copyable. This is typically embedded in other types to
|
||||
/// ensure that they are never copied, even if they lack a destructor.
|
||||
@ -435,6 +231,149 @@ pub struct NoCopy;
|
||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Managed;
|
||||
|
||||
macro_rules! impls{
|
||||
($t: ident) => (
|
||||
impl<T:?Sized, S: Hasher> Hash<S> for $t<T> {
|
||||
#[inline]
|
||||
fn hash(&self, _: &mut S) {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T:?Sized> cmp::PartialEq for $t<T> {
|
||||
fn eq(&self, _other: &$t<T>) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl<T:?Sized> cmp::Eq for $t<T> {
|
||||
}
|
||||
|
||||
impl<T:?Sized> cmp::PartialOrd for $t<T> {
|
||||
fn partial_cmp(&self, _other: &$t<T>) -> Option<cmp::Ordering> {
|
||||
Option::Some(cmp::Ordering::Equal)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T:?Sized> cmp::Ord for $t<T> {
|
||||
fn cmp(&self, _other: &$t<T>) -> cmp::Ordering {
|
||||
cmp::Ordering::Equal
|
||||
}
|
||||
}
|
||||
|
||||
impl<T:?Sized> Copy for $t<T> { }
|
||||
|
||||
impl<T:?Sized> Clone for $t<T> {
|
||||
fn clone(&self) -> $t<T> {
|
||||
$t
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
/// `MarkerTrait` is intended to be used as the supertrait for traits
|
||||
/// that don't have any methods but instead serve just to designate
|
||||
/// categories of types. An example would be the `Send` trait, which
|
||||
/// indicates types that are sendable: `Send` does not itself offer
|
||||
/// any methods, but instead is used to gate access to data.
|
||||
///
|
||||
/// FIXME. Better documentation needed here!
|
||||
pub trait MarkerTrait : PhantomFn<Self> { }
|
||||
impl<T:?Sized> MarkerTrait for T { }
|
||||
|
||||
/// `PhantomFn` is a marker trait for use with traits that contain
|
||||
/// type or lifetime parameters that do not appear in any of their
|
||||
/// methods. In that case, you can either remove those parameters, or
|
||||
/// add a `PhantomFn` supertrait that reflects the signature of
|
||||
/// methods that compiler should "pretend" exists. This most commonly
|
||||
/// occurs for traits with no methods: in that particular case, you
|
||||
/// can extend `MarkerTrait`, which is equivalent to
|
||||
/// `PhantomFn<Self>`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// As an example, consider a trait with no methods like `Even`, meant
|
||||
/// to represent types that are "even":
|
||||
///
|
||||
/// ```rust
|
||||
/// trait Even { }
|
||||
/// ```
|
||||
///
|
||||
/// In this case, because the implicit parameter `Self` is unused, the
|
||||
/// compiler will issue an error. The only purpose of this trait is to
|
||||
/// categorize types (and hence instances of those types) as "even" or
|
||||
/// not, so if we *were* going to have a method, it might look like:
|
||||
///
|
||||
/// ```rust
|
||||
/// trait Even {
|
||||
/// fn is_even(self) -> bool { true }
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// Therefore, we can model a method like this as follows:
|
||||
///
|
||||
/// ```rust
|
||||
/// use std::marker::PhantomFn
|
||||
/// trait Even : PhantomFn<Self> { }
|
||||
/// ```
|
||||
///
|
||||
/// Another equivalent, but clearer, option would be to use
|
||||
/// `MarkerTrait`:
|
||||
///
|
||||
/// ```rust
|
||||
/// use std::marker::MarkerTrait;
|
||||
/// trait Even : MarkerTrait { }
|
||||
/// ```
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// - `A` represents the type of the method's argument. You can use a
|
||||
/// tuple to represent "multiple" arguments. Any types appearing here
|
||||
/// will be considered "contravariant".
|
||||
/// - `R`, if supplied, represents the method's return type. This defaults
|
||||
/// to `()` as it is rarely needed.
|
||||
///
|
||||
/// # Additional reading
|
||||
///
|
||||
/// More details and background can be found in [RFC 738][738].
|
||||
///
|
||||
/// [738]: https://github.com/rust-lang/rfcs/blob/master/text/0738-variance.md
|
||||
#[lang="phantom_fn"]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub trait PhantomFn<A:?Sized,R:?Sized=()> { }
|
||||
|
||||
#[cfg(stage0)] // built into the trait matching system after stage0
|
||||
impl<A:?Sized, R:?Sized, U:?Sized> PhantomFn<A,R> for U { }
|
||||
|
||||
/// Specific to stage0. You should not be seeing these docs!
|
||||
#[cfg(stage0)]
|
||||
#[lang="covariant_type"] // only relevant to stage0
|
||||
pub struct PhantomData<T:?Sized>;
|
||||
|
||||
/// `PhantomData` is a way to tell the compiler about fake fields.
|
||||
/// Phantom data is required whenever type parameters are not used.
|
||||
/// The idea is that if the compiler encounters a `PhantomData<T>`
|
||||
/// instance, it will behave *as if* an instance of the type `T` were
|
||||
/// present for the purpose of various automatic analyses.
|
||||
///
|
||||
/// For example, embedding a `PhantomData<T>` will inform the compiler
|
||||
/// that one or more instances of the type `T` could be dropped when
|
||||
/// instances of the type itself is dropped, though that may not be
|
||||
/// apparent from the other structure of the type itself. This is
|
||||
/// commonly necessary if the structure is using an unsafe pointer
|
||||
/// like `*mut T` whose referent may be dropped when the type is
|
||||
/// dropped, as a `*mut T` is otherwise not treated as owned.
|
||||
///
|
||||
/// FIXME. Better documentation and examples of common patterns needed
|
||||
/// here! For now, please see [RFC 738][738] for more information.
|
||||
///
|
||||
/// [738]: https://github.com/rust-lang/rfcs/blob/master/text/0738-variance.md
|
||||
#[cfg(not(stage0))]
|
||||
#[lang="phantom_data"]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub struct PhantomData<T:?Sized>;
|
||||
|
||||
impls! { PhantomData }
|
||||
|
||||
#[cfg(not(stage0))]
|
||||
mod impls {
|
||||
use super::{Send, Sync, Sized};
|
||||
@ -442,3 +381,40 @@ mod impls {
|
||||
unsafe impl<'a, T: Sync + ?Sized> Send for &'a T {}
|
||||
unsafe impl<'a, T: Send + ?Sized> Send for &'a mut T {}
|
||||
}
|
||||
|
||||
/// Old-style marker trait. Deprecated.
|
||||
#[unstable(feature = "core", reason = "deprecated")]
|
||||
#[deprecated(since = "1.0.0", reason = "Replace with `PhantomData<&'a ()>`")]
|
||||
#[lang="contravariant_lifetime"]
|
||||
pub struct ContravariantLifetime<'a>;
|
||||
|
||||
/// Old-style marker trait. Deprecated.
|
||||
#[unstable(feature = "core", reason = "deprecated")]
|
||||
#[deprecated(since = "1.0.0", reason = "Replace with `PhantomData<fn(&'a ())>`")]
|
||||
#[lang="covariant_lifetime"]
|
||||
pub struct CovariantLifetime<'a>;
|
||||
|
||||
/// Old-style marker trait. Deprecated.
|
||||
#[unstable(feature = "core", reason = "deprecated")]
|
||||
#[deprecated(since = "1.0.0", reason = "Replace with `PhantomData<Cell<&'a ()>>`")]
|
||||
#[lang="invariant_lifetime"]
|
||||
pub struct InvariantLifetime<'a>;
|
||||
|
||||
/// Old-style marker trait. Deprecated.
|
||||
#[unstable(feature = "core", reason = "deprecated")]
|
||||
#[deprecated(since = "1.0.0", reason = "Replace with `PhantomData<fn(T)>`")]
|
||||
#[lang="contravariant_type"]
|
||||
pub struct ContravariantType<T>;
|
||||
|
||||
/// Old-style marker trait. Deprecated.
|
||||
#[unstable(feature = "core", reason = "deprecated")]
|
||||
#[deprecated(since = "1.0.0", reason = "Replace with `PhantomData<T>`")]
|
||||
#[lang="covariant_type"]
|
||||
#[cfg(not(stage0))]
|
||||
pub struct CovariantType<T>;
|
||||
|
||||
/// Old-style marker trait. Deprecated.
|
||||
#[unstable(feature = "core", reason = "deprecated")]
|
||||
#[deprecated(since = "1.0.0", reason = "Replace with `PhantomData<Cell<T>>`")]
|
||||
#[lang="invariant_type"]
|
||||
pub struct InvariantType<T>;
|
||||
|
@ -10,15 +10,14 @@
|
||||
|
||||
//! Exposes the NonZero lang item which provides optimization hints.
|
||||
|
||||
use marker::{Sized, MarkerTrait};
|
||||
use ops::Deref;
|
||||
use ptr::Unique;
|
||||
|
||||
/// Unsafe trait to indicate what types are usable with the NonZero struct
|
||||
pub unsafe trait Zeroable {}
|
||||
pub unsafe trait Zeroable : MarkerTrait {}
|
||||
|
||||
unsafe impl<T> Zeroable for *const T {}
|
||||
unsafe impl<T> Zeroable for *mut T {}
|
||||
unsafe impl<T> Zeroable for Unique<T> { }
|
||||
unsafe impl<T:?Sized> Zeroable for *const T {}
|
||||
unsafe impl<T:?Sized> Zeroable for *mut T {}
|
||||
unsafe impl Zeroable for isize {}
|
||||
unsafe impl Zeroable for usize {}
|
||||
unsafe impl Zeroable for i8 {}
|
||||
|
@ -91,8 +91,10 @@
|
||||
use mem;
|
||||
use clone::Clone;
|
||||
use intrinsics;
|
||||
use ops::Deref;
|
||||
use option::Option::{self, Some, None};
|
||||
use marker::{self, Send, Sized, Sync};
|
||||
use marker::{PhantomData, Send, Sized, Sync};
|
||||
use nonzero::NonZero;
|
||||
|
||||
use cmp::{PartialEq, Eq, Ord, PartialOrd};
|
||||
use cmp::Ordering::{self, Less, Equal, Greater};
|
||||
@ -517,15 +519,16 @@ impl<T> PartialOrd for *mut T {
|
||||
|
||||
/// A wrapper around a raw `*mut T` that indicates that the possessor
|
||||
/// of this wrapper owns the referent. This in turn implies that the
|
||||
/// `Unique<T>` is `Send`/`Sync` if `T` is `Send`/`Sync`, unlike a
|
||||
/// raw `*mut T` (which conveys no particular ownership semantics).
|
||||
/// Useful for building abstractions like `Vec<T>` or `Box<T>`, which
|
||||
/// `Unique<T>` is `Send`/`Sync` if `T` is `Send`/`Sync`, unlike a raw
|
||||
/// `*mut T` (which conveys no particular ownership semantics). It
|
||||
/// also implies that the referent of the pointer should not be
|
||||
/// modified without a unique path to the `Unique` reference. Useful
|
||||
/// for building abstractions like `Vec<T>` or `Box<T>`, which
|
||||
/// internally use raw pointers to manage the memory that they own.
|
||||
#[unstable(feature = "core", reason = "recently added to this module")]
|
||||
pub struct Unique<T: ?Sized> {
|
||||
/// The wrapped `*mut T`.
|
||||
pub ptr: *mut T,
|
||||
_own: marker::PhantomData<T>,
|
||||
pub struct Unique<T:?Sized> {
|
||||
pointer: NonZero<*const T>,
|
||||
_marker: PhantomData<T>,
|
||||
}
|
||||
|
||||
/// `Unique` pointers are `Send` if `T` is `Send` because the data they
|
||||
@ -542,25 +545,34 @@ unsafe impl<T: Send + ?Sized> Send for Unique<T> { }
|
||||
#[unstable(feature = "core", reason = "recently added to this module")]
|
||||
unsafe impl<T: Sync + ?Sized> Sync for Unique<T> { }
|
||||
|
||||
impl<T> Unique<T> {
|
||||
/// Returns a null Unique.
|
||||
impl<T:?Sized> Unique<T> {
|
||||
/// Create a new `Unique`.
|
||||
#[unstable(feature = "core",
|
||||
reason = "recently added to this module")]
|
||||
pub fn null() -> Unique<T> {
|
||||
Unique(null_mut())
|
||||
pub unsafe fn new(ptr: *mut T) -> Unique<T> {
|
||||
Unique { pointer: NonZero::new(ptr as *const T), _marker: PhantomData }
|
||||
}
|
||||
|
||||
/// Return an (unsafe) pointer into the memory owned by `self`.
|
||||
/// Dereference the content.
|
||||
#[unstable(feature = "core",
|
||||
reason = "recently added to this module")]
|
||||
pub unsafe fn offset(self, offset: isize) -> *mut T {
|
||||
self.ptr.offset(offset)
|
||||
pub unsafe fn get(&self) -> &T {
|
||||
&**self.pointer
|
||||
}
|
||||
|
||||
/// Mutably dereference the content.
|
||||
#[unstable(feature = "core",
|
||||
reason = "recently added to this module")]
|
||||
pub unsafe fn get_mut(&mut self) -> &mut T {
|
||||
&mut ***self
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a `Unique` wrapped around `ptr`, taking ownership of the
|
||||
/// data referenced by `ptr`.
|
||||
#[allow(non_snake_case)]
|
||||
pub fn Unique<T: ?Sized>(ptr: *mut T) -> Unique<T> {
|
||||
Unique { ptr: ptr, _own: marker::PhantomData }
|
||||
impl<T:?Sized> Deref for Unique<T> {
|
||||
type Target = *mut T;
|
||||
|
||||
#[inline]
|
||||
fn deref<'a>(&'a self) -> &'a *mut T {
|
||||
unsafe { mem::transmute(&*self.pointer) }
|
||||
}
|
||||
}
|
||||
|
@ -140,11 +140,11 @@ impl<T> SliceExt for [T] {
|
||||
if mem::size_of::<T>() == 0 {
|
||||
Iter {ptr: p,
|
||||
end: (p as usize + self.len()) as *const T,
|
||||
marker: marker::ContravariantLifetime::<'a>}
|
||||
_marker: marker::PhantomData}
|
||||
} else {
|
||||
Iter {ptr: p,
|
||||
end: p.offset(self.len() as isize),
|
||||
marker: marker::ContravariantLifetime::<'a>}
|
||||
_marker: marker::PhantomData}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -279,11 +279,11 @@ impl<T> SliceExt for [T] {
|
||||
if mem::size_of::<T>() == 0 {
|
||||
IterMut {ptr: p,
|
||||
end: (p as usize + self.len()) as *mut T,
|
||||
marker: marker::ContravariantLifetime::<'a>}
|
||||
_marker: marker::PhantomData}
|
||||
} else {
|
||||
IterMut {ptr: p,
|
||||
end: p.offset(self.len() as isize),
|
||||
marker: marker::ContravariantLifetime::<'a>}
|
||||
_marker: marker::PhantomData}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -733,7 +733,7 @@ macro_rules! make_slice {
|
||||
pub struct Iter<'a, T: 'a> {
|
||||
ptr: *const T,
|
||||
end: *const T,
|
||||
marker: marker::ContravariantLifetime<'a>
|
||||
_marker: marker::PhantomData<&'a T>,
|
||||
}
|
||||
|
||||
#[unstable(feature = "core")]
|
||||
@ -790,7 +790,7 @@ impl<'a, T> ExactSizeIterator for Iter<'a, T> {}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
impl<'a, T> Clone for Iter<'a, T> {
|
||||
fn clone(&self) -> Iter<'a, T> { Iter { ptr: self.ptr, end: self.end, marker: self.marker } }
|
||||
fn clone(&self) -> Iter<'a, T> { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } }
|
||||
}
|
||||
|
||||
#[unstable(feature = "core", reason = "trait is experimental")]
|
||||
@ -823,7 +823,7 @@ impl<'a, T> RandomAccessIterator for Iter<'a, T> {
|
||||
pub struct IterMut<'a, T: 'a> {
|
||||
ptr: *mut T,
|
||||
end: *mut T,
|
||||
marker: marker::ContravariantLifetime<'a>,
|
||||
_marker: marker::PhantomData<&'a mut T>,
|
||||
}
|
||||
|
||||
|
||||
|
@ -92,7 +92,7 @@ fn test_transmute_copy() {
|
||||
|
||||
#[test]
|
||||
fn test_transmute() {
|
||||
trait Foo {}
|
||||
trait Foo { fn dummy(&self) { } }
|
||||
impl Foo for int {}
|
||||
|
||||
let a = box 100 as Box<Foo>;
|
||||
|
@ -171,8 +171,8 @@ fn test_set_memory() {
|
||||
#[test]
|
||||
fn test_unsized_unique() {
|
||||
let xs: &mut [_] = &mut [1, 2, 3];
|
||||
let ptr = Unique(xs as *mut [_]);
|
||||
let ys = unsafe { &mut *ptr.ptr };
|
||||
let ptr = unsafe { Unique::new(xs as *mut [_]) };
|
||||
let ys = unsafe { &mut **ptr };
|
||||
let zs: &mut [_] = &mut [1, 2, 3];
|
||||
assert!(ys == zs);
|
||||
}
|
||||
|
@ -45,13 +45,13 @@ pub struct Bytes {
|
||||
impl Deref for Bytes {
|
||||
type Target = [u8];
|
||||
fn deref(&self) -> &[u8] {
|
||||
unsafe { slice::from_raw_parts_mut(self.ptr.ptr, self.len) }
|
||||
unsafe { slice::from_raw_parts(*self.ptr, self.len) }
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Bytes {
|
||||
fn drop(&mut self) {
|
||||
unsafe { libc::free(self.ptr.ptr as *mut _); }
|
||||
unsafe { libc::free(*self.ptr as *mut _); }
|
||||
}
|
||||
}
|
||||
|
||||
@ -84,7 +84,7 @@ fn deflate_bytes_internal(bytes: &[u8], flags: c_int) -> Option<Bytes> {
|
||||
&mut outsz,
|
||||
flags);
|
||||
if !res.is_null() {
|
||||
let res = Unique(res as *mut u8);
|
||||
let res = Unique::new(res as *mut u8);
|
||||
Some(Bytes { ptr: res, len: outsz as uint })
|
||||
} else {
|
||||
None
|
||||
@ -110,7 +110,7 @@ fn inflate_bytes_internal(bytes: &[u8], flags: c_int) -> Option<Bytes> {
|
||||
&mut outsz,
|
||||
flags);
|
||||
if !res.is_null() {
|
||||
let res = Unique(res as *mut u8);
|
||||
let res = Unique::new(res as *mut u8);
|
||||
Some(Bytes { ptr: res, len: outsz as uint })
|
||||
} else {
|
||||
None
|
||||
|
@ -21,6 +21,7 @@
|
||||
|
||||
use core::prelude::*;
|
||||
use core::num::{Float, Int};
|
||||
use core::marker::PhantomData;
|
||||
|
||||
use {Rng, Rand};
|
||||
|
||||
@ -56,7 +57,13 @@ pub trait IndependentSample<Support>: Sample<Support> {
|
||||
|
||||
/// A wrapper for generating types that implement `Rand` via the
|
||||
/// `Sample` & `IndependentSample` traits.
|
||||
pub struct RandSample<Sup>;
|
||||
pub struct RandSample<Sup> { _marker: PhantomData<Sup> }
|
||||
|
||||
impl<Sup> RandSample<Sup> {
|
||||
pub fn new() -> RandSample<Sup> {
|
||||
RandSample { _marker: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
impl<Sup: Rand> Sample<Sup> for RandSample<Sup> {
|
||||
fn sample<R: Rng>(&mut self, rng: &mut R) -> Sup { self.ind_sample(rng) }
|
||||
@ -285,7 +292,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_rand_sample() {
|
||||
let mut rand_sample = RandSample::<ConstRand>;
|
||||
let mut rand_sample = RandSample::<ConstRand>::new();
|
||||
|
||||
assert_eq!(rand_sample.sample(&mut ::test::rng()), ConstRand(0));
|
||||
assert_eq!(rand_sample.ind_sample(&mut ::test::rng()), ConstRand(0));
|
||||
|
@ -41,6 +41,7 @@ extern crate core;
|
||||
#[cfg(test)] #[macro_use] extern crate log;
|
||||
|
||||
use core::prelude::*;
|
||||
use core::marker::PhantomData;
|
||||
|
||||
pub use isaac::{IsaacRng, Isaac64Rng};
|
||||
pub use chacha::ChaChaRng;
|
||||
@ -206,7 +207,7 @@ pub trait Rng : Sized {
|
||||
/// .collect::<Vec<(f64, bool)>>());
|
||||
/// ```
|
||||
fn gen_iter<'a, T: Rand>(&'a mut self) -> Generator<'a, T, Self> {
|
||||
Generator { rng: self }
|
||||
Generator { rng: self, _marker: PhantomData }
|
||||
}
|
||||
|
||||
/// Generate a random value in the range [`low`, `high`).
|
||||
@ -317,6 +318,7 @@ pub trait Rng : Sized {
|
||||
/// This iterator is created via the `gen_iter` method on `Rng`.
|
||||
pub struct Generator<'a, T, R:'a> {
|
||||
rng: &'a mut R,
|
||||
_marker: PhantomData<T>
|
||||
}
|
||||
|
||||
impl<'a, T: Rand, R: Rng> Iterator for Generator<'a, T, R> {
|
||||
|
@ -29,7 +29,6 @@ use middle::ty::{MethodOrigin, MethodParam, MethodTypeParam};
|
||||
use middle::ty::{MethodStatic, MethodStaticClosure};
|
||||
use util::ppaux::Repr;
|
||||
|
||||
use std::marker;
|
||||
use syntax::{ast, ast_util};
|
||||
use syntax::ptr::P;
|
||||
use syntax::codemap::Span;
|
||||
@ -128,16 +127,14 @@ pub enum MatchMode {
|
||||
MovingMatch,
|
||||
}
|
||||
|
||||
#[derive(PartialEq,Debug)]
|
||||
enum TrackMatchMode<T> {
|
||||
#[derive(Copy, PartialEq, Debug)]
|
||||
enum TrackMatchMode {
|
||||
Unknown,
|
||||
Definite(MatchMode),
|
||||
Conflicting,
|
||||
}
|
||||
|
||||
impl<T> marker::Copy for TrackMatchMode<T> {}
|
||||
|
||||
impl<T> TrackMatchMode<T> {
|
||||
impl TrackMatchMode {
|
||||
// Builds up the whole match mode for a pattern from its constituent
|
||||
// parts. The lattice looks like this:
|
||||
//
|
||||
@ -931,7 +928,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> {
|
||||
return true;
|
||||
}
|
||||
|
||||
fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &ast::Arm) -> TrackMatchMode<Span> {
|
||||
fn arm_move_mode(&mut self, discr_cmt: mc::cmt<'tcx>, arm: &ast::Arm) -> TrackMatchMode {
|
||||
let mut mode = Unknown;
|
||||
for pat in &arm.pats {
|
||||
self.determine_pat_move_mode(discr_cmt.clone(), &**pat, &mut mode);
|
||||
@ -966,7 +963,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,'tcx,TYPER> {
|
||||
fn determine_pat_move_mode(&mut self,
|
||||
cmt_discr: mc::cmt<'tcx>,
|
||||
pat: &ast::Pat,
|
||||
mode: &mut TrackMatchMode<Span>) {
|
||||
mode: &mut TrackMatchMode) {
|
||||
debug!("determine_pat_move_mode cmt_discr={} pat={}", cmt_discr.repr(self.tcx()),
|
||||
pat.repr(self.tcx()));
|
||||
return_if_err!(self.mc.cat_pattern(cmt_discr, pat, |_mc, cmt_pat, pat| {
|
||||
|
145
src/librustc/middle/infer/bivariate.rs
Normal file
145
src/librustc/middle/infer/bivariate.rs
Normal file
@ -0,0 +1,145 @@
|
||||
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
//! Applies the "bivariance relationship" to two types and/or regions.
|
||||
//! If (A,B) are bivariant then either A <: B or B <: A. It occurs
|
||||
//! when type/lifetime parameters are unconstrained. Usually this is
|
||||
//! an error, but we permit it in the specific case where a type
|
||||
//! parameter is constrained in a where-clause via an associated type.
|
||||
//!
|
||||
//! There are several ways one could implement bivariance. You could
|
||||
//! just do nothing at all, for example, or you could fully verify
|
||||
//! that one of the two subtyping relationships hold. We choose to
|
||||
//! thread a middle line: we relate types up to regions, but ignore
|
||||
//! all region relationships.
|
||||
//!
|
||||
//! At one point, handling bivariance in this fashion was necessary
|
||||
//! for inference, but I'm actually not sure if that is true anymore.
|
||||
//! In particular, it might be enough to say (A,B) are bivariant for
|
||||
//! all (A,B).
|
||||
|
||||
use middle::ty::{BuiltinBounds};
|
||||
use middle::ty::{self, Ty};
|
||||
use middle::ty::TyVar;
|
||||
use middle::infer::combine::*;
|
||||
use middle::infer::{cres};
|
||||
use middle::infer::type_variable::{BiTo};
|
||||
use util::ppaux::{Repr};
|
||||
|
||||
use syntax::ast::{Unsafety};
|
||||
|
||||
pub struct Bivariate<'f, 'tcx: 'f> {
|
||||
fields: CombineFields<'f, 'tcx>
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn Bivariate<'f, 'tcx>(cf: CombineFields<'f, 'tcx>) -> Bivariate<'f, 'tcx> {
|
||||
Bivariate { fields: cf }
|
||||
}
|
||||
|
||||
impl<'f, 'tcx> Combine<'tcx> for Bivariate<'f, 'tcx> {
|
||||
fn tag(&self) -> String { "Bivariate".to_string() }
|
||||
fn fields<'a>(&'a self) -> &'a CombineFields<'a, 'tcx> { &self.fields }
|
||||
|
||||
fn tys_with_variance(&self, v: ty::Variance, a: Ty<'tcx>, b: Ty<'tcx>)
|
||||
-> cres<'tcx, Ty<'tcx>>
|
||||
{
|
||||
match v {
|
||||
ty::Invariant => self.equate().tys(a, b),
|
||||
ty::Covariant => self.tys(a, b),
|
||||
ty::Contravariant => self.tys(a, b),
|
||||
ty::Bivariant => self.tys(a, b),
|
||||
}
|
||||
}
|
||||
|
||||
fn regions_with_variance(&self, v: ty::Variance, a: ty::Region, b: ty::Region)
|
||||
-> cres<'tcx, ty::Region>
|
||||
{
|
||||
match v {
|
||||
ty::Invariant => self.equate().regions(a, b),
|
||||
ty::Covariant => self.regions(a, b),
|
||||
ty::Contravariant => self.regions(a, b),
|
||||
ty::Bivariant => self.regions(a, b),
|
||||
}
|
||||
}
|
||||
|
||||
fn regions(&self, a: ty::Region, _: ty::Region) -> cres<'tcx, ty::Region> {
|
||||
Ok(a)
|
||||
}
|
||||
|
||||
fn mts(&self, a: &ty::mt<'tcx>, b: &ty::mt<'tcx>) -> cres<'tcx, ty::mt<'tcx>> {
|
||||
debug!("mts({} <: {})",
|
||||
a.repr(self.fields.infcx.tcx),
|
||||
b.repr(self.fields.infcx.tcx));
|
||||
|
||||
if a.mutbl != b.mutbl { return Err(ty::terr_mutability); }
|
||||
let t = try!(self.tys(a.ty, b.ty));
|
||||
Ok(ty::mt { mutbl: a.mutbl, ty: t })
|
||||
}
|
||||
|
||||
fn unsafeties(&self, a: Unsafety, b: Unsafety) -> cres<'tcx, Unsafety> {
|
||||
if a != b {
|
||||
Err(ty::terr_unsafety_mismatch(expected_found(self, a, b)))
|
||||
} else {
|
||||
Ok(a)
|
||||
}
|
||||
}
|
||||
|
||||
fn builtin_bounds(&self,
|
||||
a: BuiltinBounds,
|
||||
b: BuiltinBounds)
|
||||
-> cres<'tcx, BuiltinBounds>
|
||||
{
|
||||
if a != b {
|
||||
Err(ty::terr_builtin_bounds(expected_found(self, a, b)))
|
||||
} else {
|
||||
Ok(a)
|
||||
}
|
||||
}
|
||||
|
||||
fn tys(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> cres<'tcx, Ty<'tcx>> {
|
||||
debug!("{}.tys({}, {})", self.tag(),
|
||||
a.repr(self.fields.infcx.tcx), b.repr(self.fields.infcx.tcx));
|
||||
if a == b { return Ok(a); }
|
||||
|
||||
let infcx = self.fields.infcx;
|
||||
let a = infcx.type_variables.borrow().replace_if_possible(a);
|
||||
let b = infcx.type_variables.borrow().replace_if_possible(b);
|
||||
match (&a.sty, &b.sty) {
|
||||
(&ty::ty_infer(TyVar(a_id)), &ty::ty_infer(TyVar(b_id))) => {
|
||||
infcx.type_variables.borrow_mut().relate_vars(a_id, BiTo, b_id);
|
||||
Ok(a)
|
||||
}
|
||||
|
||||
(&ty::ty_infer(TyVar(a_id)), _) => {
|
||||
try!(self.fields.instantiate(b, BiTo, a_id));
|
||||
Ok(a)
|
||||
}
|
||||
|
||||
(_, &ty::ty_infer(TyVar(b_id))) => {
|
||||
try!(self.fields.instantiate(a, BiTo, b_id));
|
||||
Ok(a)
|
||||
}
|
||||
|
||||
_ => {
|
||||
super_tys(self, a, b)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn binders<T>(&self, a: &ty::Binder<T>, b: &ty::Binder<T>) -> cres<'tcx, ty::Binder<T>>
|
||||
where T : Combineable<'tcx>
|
||||
{
|
||||
let a1 = ty::erase_late_bound_regions(self.tcx(), a);
|
||||
let b1 = ty::erase_late_bound_regions(self.tcx(), b);
|
||||
let c = try!(Combineable::combine(self, &a1, &b1));
|
||||
Ok(ty::Binder(c))
|
||||
}
|
||||
}
|
@ -32,6 +32,7 @@
|
||||
// is also useful to track which value is the "expected" value in
|
||||
// terms of error reporting.
|
||||
|
||||
use super::bivariate::Bivariate;
|
||||
use super::equate::Equate;
|
||||
use super::glb::Glb;
|
||||
use super::lub::Lub;
|
||||
@ -39,7 +40,7 @@ use super::sub::Sub;
|
||||
use super::unify::InferCtxtMethodsForSimplyUnifiableTypes;
|
||||
use super::{InferCtxt, cres};
|
||||
use super::{MiscVariable, TypeTrace};
|
||||
use super::type_variable::{RelationDir, EqTo, SubtypeOf, SupertypeOf};
|
||||
use super::type_variable::{RelationDir, BiTo, EqTo, SubtypeOf, SupertypeOf};
|
||||
|
||||
use middle::subst;
|
||||
use middle::subst::{ErasedRegions, NonerasedRegions, Substs};
|
||||
@ -48,7 +49,7 @@ use middle::ty::{IntType, UintType};
|
||||
use middle::ty::{BuiltinBounds};
|
||||
use middle::ty::{self, Ty};
|
||||
use middle::ty_fold;
|
||||
use middle::ty_fold::{TypeFoldable};
|
||||
use middle::ty_fold::{TypeFolder, TypeFoldable};
|
||||
use util::ppaux::Repr;
|
||||
|
||||
use std::rc::Rc;
|
||||
@ -58,41 +59,32 @@ use syntax::abi;
|
||||
use syntax::codemap::Span;
|
||||
|
||||
pub trait Combine<'tcx> : Sized {
|
||||
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a, 'tcx>;
|
||||
fn tcx<'a>(&'a self) -> &'a ty::ctxt<'tcx> { self.infcx().tcx }
|
||||
fn tag(&self) -> String;
|
||||
fn a_is_expected(&self) -> bool;
|
||||
fn trace(&self) -> TypeTrace<'tcx>;
|
||||
|
||||
fn equate<'a>(&'a self) -> Equate<'a, 'tcx>;
|
||||
fn sub<'a>(&'a self) -> Sub<'a, 'tcx>;
|
||||
fn lub<'a>(&'a self) -> Lub<'a, 'tcx>;
|
||||
fn glb<'a>(&'a self) -> Glb<'a, 'tcx>;
|
||||
fn fields<'a>(&'a self) -> &'a CombineFields<'a, 'tcx>;
|
||||
|
||||
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a, 'tcx> { self.fields().infcx }
|
||||
fn a_is_expected(&self) -> bool { self.fields().a_is_expected }
|
||||
fn trace(&self) -> TypeTrace<'tcx> { self.fields().trace.clone() }
|
||||
fn equate<'a>(&'a self) -> Equate<'a, 'tcx> { self.fields().equate() }
|
||||
fn bivariate<'a>(&'a self) -> Bivariate<'a, 'tcx> { self.fields().bivariate() }
|
||||
|
||||
fn sub<'a>(&'a self) -> Sub<'a, 'tcx> { self.fields().sub() }
|
||||
fn lub<'a>(&'a self) -> Lub<'a, 'tcx> { Lub(self.fields().clone()) }
|
||||
fn glb<'a>(&'a self) -> Glb<'a, 'tcx> { Glb(self.fields().clone()) }
|
||||
|
||||
fn mts(&self, a: &ty::mt<'tcx>, b: &ty::mt<'tcx>) -> cres<'tcx, ty::mt<'tcx>>;
|
||||
fn contratys(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> cres<'tcx, Ty<'tcx>>;
|
||||
|
||||
fn tys_with_variance(&self, variance: ty::Variance, a: Ty<'tcx>, b: Ty<'tcx>)
|
||||
-> cres<'tcx, Ty<'tcx>>;
|
||||
|
||||
fn tys(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> cres<'tcx, Ty<'tcx>>;
|
||||
|
||||
fn tps(&self,
|
||||
_: subst::ParamSpace,
|
||||
as_: &[Ty<'tcx>],
|
||||
bs: &[Ty<'tcx>])
|
||||
-> cres<'tcx, Vec<Ty<'tcx>>> {
|
||||
// FIXME -- In general, we treat variance a bit wrong
|
||||
// here. For historical reasons, we treat tps and Self
|
||||
// as invariant. This is overly conservative.
|
||||
fn regions_with_variance(&self, variance: ty::Variance, a: ty::Region, b: ty::Region)
|
||||
-> cres<'tcx, ty::Region>;
|
||||
|
||||
if as_.len() != bs.len() {
|
||||
return Err(ty::terr_ty_param_size(expected_found(self,
|
||||
as_.len(),
|
||||
bs.len())));
|
||||
}
|
||||
|
||||
try!(as_.iter().zip(bs.iter())
|
||||
.map(|(a, b)| self.equate().tys(*a, *b))
|
||||
.collect::<cres<Vec<Ty>>>());
|
||||
Ok(as_.to_vec())
|
||||
}
|
||||
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<'tcx, ty::Region>;
|
||||
|
||||
fn substs(&self,
|
||||
item_def_id: ast::DefId,
|
||||
@ -100,6 +92,11 @@ pub trait Combine<'tcx> : Sized {
|
||||
b_subst: &subst::Substs<'tcx>)
|
||||
-> cres<'tcx, subst::Substs<'tcx>>
|
||||
{
|
||||
debug!("substs: item_def_id={} a_subst={} b_subst={}",
|
||||
item_def_id.repr(self.infcx().tcx),
|
||||
a_subst.repr(self.infcx().tcx),
|
||||
b_subst.repr(self.infcx().tcx));
|
||||
|
||||
let variances = if self.infcx().tcx.variance_computed.get() {
|
||||
Some(ty::item_variances(self.infcx().tcx, item_def_id))
|
||||
} else {
|
||||
@ -119,7 +116,8 @@ pub trait Combine<'tcx> : Sized {
|
||||
for &space in &subst::ParamSpace::all() {
|
||||
let a_tps = a_subst.types.get_slice(space);
|
||||
let b_tps = b_subst.types.get_slice(space);
|
||||
let tps = try!(self.tps(space, a_tps, b_tps));
|
||||
let t_variances = variances.map(|v| v.types.get_slice(space));
|
||||
let tps = try!(relate_type_params(self, t_variances, a_tps, b_tps));
|
||||
substs.types.replace(space, tps);
|
||||
}
|
||||
|
||||
@ -132,20 +130,7 @@ pub trait Combine<'tcx> : Sized {
|
||||
for &space in &subst::ParamSpace::all() {
|
||||
let a_regions = a.get_slice(space);
|
||||
let b_regions = b.get_slice(space);
|
||||
|
||||
let mut invariance = Vec::new();
|
||||
let r_variances = match variances {
|
||||
Some(variances) => {
|
||||
variances.regions.get_slice(space)
|
||||
}
|
||||
None => {
|
||||
for _ in a_regions {
|
||||
invariance.push(ty::Invariant);
|
||||
}
|
||||
&invariance[..]
|
||||
}
|
||||
};
|
||||
|
||||
let r_variances = variances.map(|v| v.regions.get_slice(space));
|
||||
let regions = try!(relate_region_params(self,
|
||||
r_variances,
|
||||
a_regions,
|
||||
@ -157,13 +142,34 @@ pub trait Combine<'tcx> : Sized {
|
||||
|
||||
return Ok(substs);
|
||||
|
||||
fn relate_type_params<'tcx, C: Combine<'tcx>>(this: &C,
|
||||
variances: Option<&[ty::Variance]>,
|
||||
a_tys: &[Ty<'tcx>],
|
||||
b_tys: &[Ty<'tcx>])
|
||||
-> cres<'tcx, Vec<Ty<'tcx>>>
|
||||
{
|
||||
if a_tys.len() != b_tys.len() {
|
||||
return Err(ty::terr_ty_param_size(expected_found(this,
|
||||
a_tys.len(),
|
||||
b_tys.len())));
|
||||
}
|
||||
|
||||
range(0, a_tys.len()).map(|i| {
|
||||
let a_ty = a_tys[i];
|
||||
let b_ty = b_tys[i];
|
||||
let v = variances.map_or(ty::Invariant, |v| v[i]);
|
||||
this.tys_with_variance(v, a_ty, b_ty)
|
||||
}).collect()
|
||||
}
|
||||
|
||||
fn relate_region_params<'tcx, C: Combine<'tcx>>(this: &C,
|
||||
variances: &[ty::Variance],
|
||||
variances: Option<&[ty::Variance]>,
|
||||
a_rs: &[ty::Region],
|
||||
b_rs: &[ty::Region])
|
||||
-> cres<'tcx, Vec<ty::Region>> {
|
||||
-> cres<'tcx, Vec<ty::Region>>
|
||||
{
|
||||
let tcx = this.infcx().tcx;
|
||||
let num_region_params = variances.len();
|
||||
let num_region_params = a_rs.len();
|
||||
|
||||
debug!("relate_region_params(\
|
||||
a_rs={}, \
|
||||
@ -173,22 +179,18 @@ pub trait Combine<'tcx> : Sized {
|
||||
b_rs.repr(tcx),
|
||||
variances.repr(tcx));
|
||||
|
||||
assert_eq!(num_region_params, a_rs.len());
|
||||
assert_eq!(num_region_params,
|
||||
variances.map_or(num_region_params,
|
||||
|v| v.len()));
|
||||
|
||||
assert_eq!(num_region_params, b_rs.len());
|
||||
let mut rs = vec!();
|
||||
for i in 0..num_region_params {
|
||||
|
||||
(0..a_rs.len()).map(|i| {
|
||||
let a_r = a_rs[i];
|
||||
let b_r = b_rs[i];
|
||||
let variance = variances[i];
|
||||
let r = match variance {
|
||||
ty::Invariant => this.equate().regions(a_r, b_r),
|
||||
ty::Covariant => this.regions(a_r, b_r),
|
||||
ty::Contravariant => this.contraregions(a_r, b_r),
|
||||
ty::Bivariant => Ok(a_r),
|
||||
};
|
||||
rs.push(try!(r));
|
||||
}
|
||||
Ok(rs)
|
||||
let variance = variances.map_or(ty::Invariant, |v| v[i]);
|
||||
this.regions_with_variance(variance, a_r, b_r)
|
||||
}).collect()
|
||||
}
|
||||
}
|
||||
|
||||
@ -241,7 +243,7 @@ pub trait Combine<'tcx> : Sized {
|
||||
}
|
||||
|
||||
fn args(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> cres<'tcx, Ty<'tcx>> {
|
||||
self.contratys(a, b).and_then(|t| Ok(t))
|
||||
self.tys_with_variance(ty::Contravariant, a, b).and_then(|t| Ok(t))
|
||||
}
|
||||
|
||||
fn unsafeties(&self, a: Unsafety, b: Unsafety) -> cres<'tcx, Unsafety>;
|
||||
@ -309,7 +311,7 @@ pub trait Combine<'tcx> : Sized {
|
||||
b: &ty::ExistentialBounds<'tcx>)
|
||||
-> cres<'tcx, ty::ExistentialBounds<'tcx>>
|
||||
{
|
||||
let r = try!(self.contraregions(a.region_bound, b.region_bound));
|
||||
let r = try!(self.regions_with_variance(ty::Contravariant, a.region_bound, b.region_bound));
|
||||
let nb = try!(self.builtin_bounds(a.builtin_bounds, b.builtin_bounds));
|
||||
let pb = try!(self.projection_bounds(&a.projection_bounds, &b.projection_bounds));
|
||||
Ok(ty::ExistentialBounds { region_bound: r,
|
||||
@ -322,11 +324,6 @@ pub trait Combine<'tcx> : Sized {
|
||||
b: ty::BuiltinBounds)
|
||||
-> cres<'tcx, ty::BuiltinBounds>;
|
||||
|
||||
fn contraregions(&self, a: ty::Region, b: ty::Region)
|
||||
-> cres<'tcx, ty::Region>;
|
||||
|
||||
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<'tcx, ty::Region>;
|
||||
|
||||
fn trait_refs(&self,
|
||||
a: &ty::TraitRef<'tcx>,
|
||||
b: &ty::TraitRef<'tcx>)
|
||||
@ -540,7 +537,8 @@ pub fn super_tys<'tcx, C: Combine<'tcx>>(this: &C,
|
||||
}
|
||||
|
||||
(&ty::ty_rptr(a_r, ref a_mt), &ty::ty_rptr(b_r, ref b_mt)) => {
|
||||
let r = try!(this.contraregions(*a_r, *b_r));
|
||||
let r = try!(this.regions_with_variance(ty::Contravariant, *a_r, *b_r));
|
||||
|
||||
// FIXME(14985) If we have mutable references to trait objects, we
|
||||
// used to use covariant subtyping. I have preserved this behaviour,
|
||||
// even though it is probably incorrect. So don't go down the usual
|
||||
@ -644,6 +642,10 @@ impl<'f, 'tcx> CombineFields<'f, 'tcx> {
|
||||
Equate((*self).clone())
|
||||
}
|
||||
|
||||
fn bivariate(&self) -> Bivariate<'f, 'tcx> {
|
||||
Bivariate((*self).clone())
|
||||
}
|
||||
|
||||
fn sub(&self) -> Sub<'f, 'tcx> {
|
||||
Sub((*self).clone())
|
||||
}
|
||||
@ -697,7 +699,7 @@ impl<'f, 'tcx> CombineFields<'f, 'tcx> {
|
||||
EqTo => {
|
||||
self.generalize(a_ty, b_vid, false)
|
||||
}
|
||||
SupertypeOf | SubtypeOf => {
|
||||
BiTo | SupertypeOf | SubtypeOf => {
|
||||
self.generalize(a_ty, b_vid, true)
|
||||
}
|
||||
});
|
||||
@ -721,6 +723,10 @@ impl<'f, 'tcx> CombineFields<'f, 'tcx> {
|
||||
// to associate causes/spans with each of the relations in
|
||||
// the stack to get this right.
|
||||
match dir {
|
||||
BiTo => {
|
||||
try!(self.bivariate().tys(a_ty, b_ty));
|
||||
}
|
||||
|
||||
EqTo => {
|
||||
try!(self.equate().tys(a_ty, b_ty));
|
||||
}
|
||||
@ -730,7 +736,7 @@ impl<'f, 'tcx> CombineFields<'f, 'tcx> {
|
||||
}
|
||||
|
||||
SupertypeOf => {
|
||||
try!(self.sub().contratys(a_ty, b_ty));
|
||||
try!(self.sub().tys_with_variance(ty::Contravariant, a_ty, b_ty));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -13,11 +13,7 @@ use middle::ty::{self, Ty};
|
||||
use middle::ty::TyVar;
|
||||
use middle::infer::combine::*;
|
||||
use middle::infer::{cres};
|
||||
use middle::infer::glb::Glb;
|
||||
use middle::infer::InferCtxt;
|
||||
use middle::infer::lub::Lub;
|
||||
use middle::infer::sub::Sub;
|
||||
use middle::infer::{TypeTrace, Subtype};
|
||||
use middle::infer::{Subtype};
|
||||
use middle::infer::type_variable::{EqTo};
|
||||
use util::ppaux::{Repr};
|
||||
|
||||
@ -33,21 +29,20 @@ pub fn Equate<'f, 'tcx>(cf: CombineFields<'f, 'tcx>) -> Equate<'f, 'tcx> {
|
||||
}
|
||||
|
||||
impl<'f, 'tcx> Combine<'tcx> for Equate<'f, 'tcx> {
|
||||
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a, 'tcx> { self.fields.infcx }
|
||||
fn tag(&self) -> String { "eq".to_string() }
|
||||
fn a_is_expected(&self) -> bool { self.fields.a_is_expected }
|
||||
fn trace(&self) -> TypeTrace<'tcx> { self.fields.trace.clone() }
|
||||
fn tag(&self) -> String { "Equate".to_string() }
|
||||
fn fields<'a>(&'a self) -> &'a CombineFields<'a, 'tcx> { &self.fields }
|
||||
|
||||
fn equate<'a>(&'a self) -> Equate<'a, 'tcx> { Equate(self.fields.clone()) }
|
||||
fn sub<'a>(&'a self) -> Sub<'a, 'tcx> { Sub(self.fields.clone()) }
|
||||
fn lub<'a>(&'a self) -> Lub<'a, 'tcx> { Lub(self.fields.clone()) }
|
||||
fn glb<'a>(&'a self) -> Glb<'a, 'tcx> { Glb(self.fields.clone()) }
|
||||
|
||||
fn contratys(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> cres<'tcx, Ty<'tcx>> {
|
||||
fn tys_with_variance(&self, _: ty::Variance, a: Ty<'tcx>, b: Ty<'tcx>)
|
||||
-> cres<'tcx, Ty<'tcx>>
|
||||
{
|
||||
// Once we're equating, it doesn't matter what the variance is.
|
||||
self.tys(a, b)
|
||||
}
|
||||
|
||||
fn contraregions(&self, a: ty::Region, b: ty::Region) -> cres<'tcx, ty::Region> {
|
||||
fn regions_with_variance(&self, _: ty::Variance, a: ty::Region, b: ty::Region)
|
||||
-> cres<'tcx, ty::Region>
|
||||
{
|
||||
// Once we're equating, it doesn't matter what the variance is.
|
||||
self.regions(a, b)
|
||||
}
|
||||
|
||||
|
@ -675,6 +675,17 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
sup,
|
||||
"");
|
||||
}
|
||||
infer::Operand(span) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
"lifetime of operand does not outlive \
|
||||
the operation");
|
||||
note_and_explain_region(
|
||||
self.tcx,
|
||||
"the operand is only valid for ",
|
||||
sup,
|
||||
"");
|
||||
}
|
||||
infer::AddrOf(span) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
@ -1593,6 +1604,11 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> {
|
||||
span,
|
||||
"...so that return value is valid for the call");
|
||||
}
|
||||
infer::Operand(span) => {
|
||||
self.tcx.sess.span_err(
|
||||
span,
|
||||
"...so that operand is valid for operation");
|
||||
}
|
||||
infer::AddrOf(span) => {
|
||||
self.tcx.sess.span_note(
|
||||
span,
|
||||
|
@ -10,12 +10,9 @@
|
||||
|
||||
use super::combine::*;
|
||||
use super::lattice::*;
|
||||
use super::equate::Equate;
|
||||
use super::higher_ranked::HigherRankedRelations;
|
||||
use super::lub::Lub;
|
||||
use super::sub::Sub;
|
||||
use super::{cres, InferCtxt};
|
||||
use super::{TypeTrace, Subtype};
|
||||
use super::{cres};
|
||||
use super::Subtype;
|
||||
|
||||
use middle::ty::{BuiltinBounds};
|
||||
use middle::ty::{self, Ty};
|
||||
@ -34,15 +31,30 @@ pub fn Glb<'f, 'tcx>(cf: CombineFields<'f, 'tcx>) -> Glb<'f, 'tcx> {
|
||||
}
|
||||
|
||||
impl<'f, 'tcx> Combine<'tcx> for Glb<'f, 'tcx> {
|
||||
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a, 'tcx> { self.fields.infcx }
|
||||
fn tag(&self) -> String { "glb".to_string() }
|
||||
fn a_is_expected(&self) -> bool { self.fields.a_is_expected }
|
||||
fn trace(&self) -> TypeTrace<'tcx> { self.fields.trace.clone() }
|
||||
fn tag(&self) -> String { "Glb".to_string() }
|
||||
fn fields<'a>(&'a self) -> &'a CombineFields<'a, 'tcx> { &self.fields }
|
||||
|
||||
fn equate<'a>(&'a self) -> Equate<'a, 'tcx> { Equate(self.fields.clone()) }
|
||||
fn sub<'a>(&'a self) -> Sub<'a, 'tcx> { Sub(self.fields.clone()) }
|
||||
fn lub<'a>(&'a self) -> Lub<'a, 'tcx> { Lub(self.fields.clone()) }
|
||||
fn glb<'a>(&'a self) -> Glb<'a, 'tcx> { Glb(self.fields.clone()) }
|
||||
fn tys_with_variance(&self, v: ty::Variance, a: Ty<'tcx>, b: Ty<'tcx>)
|
||||
-> cres<'tcx, Ty<'tcx>>
|
||||
{
|
||||
match v {
|
||||
ty::Invariant => self.equate().tys(a, b),
|
||||
ty::Covariant => self.tys(a, b),
|
||||
ty::Bivariant => self.bivariate().tys(a, b),
|
||||
ty::Contravariant => self.lub().tys(a, b),
|
||||
}
|
||||
}
|
||||
|
||||
fn regions_with_variance(&self, v: ty::Variance, a: ty::Region, b: ty::Region)
|
||||
-> cres<'tcx, ty::Region>
|
||||
{
|
||||
match v {
|
||||
ty::Invariant => self.equate().regions(a, b),
|
||||
ty::Covariant => self.regions(a, b),
|
||||
ty::Bivariant => self.bivariate().regions(a, b),
|
||||
ty::Contravariant => self.lub().regions(a, b),
|
||||
}
|
||||
}
|
||||
|
||||
fn mts(&self, a: &ty::mt<'tcx>, b: &ty::mt<'tcx>) -> cres<'tcx, ty::mt<'tcx>> {
|
||||
let tcx = self.fields.infcx.tcx;
|
||||
@ -75,10 +87,6 @@ impl<'f, 'tcx> Combine<'tcx> for Glb<'f, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn contratys(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> cres<'tcx, Ty<'tcx>> {
|
||||
self.lub().tys(a, b)
|
||||
}
|
||||
|
||||
fn unsafeties(&self, a: Unsafety, b: Unsafety) -> cres<'tcx, Unsafety> {
|
||||
match (a, b) {
|
||||
(Unsafety::Normal, _) | (_, Unsafety::Normal) => Ok(Unsafety::Normal),
|
||||
@ -104,11 +112,6 @@ impl<'f, 'tcx> Combine<'tcx> for Glb<'f, 'tcx> {
|
||||
Ok(self.fields.infcx.region_vars.glb_regions(Subtype(self.trace()), a, b))
|
||||
}
|
||||
|
||||
fn contraregions(&self, a: ty::Region, b: ty::Region)
|
||||
-> cres<'tcx, ty::Region> {
|
||||
self.lub().regions(a, b)
|
||||
}
|
||||
|
||||
fn tys(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> cres<'tcx, Ty<'tcx>> {
|
||||
super_lattice_tys(self, a, b)
|
||||
}
|
||||
|
@ -31,7 +31,7 @@ pub trait HigherRankedRelations<'tcx> {
|
||||
where T : Combineable<'tcx>;
|
||||
}
|
||||
|
||||
trait InferCtxtExt<'tcx> {
|
||||
trait InferCtxtExt {
|
||||
fn tainted_regions(&self, snapshot: &CombinedSnapshot, r: ty::Region) -> Vec<ty::Region>;
|
||||
|
||||
fn region_vars_confined_to_snapshot(&self,
|
||||
@ -371,7 +371,7 @@ fn fold_regions_in<'tcx, T, F>(tcx: &ty::ctxt<'tcx>,
|
||||
}))
|
||||
}
|
||||
|
||||
impl<'a,'tcx> InferCtxtExt<'tcx> for InferCtxt<'a,'tcx> {
|
||||
impl<'a,'tcx> InferCtxtExt for InferCtxt<'a,'tcx> {
|
||||
fn tainted_regions(&self, snapshot: &CombinedSnapshot, r: ty::Region) -> Vec<ty::Region> {
|
||||
self.region_vars.tainted(&snapshot.region_vars_snapshot, r)
|
||||
}
|
||||
|
@ -9,13 +9,10 @@
|
||||
// except according to those terms.
|
||||
|
||||
use super::combine::*;
|
||||
use super::equate::Equate;
|
||||
use super::glb::Glb;
|
||||
use super::higher_ranked::HigherRankedRelations;
|
||||
use super::lattice::*;
|
||||
use super::sub::Sub;
|
||||
use super::{cres, InferCtxt};
|
||||
use super::{TypeTrace, Subtype};
|
||||
use super::{cres};
|
||||
use super::{Subtype};
|
||||
|
||||
use middle::ty::{BuiltinBounds};
|
||||
use middle::ty::{self, Ty};
|
||||
@ -34,15 +31,30 @@ pub fn Lub<'f, 'tcx>(cf: CombineFields<'f, 'tcx>) -> Lub<'f, 'tcx> {
|
||||
}
|
||||
|
||||
impl<'f, 'tcx> Combine<'tcx> for Lub<'f, 'tcx> {
|
||||
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a, 'tcx> { self.fields.infcx }
|
||||
fn tag(&self) -> String { "lub".to_string() }
|
||||
fn a_is_expected(&self) -> bool { self.fields.a_is_expected }
|
||||
fn trace(&self) -> TypeTrace<'tcx> { self.fields.trace.clone() }
|
||||
fn tag(&self) -> String { "Lub".to_string() }
|
||||
fn fields<'a>(&'a self) -> &'a CombineFields<'a, 'tcx> { &self.fields }
|
||||
|
||||
fn equate<'a>(&'a self) -> Equate<'a, 'tcx> { Equate(self.fields.clone()) }
|
||||
fn sub<'a>(&'a self) -> Sub<'a, 'tcx> { Sub(self.fields.clone()) }
|
||||
fn lub<'a>(&'a self) -> Lub<'a, 'tcx> { Lub(self.fields.clone()) }
|
||||
fn glb<'a>(&'a self) -> Glb<'a, 'tcx> { Glb(self.fields.clone()) }
|
||||
fn tys_with_variance(&self, v: ty::Variance, a: Ty<'tcx>, b: Ty<'tcx>)
|
||||
-> cres<'tcx, Ty<'tcx>>
|
||||
{
|
||||
match v {
|
||||
ty::Invariant => self.equate().tys(a, b),
|
||||
ty::Covariant => self.tys(a, b),
|
||||
ty::Bivariant => self.bivariate().tys(a, b),
|
||||
ty::Contravariant => self.glb().tys(a, b),
|
||||
}
|
||||
}
|
||||
|
||||
fn regions_with_variance(&self, v: ty::Variance, a: ty::Region, b: ty::Region)
|
||||
-> cres<'tcx, ty::Region>
|
||||
{
|
||||
match v {
|
||||
ty::Invariant => self.equate().regions(a, b),
|
||||
ty::Covariant => self.regions(a, b),
|
||||
ty::Bivariant => self.bivariate().regions(a, b),
|
||||
ty::Contravariant => self.glb().regions(a, b),
|
||||
}
|
||||
}
|
||||
|
||||
fn mts(&self, a: &ty::mt<'tcx>, b: &ty::mt<'tcx>) -> cres<'tcx, ty::mt<'tcx>> {
|
||||
let tcx = self.tcx();
|
||||
@ -70,10 +82,6 @@ impl<'f, 'tcx> Combine<'tcx> for Lub<'f, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn contratys(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> cres<'tcx, Ty<'tcx>> {
|
||||
self.glb().tys(a, b)
|
||||
}
|
||||
|
||||
fn unsafeties(&self, a: Unsafety, b: Unsafety) -> cres<'tcx, Unsafety> {
|
||||
match (a, b) {
|
||||
(Unsafety::Unsafe, _) | (_, Unsafety::Unsafe) => Ok(Unsafety::Unsafe),
|
||||
@ -90,11 +98,6 @@ impl<'f, 'tcx> Combine<'tcx> for Lub<'f, 'tcx> {
|
||||
Ok(a.intersection(b))
|
||||
}
|
||||
|
||||
fn contraregions(&self, a: ty::Region, b: ty::Region)
|
||||
-> cres<'tcx, ty::Region> {
|
||||
self.glb().regions(a, b)
|
||||
}
|
||||
|
||||
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<'tcx, ty::Region> {
|
||||
debug!("{}.regions({}, {})",
|
||||
self.tag(),
|
||||
|
@ -45,6 +45,7 @@ use self::lub::Lub;
|
||||
use self::unify::{UnificationTable, InferCtxtMethodsForSimplyUnifiableTypes};
|
||||
use self::error_reporting::ErrorReporting;
|
||||
|
||||
pub mod bivariate;
|
||||
pub mod combine;
|
||||
pub mod equate;
|
||||
pub mod error_reporting;
|
||||
@ -209,6 +210,9 @@ pub enum SubregionOrigin<'tcx> {
|
||||
// Region in return type of invoked fn must enclose call
|
||||
CallReturn(Span),
|
||||
|
||||
// Operands must be in scope
|
||||
Operand(Span),
|
||||
|
||||
// Region resulting from a `&` expr must enclose the `&` expr
|
||||
AddrOf(Span),
|
||||
|
||||
@ -1194,6 +1198,7 @@ impl<'tcx> SubregionOrigin<'tcx> {
|
||||
CallRcvr(a) => a,
|
||||
CallArg(a) => a,
|
||||
CallReturn(a) => a,
|
||||
Operand(a) => a,
|
||||
AddrOf(a) => a,
|
||||
AutoBorrow(a) => a,
|
||||
SafeDestructor(a) => a,
|
||||
@ -1257,6 +1262,7 @@ impl<'tcx> Repr<'tcx> for SubregionOrigin<'tcx> {
|
||||
CallRcvr(a) => format!("CallRcvr({})", a.repr(tcx)),
|
||||
CallArg(a) => format!("CallArg({})", a.repr(tcx)),
|
||||
CallReturn(a) => format!("CallReturn({})", a.repr(tcx)),
|
||||
Operand(a) => format!("Operand({})", a.repr(tcx)),
|
||||
AddrOf(a) => format!("AddrOf({})", a.repr(tcx)),
|
||||
AutoBorrow(a) => format!("AutoBorrow({})", a.repr(tcx)),
|
||||
SafeDestructor(a) => format!("SafeDestructor({})", a.repr(tcx)),
|
||||
|
@ -10,12 +10,8 @@
|
||||
|
||||
use super::combine::*;
|
||||
use super::{cres, CresCompare};
|
||||
use super::equate::Equate;
|
||||
use super::glb::Glb;
|
||||
use super::higher_ranked::HigherRankedRelations;
|
||||
use super::InferCtxt;
|
||||
use super::lub::Lub;
|
||||
use super::{TypeTrace, Subtype};
|
||||
use super::{Subtype};
|
||||
use super::type_variable::{SubtypeOf, SupertypeOf};
|
||||
|
||||
use middle::ty::{BuiltinBounds};
|
||||
@ -37,28 +33,30 @@ pub fn Sub<'f, 'tcx>(cf: CombineFields<'f, 'tcx>) -> Sub<'f, 'tcx> {
|
||||
}
|
||||
|
||||
impl<'f, 'tcx> Combine<'tcx> for Sub<'f, 'tcx> {
|
||||
fn infcx<'a>(&'a self) -> &'a InferCtxt<'a, 'tcx> { self.fields.infcx }
|
||||
fn tag(&self) -> String { "sub".to_string() }
|
||||
fn a_is_expected(&self) -> bool { self.fields.a_is_expected }
|
||||
fn trace(&self) -> TypeTrace<'tcx> { self.fields.trace.clone() }
|
||||
fn tag(&self) -> String { "Sub".to_string() }
|
||||
fn fields<'a>(&'a self) -> &'a CombineFields<'a, 'tcx> { &self.fields }
|
||||
|
||||
fn equate<'a>(&'a self) -> Equate<'a, 'tcx> { Equate(self.fields.clone()) }
|
||||
fn sub<'a>(&'a self) -> Sub<'a, 'tcx> { Sub(self.fields.clone()) }
|
||||
fn lub<'a>(&'a self) -> Lub<'a, 'tcx> { Lub(self.fields.clone()) }
|
||||
fn glb<'a>(&'a self) -> Glb<'a, 'tcx> { Glb(self.fields.clone()) }
|
||||
|
||||
fn contratys(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> cres<'tcx, Ty<'tcx>> {
|
||||
Sub(self.fields.switch_expected()).tys(b, a)
|
||||
fn tys_with_variance(&self, v: ty::Variance, a: Ty<'tcx>, b: Ty<'tcx>)
|
||||
-> cres<'tcx, Ty<'tcx>>
|
||||
{
|
||||
match v {
|
||||
ty::Invariant => self.equate().tys(a, b),
|
||||
ty::Covariant => self.tys(a, b),
|
||||
ty::Bivariant => self.bivariate().tys(a, b),
|
||||
ty::Contravariant => Sub(self.fields.switch_expected()).tys(b, a),
|
||||
}
|
||||
}
|
||||
|
||||
fn contraregions(&self, a: ty::Region, b: ty::Region)
|
||||
-> cres<'tcx, ty::Region> {
|
||||
let opp = CombineFields {
|
||||
a_is_expected: !self.fields.a_is_expected,
|
||||
..self.fields.clone()
|
||||
};
|
||||
Sub(opp).regions(b, a)
|
||||
}
|
||||
fn regions_with_variance(&self, v: ty::Variance, a: ty::Region, b: ty::Region)
|
||||
-> cres<'tcx, ty::Region>
|
||||
{
|
||||
match v {
|
||||
ty::Invariant => self.equate().regions(a, b),
|
||||
ty::Covariant => self.regions(a, b),
|
||||
ty::Bivariant => self.bivariate().regions(a, b),
|
||||
ty::Contravariant => Sub(self.fields.switch_expected()).regions(b, a),
|
||||
}
|
||||
}
|
||||
|
||||
fn regions(&self, a: ty::Region, b: ty::Region) -> cres<'tcx, ty::Region> {
|
||||
debug!("{}.regions({}, {})",
|
||||
|
@ -14,6 +14,7 @@ use self::UndoEntry::*;
|
||||
|
||||
use middle::ty::{self, Ty};
|
||||
use std::cmp::min;
|
||||
use std::marker::PhantomData;
|
||||
use std::mem;
|
||||
use std::u32;
|
||||
use util::snapshot_vec as sv;
|
||||
@ -42,13 +43,13 @@ enum UndoEntry {
|
||||
Relate(ty::TyVid, ty::TyVid),
|
||||
}
|
||||
|
||||
struct Delegate<'tcx>;
|
||||
struct Delegate<'tcx>(PhantomData<&'tcx ()>);
|
||||
|
||||
type Relation = (RelationDir, ty::TyVid);
|
||||
|
||||
#[derive(Copy, PartialEq, Debug)]
|
||||
pub enum RelationDir {
|
||||
SubtypeOf, SupertypeOf, EqTo
|
||||
SubtypeOf, SupertypeOf, EqTo, BiTo
|
||||
}
|
||||
|
||||
impl RelationDir {
|
||||
@ -56,14 +57,15 @@ impl RelationDir {
|
||||
match self {
|
||||
SubtypeOf => SupertypeOf,
|
||||
SupertypeOf => SubtypeOf,
|
||||
EqTo => EqTo
|
||||
EqTo => EqTo,
|
||||
BiTo => BiTo,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx> TypeVariableTable<'tcx> {
|
||||
pub fn new() -> TypeVariableTable<'tcx> {
|
||||
TypeVariableTable { values: sv::SnapshotVec::new(Delegate) }
|
||||
TypeVariableTable { values: sv::SnapshotVec::new(Delegate(PhantomData)) }
|
||||
}
|
||||
|
||||
fn relations<'a>(&'a mut self, a: ty::TyVid) -> &'a mut Vec<Relation> {
|
||||
|
@ -18,6 +18,7 @@ use middle::infer::{uok, ures};
|
||||
use middle::infer::InferCtxt;
|
||||
use std::cell::RefCell;
|
||||
use std::fmt::Debug;
|
||||
use std::marker::PhantomData;
|
||||
use syntax::ast;
|
||||
use util::snapshot_vec as sv;
|
||||
|
||||
@ -79,7 +80,7 @@ pub struct UnificationTable<K:UnifyKey> {
|
||||
/// made during the snapshot may either be *committed* or *rolled back*.
|
||||
pub struct Snapshot<K:UnifyKey> {
|
||||
// Link snapshot to the key type `K` of the table.
|
||||
marker: marker::CovariantType<K>,
|
||||
marker: marker::PhantomData<K>,
|
||||
snapshot: sv::Snapshot,
|
||||
}
|
||||
|
||||
@ -92,7 +93,7 @@ pub struct Node<K:UnifyKey> {
|
||||
}
|
||||
|
||||
#[derive(Copy)]
|
||||
pub struct Delegate<K>;
|
||||
pub struct Delegate<K>(PhantomData<K>);
|
||||
|
||||
// We can't use V:LatticeValue, much as I would like to,
|
||||
// because frequently the pattern is that V=Option<U> for some
|
||||
@ -102,14 +103,14 @@ pub struct Delegate<K>;
|
||||
impl<K:UnifyKey> UnificationTable<K> {
|
||||
pub fn new() -> UnificationTable<K> {
|
||||
UnificationTable {
|
||||
values: sv::SnapshotVec::new(Delegate),
|
||||
values: sv::SnapshotVec::new(Delegate(PhantomData)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Starts a new snapshot. Each snapshot must be either
|
||||
/// rolled back or committed in a "LIFO" (stack) order.
|
||||
pub fn snapshot(&mut self) -> Snapshot<K> {
|
||||
Snapshot { marker: marker::CovariantType::<K>,
|
||||
Snapshot { marker: marker::PhantomData::<K>,
|
||||
snapshot: self.values.start_snapshot() }
|
||||
}
|
||||
|
||||
|
@ -306,12 +306,13 @@ lets_do_this! {
|
||||
ExchangeHeapLangItem, "exchange_heap", exchange_heap;
|
||||
OwnedBoxLangItem, "owned_box", owned_box;
|
||||
|
||||
PhantomFnItem, "phantom_fn", phantom_fn;
|
||||
PhantomDataItem, "phantom_data", phantom_data;
|
||||
|
||||
// Deprecated:
|
||||
CovariantTypeItem, "covariant_type", covariant_type;
|
||||
ContravariantTypeItem, "contravariant_type", contravariant_type;
|
||||
InvariantTypeItem, "invariant_type", invariant_type;
|
||||
|
||||
CovariantLifetimeItem, "covariant_lifetime", covariant_lifetime;
|
||||
ContravariantLifetimeItem, "contravariant_lifetime", contravariant_lifetime;
|
||||
InvariantLifetimeItem, "invariant_lifetime", invariant_lifetime;
|
||||
|
@ -10,24 +10,27 @@
|
||||
|
||||
//! See `doc.rs` for high-level documentation
|
||||
|
||||
use super::Normalized;
|
||||
use super::SelectionContext;
|
||||
use super::{Obligation, ObligationCause};
|
||||
use super::{ObligationCause};
|
||||
use super::PredicateObligation;
|
||||
use super::project;
|
||||
use super::util;
|
||||
|
||||
use middle::subst::{Subst, TypeSpace};
|
||||
use middle::ty::{self, Ty};
|
||||
use middle::infer::InferCtxt;
|
||||
use middle::ty::{self, ToPolyTraitRef, Ty};
|
||||
use middle::infer::{self, InferCtxt};
|
||||
use std::collections::HashSet;
|
||||
use std::rc::Rc;
|
||||
use syntax::ast;
|
||||
use syntax::codemap::DUMMY_SP;
|
||||
use util::ppaux::Repr;
|
||||
|
||||
pub fn impl_can_satisfy(infcx: &InferCtxt,
|
||||
impl1_def_id: ast::DefId,
|
||||
impl2_def_id: ast::DefId)
|
||||
-> bool
|
||||
/// True if there exist types that satisfy both of the two given impls.
|
||||
pub fn overlapping_impls(infcx: &InferCtxt,
|
||||
impl1_def_id: ast::DefId,
|
||||
impl2_def_id: ast::DefId)
|
||||
-> bool
|
||||
{
|
||||
debug!("impl_can_satisfy(\
|
||||
impl1_def_id={}, \
|
||||
@ -35,28 +38,68 @@ pub fn impl_can_satisfy(infcx: &InferCtxt,
|
||||
impl1_def_id.repr(infcx.tcx),
|
||||
impl2_def_id.repr(infcx.tcx));
|
||||
|
||||
let param_env = ty::empty_parameter_environment(infcx.tcx);
|
||||
let mut selcx = SelectionContext::intercrate(infcx, ¶m_env);
|
||||
let cause = ObligationCause::dummy();
|
||||
let param_env = &ty::empty_parameter_environment(infcx.tcx);
|
||||
let selcx = &mut SelectionContext::intercrate(infcx, param_env);
|
||||
infcx.probe(|_| {
|
||||
overlap(selcx, impl1_def_id, impl2_def_id) || overlap(selcx, impl2_def_id, impl1_def_id)
|
||||
})
|
||||
}
|
||||
|
||||
// `impl1` provides an implementation of `Foo<X,Y> for Z`.
|
||||
let impl1_substs =
|
||||
util::fresh_substs_for_impl(infcx, DUMMY_SP, impl1_def_id);
|
||||
let impl1_trait_ref =
|
||||
(*ty::impl_trait_ref(infcx.tcx, impl1_def_id).unwrap()).subst(infcx.tcx, &impl1_substs);
|
||||
let impl1_trait_ref =
|
||||
project::normalize(&mut selcx, cause.clone(), &impl1_trait_ref);
|
||||
/// Can the types from impl `a` be used to satisfy impl `b`?
|
||||
/// (Including all conditions)
|
||||
fn overlap(selcx: &mut SelectionContext,
|
||||
a_def_id: ast::DefId,
|
||||
b_def_id: ast::DefId)
|
||||
-> bool
|
||||
{
|
||||
let (a_trait_ref, a_obligations) = impl_trait_ref_and_oblig(selcx, a_def_id);
|
||||
let (b_trait_ref, b_obligations) = impl_trait_ref_and_oblig(selcx, b_def_id);
|
||||
|
||||
// Determine whether `impl2` can provide an implementation for those
|
||||
// same types.
|
||||
let obligation = Obligation::new(cause,
|
||||
ty::Binder(ty::TraitPredicate {
|
||||
trait_ref: Rc::new(impl1_trait_ref.value),
|
||||
}));
|
||||
debug!("impl_can_satisfy(obligation={})", obligation.repr(infcx.tcx));
|
||||
selcx.evaluate_impl(impl2_def_id, &obligation) &&
|
||||
impl1_trait_ref.obligations.iter().all(
|
||||
|o| selcx.evaluate_obligation(o))
|
||||
// Does `a <: b` hold? If not, no overlap.
|
||||
if let Err(_) = infer::mk_sub_poly_trait_refs(selcx.infcx(),
|
||||
true,
|
||||
infer::Misc(DUMMY_SP),
|
||||
a_trait_ref.to_poly_trait_ref(),
|
||||
b_trait_ref.to_poly_trait_ref()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Are any of the obligations unsatisfiable? If so, no overlap.
|
||||
a_obligations.iter()
|
||||
.chain(b_obligations.iter())
|
||||
.all(|o| selcx.evaluate_obligation(o))
|
||||
}
|
||||
|
||||
/// Instantiate fresh variables for all bound parameters of the impl
|
||||
/// and return the impl trait ref with those variables substituted.
|
||||
fn impl_trait_ref_and_oblig<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>,
|
||||
impl_def_id: ast::DefId)
|
||||
-> (Rc<ty::TraitRef<'tcx>>,
|
||||
Vec<PredicateObligation<'tcx>>)
|
||||
{
|
||||
let impl_substs =
|
||||
&util::fresh_substs_for_impl(selcx.infcx(), DUMMY_SP, impl_def_id);
|
||||
let impl_trait_ref =
|
||||
ty::impl_trait_ref(selcx.tcx(), impl_def_id).unwrap();
|
||||
let impl_trait_ref =
|
||||
impl_trait_ref.subst(selcx.tcx(), impl_substs);
|
||||
let Normalized { value: impl_trait_ref, obligations: normalization_obligations1 } =
|
||||
project::normalize(selcx, ObligationCause::dummy(), &impl_trait_ref);
|
||||
|
||||
let predicates = ty::lookup_predicates(selcx.tcx(), impl_def_id);
|
||||
let predicates = predicates.instantiate(selcx.tcx(), impl_substs);
|
||||
let Normalized { value: predicates, obligations: normalization_obligations2 } =
|
||||
project::normalize(selcx, ObligationCause::dummy(), &predicates);
|
||||
let impl_obligations =
|
||||
util::predicates_for_generics(selcx.tcx(), ObligationCause::dummy(), 0, &predicates);
|
||||
|
||||
let impl_obligations: Vec<_> =
|
||||
impl_obligations.into_iter()
|
||||
.chain(normalization_obligations1.into_iter())
|
||||
.chain(normalization_obligations2.into_iter())
|
||||
.collect();
|
||||
|
||||
(impl_trait_ref, impl_obligations)
|
||||
}
|
||||
|
||||
pub enum OrphanCheckErr<'tcx> {
|
||||
|
@ -28,6 +28,7 @@ use util::ppaux::{Repr, UserString};
|
||||
pub use self::error_reporting::report_fulfillment_errors;
|
||||
pub use self::error_reporting::suggest_new_overflow_limit;
|
||||
pub use self::coherence::orphan_check;
|
||||
pub use self::coherence::overlapping_impls;
|
||||
pub use self::coherence::OrphanCheckErr;
|
||||
pub use self::fulfill::{FulfillmentContext, RegionObligation};
|
||||
pub use self::project::MismatchedProjectionTypes;
|
||||
@ -270,16 +271,6 @@ pub struct VtableObjectData<'tcx> {
|
||||
pub object_ty: Ty<'tcx>,
|
||||
}
|
||||
|
||||
/// True if there exist types that satisfy both of the two given impls.
|
||||
pub fn overlapping_impls(infcx: &InferCtxt,
|
||||
impl1_def_id: ast::DefId,
|
||||
impl2_def_id: ast::DefId)
|
||||
-> bool
|
||||
{
|
||||
coherence::impl_can_satisfy(infcx, impl1_def_id, impl2_def_id) &&
|
||||
coherence::impl_can_satisfy(infcx, impl2_def_id, impl1_def_id)
|
||||
}
|
||||
|
||||
/// Creates predicate obligations from the generic bounds.
|
||||
pub fn predicates_for_generics<'tcx>(tcx: &ty::ctxt<'tcx>,
|
||||
cause: ObligationCause<'tcx>,
|
||||
|
@ -132,6 +132,7 @@ pub enum MethodMatchedData {
|
||||
/// parameters) that would have to be inferred from the impl.
|
||||
#[derive(PartialEq,Eq,Debug,Clone)]
|
||||
enum SelectionCandidate<'tcx> {
|
||||
PhantomFnCandidate,
|
||||
BuiltinCandidate(ty::BuiltinBound),
|
||||
ParamCandidate(ty::PolyTraitRef<'tcx>),
|
||||
ImplCandidate(ast::DefId),
|
||||
@ -793,8 +794,6 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
stack: &TraitObligationStack<'o, 'tcx>)
|
||||
-> Result<SelectionCandidateSet<'tcx>, SelectionError<'tcx>>
|
||||
{
|
||||
// Check for overflow.
|
||||
|
||||
let TraitObligationStack { obligation, .. } = *stack;
|
||||
|
||||
let mut candidates = SelectionCandidateSet {
|
||||
@ -802,6 +801,14 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
ambiguous: false
|
||||
};
|
||||
|
||||
// Check for the `PhantomFn` trait. This is really just a
|
||||
// special annotation that is *always* considered to match, no
|
||||
// matter what the type parameters are etc.
|
||||
if self.tcx().lang_items.phantom_fn() == Some(obligation.predicate.def_id()) {
|
||||
candidates.vec.push(PhantomFnCandidate);
|
||||
return Ok(candidates);
|
||||
}
|
||||
|
||||
// Other bounds. Consider both in-scope bounds from fn decl
|
||||
// and applicable impls. There is a certain set of precedence rules here.
|
||||
|
||||
@ -1629,6 +1636,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
try!(self.confirm_builtin_candidate(obligation, builtin_bound))))
|
||||
}
|
||||
|
||||
PhantomFnCandidate |
|
||||
ErrorCandidate => {
|
||||
Ok(VtableBuiltin(VtableBuiltinData { nested: VecPerParamSpace::empty() }))
|
||||
}
|
||||
@ -2295,6 +2303,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
impl<'tcx> Repr<'tcx> for SelectionCandidate<'tcx> {
|
||||
fn repr(&self, tcx: &ty::ctxt<'tcx>) -> String {
|
||||
match *self {
|
||||
PhantomFnCandidate => format!("PhantomFnCandidate"),
|
||||
ErrorCandidate => format!("ErrorCandidate"),
|
||||
BuiltinCandidate(b) => format!("BuiltinCandidate({:?})", b),
|
||||
ParamCandidate(ref a) => format!("ParamCandidate({})", a.repr(tcx)),
|
||||
|
@ -3008,6 +3008,13 @@ impl<'tcx> TyS<'tcx> {
|
||||
assert_eq!(r, Some(self));
|
||||
walker
|
||||
}
|
||||
|
||||
pub fn as_opt_param_ty(&self) -> Option<ty::ParamTy> {
|
||||
match self.sty {
|
||||
ty::ty_param(ref d) => Some(d.clone()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn walk_ty<'tcx, F>(ty_root: Ty<'tcx>, mut f: F)
|
||||
|
@ -306,10 +306,10 @@ pub fn all_traits<'a>(ccx: &'a CrateCtxt) -> AllTraits<'a> {
|
||||
// Crate-local:
|
||||
//
|
||||
// meh.
|
||||
struct Visitor<'a, 'b: 'a, 'tcx: 'a + 'b> {
|
||||
struct Visitor<'a> {
|
||||
traits: &'a mut AllTraitsVec,
|
||||
}
|
||||
impl<'v,'a, 'b, 'tcx> visit::Visitor<'v> for Visitor<'a, 'b, 'tcx> {
|
||||
impl<'v, 'a> visit::Visitor<'v> for Visitor<'a> {
|
||||
fn visit_item(&mut self, i: &'v ast::Item) {
|
||||
match i.node {
|
||||
ast::ItemTrait(..) => {
|
||||
|
@ -626,6 +626,20 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) {
|
||||
visit::walk_expr(rcx, expr);
|
||||
}
|
||||
|
||||
ast::ExprBinary(_, ref lhs, ref rhs) => {
|
||||
// If you do `x OP y`, then the types of `x` and `y` must
|
||||
// outlive the operation you are performing.
|
||||
let lhs_ty = rcx.resolve_expr_type_adjusted(&**lhs);
|
||||
let rhs_ty = rcx.resolve_expr_type_adjusted(&**rhs);
|
||||
for &ty in [lhs_ty, rhs_ty].iter() {
|
||||
type_must_outlive(rcx,
|
||||
infer::Operand(expr.span),
|
||||
ty,
|
||||
ty::ReScope(CodeExtent::from_node_id(expr.id)));
|
||||
}
|
||||
visit::walk_expr(rcx, expr);
|
||||
}
|
||||
|
||||
ast::ExprUnary(op, ref lhs) if has_method_map => {
|
||||
let implicitly_ref_args = !ast_util::is_by_value_unop(op);
|
||||
|
||||
|
@ -10,21 +10,22 @@
|
||||
|
||||
use astconv::AstConv;
|
||||
use check::{FnCtxt, Inherited, blank_fn_ctxt, vtable, regionck};
|
||||
use constrained_type_params::identify_constrained_type_params;
|
||||
use CrateCtxt;
|
||||
use middle::region;
|
||||
use middle::subst;
|
||||
use middle::subst::{self, TypeSpace, FnSpace, ParamSpace, SelfSpace};
|
||||
use middle::traits;
|
||||
use middle::ty::{self, Ty};
|
||||
use middle::ty::liberate_late_bound_regions;
|
||||
use middle::ty_fold::{TypeFolder, TypeFoldable, super_fold_ty};
|
||||
use util::ppaux::Repr;
|
||||
use util::ppaux::{Repr, UserString};
|
||||
|
||||
use std::collections::HashSet;
|
||||
use syntax::ast;
|
||||
use syntax::ast_util::{local_def};
|
||||
use syntax::attr;
|
||||
use syntax::codemap::Span;
|
||||
use syntax::parse::token;
|
||||
use syntax::parse::token::{self, special_idents};
|
||||
use syntax::visit;
|
||||
use syntax::visit::Visitor;
|
||||
|
||||
@ -38,6 +39,10 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> {
|
||||
CheckTypeWellFormedVisitor { ccx: ccx, cache: HashSet::new() }
|
||||
}
|
||||
|
||||
fn tcx(&self) -> &ty::ctxt<'tcx> {
|
||||
self.ccx.tcx
|
||||
}
|
||||
|
||||
/// Checks that the field types (in a struct def'n) or argument types (in an enum def'n) are
|
||||
/// well-formed, meaning that they do not require any constraints not declared in the struct
|
||||
/// definition itself. For example, this definition would be illegal:
|
||||
@ -96,19 +101,29 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> {
|
||||
ast::ItemConst(..) => {
|
||||
self.check_item_type(item);
|
||||
}
|
||||
ast::ItemStruct(ref struct_def, _) => {
|
||||
self.check_type_defn(item, |fcx| vec![struct_variant(fcx, &**struct_def)]);
|
||||
ast::ItemStruct(ref struct_def, ref ast_generics) => {
|
||||
self.check_type_defn(item, |fcx| {
|
||||
vec![struct_variant(fcx, &**struct_def)]
|
||||
});
|
||||
|
||||
self.check_variances_for_type_defn(item, ast_generics);
|
||||
}
|
||||
ast::ItemEnum(ref enum_def, _) => {
|
||||
self.check_type_defn(item, |fcx| enum_variants(fcx, enum_def));
|
||||
ast::ItemEnum(ref enum_def, ref ast_generics) => {
|
||||
self.check_type_defn(item, |fcx| {
|
||||
enum_variants(fcx, enum_def)
|
||||
});
|
||||
|
||||
self.check_variances_for_type_defn(item, ast_generics);
|
||||
}
|
||||
ast::ItemTrait(..) => {
|
||||
ast::ItemTrait(_, ref ast_generics, _, _) => {
|
||||
let trait_predicates =
|
||||
ty::lookup_predicates(ccx.tcx, local_def(item.id));
|
||||
reject_non_type_param_bounds(
|
||||
ccx.tcx,
|
||||
item.span,
|
||||
&trait_predicates);
|
||||
self.check_variances(item, ast_generics, &trait_predicates,
|
||||
self.tcx().lang_items.phantom_fn());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -276,6 +291,123 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn check_variances_for_type_defn(&self,
|
||||
item: &ast::Item,
|
||||
ast_generics: &ast::Generics)
|
||||
{
|
||||
let item_def_id = local_def(item.id);
|
||||
let predicates = ty::lookup_predicates(self.tcx(), item_def_id);
|
||||
self.check_variances(item,
|
||||
ast_generics,
|
||||
&predicates,
|
||||
self.tcx().lang_items.phantom_data());
|
||||
}
|
||||
|
||||
fn check_variances(&self,
|
||||
item: &ast::Item,
|
||||
ast_generics: &ast::Generics,
|
||||
ty_predicates: &ty::GenericPredicates<'tcx>,
|
||||
suggested_marker_id: Option<ast::DefId>)
|
||||
{
|
||||
let variance_lang_items = &[
|
||||
self.tcx().lang_items.phantom_fn(),
|
||||
self.tcx().lang_items.phantom_data(),
|
||||
];
|
||||
|
||||
let item_def_id = local_def(item.id);
|
||||
let is_lang_item = variance_lang_items.iter().any(|n| *n == Some(item_def_id));
|
||||
if is_lang_item {
|
||||
return;
|
||||
}
|
||||
|
||||
let variances = ty::item_variances(self.tcx(), item_def_id);
|
||||
|
||||
let mut constrained_parameters: HashSet<_> =
|
||||
variances.types
|
||||
.iter_enumerated()
|
||||
.filter(|&(_, _, &variance)| variance != ty::Bivariant)
|
||||
.map(|(space, index, _)| self.param_ty(ast_generics, space, index))
|
||||
.collect();
|
||||
|
||||
identify_constrained_type_params(self.tcx(),
|
||||
ty_predicates.predicates.as_slice(),
|
||||
None,
|
||||
&mut constrained_parameters);
|
||||
|
||||
for (space, index, _) in variances.types.iter_enumerated() {
|
||||
let param_ty = self.param_ty(ast_generics, space, index);
|
||||
if constrained_parameters.contains(¶m_ty) {
|
||||
continue;
|
||||
}
|
||||
let span = self.ty_param_span(ast_generics, item, space, index);
|
||||
self.report_bivariance(span, param_ty.name, suggested_marker_id);
|
||||
}
|
||||
|
||||
for (space, index, &variance) in variances.regions.iter_enumerated() {
|
||||
if variance != ty::Bivariant {
|
||||
continue;
|
||||
}
|
||||
|
||||
assert_eq!(space, TypeSpace);
|
||||
let span = ast_generics.lifetimes[index].lifetime.span;
|
||||
let name = ast_generics.lifetimes[index].lifetime.name;
|
||||
self.report_bivariance(span, name, suggested_marker_id);
|
||||
}
|
||||
}
|
||||
|
||||
fn param_ty(&self,
|
||||
ast_generics: &ast::Generics,
|
||||
space: ParamSpace,
|
||||
index: usize)
|
||||
-> ty::ParamTy
|
||||
{
|
||||
let name = match space {
|
||||
TypeSpace => ast_generics.ty_params[index].ident.name,
|
||||
SelfSpace => special_idents::type_self.name,
|
||||
FnSpace => self.tcx().sess.bug("Fn space occupied?"),
|
||||
};
|
||||
|
||||
ty::ParamTy { space: space, idx: index as u32, name: name }
|
||||
}
|
||||
|
||||
fn ty_param_span(&self,
|
||||
ast_generics: &ast::Generics,
|
||||
item: &ast::Item,
|
||||
space: ParamSpace,
|
||||
index: usize)
|
||||
-> Span
|
||||
{
|
||||
match space {
|
||||
TypeSpace => ast_generics.ty_params[index].span,
|
||||
SelfSpace => item.span,
|
||||
FnSpace => self.tcx().sess.span_bug(item.span, "Fn space occupied?"),
|
||||
}
|
||||
}
|
||||
|
||||
fn report_bivariance(&self,
|
||||
span: Span,
|
||||
param_name: ast::Name,
|
||||
suggested_marker_id: Option<ast::DefId>)
|
||||
{
|
||||
self.tcx().sess.span_err(
|
||||
span,
|
||||
&format!("parameter `{}` is never used",
|
||||
param_name.user_string(self.tcx()))[]);
|
||||
|
||||
match suggested_marker_id {
|
||||
Some(def_id) => {
|
||||
self.tcx().sess.span_help(
|
||||
span,
|
||||
format!("consider removing `{}` or using a marker such as `{}`",
|
||||
param_name.user_string(self.tcx()),
|
||||
ty::item_path_str(self.tcx(), def_id)).as_slice());
|
||||
}
|
||||
None => {
|
||||
// no lang items, no help!
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reject any predicates that do not involve a type parameter.
|
||||
@ -343,9 +475,9 @@ impl<'ccx, 'tcx, 'v> Visitor<'v> for CheckTypeWellFormedVisitor<'ccx, 'tcx> {
|
||||
match fk {
|
||||
visit::FkFnBlock | visit::FkItemFn(..) => {}
|
||||
visit::FkMethod(..) => {
|
||||
match ty::impl_or_trait_item(self.ccx.tcx, local_def(id)) {
|
||||
match ty::impl_or_trait_item(self.tcx(), local_def(id)) {
|
||||
ty::ImplOrTraitItem::MethodTraitItem(ty_method) => {
|
||||
reject_shadowing_type_parameters(self.ccx.tcx, span, &ty_method.generics)
|
||||
reject_shadowing_type_parameters(self.tcx(), span, &ty_method.generics)
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
@ -359,14 +491,14 @@ impl<'ccx, 'tcx, 'v> Visitor<'v> for CheckTypeWellFormedVisitor<'ccx, 'tcx> {
|
||||
&ast::TraitItem::ProvidedMethod(_) |
|
||||
&ast::TraitItem::TypeTraitItem(_) => {},
|
||||
&ast::TraitItem::RequiredMethod(ref method) => {
|
||||
match ty::impl_or_trait_item(self.ccx.tcx, local_def(method.id)) {
|
||||
match ty::impl_or_trait_item(self.tcx(), local_def(method.id)) {
|
||||
ty::ImplOrTraitItem::MethodTraitItem(ty_method) => {
|
||||
reject_non_type_param_bounds(
|
||||
self.ccx.tcx,
|
||||
self.tcx(),
|
||||
method.span,
|
||||
&ty_method.predicates);
|
||||
reject_shadowing_type_parameters(
|
||||
self.ccx.tcx,
|
||||
self.tcx(),
|
||||
method.span,
|
||||
&ty_method.generics);
|
||||
}
|
||||
|
@ -87,6 +87,7 @@ There are some shortcomings in this design:
|
||||
|
||||
use astconv::{self, AstConv, ty_of_arg, ast_ty_to_ty, ast_region_to_region};
|
||||
use middle::def;
|
||||
use constrained_type_params::identify_constrained_type_params;
|
||||
use middle::lang_items::SizedTraitLangItem;
|
||||
use middle::region;
|
||||
use middle::resolve_lifetime;
|
||||
@ -1960,51 +1961,15 @@ fn enforce_impl_ty_params_are_constrained<'tcx>(tcx: &ty::ctxt<'tcx>,
|
||||
let mut input_parameters: HashSet<_> =
|
||||
impl_trait_ref.iter()
|
||||
.flat_map(|t| t.input_types().iter()) // Types in trait ref, if any
|
||||
.chain(Some(impl_scheme.ty).iter()) // Self type, always
|
||||
.chain(Some(impl_scheme.ty).iter()) // Self type, always
|
||||
.flat_map(|t| t.walk())
|
||||
.filter_map(to_opt_param_ty)
|
||||
.filter_map(|t| t.as_opt_param_ty())
|
||||
.collect();
|
||||
|
||||
loop {
|
||||
let num_inputs = input_parameters.len();
|
||||
|
||||
let projection_predicates =
|
||||
impl_predicates.predicates
|
||||
.iter()
|
||||
.filter_map(|predicate| {
|
||||
match *predicate {
|
||||
// Ignore higher-ranked binders. For the purposes
|
||||
// of this check, they don't matter because they
|
||||
// only affect named regions, and we're just
|
||||
// concerned about type parameters here.
|
||||
ty::Predicate::Projection(ref data) => Some(data.0.clone()),
|
||||
_ => None,
|
||||
}
|
||||
});
|
||||
|
||||
for projection in projection_predicates {
|
||||
// Special case: watch out for some kind of sneaky attempt
|
||||
// to project out an associated type defined by this very trait.
|
||||
if Some(projection.projection_ty.trait_ref.clone()) == impl_trait_ref {
|
||||
continue;
|
||||
}
|
||||
|
||||
let relies_only_on_inputs =
|
||||
projection.projection_ty.trait_ref.input_types().iter()
|
||||
.flat_map(|t| t.walk())
|
||||
.filter_map(to_opt_param_ty)
|
||||
.all(|t| input_parameters.contains(&t));
|
||||
|
||||
if relies_only_on_inputs {
|
||||
input_parameters.extend(
|
||||
projection.ty.walk().filter_map(to_opt_param_ty));
|
||||
}
|
||||
}
|
||||
|
||||
if input_parameters.len() == num_inputs {
|
||||
break;
|
||||
}
|
||||
}
|
||||
identify_constrained_type_params(tcx,
|
||||
impl_predicates.predicates.as_slice(),
|
||||
impl_trait_ref,
|
||||
&mut input_parameters);
|
||||
|
||||
for (index, ty_param) in ast_generics.ty_params.iter().enumerate() {
|
||||
let param_ty = ty::ParamTy { space: TypeSpace,
|
||||
@ -2025,11 +1990,4 @@ fn enforce_impl_ty_params_are_constrained<'tcx>(tcx: &ty::ctxt<'tcx>,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn to_opt_param_ty<'tcx>(ty: Ty<'tcx>) -> Option<ty::ParamTy> {
|
||||
match ty.sty {
|
||||
ty::ty_param(ref d) => Some(d.clone()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
61
src/librustc_typeck/constrained_type_params.rs
Normal file
61
src/librustc_typeck/constrained_type_params.rs
Normal file
@ -0,0 +1,61 @@
|
||||
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use middle::ty::{self};
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::rc::Rc;
|
||||
|
||||
pub fn identify_constrained_type_params<'tcx>(_tcx: &ty::ctxt<'tcx>,
|
||||
predicates: &[ty::Predicate<'tcx>],
|
||||
impl_trait_ref: Option<Rc<ty::TraitRef<'tcx>>>,
|
||||
input_parameters: &mut HashSet<ty::ParamTy>)
|
||||
{
|
||||
loop {
|
||||
let num_inputs = input_parameters.len();
|
||||
|
||||
let projection_predicates =
|
||||
predicates.iter()
|
||||
.filter_map(|predicate| {
|
||||
match *predicate {
|
||||
// Ignore higher-ranked binders. For the purposes
|
||||
// of this check, they don't matter because they
|
||||
// only affect named regions, and we're just
|
||||
// concerned about type parameters here.
|
||||
ty::Predicate::Projection(ref data) => Some(data.0.clone()),
|
||||
_ => None,
|
||||
}
|
||||
});
|
||||
|
||||
for projection in projection_predicates {
|
||||
// Special case: watch out for some kind of sneaky attempt
|
||||
// to project out an associated type defined by this very trait.
|
||||
if Some(projection.projection_ty.trait_ref.clone()) == impl_trait_ref {
|
||||
continue;
|
||||
}
|
||||
|
||||
let relies_only_on_inputs =
|
||||
projection.projection_ty.trait_ref.input_types()
|
||||
.iter()
|
||||
.flat_map(|t| t.walk())
|
||||
.filter_map(|t| t.as_opt_param_ty())
|
||||
.all(|t| input_parameters.contains(&t));
|
||||
|
||||
if relies_only_on_inputs {
|
||||
input_parameters.extend(
|
||||
projection.ty.walk().filter_map(|t| t.as_opt_param_ty()));
|
||||
}
|
||||
}
|
||||
|
||||
if input_parameters.len() == num_inputs {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
@ -123,6 +123,7 @@ mod check;
|
||||
mod rscope;
|
||||
mod astconv;
|
||||
mod collect;
|
||||
mod constrained_type_params;
|
||||
mod coherence;
|
||||
mod variance;
|
||||
|
||||
|
@ -187,6 +187,22 @@
|
||||
//! and the definition-site variance of the [corresponding] type parameter
|
||||
//! of a class `C` is `V1`, then the variance of `X` in the type expression
|
||||
//! `C<E>` is `V3 = V1.xform(V2)`.
|
||||
//!
|
||||
//! ### Constraints
|
||||
//!
|
||||
//! If I have a struct or enum with where clauses:
|
||||
//!
|
||||
//! struct Foo<T:Bar> { ... }
|
||||
//!
|
||||
//! you might wonder whether the variance of `T` with respect to `Bar`
|
||||
//! affects the variance `T` with respect to `Foo`. I claim no. The
|
||||
//! reason: assume that `T` is invariant w/r/t `Bar` but covariant w/r/t
|
||||
//! `Foo`. And then we have a `Foo<X>` that is upcast to `Foo<Y>`, where
|
||||
//! `X <: Y`. However, while `X : Bar`, `Y : Bar` does not hold. In that
|
||||
//! case, the upcast will be illegal, but not because of a variance
|
||||
//! failure, but rather because the target type `Foo<Y>` is itself just
|
||||
//! not well-formed. Basically we get to assume well-formedness of all
|
||||
//! types involved before considering variance.
|
||||
|
||||
use self::VarianceTerm::*;
|
||||
use self::ParamKind::*;
|
||||
@ -199,7 +215,6 @@ use middle::subst::{ParamSpace, FnSpace, TypeSpace, SelfSpace, VecPerParamSpace}
|
||||
use middle::ty::{self, Ty};
|
||||
use std::fmt;
|
||||
use std::rc::Rc;
|
||||
use std::iter::repeat;
|
||||
use syntax::ast;
|
||||
use syntax::ast_map;
|
||||
use syntax::ast_util;
|
||||
@ -258,6 +273,11 @@ struct TermsContext<'a, 'tcx: 'a> {
|
||||
|
||||
empty_variances: Rc<ty::ItemVariances>,
|
||||
|
||||
// For marker types, UnsafeCell, and other lang items where
|
||||
// variance is hardcoded, records the item-id and the hardcoded
|
||||
// variance.
|
||||
lang_items: Vec<(ast::NodeId, Vec<ty::Variance>)>,
|
||||
|
||||
// Maps from the node id of a type/generic parameter to the
|
||||
// corresponding inferred index.
|
||||
inferred_map: NodeMap<InferredIndex>,
|
||||
@ -269,7 +289,7 @@ struct TermsContext<'a, 'tcx: 'a> {
|
||||
#[derive(Copy, Debug, PartialEq)]
|
||||
enum ParamKind {
|
||||
TypeParam,
|
||||
RegionParam
|
||||
RegionParam,
|
||||
}
|
||||
|
||||
struct InferredInfo<'a> {
|
||||
@ -279,6 +299,11 @@ struct InferredInfo<'a> {
|
||||
index: uint,
|
||||
param_id: ast::NodeId,
|
||||
term: VarianceTermPtr<'a>,
|
||||
|
||||
// Initial value to use for this parameter when inferring
|
||||
// variance. For most parameters, this is Bivariant. But for lang
|
||||
// items and input type parameters on traits, it is different.
|
||||
initial_variance: ty::Variance,
|
||||
}
|
||||
|
||||
fn determine_parameters_to_be_inferred<'a, 'tcx>(tcx: &'a ty::ctxt<'tcx>,
|
||||
@ -291,6 +316,8 @@ fn determine_parameters_to_be_inferred<'a, 'tcx>(tcx: &'a ty::ctxt<'tcx>,
|
||||
inferred_map: NodeMap(),
|
||||
inferred_infos: Vec::new(),
|
||||
|
||||
lang_items: lang_items(tcx),
|
||||
|
||||
// cache and share the variance struct used for items with
|
||||
// no type/region parameters
|
||||
empty_variances: Rc::new(ty::ItemVariances {
|
||||
@ -304,7 +331,78 @@ fn determine_parameters_to_be_inferred<'a, 'tcx>(tcx: &'a ty::ctxt<'tcx>,
|
||||
terms_cx
|
||||
}
|
||||
|
||||
fn lang_items(tcx: &ty::ctxt) -> Vec<(ast::NodeId,Vec<ty::Variance>)> {
|
||||
let all = vec![
|
||||
(tcx.lang_items.phantom_fn(), vec![ty::Contravariant, ty::Covariant]),
|
||||
(tcx.lang_items.phantom_data(), vec![ty::Covariant]),
|
||||
(tcx.lang_items.unsafe_cell_type(), vec![ty::Invariant]),
|
||||
|
||||
// Deprecated:
|
||||
(tcx.lang_items.covariant_type(), vec![ty::Covariant]),
|
||||
(tcx.lang_items.contravariant_type(), vec![ty::Contravariant]),
|
||||
(tcx.lang_items.invariant_type(), vec![ty::Invariant]),
|
||||
(tcx.lang_items.covariant_lifetime(), vec![ty::Covariant]),
|
||||
(tcx.lang_items.contravariant_lifetime(), vec![ty::Contravariant]),
|
||||
(tcx.lang_items.invariant_lifetime(), vec![ty::Invariant]),
|
||||
|
||||
];
|
||||
|
||||
all.into_iter()
|
||||
.filter(|&(ref d,_)| d.is_some())
|
||||
.filter(|&(ref d,_)| d.as_ref().unwrap().krate == ast::LOCAL_CRATE)
|
||||
.map(|(d, v)| (d.unwrap().node, v))
|
||||
.collect()
|
||||
}
|
||||
|
||||
impl<'a, 'tcx> TermsContext<'a, 'tcx> {
|
||||
fn add_inferreds_for_item(&mut self,
|
||||
item_id: ast::NodeId,
|
||||
has_self: bool,
|
||||
generics: &ast::Generics)
|
||||
{
|
||||
/*!
|
||||
* Add "inferreds" for the generic parameters declared on this
|
||||
* item. This has a lot of annoying parameters because we are
|
||||
* trying to drive this from the AST, rather than the
|
||||
* ty::Generics, so that we can get span info -- but this
|
||||
* means we must accommodate syntactic distinctions.
|
||||
*/
|
||||
|
||||
// NB: In the code below for writing the results back into the
|
||||
// tcx, we rely on the fact that all inferreds for a particular
|
||||
// item are assigned continuous indices.
|
||||
|
||||
let inferreds_on_entry = self.num_inferred();
|
||||
|
||||
if has_self {
|
||||
self.add_inferred(item_id, TypeParam, SelfSpace, 0, item_id);
|
||||
}
|
||||
|
||||
for (i, p) in generics.lifetimes.iter().enumerate() {
|
||||
let id = p.lifetime.id;
|
||||
self.add_inferred(item_id, RegionParam, TypeSpace, i, id);
|
||||
}
|
||||
|
||||
for (i, p) in generics.ty_params.iter().enumerate() {
|
||||
self.add_inferred(item_id, TypeParam, TypeSpace, i, p.id);
|
||||
}
|
||||
|
||||
// If this item has no type or lifetime parameters,
|
||||
// then there are no variances to infer, so just
|
||||
// insert an empty entry into the variance map.
|
||||
// Arguably we could just leave the map empty in this
|
||||
// case but it seems cleaner to be able to distinguish
|
||||
// "invalid item id" from "item id with no
|
||||
// parameters".
|
||||
if self.num_inferred() == inferreds_on_entry {
|
||||
let newly_added =
|
||||
self.tcx.item_variance_map.borrow_mut().insert(
|
||||
ast_util::local_def(item_id),
|
||||
self.empty_variances.clone()).is_none();
|
||||
assert!(newly_added);
|
||||
}
|
||||
}
|
||||
|
||||
fn add_inferred(&mut self,
|
||||
item_id: ast::NodeId,
|
||||
kind: ParamKind,
|
||||
@ -313,21 +411,48 @@ impl<'a, 'tcx> TermsContext<'a, 'tcx> {
|
||||
param_id: ast::NodeId) {
|
||||
let inf_index = InferredIndex(self.inferred_infos.len());
|
||||
let term = self.arena.alloc(InferredTerm(inf_index));
|
||||
let initial_variance = self.pick_initial_variance(item_id, space, index);
|
||||
self.inferred_infos.push(InferredInfo { item_id: item_id,
|
||||
kind: kind,
|
||||
space: space,
|
||||
index: index,
|
||||
param_id: param_id,
|
||||
term: term });
|
||||
term: term,
|
||||
initial_variance: initial_variance });
|
||||
let newly_added = self.inferred_map.insert(param_id, inf_index).is_none();
|
||||
assert!(newly_added);
|
||||
|
||||
debug!("add_inferred(item_id={}, \
|
||||
debug!("add_inferred(item_path={}, \
|
||||
item_id={}, \
|
||||
kind={:?}, \
|
||||
space={:?}, \
|
||||
index={}, \
|
||||
param_id={},
|
||||
inf_index={:?})",
|
||||
item_id, kind, index, param_id, inf_index);
|
||||
param_id={}, \
|
||||
inf_index={:?}, \
|
||||
initial_variance={:?})",
|
||||
ty::item_path_str(self.tcx, ast_util::local_def(item_id)),
|
||||
item_id, kind, space, index, param_id, inf_index,
|
||||
initial_variance);
|
||||
}
|
||||
|
||||
fn pick_initial_variance(&self,
|
||||
item_id: ast::NodeId,
|
||||
space: ParamSpace,
|
||||
index: uint)
|
||||
-> ty::Variance
|
||||
{
|
||||
match space {
|
||||
SelfSpace | FnSpace => {
|
||||
ty::Bivariant
|
||||
}
|
||||
|
||||
TypeSpace => {
|
||||
match self.lang_items.iter().find(|&&(n, _)| n == item_id) {
|
||||
Some(&(_, ref variances)) => variances[index],
|
||||
None => ty::Bivariant
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn num_inferred(&self) -> uint {
|
||||
@ -339,44 +464,13 @@ impl<'a, 'tcx, 'v> Visitor<'v> for TermsContext<'a, 'tcx> {
|
||||
fn visit_item(&mut self, item: &ast::Item) {
|
||||
debug!("add_inferreds for item {}", item.repr(self.tcx));
|
||||
|
||||
let inferreds_on_entry = self.num_inferred();
|
||||
|
||||
// NB: In the code below for writing the results back into the
|
||||
// tcx, we rely on the fact that all inferreds for a particular
|
||||
// item are assigned continuous indices.
|
||||
match item.node {
|
||||
ast::ItemTrait(..) => {
|
||||
self.add_inferred(item.id, TypeParam, SelfSpace, 0, item.id);
|
||||
}
|
||||
_ => { }
|
||||
}
|
||||
|
||||
match item.node {
|
||||
ast::ItemEnum(_, ref generics) |
|
||||
ast::ItemStruct(_, ref generics) |
|
||||
ast::ItemStruct(_, ref generics) => {
|
||||
self.add_inferreds_for_item(item.id, false, generics);
|
||||
}
|
||||
ast::ItemTrait(_, ref generics, _, _) => {
|
||||
for (i, p) in generics.lifetimes.iter().enumerate() {
|
||||
let id = p.lifetime.id;
|
||||
self.add_inferred(item.id, RegionParam, TypeSpace, i, id);
|
||||
}
|
||||
for (i, p) in generics.ty_params.iter().enumerate() {
|
||||
self.add_inferred(item.id, TypeParam, TypeSpace, i, p.id);
|
||||
}
|
||||
|
||||
// If this item has no type or lifetime parameters,
|
||||
// then there are no variances to infer, so just
|
||||
// insert an empty entry into the variance map.
|
||||
// Arguably we could just leave the map empty in this
|
||||
// case but it seems cleaner to be able to distinguish
|
||||
// "invalid item id" from "item id with no
|
||||
// parameters".
|
||||
if self.num_inferred() == inferreds_on_entry {
|
||||
let newly_added = self.tcx.item_variance_map.borrow_mut().insert(
|
||||
ast_util::local_def(item.id),
|
||||
self.empty_variances.clone()).is_none();
|
||||
assert!(newly_added);
|
||||
}
|
||||
|
||||
self.add_inferreds_for_item(item.id, true, generics);
|
||||
visit::walk_item(self, item);
|
||||
}
|
||||
|
||||
@ -404,16 +498,6 @@ impl<'a, 'tcx, 'v> Visitor<'v> for TermsContext<'a, 'tcx> {
|
||||
struct ConstraintContext<'a, 'tcx: 'a> {
|
||||
terms_cx: TermsContext<'a, 'tcx>,
|
||||
|
||||
// These are the def-id of the std::marker::InvariantType,
|
||||
// std::marker::InvariantLifetime, and so on. The arrays
|
||||
// are indexed by the `ParamKind` (type, lifetime, self). Note
|
||||
// that there are no marker types for self, so the entries for
|
||||
// self are always None.
|
||||
invariant_lang_items: [Option<ast::DefId>; 2],
|
||||
covariant_lang_items: [Option<ast::DefId>; 2],
|
||||
contravariant_lang_items: [Option<ast::DefId>; 2],
|
||||
unsafe_cell_lang_item: Option<ast::DefId>,
|
||||
|
||||
// These are pointers to common `ConstantTerm` instances
|
||||
covariant: VarianceTermPtr<'a>,
|
||||
contravariant: VarianceTermPtr<'a>,
|
||||
@ -433,40 +517,14 @@ struct Constraint<'a> {
|
||||
|
||||
fn add_constraints_from_crate<'a, 'tcx>(terms_cx: TermsContext<'a, 'tcx>,
|
||||
krate: &ast::Crate)
|
||||
-> ConstraintContext<'a, 'tcx> {
|
||||
let mut invariant_lang_items = [None; 2];
|
||||
let mut covariant_lang_items = [None; 2];
|
||||
let mut contravariant_lang_items = [None; 2];
|
||||
|
||||
covariant_lang_items[TypeParam as uint] =
|
||||
terms_cx.tcx.lang_items.covariant_type();
|
||||
covariant_lang_items[RegionParam as uint] =
|
||||
terms_cx.tcx.lang_items.covariant_lifetime();
|
||||
|
||||
contravariant_lang_items[TypeParam as uint] =
|
||||
terms_cx.tcx.lang_items.contravariant_type();
|
||||
contravariant_lang_items[RegionParam as uint] =
|
||||
terms_cx.tcx.lang_items.contravariant_lifetime();
|
||||
|
||||
invariant_lang_items[TypeParam as uint] =
|
||||
terms_cx.tcx.lang_items.invariant_type();
|
||||
invariant_lang_items[RegionParam as uint] =
|
||||
terms_cx.tcx.lang_items.invariant_lifetime();
|
||||
|
||||
let unsafe_cell_lang_item = terms_cx.tcx.lang_items.unsafe_cell_type();
|
||||
|
||||
-> ConstraintContext<'a, 'tcx>
|
||||
{
|
||||
let covariant = terms_cx.arena.alloc(ConstantTerm(ty::Covariant));
|
||||
let contravariant = terms_cx.arena.alloc(ConstantTerm(ty::Contravariant));
|
||||
let invariant = terms_cx.arena.alloc(ConstantTerm(ty::Invariant));
|
||||
let bivariant = terms_cx.arena.alloc(ConstantTerm(ty::Bivariant));
|
||||
let mut constraint_cx = ConstraintContext {
|
||||
terms_cx: terms_cx,
|
||||
|
||||
invariant_lang_items: invariant_lang_items,
|
||||
covariant_lang_items: covariant_lang_items,
|
||||
contravariant_lang_items: contravariant_lang_items,
|
||||
unsafe_cell_lang_item: unsafe_cell_lang_item,
|
||||
|
||||
covariant: covariant,
|
||||
contravariant: contravariant,
|
||||
invariant: invariant,
|
||||
@ -487,7 +545,13 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ConstraintContext<'a, 'tcx> {
|
||||
|
||||
match item.node {
|
||||
ast::ItemEnum(ref enum_definition, _) => {
|
||||
let generics = &ty::lookup_item_type(tcx, did).generics;
|
||||
let scheme = ty::lookup_item_type(tcx, did);
|
||||
|
||||
// Not entirely obvious: constraints on structs/enums do not
|
||||
// affect the variance of their type parameters. See discussion
|
||||
// in comment at top of module.
|
||||
//
|
||||
// self.add_constraints_from_generics(&scheme.generics);
|
||||
|
||||
// Hack: If we directly call `ty::enum_variants`, it
|
||||
// annoyingly takes it upon itself to run off and
|
||||
@ -505,29 +569,48 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ConstraintContext<'a, 'tcx> {
|
||||
&**ast_variant,
|
||||
/*discriminant*/ 0);
|
||||
for arg_ty in &variant.args {
|
||||
self.add_constraints_from_ty(generics, *arg_ty, self.covariant);
|
||||
self.add_constraints_from_ty(&scheme.generics, *arg_ty, self.covariant);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
ast::ItemStruct(..) => {
|
||||
let generics = &ty::lookup_item_type(tcx, did).generics;
|
||||
let scheme = ty::lookup_item_type(tcx, did);
|
||||
|
||||
// Not entirely obvious: constraints on structs/enums do not
|
||||
// affect the variance of their type parameters. See discussion
|
||||
// in comment at top of module.
|
||||
//
|
||||
// self.add_constraints_from_generics(&scheme.generics);
|
||||
|
||||
let struct_fields = ty::lookup_struct_fields(tcx, did);
|
||||
for field_info in &struct_fields {
|
||||
assert_eq!(field_info.id.krate, ast::LOCAL_CRATE);
|
||||
let field_ty = ty::node_id_to_type(tcx, field_info.id.node);
|
||||
self.add_constraints_from_ty(generics, field_ty, self.covariant);
|
||||
self.add_constraints_from_ty(&scheme.generics, field_ty, self.covariant);
|
||||
}
|
||||
}
|
||||
|
||||
ast::ItemTrait(..) => {
|
||||
let trait_def = ty::lookup_trait_def(tcx, did);
|
||||
let predicates = ty::predicates(tcx, ty::mk_self_type(tcx), &trait_def.bounds);
|
||||
self.add_constraints_from_predicates(&trait_def.generics,
|
||||
&predicates[],
|
||||
self.covariant);
|
||||
|
||||
let trait_items = ty::trait_items(tcx, did);
|
||||
for trait_item in &*trait_items {
|
||||
match *trait_item {
|
||||
ty::MethodTraitItem(ref method) => {
|
||||
self.add_constraints_from_sig(&method.generics,
|
||||
&method.fty.sig,
|
||||
self.covariant);
|
||||
self.add_constraints_from_predicates(
|
||||
&method.generics,
|
||||
method.predicates.predicates.get_slice(FnSpace),
|
||||
self.contravariant);
|
||||
|
||||
self.add_constraints_from_sig(
|
||||
&method.generics,
|
||||
&method.fty.sig,
|
||||
self.covariant);
|
||||
}
|
||||
ty::TypeTraitItem(_) => {}
|
||||
}
|
||||
@ -544,9 +627,10 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ConstraintContext<'a, 'tcx> {
|
||||
ast::ItemTy(..) |
|
||||
ast::ItemImpl(..) |
|
||||
ast::ItemMac(..) => {
|
||||
visit::walk_item(self, item);
|
||||
}
|
||||
}
|
||||
|
||||
visit::walk_item(self, item);
|
||||
}
|
||||
}
|
||||
|
||||
@ -648,15 +732,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
|
||||
-> VarianceTermPtr<'a> {
|
||||
assert_eq!(param_def_id.krate, item_def_id.krate);
|
||||
|
||||
if self.invariant_lang_items[kind as uint] == Some(item_def_id) {
|
||||
self.invariant
|
||||
} else if self.covariant_lang_items[kind as uint] == Some(item_def_id) {
|
||||
self.covariant
|
||||
} else if self.contravariant_lang_items[kind as uint] == Some(item_def_id) {
|
||||
self.contravariant
|
||||
} else if kind == TypeParam && Some(item_def_id) == self.unsafe_cell_lang_item {
|
||||
self.invariant
|
||||
} else if param_def_id.krate == ast::LOCAL_CRATE {
|
||||
if param_def_id.krate == ast::LOCAL_CRATE {
|
||||
// Parameter on an item defined within current crate:
|
||||
// variance not yet inferred, so return a symbolic
|
||||
// variance.
|
||||
@ -724,6 +800,25 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn add_constraints_from_trait_ref(&mut self,
|
||||
generics: &ty::Generics<'tcx>,
|
||||
trait_ref: &ty::TraitRef<'tcx>,
|
||||
variance: VarianceTermPtr<'a>) {
|
||||
debug!("add_constraints_from_trait_ref: trait_ref={} variance={:?}",
|
||||
trait_ref.repr(self.tcx()),
|
||||
variance);
|
||||
|
||||
let trait_def = ty::lookup_trait_def(self.tcx(), trait_ref.def_id);
|
||||
|
||||
self.add_constraints_from_substs(
|
||||
generics,
|
||||
trait_ref.def_id,
|
||||
trait_def.generics.types.as_slice(),
|
||||
trait_def.generics.regions.as_slice(),
|
||||
trait_ref.substs,
|
||||
variance);
|
||||
}
|
||||
|
||||
/// Adds constraints appropriate for an instance of `ty` appearing
|
||||
/// in a context with the generics defined in `generics` and
|
||||
/// ambient variance `variance`
|
||||
@ -731,7 +826,9 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
|
||||
generics: &ty::Generics<'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
variance: VarianceTermPtr<'a>) {
|
||||
debug!("add_constraints_from_ty(ty={})", ty.repr(self.tcx()));
|
||||
debug!("add_constraints_from_ty(ty={}, variance={:?})",
|
||||
ty.repr(self.tcx()),
|
||||
variance);
|
||||
|
||||
match ty.sty {
|
||||
ty::ty_bool |
|
||||
@ -754,6 +851,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
|
||||
self.add_constraints_from_ty(generics, typ, variance);
|
||||
}
|
||||
|
||||
|
||||
ty::ty_ptr(ref mt) => {
|
||||
self.add_constraints_from_mt(generics, mt, variance);
|
||||
}
|
||||
@ -797,27 +895,16 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
|
||||
}
|
||||
|
||||
ty::ty_trait(ref data) => {
|
||||
let trait_ref = data.principal_trait_ref_with_self_ty(self.tcx(),
|
||||
self.tcx().types.err);
|
||||
let trait_def = ty::lookup_trait_def(self.tcx(), trait_ref.def_id());
|
||||
|
||||
// Traits never declare region parameters in the self
|
||||
// space nor anything in the fn space.
|
||||
assert!(trait_def.generics.regions.is_empty_in(subst::SelfSpace));
|
||||
assert!(trait_def.generics.types.is_empty_in(subst::FnSpace));
|
||||
assert!(trait_def.generics.regions.is_empty_in(subst::FnSpace));
|
||||
let poly_trait_ref =
|
||||
data.principal_trait_ref_with_self_ty(self.tcx(),
|
||||
self.tcx().types.err);
|
||||
|
||||
// The type `Foo<T+'a>` is contravariant w/r/t `'a`:
|
||||
let contra = self.contravariant(variance);
|
||||
self.add_constraints_from_region(generics, data.bounds.region_bound, contra);
|
||||
|
||||
self.add_constraints_from_substs(
|
||||
generics,
|
||||
trait_ref.def_id(),
|
||||
trait_def.generics.types.get_slice(subst::TypeSpace),
|
||||
trait_def.generics.regions.get_slice(subst::TypeSpace),
|
||||
trait_ref.substs(),
|
||||
variance);
|
||||
// Ignore the SelfSpace, it is erased.
|
||||
self.add_constraints_from_trait_ref(generics, &*poly_trait_ref.0, variance);
|
||||
|
||||
let projections = data.projection_bounds_with_self_ty(self.tcx(),
|
||||
self.tcx().types.err);
|
||||
@ -845,7 +932,12 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
|
||||
self.add_constraints_from_sig(generics, sig, variance);
|
||||
}
|
||||
|
||||
ty::ty_infer(..) | ty::ty_err => {
|
||||
ty::ty_err => {
|
||||
// we encounter this when walking the trait references for object
|
||||
// types, where we use ty_err as the Self type
|
||||
}
|
||||
|
||||
ty::ty_infer(..) => {
|
||||
self.tcx().sess.bug(
|
||||
&format!("unexpected type encountered in \
|
||||
variance inference: {}",
|
||||
@ -864,7 +956,10 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
|
||||
region_param_defs: &[ty::RegionParameterDef],
|
||||
substs: &subst::Substs<'tcx>,
|
||||
variance: VarianceTermPtr<'a>) {
|
||||
debug!("add_constraints_from_substs(def_id={:?})", def_id);
|
||||
debug!("add_constraints_from_substs(def_id={}, substs={}, variance={:?})",
|
||||
def_id.repr(self.tcx()),
|
||||
substs.repr(self.tcx()),
|
||||
variance);
|
||||
|
||||
for p in type_param_defs {
|
||||
let variance_decl =
|
||||
@ -872,6 +967,8 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
|
||||
p.space, p.index as uint);
|
||||
let variance_i = self.xform(variance, variance_decl);
|
||||
let substs_ty = *substs.types.get(p.space, p.index as uint);
|
||||
debug!("add_constraints_from_substs: variance_decl={:?} variance_i={:?}",
|
||||
variance_decl, variance_i);
|
||||
self.add_constraints_from_ty(generics, substs_ty, variance_i);
|
||||
}
|
||||
|
||||
@ -885,6 +982,51 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn add_constraints_from_predicates(&mut self,
|
||||
generics: &ty::Generics<'tcx>,
|
||||
predicates: &[ty::Predicate<'tcx>],
|
||||
variance: VarianceTermPtr<'a>) {
|
||||
debug!("add_constraints_from_generics({})",
|
||||
generics.repr(self.tcx()));
|
||||
|
||||
for predicate in predicates.iter() {
|
||||
match *predicate {
|
||||
ty::Predicate::Trait(ty::Binder(ref data)) => {
|
||||
self.add_constraints_from_trait_ref(generics, &*data.trait_ref, variance);
|
||||
}
|
||||
|
||||
ty::Predicate::Equate(ty::Binder(ref data)) => {
|
||||
self.add_constraints_from_ty(generics, data.0, variance);
|
||||
self.add_constraints_from_ty(generics, data.1, variance);
|
||||
}
|
||||
|
||||
ty::Predicate::TypeOutlives(ty::Binder(ref data)) => {
|
||||
self.add_constraints_from_ty(generics, data.0, variance);
|
||||
|
||||
let variance_r = self.xform(variance, self.contravariant);
|
||||
self.add_constraints_from_region(generics, data.1, variance_r);
|
||||
}
|
||||
|
||||
ty::Predicate::RegionOutlives(ty::Binder(ref data)) => {
|
||||
// `'a : 'b` is still true if 'a gets bigger
|
||||
self.add_constraints_from_region(generics, data.0, variance);
|
||||
|
||||
// `'a : 'b` is still true if 'b gets smaller
|
||||
let variance_r = self.xform(variance, self.contravariant);
|
||||
self.add_constraints_from_region(generics, data.1, variance_r);
|
||||
}
|
||||
|
||||
ty::Predicate::Projection(ty::Binder(ref data)) => {
|
||||
self.add_constraints_from_trait_ref(generics,
|
||||
&*data.projection_ty.trait_ref,
|
||||
variance);
|
||||
|
||||
self.add_constraints_from_ty(generics, data.ty, self.invariant);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Adds constraints appropriate for a function with signature
|
||||
/// `sig` appearing in a context with ambient variance `variance`
|
||||
fn add_constraints_from_sig(&mut self,
|
||||
@ -969,7 +1111,12 @@ struct SolveContext<'a, 'tcx: 'a> {
|
||||
|
||||
fn solve_constraints(constraints_cx: ConstraintContext) {
|
||||
let ConstraintContext { terms_cx, constraints, .. } = constraints_cx;
|
||||
let solutions: Vec<_> = repeat(ty::Bivariant).take(terms_cx.num_inferred()).collect();
|
||||
|
||||
let solutions =
|
||||
terms_cx.inferred_infos.iter()
|
||||
.map(|ii| ii.initial_variance)
|
||||
.collect();
|
||||
|
||||
let mut solutions_cx = SolveContext {
|
||||
terms_cx: terms_cx,
|
||||
constraints: constraints,
|
||||
@ -1034,20 +1181,16 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> {
|
||||
let mut types = VecPerParamSpace::empty();
|
||||
let mut regions = VecPerParamSpace::empty();
|
||||
|
||||
while index < num_inferred &&
|
||||
inferred_infos[index].item_id == item_id {
|
||||
while index < num_inferred && inferred_infos[index].item_id == item_id {
|
||||
let info = &inferred_infos[index];
|
||||
let variance = solutions[index];
|
||||
debug!("Index {} Info {} / {:?} / {:?} Variance {:?}",
|
||||
index, info.index, info.kind, info.space, variance);
|
||||
match info.kind {
|
||||
TypeParam => {
|
||||
types.push(info.space, variance);
|
||||
}
|
||||
RegionParam => {
|
||||
regions.push(info.space, variance);
|
||||
}
|
||||
TypeParam => { types.push(info.space, variance); }
|
||||
RegionParam => { regions.push(info.space, variance); }
|
||||
}
|
||||
|
||||
index += 1;
|
||||
}
|
||||
|
||||
@ -1144,3 +1287,4 @@ fn glb(v1: ty::Variance, v2: ty::Variance) -> ty::Variance {
|
||||
(x, ty::Bivariant) | (ty::Bivariant, x) => x,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -11,6 +11,7 @@
|
||||
use clone::Clone;
|
||||
use default::Default;
|
||||
use hash;
|
||||
use marker;
|
||||
|
||||
/// A trait representing stateful hashes which can be used to hash keys in a
|
||||
/// `HashMap`.
|
||||
@ -37,7 +38,7 @@ pub trait HashState {
|
||||
///
|
||||
/// This struct has is 0-sized and does not need construction.
|
||||
#[unstable(feature = "std_misc", reason = "hasher stuff is unclear")]
|
||||
pub struct DefaultState<H>;
|
||||
pub struct DefaultState<H>(marker::PhantomData<H>);
|
||||
|
||||
impl<H: Default + hash::Hasher> HashState for DefaultState<H> {
|
||||
type Hasher = H;
|
||||
@ -45,9 +46,9 @@ impl<H: Default + hash::Hasher> HashState for DefaultState<H> {
|
||||
}
|
||||
|
||||
impl<H> Clone for DefaultState<H> {
|
||||
fn clone(&self) -> DefaultState<H> { DefaultState }
|
||||
fn clone(&self) -> DefaultState<H> { DefaultState(marker::PhantomData) }
|
||||
}
|
||||
|
||||
impl<H> Default for DefaultState<H> {
|
||||
fn default() -> DefaultState<H> { DefaultState }
|
||||
fn default() -> DefaultState<H> { DefaultState(marker::PhantomData) }
|
||||
}
|
||||
|
@ -23,8 +23,8 @@ use num::{Int, UnsignedInt};
|
||||
use ops::{Deref, DerefMut, Drop};
|
||||
use option::Option;
|
||||
use option::Option::{Some, None};
|
||||
use ptr::{self, PtrExt, copy_nonoverlapping_memory, zero_memory};
|
||||
use rt::heap::{allocate, deallocate};
|
||||
use ptr::{self, PtrExt, copy_nonoverlapping_memory, Unique, zero_memory};
|
||||
use rt::heap::{allocate, deallocate, EMPTY};
|
||||
use collections::hash_state::HashState;
|
||||
|
||||
const EMPTY_BUCKET: u64 = 0u64;
|
||||
@ -69,10 +69,11 @@ const EMPTY_BUCKET: u64 = 0u64;
|
||||
pub struct RawTable<K, V> {
|
||||
capacity: usize,
|
||||
size: usize,
|
||||
hashes: *mut u64,
|
||||
hashes: Unique<u64>,
|
||||
|
||||
// Because K/V do not appear directly in any of the types in the struct,
|
||||
// inform rustc that in fact instances of K and V are reachable from here.
|
||||
marker: marker::CovariantType<(K,V)>,
|
||||
marker: marker::PhantomData<(K,V)>,
|
||||
}
|
||||
|
||||
unsafe impl<K: Send, V: Send> Send for RawTable<K, V> {}
|
||||
@ -81,7 +82,8 @@ unsafe impl<K: Sync, V: Sync> Sync for RawTable<K, V> {}
|
||||
struct RawBucket<K, V> {
|
||||
hash: *mut u64,
|
||||
key: *mut K,
|
||||
val: *mut V
|
||||
val: *mut V,
|
||||
_marker: marker::PhantomData<(K,V)>,
|
||||
}
|
||||
|
||||
impl<K,V> Copy for RawBucket<K,V> {}
|
||||
@ -187,11 +189,12 @@ fn can_alias_safehash_as_u64() {
|
||||
}
|
||||
|
||||
impl<K, V> RawBucket<K, V> {
|
||||
unsafe fn offset(self, count: int) -> RawBucket<K, V> {
|
||||
unsafe fn offset(self, count: isize) -> RawBucket<K, V> {
|
||||
RawBucket {
|
||||
hash: self.hash.offset(count),
|
||||
key: self.key.offset(count),
|
||||
val: self.val.offset(count),
|
||||
_marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -584,10 +587,11 @@ impl<K, V> RawTable<K, V> {
|
||||
return RawTable {
|
||||
size: 0,
|
||||
capacity: 0,
|
||||
hashes: ptr::null_mut(),
|
||||
marker: marker::CovariantType,
|
||||
hashes: Unique::new(EMPTY as *mut u64),
|
||||
marker: marker::PhantomData,
|
||||
};
|
||||
}
|
||||
|
||||
// No need for `checked_mul` before a more restrictive check performed
|
||||
// later in this method.
|
||||
let hashes_size = capacity * size_of::<u64>();
|
||||
@ -623,8 +627,8 @@ impl<K, V> RawTable<K, V> {
|
||||
RawTable {
|
||||
capacity: capacity,
|
||||
size: 0,
|
||||
hashes: hashes,
|
||||
marker: marker::CovariantType,
|
||||
hashes: Unique::new(hashes),
|
||||
marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
@ -632,16 +636,17 @@ impl<K, V> RawTable<K, V> {
|
||||
let hashes_size = self.capacity * size_of::<u64>();
|
||||
let keys_size = self.capacity * size_of::<K>();
|
||||
|
||||
let buffer = self.hashes as *mut u8;
|
||||
let buffer = *self.hashes as *mut u8;
|
||||
let (keys_offset, vals_offset) = calculate_offsets(hashes_size,
|
||||
keys_size, min_align_of::<K>(),
|
||||
min_align_of::<V>());
|
||||
|
||||
unsafe {
|
||||
RawBucket {
|
||||
hash: self.hashes,
|
||||
hash: *self.hashes,
|
||||
key: buffer.offset(keys_offset as isize) as *mut K,
|
||||
val: buffer.offset(vals_offset as isize) as *mut V
|
||||
val: buffer.offset(vals_offset as isize) as *mut V,
|
||||
_marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -651,7 +656,7 @@ impl<K, V> RawTable<K, V> {
|
||||
pub fn new(capacity: usize) -> RawTable<K, V> {
|
||||
unsafe {
|
||||
let ret = RawTable::new_uninitialized(capacity);
|
||||
zero_memory(ret.hashes, capacity);
|
||||
zero_memory(*ret.hashes, capacity);
|
||||
ret
|
||||
}
|
||||
}
|
||||
@ -673,7 +678,7 @@ impl<K, V> RawTable<K, V> {
|
||||
hashes_end: unsafe {
|
||||
self.hashes.offset(self.capacity as isize)
|
||||
},
|
||||
marker: marker::ContravariantLifetime,
|
||||
marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
@ -698,7 +703,7 @@ impl<K, V> RawTable<K, V> {
|
||||
iter: RawBuckets {
|
||||
raw: raw,
|
||||
hashes_end: hashes_end,
|
||||
marker: marker::ContravariantLifetime,
|
||||
marker: marker::PhantomData,
|
||||
},
|
||||
table: self,
|
||||
}
|
||||
@ -711,7 +716,7 @@ impl<K, V> RawTable<K, V> {
|
||||
iter: RawBuckets {
|
||||
raw: raw,
|
||||
hashes_end: hashes_end,
|
||||
marker: marker::ContravariantLifetime::<'static>,
|
||||
marker: marker::PhantomData,
|
||||
},
|
||||
table: self,
|
||||
}
|
||||
@ -725,7 +730,7 @@ impl<K, V> RawTable<K, V> {
|
||||
raw: raw_bucket.offset(self.capacity as isize),
|
||||
hashes_end: raw_bucket.hash,
|
||||
elems_left: self.size,
|
||||
marker: marker::ContravariantLifetime,
|
||||
marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -735,7 +740,13 @@ impl<K, V> RawTable<K, V> {
|
||||
struct RawBuckets<'a, K, V> {
|
||||
raw: RawBucket<K, V>,
|
||||
hashes_end: *mut u64,
|
||||
marker: marker::ContravariantLifetime<'a>,
|
||||
|
||||
// Strictly speaking, this should be &'a (K,V), but that would
|
||||
// require that K:'a, and we often use RawBuckets<'static...> for
|
||||
// move iterations, so that messes up a lot of other things. So
|
||||
// just use `&'a (K,V)` as this is not a publicly exposed type
|
||||
// anyway.
|
||||
marker: marker::PhantomData<&'a ()>,
|
||||
}
|
||||
|
||||
// FIXME(#19839) Remove in favor of `#[derive(Clone)]`
|
||||
@ -744,7 +755,7 @@ impl<'a, K, V> Clone for RawBuckets<'a, K, V> {
|
||||
RawBuckets {
|
||||
raw: self.raw,
|
||||
hashes_end: self.hashes_end,
|
||||
marker: marker::ContravariantLifetime,
|
||||
marker: marker::PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -776,7 +787,11 @@ struct RevMoveBuckets<'a, K, V> {
|
||||
raw: RawBucket<K, V>,
|
||||
hashes_end: *mut u64,
|
||||
elems_left: usize,
|
||||
marker: marker::ContravariantLifetime<'a>,
|
||||
|
||||
// As above, `&'a (K,V)` would seem better, but we often use
|
||||
// 'static for the lifetime, and this is not a publicly exposed
|
||||
// type.
|
||||
marker: marker::PhantomData<&'a ()>,
|
||||
}
|
||||
|
||||
impl<'a, K, V> Iterator for RevMoveBuckets<'a, K, V> {
|
||||
@ -983,9 +998,10 @@ impl<K: Clone, V: Clone> Clone for RawTable<K, V> {
|
||||
#[unsafe_destructor]
|
||||
impl<K, V> Drop for RawTable<K, V> {
|
||||
fn drop(&mut self) {
|
||||
if self.hashes.is_null() {
|
||||
if self.capacity == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
// This is done in reverse because we've likely partially taken
|
||||
// some elements out with `.into_iter()` from the front.
|
||||
// Check if the size is 0, so we don't do a useless scan when
|
||||
@ -1003,7 +1019,7 @@ impl<K, V> Drop for RawTable<K, V> {
|
||||
vals_size, min_align_of::<V>());
|
||||
|
||||
unsafe {
|
||||
deallocate(self.hashes as *mut u8, size, align);
|
||||
deallocate(*self.hashes as *mut u8, size, align);
|
||||
// Remember how everything was allocated out of one buffer
|
||||
// during initialization? We only need one call to free here.
|
||||
}
|
||||
|
@ -252,7 +252,7 @@ use error::Error;
|
||||
use fmt;
|
||||
use isize;
|
||||
use iter::{Iterator, IteratorExt};
|
||||
use marker::Sized;
|
||||
use marker::{PhantomFn, Sized};
|
||||
use mem::transmute;
|
||||
use ops::FnOnce;
|
||||
use option::Option;
|
||||
@ -433,7 +433,7 @@ pub enum IoErrorKind {
|
||||
}
|
||||
|
||||
/// A trait that lets you add a `detail` to an IoError easily
|
||||
trait UpdateIoError<T> {
|
||||
trait UpdateIoError {
|
||||
/// Returns an IoError with updated description and detail
|
||||
fn update_err<D>(self, desc: &'static str, detail: D) -> Self where
|
||||
D: FnOnce(&IoError) -> String;
|
||||
@ -446,7 +446,7 @@ trait UpdateIoError<T> {
|
||||
fn update_desc(self, desc: &'static str) -> Self;
|
||||
}
|
||||
|
||||
impl<T> UpdateIoError<T> for IoResult<T> {
|
||||
impl<T> UpdateIoError for IoResult<T> {
|
||||
fn update_err<D>(self, desc: &'static str, detail: D) -> IoResult<T> where
|
||||
D: FnOnce(&IoError) -> String,
|
||||
{
|
||||
@ -1572,7 +1572,9 @@ pub trait Seek {
|
||||
/// connections.
|
||||
///
|
||||
/// Doing so produces some sort of Acceptor.
|
||||
pub trait Listener<T, A: Acceptor<T>> {
|
||||
pub trait Listener<T, A: Acceptor<T>>
|
||||
: PhantomFn<T,T> // FIXME should be an assoc type anyhow
|
||||
{
|
||||
/// Spin up the listener and start queuing incoming connections
|
||||
///
|
||||
/// # Error
|
||||
|
@ -153,7 +153,7 @@ use any::Any;
|
||||
use cell::UnsafeCell;
|
||||
use fmt;
|
||||
use io;
|
||||
use marker;
|
||||
use marker::PhantomData;
|
||||
use old_io::stdio;
|
||||
use rt::{self, unwind};
|
||||
use sync::{Mutex, Condvar, Arc};
|
||||
@ -260,7 +260,7 @@ impl Builder {
|
||||
T: Send + 'a, F: FnOnce() -> T, F: Send + 'a
|
||||
{
|
||||
self.spawn_inner(Thunk::new(f)).map(|inner| {
|
||||
JoinGuard { inner: inner, _marker: marker::CovariantType }
|
||||
JoinGuard { inner: inner, _marker: PhantomData }
|
||||
})
|
||||
}
|
||||
|
||||
@ -642,7 +642,7 @@ impl Drop for JoinHandle {
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub struct JoinGuard<'a, T: 'a> {
|
||||
inner: JoinInner<T>,
|
||||
_marker: marker::CovariantType<&'a T>,
|
||||
_marker: PhantomData<&'a T>,
|
||||
}
|
||||
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
|
@ -45,11 +45,11 @@ fn no_prelude(attrs: &[ast::Attribute]) -> bool {
|
||||
attr::contains_name(attrs, "no_implicit_prelude")
|
||||
}
|
||||
|
||||
struct StandardLibraryInjector<'a> {
|
||||
alt_std_name: Option<String>
|
||||
struct StandardLibraryInjector {
|
||||
alt_std_name: Option<String>,
|
||||
}
|
||||
|
||||
impl<'a> fold::Folder for StandardLibraryInjector<'a> {
|
||||
impl fold::Folder for StandardLibraryInjector {
|
||||
fn fold_crate(&mut self, mut krate: ast::Crate) -> ast::Crate {
|
||||
|
||||
// The name to use in `extern crate "name" as std;`
|
||||
@ -80,9 +80,10 @@ fn inject_crates_ref(krate: ast::Crate, alt_std_name: Option<String>) -> ast::Cr
|
||||
fold.fold_crate(krate)
|
||||
}
|
||||
|
||||
struct PreludeInjector<'a>;
|
||||
struct PreludeInjector;
|
||||
|
||||
impl<'a> fold::Folder for PreludeInjector<'a> {
|
||||
|
||||
impl fold::Folder for PreludeInjector {
|
||||
fn fold_crate(&mut self, mut krate: ast::Crate) -> ast::Crate {
|
||||
// only add `use std::prelude::*;` if there wasn't a
|
||||
// `#![no_implicit_prelude]` at the crate level.
|
||||
|
@ -50,7 +50,6 @@
|
||||
html_playground_url = "http://play.rust-lang.org/")]
|
||||
#![deny(missing_docs)]
|
||||
|
||||
#![feature(core)]
|
||||
#![feature(box_syntax)]
|
||||
#![feature(collections)]
|
||||
#![feature(int_uint)]
|
||||
|
@ -8,7 +8,7 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
pub trait TheTrait<T> {
|
||||
pub trait TheTrait<T> : ::std::marker::PhantomFn<T> {
|
||||
fn the_fn(&self);
|
||||
}
|
||||
|
||||
|
@ -12,4 +12,5 @@ pub struct Heap;
|
||||
|
||||
pub struct FakeHeap;
|
||||
|
||||
pub struct FakeVec<T, A = FakeHeap>;
|
||||
pub struct FakeVec<T, A = FakeHeap> { pub f: Option<(T,A)> }
|
||||
|
||||
|
@ -8,11 +8,11 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
pub struct A<T>;
|
||||
pub struct B<T>;
|
||||
pub struct A<T> { pub v: T }
|
||||
pub struct B<T> { pub v: T }
|
||||
|
||||
pub mod test {
|
||||
pub struct A<T>;
|
||||
pub struct A<T> { pub v: T }
|
||||
|
||||
impl<T> A<T> {
|
||||
pub fn foo(&self) -> int {
|
||||
@ -52,9 +52,9 @@ impl<T> B<T> {
|
||||
}
|
||||
|
||||
pub fn foo() -> int {
|
||||
let a = A::<()>;
|
||||
let b = B::<()>;
|
||||
let c = test::A::<()>;
|
||||
let a = A { v: () };
|
||||
let b = B { v: () };
|
||||
let c = test::A { v: () };
|
||||
return a.foo() + a.bar() +
|
||||
b.foo() + b.bar() +
|
||||
c.foo() + c.bar();
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
#![crate_type="lib"]
|
||||
#![deny(warnings)]
|
||||
#![allow(dead_code)]
|
||||
|
||||
pub use src::aliases::B;
|
||||
pub use src::hidden_core::make;
|
||||
@ -23,9 +24,9 @@ mod src {
|
||||
pub mod hidden_core {
|
||||
use super::aliases::B;
|
||||
|
||||
pub struct A<T>;
|
||||
pub struct A<T> { t: T }
|
||||
|
||||
pub fn make() -> B { A }
|
||||
pub fn make() -> B { A { t: 1.0 } }
|
||||
|
||||
impl<T> A<T> {
|
||||
pub fn foo(&mut self) { println!("called foo"); }
|
||||
|
@ -10,7 +10,7 @@
|
||||
|
||||
#![crate_type = "lib"]
|
||||
|
||||
pub struct TreeBuilder<H>;
|
||||
pub struct TreeBuilder<H> { pub h: H }
|
||||
|
||||
impl<H> TreeBuilder<H> {
|
||||
pub fn process_token(&mut self) {
|
||||
|
@ -11,7 +11,7 @@
|
||||
#![crate_type = "lib"]
|
||||
|
||||
pub trait Foo<'a, T> {
|
||||
fn foo(&self) -> T;
|
||||
fn foo(&'a self) -> T;
|
||||
}
|
||||
|
||||
pub fn foo<'a, T>(x: &'a Foo<'a, T>) -> T {
|
||||
|
@ -14,7 +14,10 @@
|
||||
#![allow(unknown_features)]
|
||||
#![feature(box_syntax)]
|
||||
|
||||
pub trait i<T> { }
|
||||
pub trait i<T>
|
||||
{
|
||||
fn dummy(&self, t: T) -> T { panic!() }
|
||||
}
|
||||
|
||||
pub fn f<T>() -> Box<i<T>+'static> {
|
||||
impl<T> i<T> for () { }
|
||||
|
@ -13,8 +13,11 @@
|
||||
|
||||
#![feature(unsafe_destructor)]
|
||||
|
||||
use std::marker;
|
||||
|
||||
struct arc_destruct<T> {
|
||||
_data: int,
|
||||
_data: int,
|
||||
_marker: marker::PhantomData<T>
|
||||
}
|
||||
|
||||
#[unsafe_destructor]
|
||||
@ -24,7 +27,8 @@ impl<T: Sync> Drop for arc_destruct<T> {
|
||||
|
||||
fn arc_destruct<T: Sync>(data: int) -> arc_destruct<T> {
|
||||
arc_destruct {
|
||||
_data: data
|
||||
_data: data,
|
||||
_marker: marker::PhantomData
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -10,4 +10,5 @@
|
||||
|
||||
pub trait T {
|
||||
type C;
|
||||
fn dummy(&self) { }
|
||||
}
|
||||
|
@ -8,7 +8,9 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
pub trait Foo {
|
||||
use std::marker::MarkerTrait;
|
||||
|
||||
pub trait Foo : MarkerTrait {
|
||||
fn bar();
|
||||
}
|
||||
|
||||
|
@ -12,7 +12,9 @@
|
||||
|
||||
use std::mem;
|
||||
|
||||
trait A {}
|
||||
trait A {
|
||||
fn dummy(&self) { }
|
||||
}
|
||||
struct B;
|
||||
impl A for B {}
|
||||
|
||||
|
@ -15,5 +15,6 @@ pub trait X {
|
||||
fn f() { }
|
||||
f();
|
||||
}
|
||||
fn dummy(&self) { }
|
||||
}
|
||||
|
||||
|
@ -12,8 +12,12 @@
|
||||
#![no_std]
|
||||
#![feature(lang_items)]
|
||||
|
||||
#[lang="phantom_fn"]
|
||||
pub trait PhantomFn<A:?Sized,R:?Sized=()> { }
|
||||
impl<A:?Sized, R:?Sized, U:?Sized> PhantomFn<A,R> for U { }
|
||||
|
||||
#[lang="sized"]
|
||||
pub trait Sized {}
|
||||
pub trait Sized : PhantomFn<Self> {}
|
||||
|
||||
#[lang="panic"]
|
||||
fn panic(_: &(&'static str, &'static str, uint)) -> ! { loop {} }
|
||||
@ -25,6 +29,8 @@ extern fn stack_exhausted() {}
|
||||
extern fn eh_personality() {}
|
||||
|
||||
#[lang="copy"]
|
||||
pub trait Copy {}
|
||||
pub trait Copy : PhantomFn<Self> {
|
||||
// Empty.
|
||||
}
|
||||
|
||||
|
||||
|
@ -96,7 +96,7 @@ pub trait Trait {
|
||||
impl Trait for MethodTester {}
|
||||
|
||||
#[unstable(feature = "test_feature")]
|
||||
pub trait UnstableTrait {}
|
||||
pub trait UnstableTrait { fn dummy(&self) { } }
|
||||
|
||||
#[stable(feature = "test_feature", since = "1.0.0")]
|
||||
#[deprecated(since = "1.0.0")]
|
||||
|
@ -25,7 +25,7 @@ impl Foo {
|
||||
}
|
||||
|
||||
// issue 8134
|
||||
pub struct Parser<T>;
|
||||
pub struct Parser<T>(T);
|
||||
impl<T: std::iter::Iterator<Item=char>> Parser<T> {
|
||||
fn in_doctype(&mut self) {
|
||||
static DOCTYPEPattern: [char; 6] = ['O', 'C', 'T', 'Y', 'P', 'E'];
|
||||
|
@ -8,4 +8,4 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
pub trait RemoteTrait {}
|
||||
pub trait RemoteTrait { fn dummy(&self) { } }
|
||||
|
@ -11,7 +11,8 @@
|
||||
use std::ops::Deref;
|
||||
|
||||
struct DerefWithHelper<H, T> {
|
||||
pub helper: H
|
||||
pub helper: H,
|
||||
pub value: Option<T>
|
||||
}
|
||||
|
||||
trait Helper<T> {
|
||||
@ -34,6 +35,6 @@ impl<T, H: Helper<T>> Deref for DerefWithHelper<H, T> {
|
||||
|
||||
// Test cross-crate autoderef + vtable.
|
||||
pub fn check<T: PartialEq>(x: T, y: T) -> bool {
|
||||
let d: DerefWithHelper<Option<T>, T> = DerefWithHelper { helper: Some(x) };
|
||||
let d: DerefWithHelper<Option<T>, T> = DerefWithHelper { helper: Some(x), value: None };
|
||||
d.eq(&y)
|
||||
}
|
||||
|
@ -8,4 +8,4 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
trait Foo {}
|
||||
trait Foo : ::std::marker::MarkerTrait {}
|
||||
|
@ -15,12 +15,14 @@
|
||||
|
||||
#![crate_name = "a"]
|
||||
|
||||
use std::marker::MarkerTrait;
|
||||
|
||||
macro_rules! three {
|
||||
() => { 3 }
|
||||
}
|
||||
|
||||
pub trait U {}
|
||||
pub trait V {}
|
||||
pub trait U : MarkerTrait {}
|
||||
pub trait V : MarkerTrait {}
|
||||
impl U for () {}
|
||||
impl V for () {}
|
||||
|
||||
|
@ -15,12 +15,14 @@
|
||||
|
||||
#![crate_name = "a"]
|
||||
|
||||
use std::marker::MarkerTrait;
|
||||
|
||||
macro_rules! three {
|
||||
() => { 3 }
|
||||
}
|
||||
|
||||
pub trait U {}
|
||||
pub trait V {}
|
||||
pub trait U : MarkerTrait {}
|
||||
pub trait V : MarkerTrait {}
|
||||
impl U for () {}
|
||||
impl V for () {}
|
||||
|
||||
|
@ -15,12 +15,14 @@
|
||||
|
||||
#![crate_name = "a"]
|
||||
|
||||
use std::marker::MarkerTrait;
|
||||
|
||||
macro_rules! three {
|
||||
() => { 3 }
|
||||
}
|
||||
|
||||
pub trait U {}
|
||||
pub trait V {}
|
||||
pub trait U : MarkerTrait {}
|
||||
pub trait V : MarkerTrait {}
|
||||
impl U for () {}
|
||||
impl V for () {}
|
||||
|
||||
|
@ -15,12 +15,14 @@
|
||||
|
||||
#![crate_name = "a"]
|
||||
|
||||
use std::marker::MarkerTrait;
|
||||
|
||||
macro_rules! three {
|
||||
() => { 3 }
|
||||
}
|
||||
|
||||
pub trait U {}
|
||||
pub trait V {}
|
||||
pub trait U : MarkerTrait {}
|
||||
pub trait V : MarkerTrait {}
|
||||
impl U for () {}
|
||||
impl V for () {}
|
||||
|
||||
|
@ -15,12 +15,14 @@
|
||||
|
||||
#![crate_name = "a"]
|
||||
|
||||
use std::marker::MarkerTrait;
|
||||
|
||||
macro_rules! three {
|
||||
() => { 3 }
|
||||
}
|
||||
|
||||
pub trait U {}
|
||||
pub trait V {}
|
||||
pub trait U : MarkerTrait {}
|
||||
pub trait V : MarkerTrait {}
|
||||
impl U for () {}
|
||||
impl V for () {}
|
||||
|
||||
|
@ -15,12 +15,14 @@
|
||||
|
||||
#![crate_name = "a"]
|
||||
|
||||
use std::marker::MarkerTrait;
|
||||
|
||||
macro_rules! three {
|
||||
() => { 3 }
|
||||
}
|
||||
|
||||
pub trait U {}
|
||||
pub trait V {}
|
||||
pub trait U : MarkerTrait {}
|
||||
pub trait V : MarkerTrait {}
|
||||
impl U for () {}
|
||||
impl V for () {}
|
||||
|
||||
|
@ -15,12 +15,14 @@
|
||||
|
||||
#![crate_name = "a"]
|
||||
|
||||
use std::marker::MarkerTrait;
|
||||
|
||||
macro_rules! three {
|
||||
() => { 3 }
|
||||
}
|
||||
|
||||
pub trait U {}
|
||||
pub trait V {}
|
||||
pub trait U : MarkerTrait {}
|
||||
pub trait V : MarkerTrait {}
|
||||
impl U for () {}
|
||||
impl V for () {}
|
||||
|
||||
|
@ -15,12 +15,14 @@
|
||||
|
||||
#![crate_name = "a"]
|
||||
|
||||
use std::marker::MarkerTrait;
|
||||
|
||||
macro_rules! three {
|
||||
() => { 3 }
|
||||
}
|
||||
|
||||
pub trait U {}
|
||||
pub trait V {}
|
||||
pub trait U : MarkerTrait {}
|
||||
pub trait V : MarkerTrait {}
|
||||
impl U for () {}
|
||||
impl V for () {}
|
||||
|
||||
|
@ -15,12 +15,14 @@
|
||||
|
||||
#![crate_name = "a"]
|
||||
|
||||
use std::marker::MarkerTrait;
|
||||
|
||||
macro_rules! three {
|
||||
() => { 3 }
|
||||
}
|
||||
|
||||
pub trait U {}
|
||||
pub trait V {}
|
||||
pub trait U : MarkerTrait {}
|
||||
pub trait V : MarkerTrait {}
|
||||
impl U for () {}
|
||||
impl V for () {}
|
||||
|
||||
|
@ -15,12 +15,14 @@
|
||||
|
||||
#![crate_name = "a"]
|
||||
|
||||
use std::marker::MarkerTrait;
|
||||
|
||||
macro_rules! three {
|
||||
() => { 3 }
|
||||
}
|
||||
|
||||
pub trait U {}
|
||||
pub trait V {}
|
||||
pub trait U : MarkerTrait {}
|
||||
pub trait V : MarkerTrait {}
|
||||
impl U for () {}
|
||||
impl V for () {}
|
||||
|
||||
|
@ -15,12 +15,14 @@
|
||||
|
||||
#![crate_name = "a"]
|
||||
|
||||
use std::marker::MarkerTrait;
|
||||
|
||||
macro_rules! three {
|
||||
() => { 3 }
|
||||
}
|
||||
|
||||
pub trait U {}
|
||||
pub trait V {}
|
||||
pub trait U : MarkerTrait {}
|
||||
pub trait V : MarkerTrait {}
|
||||
impl U for () {}
|
||||
impl V for () {}
|
||||
|
||||
|
@ -15,12 +15,14 @@
|
||||
|
||||
#![crate_name = "a"]
|
||||
|
||||
use std::marker::MarkerTrait;
|
||||
|
||||
macro_rules! three {
|
||||
() => { 3 }
|
||||
}
|
||||
|
||||
pub trait U {}
|
||||
pub trait V {}
|
||||
pub trait U : MarkerTrait {}
|
||||
pub trait V : MarkerTrait {}
|
||||
impl U for () {}
|
||||
impl V for () {}
|
||||
|
||||
|
@ -15,12 +15,14 @@
|
||||
|
||||
#![crate_name = "a"]
|
||||
|
||||
use std::marker::MarkerTrait;
|
||||
|
||||
macro_rules! three {
|
||||
() => { 3 }
|
||||
}
|
||||
|
||||
pub trait U {}
|
||||
pub trait V {}
|
||||
pub trait U : MarkerTrait {}
|
||||
pub trait V : MarkerTrait {}
|
||||
impl U for () {}
|
||||
impl V for () {}
|
||||
|
||||
|
@ -8,7 +8,9 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
pub trait Trait {}
|
||||
pub trait Trait {
|
||||
fn dummy(&self) { }
|
||||
}
|
||||
|
||||
pub struct Foo<T:Trait> {
|
||||
pub x: T,
|
||||
|
@ -8,7 +8,7 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
pub trait Foo {
|
||||
pub trait Foo : ::std::marker::MarkerTrait {
|
||||
}
|
||||
|
||||
impl Foo for int {
|
||||
|
@ -11,7 +11,7 @@
|
||||
pub use self::sub::{Bar, Baz};
|
||||
|
||||
pub trait Trait {
|
||||
fn foo();
|
||||
fn foo(&self);
|
||||
}
|
||||
|
||||
struct Foo;
|
||||
|
@ -16,8 +16,12 @@
|
||||
#![feature(no_std)]
|
||||
#![no_std]
|
||||
|
||||
#[lang="phantom_fn"]
|
||||
pub trait PhantomFn<A:?Sized,R:?Sized=()> { }
|
||||
impl<A:?Sized, R:?Sized, U:?Sized> PhantomFn<A,R> for U { }
|
||||
|
||||
#[lang="sized"]
|
||||
pub trait Sized {
|
||||
pub trait Sized : PhantomFn<Self> {
|
||||
// Empty.
|
||||
}
|
||||
|
||||
|
@ -11,9 +11,10 @@
|
||||
// Test that coherence detects overlap when some of the types in the
|
||||
// impls are projections of associated type. Issue #20624.
|
||||
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::Deref;
|
||||
|
||||
pub struct Cow<'a, B: ?Sized>;
|
||||
pub struct Cow<'a, B: ?Sized>(PhantomData<(&'a (),B)>);
|
||||
|
||||
/// Trait for moving into a `Cow`
|
||||
pub trait IntoCow<'a, B: ?Sized> {
|
||||
|
@ -10,7 +10,7 @@
|
||||
|
||||
// Check that an associated type cannot be bound in an expression path.
|
||||
|
||||
trait Foo {
|
||||
trait Foo : ::std::marker::MarkerTrait {
|
||||
type A;
|
||||
fn bar() -> isize;
|
||||
}
|
||||
|
@ -11,7 +11,7 @@
|
||||
// Test that we do not ICE when an impl is missing an associated type (and that we report
|
||||
// a useful error, of course).
|
||||
|
||||
trait Trait {
|
||||
trait Trait : ::std::marker::MarkerTrait {
|
||||
type Type;
|
||||
}
|
||||
|
||||
|
@ -8,7 +8,7 @@
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
trait Foo {
|
||||
trait Foo : ::std::marker::MarkerTrait {
|
||||
type X;
|
||||
type Y;
|
||||
}
|
||||
|
@ -11,7 +11,7 @@
|
||||
// Check that we get an error when you use `<Self as Get>::Value` in
|
||||
// the trait definition but `Self` does not, in fact, implement `Get`.
|
||||
|
||||
trait Get {
|
||||
trait Get : ::std::marker::MarkerTrait {
|
||||
type Value;
|
||||
}
|
||||
|
||||
|
@ -12,6 +12,8 @@
|
||||
|
||||
pub trait Foo {
|
||||
type A;
|
||||
|
||||
fn dummy(&self) { }
|
||||
}
|
||||
|
||||
impl Foo for i32 {
|
||||
|
@ -10,7 +10,7 @@
|
||||
|
||||
// Check that an associated type cannot be bound in an expression path.
|
||||
|
||||
trait Foo {
|
||||
trait Foo : ::std::marker::MarkerTrait {
|
||||
type A;
|
||||
fn bar() -> isize;
|
||||
}
|
||||
|
@ -10,13 +10,6 @@
|
||||
|
||||
// ignore-tidy-linelength
|
||||
|
||||
#![feature(no_std)]
|
||||
#![no_std]
|
||||
#![feature(lang_items)]
|
||||
|
||||
#[lang="sized"]
|
||||
pub trait Sized {}
|
||||
|
||||
struct S<T> {
|
||||
contents: T,
|
||||
}
|
||||
|
@ -12,7 +12,7 @@
|
||||
|
||||
use std::cell::RefCell;
|
||||
|
||||
trait Trait {}
|
||||
trait Trait : ::std::marker::MarkerTrait {}
|
||||
|
||||
pub fn main() {
|
||||
let x: Vec<Trait + Sized> = Vec::new();
|
||||
|
@ -10,6 +10,7 @@
|
||||
|
||||
use std::fmt::Show;
|
||||
use std::default::Default;
|
||||
use std::marker::MarkerTrait;
|
||||
|
||||
// Test that two blanket impls conflict (at least without negative
|
||||
// bounds). After all, some other crate could implement Even or Odd
|
||||
@ -19,9 +20,9 @@ trait MyTrait {
|
||||
fn get(&self) -> usize;
|
||||
}
|
||||
|
||||
trait Even { }
|
||||
trait Even : MarkerTrait { }
|
||||
|
||||
trait Odd { }
|
||||
trait Odd : MarkerTrait { }
|
||||
|
||||
impl Even for isize { }
|
||||
|
||||
|
@ -19,9 +19,9 @@ trait MyTrait {
|
||||
fn get(&self) -> usize;
|
||||
}
|
||||
|
||||
trait Even { }
|
||||
trait Even : ::std::marker::MarkerTrait { }
|
||||
|
||||
trait Odd { }
|
||||
trait Odd : ::std::marker::MarkerTrait { }
|
||||
|
||||
impl<T:Even> MyTrait for T { //~ ERROR E0119
|
||||
fn get(&self) -> usize { 0 }
|
||||
|
@ -10,18 +10,18 @@
|
||||
|
||||
#![feature(optin_builtin_traits)]
|
||||
|
||||
trait MyTrait {}
|
||||
trait MyTrait : ::std::marker::MarkerTrait {}
|
||||
|
||||
struct TestType<T>;
|
||||
struct TestType<T>(::std::marker::PhantomData<T>);
|
||||
|
||||
unsafe impl<T: MyTrait> Send for TestType<T> {}
|
||||
unsafe impl<T: MyTrait+'static> Send for TestType<T> {}
|
||||
//~^ ERROR conflicting implementations for trait `core::marker::Send`
|
||||
//~^^ ERROR conflicting implementations for trait `core::marker::Send`
|
||||
|
||||
impl<T: MyTrait> !Send for TestType<T> {}
|
||||
//~^ ERROR conflicting implementations for trait `core::marker::Send`
|
||||
|
||||
unsafe impl<T> Send for TestType<T> {}
|
||||
unsafe impl<T:'static> Send for TestType<T> {}
|
||||
//~^ ERROR error: conflicting implementations for trait `core::marker::Send`
|
||||
|
||||
impl !Send for TestType<i32> {}
|
||||
|
50
src/test/compile-fail/coherence-subtyping.rs
Normal file
50
src/test/compile-fail/coherence-subtyping.rs
Normal file
@ -0,0 +1,50 @@
|
||||
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
// Test that two distinct impls which match subtypes of one another
|
||||
// yield coherence errors (or not) depending on the variance.
|
||||
|
||||
trait Contravariant {
|
||||
fn foo(&self) { }
|
||||
}
|
||||
|
||||
impl Contravariant for for<'a,'b> fn(&'a u8, &'b u8) {
|
||||
//~^ ERROR E0119
|
||||
}
|
||||
|
||||
impl Contravariant for for<'a> fn(&'a u8, &'a u8) {
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
|
||||
trait Covariant {
|
||||
fn foo(&self) { }
|
||||
}
|
||||
|
||||
impl Covariant for for<'a,'b> fn(&'a u8, &'b u8) {
|
||||
//~^ ERROR E0119
|
||||
}
|
||||
|
||||
impl Covariant for for<'a> fn(&'a u8, &'a u8) {
|
||||
}
|
||||
|
||||
///////////////////////////////////////////////////////////////////////////
|
||||
|
||||
trait Invariant {
|
||||
fn foo(&self) -> Self { }
|
||||
}
|
||||
|
||||
impl Invariant for for<'a,'b> fn(&'a u8, &'b u8) {
|
||||
}
|
||||
|
||||
impl Invariant for for<'a> fn(&'a u8, &'a u8) {
|
||||
}
|
||||
|
||||
fn main() { }
|
@ -14,7 +14,7 @@
|
||||
#![feature(box_syntax)]
|
||||
|
||||
struct Foo;
|
||||
trait Trait {}
|
||||
trait Trait : ::std::marker::MarkerTrait {}
|
||||
impl Trait for Foo {}
|
||||
|
||||
pub fn main() {
|
||||
|
@ -14,7 +14,7 @@
|
||||
#![feature(box_patterns)]
|
||||
#![feature(box_syntax)]
|
||||
|
||||
trait T {}
|
||||
trait T : ::std::marker::MarkerTrait {}
|
||||
impl T for isize {}
|
||||
|
||||
fn main() {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user