mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 08:13:41 +00:00
Rollup merge of #56324 - Zoxc:int-ext, r=nikomatsakis
Use raw_entry for more efficient interning Fixes https://github.com/rust-lang/rust/issues/56308#issuecomment-442492744
This commit is contained in:
commit
ecfe721620
@ -298,6 +298,7 @@ pub struct DroplessArena {
|
||||
unsafe impl Send for DroplessArena {}
|
||||
|
||||
impl Default for DroplessArena {
|
||||
#[inline]
|
||||
fn default() -> DroplessArena {
|
||||
DroplessArena {
|
||||
ptr: Cell::new(0 as *mut u8),
|
||||
@ -319,6 +320,7 @@ impl DroplessArena {
|
||||
false
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn align(&self, align: usize) {
|
||||
let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
|
||||
self.ptr.set(final_address as *mut u8);
|
||||
|
@ -53,6 +53,7 @@ use ty::CanonicalTy;
|
||||
use ty::CanonicalPolyFnSig;
|
||||
use util::nodemap::{DefIdMap, DefIdSet, ItemLocalMap};
|
||||
use util::nodemap::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::interner::HashInterner;
|
||||
use smallvec::SmallVec;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
|
||||
StableHasher, StableHasherResult,
|
||||
@ -113,7 +114,7 @@ pub struct GlobalArenas<'tcx> {
|
||||
const_allocs: TypedArena<interpret::Allocation>,
|
||||
}
|
||||
|
||||
type InternedSet<'tcx, T> = Lock<FxHashSet<Interned<'tcx, T>>>;
|
||||
type InternedSet<'tcx, T> = Lock<FxHashMap<Interned<'tcx, T>, ()>>;
|
||||
|
||||
pub struct CtxtInterners<'tcx> {
|
||||
/// The arena that types, regions, etc are allocated from
|
||||
@ -167,51 +168,39 @@ impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
|
||||
// determine that all contents are in the global tcx.
|
||||
// See comments on Lift for why we can't use that.
|
||||
if flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) {
|
||||
let mut interner = local.type_.borrow_mut();
|
||||
if let Some(&Interned(ty)) = interner.get(&st) {
|
||||
return ty;
|
||||
}
|
||||
local.type_.borrow_mut().intern(st, |st| {
|
||||
let ty_struct = TyS {
|
||||
sty: st,
|
||||
flags: flags.flags,
|
||||
outer_exclusive_binder: flags.outer_exclusive_binder,
|
||||
};
|
||||
|
||||
let ty_struct = TyS {
|
||||
sty: st,
|
||||
flags: flags.flags,
|
||||
outer_exclusive_binder: flags.outer_exclusive_binder,
|
||||
};
|
||||
// Make sure we don't end up with inference
|
||||
// types/regions in the global interner
|
||||
if local as *const _ as usize == global as *const _ as usize {
|
||||
bug!("Attempted to intern `{:?}` which contains \
|
||||
inference types/regions in the global type context",
|
||||
&ty_struct);
|
||||
}
|
||||
|
||||
// Make sure we don't end up with inference
|
||||
// types/regions in the global interner
|
||||
if local as *const _ as usize == global as *const _ as usize {
|
||||
bug!("Attempted to intern `{:?}` which contains \
|
||||
inference types/regions in the global type context",
|
||||
&ty_struct);
|
||||
}
|
||||
|
||||
// Don't be &mut TyS.
|
||||
let ty: Ty<'tcx> = local.arena.alloc(ty_struct);
|
||||
interner.insert(Interned(ty));
|
||||
ty
|
||||
Interned(local.arena.alloc(ty_struct))
|
||||
}).0
|
||||
} else {
|
||||
let mut interner = global.type_.borrow_mut();
|
||||
if let Some(&Interned(ty)) = interner.get(&st) {
|
||||
return ty;
|
||||
}
|
||||
global.type_.borrow_mut().intern(st, |st| {
|
||||
let ty_struct = TyS {
|
||||
sty: st,
|
||||
flags: flags.flags,
|
||||
outer_exclusive_binder: flags.outer_exclusive_binder,
|
||||
};
|
||||
|
||||
let ty_struct = TyS {
|
||||
sty: st,
|
||||
flags: flags.flags,
|
||||
outer_exclusive_binder: flags.outer_exclusive_binder,
|
||||
};
|
||||
// This is safe because all the types the ty_struct can point to
|
||||
// already is in the global arena
|
||||
let ty_struct: TyS<'gcx> = unsafe {
|
||||
mem::transmute(ty_struct)
|
||||
};
|
||||
|
||||
// This is safe because all the types the ty_struct can point to
|
||||
// already is in the global arena
|
||||
let ty_struct: TyS<'gcx> = unsafe {
|
||||
mem::transmute(ty_struct)
|
||||
};
|
||||
|
||||
// Don't be &mut TyS.
|
||||
let ty: Ty<'gcx> = global.arena.alloc(ty_struct);
|
||||
interner.insert(Interned(ty));
|
||||
ty
|
||||
Interned(global.arena.alloc(ty_struct))
|
||||
}).0
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -827,12 +816,9 @@ impl<'tcx> CommonTypes<'tcx> {
|
||||
fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> {
|
||||
let mk = |sty| CtxtInterners::intern_ty(interners, interners, sty);
|
||||
let mk_region = |r| {
|
||||
if let Some(r) = interners.region.borrow().get(&r) {
|
||||
return r.0;
|
||||
}
|
||||
let r = interners.arena.alloc(r);
|
||||
interners.region.borrow_mut().insert(Interned(r));
|
||||
&*r
|
||||
interners.region.borrow_mut().intern(r, |r| {
|
||||
Interned(interners.arena.alloc(r))
|
||||
}).0
|
||||
};
|
||||
|
||||
CommonTypes {
|
||||
@ -955,14 +941,14 @@ pub struct GlobalCtxt<'tcx> {
|
||||
/// Data layout specification for the current target.
|
||||
pub data_layout: TargetDataLayout,
|
||||
|
||||
stability_interner: Lock<FxHashSet<&'tcx attr::Stability>>,
|
||||
stability_interner: Lock<FxHashMap<&'tcx attr::Stability, ()>>,
|
||||
|
||||
/// Stores the value of constants (and deduplicates the actual memory)
|
||||
allocation_interner: Lock<FxHashSet<&'tcx Allocation>>,
|
||||
allocation_interner: Lock<FxHashMap<&'tcx Allocation, ()>>,
|
||||
|
||||
pub alloc_map: Lock<interpret::AllocMap<'tcx, &'tcx Allocation>>,
|
||||
|
||||
layout_interner: Lock<FxHashSet<&'tcx LayoutDetails>>,
|
||||
layout_interner: Lock<FxHashMap<&'tcx LayoutDetails, ()>>,
|
||||
|
||||
/// A general purpose channel to throw data out the back towards LLVM worker
|
||||
/// threads.
|
||||
@ -1045,16 +1031,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
self,
|
||||
alloc: Allocation,
|
||||
) -> &'gcx Allocation {
|
||||
let allocs = &mut self.allocation_interner.borrow_mut();
|
||||
if let Some(alloc) = allocs.get(&alloc) {
|
||||
return alloc;
|
||||
}
|
||||
|
||||
let interned = self.global_arenas.const_allocs.alloc(alloc);
|
||||
if let Some(prev) = allocs.replace(interned) { // insert into interner
|
||||
bug!("Tried to overwrite interned Allocation: {:#?}", prev)
|
||||
}
|
||||
interned
|
||||
self.allocation_interner.borrow_mut().intern(alloc, |alloc| {
|
||||
self.global_arenas.const_allocs.alloc(alloc)
|
||||
})
|
||||
}
|
||||
|
||||
/// Allocates a byte or string literal for `mir::interpret`, read-only
|
||||
@ -1066,29 +1045,15 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
|
||||
let mut stability_interner = self.stability_interner.borrow_mut();
|
||||
if let Some(st) = stability_interner.get(&stab) {
|
||||
return st;
|
||||
}
|
||||
|
||||
let interned = self.global_interners.arena.alloc(stab);
|
||||
if let Some(prev) = stability_interner.replace(interned) {
|
||||
bug!("Tried to overwrite interned Stability: {:?}", prev)
|
||||
}
|
||||
interned
|
||||
self.stability_interner.borrow_mut().intern(stab, |stab| {
|
||||
self.global_interners.arena.alloc(stab)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn intern_layout(self, layout: LayoutDetails) -> &'gcx LayoutDetails {
|
||||
let mut layout_interner = self.layout_interner.borrow_mut();
|
||||
if let Some(layout) = layout_interner.get(&layout) {
|
||||
return layout;
|
||||
}
|
||||
|
||||
let interned = self.global_arenas.layout.alloc(layout);
|
||||
if let Some(prev) = layout_interner.replace(interned) {
|
||||
bug!("Tried to overwrite interned Layout: {:?}", prev)
|
||||
}
|
||||
interned
|
||||
self.layout_interner.borrow_mut().intern(layout, |layout| {
|
||||
self.global_arenas.layout.alloc(layout)
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns a range of the start/end indices specified with the
|
||||
@ -2198,7 +2163,7 @@ macro_rules! sty_debug_print {
|
||||
};
|
||||
$(let mut $variant = total;)*
|
||||
|
||||
for &Interned(t) in tcx.interners.type_.borrow().iter() {
|
||||
for &Interned(t) in tcx.interners.type_.borrow().keys() {
|
||||
let variant = match t.sty {
|
||||
ty::Bool | ty::Char | ty::Int(..) | ty::Uint(..) |
|
||||
ty::Float(..) | ty::Str | ty::Never => continue,
|
||||
@ -2257,6 +2222,13 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
|
||||
/// An entry in an interner.
|
||||
struct Interned<'tcx, T: 'tcx+?Sized>(&'tcx T);
|
||||
|
||||
impl<'tcx, T: 'tcx+?Sized> Clone for Interned<'tcx, T> {
|
||||
fn clone(&self) -> Self {
|
||||
Interned(self.0)
|
||||
}
|
||||
}
|
||||
impl<'tcx, T: 'tcx+?Sized> Copy for Interned<'tcx, T> {}
|
||||
|
||||
// NB: An Interned<Ty> compares and hashes as a sty.
|
||||
impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> {
|
||||
fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool {
|
||||
@ -2377,37 +2349,28 @@ macro_rules! intern_method {
|
||||
// determine that all contents are in the global tcx.
|
||||
// See comments on Lift for why we can't use that.
|
||||
if ($keep_in_local_tcx)(&v) {
|
||||
let mut interner = self.interners.$name.borrow_mut();
|
||||
if let Some(&Interned(v)) = interner.get(key) {
|
||||
return v;
|
||||
}
|
||||
self.interners.$name.borrow_mut().intern_ref(key, || {
|
||||
// Make sure we don't end up with inference
|
||||
// types/regions in the global tcx.
|
||||
if self.is_global() {
|
||||
bug!("Attempted to intern `{:?}` which contains \
|
||||
inference types/regions in the global type context",
|
||||
v);
|
||||
}
|
||||
|
||||
// Make sure we don't end up with inference
|
||||
// types/regions in the global tcx.
|
||||
if self.is_global() {
|
||||
bug!("Attempted to intern `{:?}` which contains \
|
||||
inference types/regions in the global type context",
|
||||
v);
|
||||
}
|
||||
|
||||
let i = $alloc_method(&self.interners.arena, v);
|
||||
interner.insert(Interned(i));
|
||||
i
|
||||
Interned($alloc_method(&self.interners.arena, v))
|
||||
}).0
|
||||
} else {
|
||||
let mut interner = self.global_interners.$name.borrow_mut();
|
||||
if let Some(&Interned(v)) = interner.get(key) {
|
||||
return v;
|
||||
}
|
||||
|
||||
// This transmutes $alloc<'tcx> to $alloc<'gcx>
|
||||
let v = unsafe {
|
||||
mem::transmute(v)
|
||||
};
|
||||
let i: &$lt_tcx $ty = $alloc_method(&self.global_interners.arena, v);
|
||||
// Cast to 'gcx
|
||||
let i = unsafe { mem::transmute(i) };
|
||||
interner.insert(Interned(i));
|
||||
i
|
||||
self.global_interners.$name.borrow_mut().intern_ref(key, || {
|
||||
// This transmutes $alloc<'tcx> to $alloc<'gcx>
|
||||
let v = unsafe {
|
||||
mem::transmute(v)
|
||||
};
|
||||
let i: &$lt_tcx $ty = $alloc_method(&self.global_interners.arena, v);
|
||||
// Cast to 'gcx
|
||||
let i = unsafe { mem::transmute(i) };
|
||||
Interned(i)
|
||||
}).0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
68
src/librustc_data_structures/interner.rs
Normal file
68
src/librustc_data_structures/interner.rs
Normal file
@ -0,0 +1,68 @@
|
||||
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
|
||||
// file at the top-level directory of this distribution and at
|
||||
// http://rust-lang.org/COPYRIGHT.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||
// option. This file may not be copied, modified, or distributed
|
||||
// except according to those terms.
|
||||
|
||||
use std::hash::Hash;
|
||||
use std::hash::BuildHasher;
|
||||
use std::hash::Hasher;
|
||||
use std::collections::HashMap;
|
||||
use std::collections::hash_map::RawEntryMut;
|
||||
use std::borrow::Borrow;
|
||||
|
||||
pub trait HashInterner<K: Eq + Hash> {
|
||||
fn intern_ref<Q: ?Sized, F: FnOnce() -> K>(&mut self, value: &Q, make: F) -> K
|
||||
where K: Borrow<Q>,
|
||||
Q: Hash + Eq;
|
||||
|
||||
fn intern<Q, F: FnOnce(Q) -> K>(&mut self, value: Q, make: F) -> K
|
||||
where K: Borrow<Q>,
|
||||
Q: Hash + Eq;
|
||||
}
|
||||
|
||||
impl<K: Eq + Hash + Copy, S: BuildHasher> HashInterner<K> for HashMap<K, (), S> {
|
||||
#[inline]
|
||||
fn intern_ref<Q: ?Sized, F: FnOnce() -> K>(&mut self, value: &Q, make: F) -> K
|
||||
where K: Borrow<Q>,
|
||||
Q: Hash + Eq
|
||||
{
|
||||
let mut hasher = self.hasher().build_hasher();
|
||||
value.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
let entry = self.raw_entry_mut().from_key_hashed_nocheck(hash, value);
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(e) => *e.key(),
|
||||
RawEntryMut::Vacant(e) => {
|
||||
let v = make();
|
||||
e.insert_hashed_nocheck(hash, v, ());
|
||||
v
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn intern<Q, F: FnOnce(Q) -> K>(&mut self, value: Q, make: F) -> K
|
||||
where K: Borrow<Q>,
|
||||
Q: Hash + Eq
|
||||
{
|
||||
let mut hasher = self.hasher().build_hasher();
|
||||
value.hash(&mut hasher);
|
||||
let hash = hasher.finish();
|
||||
let entry = self.raw_entry_mut().from_key_hashed_nocheck(hash, &value);
|
||||
|
||||
match entry {
|
||||
RawEntryMut::Occupied(e) => *e.key(),
|
||||
RawEntryMut::Vacant(e) => {
|
||||
let v = make(value);
|
||||
e.insert_hashed_nocheck(hash, v, ());
|
||||
v
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -29,6 +29,7 @@
|
||||
#![feature(nll)]
|
||||
#![feature(allow_internal_unstable)]
|
||||
#![feature(vec_resize_with)]
|
||||
#![feature(hash_raw_entry)]
|
||||
|
||||
#![cfg_attr(unix, feature(libc))]
|
||||
#![cfg_attr(test, feature(test))]
|
||||
@ -66,6 +67,7 @@ pub mod flock;
|
||||
pub mod fx;
|
||||
pub mod graph;
|
||||
pub mod indexed_vec;
|
||||
pub mod interner;
|
||||
pub mod obligation_forest;
|
||||
pub mod owning_ref;
|
||||
pub mod ptr_key;
|
||||
|
@ -849,6 +849,7 @@ impl<K, V, S> HashMap<K, V, S>
|
||||
/// let mut map: HashMap<&str, i32> = HashMap::new();
|
||||
/// map.reserve(10);
|
||||
/// ```
|
||||
#[inline]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub fn reserve(&mut self, additional: usize) {
|
||||
match self.reserve_internal(additional, Infallible) {
|
||||
@ -880,6 +881,7 @@ impl<K, V, S> HashMap<K, V, S>
|
||||
self.reserve_internal(additional, Fallible)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn reserve_internal(&mut self, additional: usize, fallibility: Fallibility)
|
||||
-> Result<(), CollectionAllocErr> {
|
||||
|
||||
@ -1571,6 +1573,7 @@ impl<K, V, S> HashMap<K, V, S>
|
||||
/// so that the map now contains keys which compare equal, search may start
|
||||
/// acting erratically, with two keys randomly masking each other. Implementations
|
||||
/// are free to assume this doesn't happen (within the limits of memory-safety).
|
||||
#[inline(always)]
|
||||
#[unstable(feature = "hash_raw_entry", issue = "56167")]
|
||||
pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<K, V, S> {
|
||||
self.reserve(1);
|
||||
@ -1911,6 +1914,7 @@ impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S>
|
||||
}
|
||||
|
||||
/// Create a `RawEntryMut` from the given key and its hash.
|
||||
#[inline]
|
||||
#[unstable(feature = "hash_raw_entry", issue = "56167")]
|
||||
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S>
|
||||
where K: Borrow<Q>,
|
||||
@ -1919,6 +1923,7 @@ impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S>
|
||||
self.from_hash(hash, |q| q.borrow().eq(k))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn search<F>(self, hash: u64, is_match: F, compare_hashes: bool) -> RawEntryMut<'a, K, V, S>
|
||||
where for<'b> F: FnMut(&'b K) -> bool,
|
||||
{
|
||||
@ -1941,6 +1946,7 @@ impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S>
|
||||
}
|
||||
}
|
||||
/// Create a `RawEntryMut` from the given hash.
|
||||
#[inline]
|
||||
#[unstable(feature = "hash_raw_entry", issue = "56167")]
|
||||
pub fn from_hash<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S>
|
||||
where for<'b> F: FnMut(&'b K) -> bool,
|
||||
@ -2215,6 +2221,7 @@ impl<'a, K, V, S> RawVacantEntryMut<'a, K, V, S> {
|
||||
|
||||
/// Sets the value of the entry with the VacantEntry's key,
|
||||
/// and returns a mutable reference to it.
|
||||
#[inline]
|
||||
#[unstable(feature = "hash_raw_entry", issue = "56167")]
|
||||
pub fn insert_hashed_nocheck(self, hash: u64, key: K, value: V) -> (&'a mut K, &'a mut V) {
|
||||
let hash = SafeHash::new(hash);
|
||||
|
@ -329,6 +329,7 @@ impl<K, V, M> Put<K, V> for FullBucket<K, V, M>
|
||||
}
|
||||
|
||||
impl<K, V, M: Deref<Target = RawTable<K, V>>> Bucket<K, V, M> {
|
||||
#[inline]
|
||||
pub fn new(table: M, hash: SafeHash) -> Bucket<K, V, M> {
|
||||
Bucket::at_index(table, hash.inspect() as usize)
|
||||
}
|
||||
@ -342,6 +343,7 @@ impl<K, V, M: Deref<Target = RawTable<K, V>>> Bucket<K, V, M> {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn at_index(table: M, ib_index: usize) -> Bucket<K, V, M> {
|
||||
// if capacity is 0, then the RawBucket will be populated with bogus pointers.
|
||||
// This is an uncommon case though, so avoid it in release builds.
|
||||
@ -654,6 +656,7 @@ impl<K, V, M> GapThenFull<K, V, M>
|
||||
|
||||
// Returns a Layout which describes the allocation required for a hash table,
|
||||
// and the offset of the array of (key, value) pairs in the allocation.
|
||||
#[inline(always)]
|
||||
fn calculate_layout<K, V>(capacity: usize) -> Result<(Layout, usize), LayoutErr> {
|
||||
let hashes = Layout::array::<HashUint>(capacity)?;
|
||||
let pairs = Layout::array::<(K, V)>(capacity)?;
|
||||
@ -722,6 +725,7 @@ impl<K, V> RawTable<K, V> {
|
||||
}
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
fn raw_bucket_at(&self, index: usize) -> RawBucket<K, V> {
|
||||
let (_, pairs_offset) = calculate_layout::<K, V>(self.capacity())
|
||||
.unwrap_or_else(|_| unsafe { hint::unreachable_unchecked() });
|
||||
|
Loading…
Reference in New Issue
Block a user