mirror of
https://github.com/rust-lang/rust.git
synced 2025-04-17 06:26:55 +00:00
Auto merge of #113772 - nnethercote:streamline-size-estimates-2, r=wesleywiser
Streamline size estimates (take 2) This was merged in #113684 but then [something happened](https://github.com/rust-lang/rust/pull/113684#issuecomment-1636811985): > There has been a bors issue that lead to the merge commit of this PR getting purged from master. > You'll have to make a new PR to reapply it. So this is exactly the same changes. `@bors` r=wesleywiser
This commit is contained in:
commit
299179e694
@ -5,7 +5,7 @@
|
||||
//! [`codegen_static`]: crate::constant::codegen_static
|
||||
|
||||
use rustc_data_structures::profiling::SelfProfilerRef;
|
||||
use rustc_middle::mir::mono::{Linkage as RLinkage, MonoItem, Visibility};
|
||||
use rustc_middle::mir::mono::{MonoItem, MonoItemData};
|
||||
|
||||
use crate::prelude::*;
|
||||
|
||||
@ -16,11 +16,11 @@ pub(crate) mod jit;
|
||||
fn predefine_mono_items<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
module: &mut dyn Module,
|
||||
mono_items: &[(MonoItem<'tcx>, (RLinkage, Visibility))],
|
||||
mono_items: &[(MonoItem<'tcx>, MonoItemData)],
|
||||
) {
|
||||
tcx.prof.generic_activity("predefine functions").run(|| {
|
||||
let is_compiler_builtins = tcx.is_compiler_builtins(LOCAL_CRATE);
|
||||
for &(mono_item, (linkage, visibility)) in mono_items {
|
||||
for &(mono_item, data) in mono_items {
|
||||
match mono_item {
|
||||
MonoItem::Fn(instance) => {
|
||||
let name = tcx.symbol_name(instance).name;
|
||||
@ -29,8 +29,8 @@ fn predefine_mono_items<'tcx>(
|
||||
get_function_sig(tcx, module.target_config().default_call_conv, instance);
|
||||
let linkage = crate::linkage::get_clif_linkage(
|
||||
mono_item,
|
||||
linkage,
|
||||
visibility,
|
||||
data.linkage,
|
||||
data.visibility,
|
||||
is_compiler_builtins,
|
||||
);
|
||||
module.declare_function(name, linkage, &sig).unwrap();
|
||||
|
@ -159,8 +159,8 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol, supports_128bit_i
|
||||
let cx = CodegenCx::new(&context, cgu, tcx, supports_128bit_integers);
|
||||
|
||||
let mono_items = cgu.items_in_deterministic_order(tcx);
|
||||
for &(mono_item, (linkage, visibility)) in &mono_items {
|
||||
mono_item.predefine::<Builder<'_, '_, '_>>(&cx, linkage, visibility);
|
||||
for &(mono_item, data) in &mono_items {
|
||||
mono_item.predefine::<Builder<'_, '_, '_>>(&cx, data.linkage, data.visibility);
|
||||
}
|
||||
|
||||
// ... and now that we have everything pre-defined, fill out those definitions.
|
||||
|
@ -86,8 +86,8 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol) -> (ModuleCodegen
|
||||
{
|
||||
let cx = CodegenCx::new(tcx, cgu, &llvm_module);
|
||||
let mono_items = cx.codegen_unit.items_in_deterministic_order(cx.tcx);
|
||||
for &(mono_item, (linkage, visibility)) in &mono_items {
|
||||
mono_item.predefine::<Builder<'_, '_, '_>>(&cx, linkage, visibility);
|
||||
for &(mono_item, data) in &mono_items {
|
||||
mono_item.predefine::<Builder<'_, '_, '_>>(&cx, data.linkage, data.visibility);
|
||||
}
|
||||
|
||||
// ... and now that we have everything pre-defined, fill out those definitions.
|
||||
|
@ -328,14 +328,14 @@ fn exported_symbols_provider_local(
|
||||
|
||||
let (_, cgus) = tcx.collect_and_partition_mono_items(());
|
||||
|
||||
for (mono_item, &(linkage, visibility)) in cgus.iter().flat_map(|cgu| cgu.items().iter()) {
|
||||
if linkage != Linkage::External {
|
||||
for (mono_item, data) in cgus.iter().flat_map(|cgu| cgu.items().iter()) {
|
||||
if data.linkage != Linkage::External {
|
||||
// We can only re-use things with external linkage, otherwise
|
||||
// we'll get a linker error
|
||||
continue;
|
||||
}
|
||||
|
||||
if need_visibility && visibility == Visibility::Hidden {
|
||||
if need_visibility && data.visibility == Visibility::Hidden {
|
||||
// If we potentially share things from Rust dylibs, they must
|
||||
// not be hidden
|
||||
continue;
|
||||
|
@ -59,12 +59,19 @@ impl<'tcx> MonoItem<'tcx> {
|
||||
pub fn size_estimate(&self, tcx: TyCtxt<'tcx>) -> usize {
|
||||
match *self {
|
||||
MonoItem::Fn(instance) => {
|
||||
// Estimate the size of a function based on how many statements
|
||||
// it contains.
|
||||
tcx.instance_def_size_estimate(instance.def)
|
||||
match instance.def {
|
||||
// "Normal" functions size estimate: the number of
|
||||
// statements, plus one for the terminator.
|
||||
InstanceDef::Item(..) | InstanceDef::DropGlue(..) => {
|
||||
let mir = tcx.instance_mir(instance.def);
|
||||
mir.basic_blocks.iter().map(|bb| bb.statements.len() + 1).sum()
|
||||
}
|
||||
// Other compiler-generated shims size estimate: 1
|
||||
_ => 1,
|
||||
}
|
||||
}
|
||||
// Conservatively estimate the size of a static declaration
|
||||
// or assembly to be 1.
|
||||
// Conservatively estimate the size of a static declaration or
|
||||
// assembly item to be 1.
|
||||
MonoItem::Static(_) | MonoItem::GlobalAsm(_) => 1,
|
||||
}
|
||||
}
|
||||
@ -230,7 +237,7 @@ pub struct CodegenUnit<'tcx> {
|
||||
/// contain something unique to this crate (e.g., a module path)
|
||||
/// as well as the crate name and disambiguator.
|
||||
name: Symbol,
|
||||
items: FxHashMap<MonoItem<'tcx>, (Linkage, Visibility)>,
|
||||
items: FxHashMap<MonoItem<'tcx>, MonoItemData>,
|
||||
size_estimate: usize,
|
||||
primary: bool,
|
||||
/// True if this is CGU is used to hold code coverage information for dead code,
|
||||
@ -238,6 +245,14 @@ pub struct CodegenUnit<'tcx> {
|
||||
is_code_coverage_dead_code_cgu: bool,
|
||||
}
|
||||
|
||||
/// Auxiliary info about a `MonoItem`.
|
||||
#[derive(Copy, Clone, PartialEq, Debug, HashStable)]
|
||||
pub struct MonoItemData {
|
||||
pub linkage: Linkage,
|
||||
pub visibility: Visibility,
|
||||
pub size_estimate: usize,
|
||||
}
|
||||
|
||||
/// Specifies the linkage type for a `MonoItem`.
|
||||
///
|
||||
/// See <https://llvm.org/docs/LangRef.html#linkage-types> for more details about these variants.
|
||||
@ -292,12 +307,12 @@ impl<'tcx> CodegenUnit<'tcx> {
|
||||
}
|
||||
|
||||
/// The order of these items is non-determinstic.
|
||||
pub fn items(&self) -> &FxHashMap<MonoItem<'tcx>, (Linkage, Visibility)> {
|
||||
pub fn items(&self) -> &FxHashMap<MonoItem<'tcx>, MonoItemData> {
|
||||
&self.items
|
||||
}
|
||||
|
||||
/// The order of these items is non-determinstic.
|
||||
pub fn items_mut(&mut self) -> &mut FxHashMap<MonoItem<'tcx>, (Linkage, Visibility)> {
|
||||
pub fn items_mut(&mut self) -> &mut FxHashMap<MonoItem<'tcx>, MonoItemData> {
|
||||
&mut self.items
|
||||
}
|
||||
|
||||
@ -320,16 +335,16 @@ impl<'tcx> CodegenUnit<'tcx> {
|
||||
base_n::encode(hash, base_n::CASE_INSENSITIVE)
|
||||
}
|
||||
|
||||
pub fn compute_size_estimate(&mut self, tcx: TyCtxt<'tcx>) {
|
||||
// Estimate the size of a codegen unit as (approximately) the number of MIR
|
||||
// statements it corresponds to.
|
||||
self.size_estimate = self.items.keys().map(|mi| mi.size_estimate(tcx)).sum();
|
||||
pub fn compute_size_estimate(&mut self) {
|
||||
// The size of a codegen unit as the sum of the sizes of the items
|
||||
// within it.
|
||||
self.size_estimate = self.items.values().map(|data| data.size_estimate).sum();
|
||||
}
|
||||
|
||||
#[inline]
|
||||
/// Should only be called if [`compute_size_estimate`] has previously been called.
|
||||
///
|
||||
/// [`compute_size_estimate`]: Self::compute_size_estimate
|
||||
#[inline]
|
||||
pub fn size_estimate(&self) -> usize {
|
||||
// Items are never zero-sized, so if we have items the estimate must be
|
||||
// non-zero, unless we forgot to call `compute_size_estimate` first.
|
||||
@ -355,7 +370,7 @@ impl<'tcx> CodegenUnit<'tcx> {
|
||||
pub fn items_in_deterministic_order(
|
||||
&self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
) -> Vec<(MonoItem<'tcx>, (Linkage, Visibility))> {
|
||||
) -> Vec<(MonoItem<'tcx>, MonoItemData)> {
|
||||
// The codegen tests rely on items being process in the same order as
|
||||
// they appear in the file, so for local items, we sort by node_id first
|
||||
#[derive(PartialEq, Eq, PartialOrd, Ord)]
|
||||
@ -390,7 +405,7 @@ impl<'tcx> CodegenUnit<'tcx> {
|
||||
)
|
||||
}
|
||||
|
||||
let mut items: Vec<_> = self.items().iter().map(|(&i, &l)| (i, l)).collect();
|
||||
let mut items: Vec<_> = self.items().iter().map(|(&i, &data)| (i, data)).collect();
|
||||
items.sort_by_cached_key(|&(i, _)| item_sort_key(tcx, i));
|
||||
items
|
||||
}
|
||||
|
@ -2080,12 +2080,6 @@ rustc_queries! {
|
||||
desc { "looking up supported target features" }
|
||||
}
|
||||
|
||||
/// Get an estimate of the size of an InstanceDef based on its MIR for CGU partitioning.
|
||||
query instance_def_size_estimate(def: ty::InstanceDef<'tcx>)
|
||||
-> usize {
|
||||
desc { |tcx| "estimating size for `{}`", tcx.def_path_str(def.def_id()) }
|
||||
}
|
||||
|
||||
query features_query(_: ()) -> &'tcx rustc_feature::Features {
|
||||
feedable
|
||||
desc { "looking up enabled feature gates" }
|
||||
|
@ -107,7 +107,8 @@ use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
||||
use rustc_middle::middle::exported_symbols::{SymbolExportInfo, SymbolExportLevel};
|
||||
use rustc_middle::mir;
|
||||
use rustc_middle::mir::mono::{
|
||||
CodegenUnit, CodegenUnitNameBuilder, InstantiationMode, Linkage, MonoItem, Visibility,
|
||||
CodegenUnit, CodegenUnitNameBuilder, InstantiationMode, Linkage, MonoItem, MonoItemData,
|
||||
Visibility,
|
||||
};
|
||||
use rustc_middle::query::Providers;
|
||||
use rustc_middle::ty::print::{characteristic_def_id_of_type, with_no_trimmed_paths};
|
||||
@ -130,11 +131,6 @@ struct PlacedMonoItems<'tcx> {
|
||||
codegen_units: Vec<CodegenUnit<'tcx>>,
|
||||
|
||||
internalization_candidates: FxHashSet<MonoItem<'tcx>>,
|
||||
|
||||
/// These must be obtained when the iterator in `partition` runs. They
|
||||
/// can't be obtained later because some inlined functions might not be
|
||||
/// reachable.
|
||||
unique_inlined_stats: (usize, usize),
|
||||
}
|
||||
|
||||
// The output CGUs are sorted by name.
|
||||
@ -152,11 +148,11 @@ where
|
||||
|
||||
// Place all mono items into a codegen unit. `place_mono_items` is
|
||||
// responsible for initializing the CGU size estimates.
|
||||
let PlacedMonoItems { mut codegen_units, internalization_candidates, unique_inlined_stats } = {
|
||||
let PlacedMonoItems { mut codegen_units, internalization_candidates } = {
|
||||
let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_place_items");
|
||||
let placed = place_mono_items(cx, mono_items);
|
||||
|
||||
debug_dump(tcx, "PLACE", &placed.codegen_units, placed.unique_inlined_stats);
|
||||
debug_dump(tcx, "PLACE", &placed.codegen_units);
|
||||
|
||||
placed
|
||||
};
|
||||
@ -167,7 +163,7 @@ where
|
||||
{
|
||||
let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_merge_cgus");
|
||||
merge_codegen_units(cx, &mut codegen_units);
|
||||
debug_dump(tcx, "MERGE", &codegen_units, unique_inlined_stats);
|
||||
debug_dump(tcx, "MERGE", &codegen_units);
|
||||
}
|
||||
|
||||
// Make as many symbols "internal" as possible, so LLVM has more freedom to
|
||||
@ -176,7 +172,7 @@ where
|
||||
let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_internalize_symbols");
|
||||
internalize_symbols(cx, &mut codegen_units, internalization_candidates);
|
||||
|
||||
debug_dump(tcx, "INTERNALIZE", &codegen_units, unique_inlined_stats);
|
||||
debug_dump(tcx, "INTERNALIZE", &codegen_units);
|
||||
}
|
||||
|
||||
// Mark one CGU for dead code, if necessary.
|
||||
@ -216,18 +212,12 @@ where
|
||||
let cgu_name_builder = &mut CodegenUnitNameBuilder::new(cx.tcx);
|
||||
let cgu_name_cache = &mut FxHashMap::default();
|
||||
|
||||
let mut num_unique_inlined_items = 0;
|
||||
let mut unique_inlined_items_size = 0;
|
||||
for mono_item in mono_items {
|
||||
// Handle only root items directly here. Inlined items are handled at
|
||||
// the bottom of the loop based on reachability.
|
||||
match mono_item.instantiation_mode(cx.tcx) {
|
||||
InstantiationMode::GloballyShared { .. } => {}
|
||||
InstantiationMode::LocalCopy => {
|
||||
num_unique_inlined_items += 1;
|
||||
unique_inlined_items_size += mono_item.size_estimate(cx.tcx);
|
||||
continue;
|
||||
}
|
||||
InstantiationMode::LocalCopy => continue,
|
||||
}
|
||||
|
||||
let characteristic_def_id = characteristic_def_id_of_mono_item(cx.tcx, mono_item);
|
||||
@ -256,8 +246,9 @@ where
|
||||
if visibility == Visibility::Hidden && can_be_internalized {
|
||||
internalization_candidates.insert(mono_item);
|
||||
}
|
||||
let size_estimate = mono_item.size_estimate(cx.tcx);
|
||||
|
||||
cgu.items_mut().insert(mono_item, (linkage, visibility));
|
||||
cgu.items_mut().insert(mono_item, MonoItemData { linkage, visibility, size_estimate });
|
||||
|
||||
// Get all inlined items that are reachable from `mono_item` without
|
||||
// going via another root item. This includes drop-glue, functions from
|
||||
@ -271,7 +262,11 @@ where
|
||||
// the `insert` will be a no-op.
|
||||
for inlined_item in reachable_inlined_items {
|
||||
// This is a CGU-private copy.
|
||||
cgu.items_mut().insert(inlined_item, (Linkage::Internal, Visibility::Default));
|
||||
cgu.items_mut().entry(inlined_item).or_insert_with(|| MonoItemData {
|
||||
linkage: Linkage::Internal,
|
||||
visibility: Visibility::Default,
|
||||
size_estimate: inlined_item.size_estimate(cx.tcx),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -286,14 +281,10 @@ where
|
||||
codegen_units.sort_by(|a, b| a.name().as_str().cmp(b.name().as_str()));
|
||||
|
||||
for cgu in codegen_units.iter_mut() {
|
||||
cgu.compute_size_estimate(cx.tcx);
|
||||
cgu.compute_size_estimate();
|
||||
}
|
||||
|
||||
return PlacedMonoItems {
|
||||
codegen_units,
|
||||
internalization_candidates,
|
||||
unique_inlined_stats: (num_unique_inlined_items, unique_inlined_items_size),
|
||||
};
|
||||
return PlacedMonoItems { codegen_units, internalization_candidates };
|
||||
|
||||
fn get_reachable_inlined_items<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
@ -349,7 +340,7 @@ fn merge_codegen_units<'tcx>(
|
||||
&& codegen_units.iter().any(|cgu| cgu.size_estimate() < NON_INCR_MIN_CGU_SIZE))
|
||||
{
|
||||
// Sort small cgus to the back.
|
||||
codegen_units.sort_by_cached_key(|cgu| cmp::Reverse(cgu.size_estimate()));
|
||||
codegen_units.sort_by_key(|cgu| cmp::Reverse(cgu.size_estimate()));
|
||||
|
||||
let mut smallest = codegen_units.pop().unwrap();
|
||||
let second_smallest = codegen_units.last_mut().unwrap();
|
||||
@ -358,7 +349,7 @@ fn merge_codegen_units<'tcx>(
|
||||
// may be duplicate inlined items, in which case the destination CGU is
|
||||
// unaffected. Recalculate size estimates afterwards.
|
||||
second_smallest.items_mut().extend(smallest.items_mut().drain());
|
||||
second_smallest.compute_size_estimate(cx.tcx);
|
||||
second_smallest.compute_size_estimate();
|
||||
|
||||
// Record that `second_smallest` now contains all the stuff that was
|
||||
// in `smallest` before.
|
||||
@ -492,7 +483,7 @@ fn internalize_symbols<'tcx>(
|
||||
for cgu in codegen_units {
|
||||
let home_cgu = MonoItemPlacement::SingleCgu(cgu.name());
|
||||
|
||||
for (item, linkage_and_visibility) in cgu.items_mut() {
|
||||
for (item, data) in cgu.items_mut() {
|
||||
if !internalization_candidates.contains(item) {
|
||||
// This item is no candidate for internalizing, so skip it.
|
||||
continue;
|
||||
@ -520,7 +511,8 @@ fn internalize_symbols<'tcx>(
|
||||
|
||||
// If we got here, we did not find any uses from other CGUs, so
|
||||
// it's fine to make this monomorphization internal.
|
||||
*linkage_and_visibility = (Linkage::Internal, Visibility::Default);
|
||||
data.linkage = Linkage::Internal;
|
||||
data.visibility = Visibility::Default;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -537,7 +529,7 @@ fn mark_code_coverage_dead_code_cgu<'tcx>(codegen_units: &mut [CodegenUnit<'tcx>
|
||||
// function symbols to be included via `-u` or `/include` linker args.
|
||||
let dead_code_cgu = codegen_units
|
||||
.iter_mut()
|
||||
.filter(|cgu| cgu.items().iter().any(|(_, (linkage, _))| *linkage == Linkage::External))
|
||||
.filter(|cgu| cgu.items().iter().any(|(_, data)| data.linkage == Linkage::External))
|
||||
.min_by_key(|cgu| cgu.size_estimate());
|
||||
|
||||
// If there are no CGUs that have externally linked items, then we just
|
||||
@ -851,12 +843,7 @@ fn default_visibility(tcx: TyCtxt<'_>, id: DefId, is_generic: bool) -> Visibilit
|
||||
}
|
||||
}
|
||||
|
||||
fn debug_dump<'a, 'tcx: 'a>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
label: &str,
|
||||
cgus: &[CodegenUnit<'tcx>],
|
||||
(unique_inlined_items, unique_inlined_size): (usize, usize),
|
||||
) {
|
||||
fn debug_dump<'a, 'tcx: 'a>(tcx: TyCtxt<'tcx>, label: &str, cgus: &[CodegenUnit<'tcx>]) {
|
||||
let dump = move || {
|
||||
use std::fmt::Write;
|
||||
|
||||
@ -865,28 +852,36 @@ fn debug_dump<'a, 'tcx: 'a>(
|
||||
|
||||
// Note: every unique root item is placed exactly once, so the number
|
||||
// of unique root items always equals the number of placed root items.
|
||||
//
|
||||
// Also, unreached inlined items won't be counted here. This is fine.
|
||||
|
||||
let mut inlined_items = FxHashSet::default();
|
||||
|
||||
let mut root_items = 0;
|
||||
// unique_inlined_items is passed in above.
|
||||
let mut unique_inlined_items = 0;
|
||||
let mut placed_inlined_items = 0;
|
||||
|
||||
let mut root_size = 0;
|
||||
// unique_inlined_size is passed in above.
|
||||
let mut unique_inlined_size = 0;
|
||||
let mut placed_inlined_size = 0;
|
||||
|
||||
for cgu in cgus.iter() {
|
||||
num_cgus += 1;
|
||||
all_cgu_sizes.push(cgu.size_estimate());
|
||||
|
||||
for (item, _) in cgu.items() {
|
||||
for (item, data) in cgu.items() {
|
||||
match item.instantiation_mode(tcx) {
|
||||
InstantiationMode::GloballyShared { .. } => {
|
||||
root_items += 1;
|
||||
root_size += item.size_estimate(tcx);
|
||||
root_size += data.size_estimate;
|
||||
}
|
||||
InstantiationMode::LocalCopy => {
|
||||
if inlined_items.insert(item) {
|
||||
unique_inlined_items += 1;
|
||||
unique_inlined_size += data.size_estimate;
|
||||
}
|
||||
placed_inlined_items += 1;
|
||||
placed_inlined_size += item.size_estimate(tcx);
|
||||
placed_inlined_size += data.size_estimate;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -928,7 +923,7 @@ fn debug_dump<'a, 'tcx: 'a>(
|
||||
let mean_size = size as f64 / num_items as f64;
|
||||
|
||||
let mut placed_item_sizes: Vec<_> =
|
||||
cgu.items().iter().map(|(item, _)| item.size_estimate(tcx)).collect();
|
||||
cgu.items().values().map(|data| data.size_estimate).collect();
|
||||
placed_item_sizes.sort_unstable_by_key(|&n| cmp::Reverse(n));
|
||||
let sizes = list(&placed_item_sizes);
|
||||
|
||||
@ -937,15 +932,16 @@ fn debug_dump<'a, 'tcx: 'a>(
|
||||
let _ =
|
||||
writeln!(s, " - items: {num_items}, mean size: {mean_size:.1}, sizes: {sizes}",);
|
||||
|
||||
for (item, linkage) in cgu.items_in_deterministic_order(tcx) {
|
||||
for (item, data) in cgu.items_in_deterministic_order(tcx) {
|
||||
let linkage = data.linkage;
|
||||
let symbol_name = item.symbol_name(tcx).name;
|
||||
let symbol_hash_start = symbol_name.rfind('h');
|
||||
let symbol_hash = symbol_hash_start.map_or("<no hash>", |i| &symbol_name[i..]);
|
||||
let size = item.size_estimate(tcx);
|
||||
let kind = match item.instantiation_mode(tcx) {
|
||||
InstantiationMode::GloballyShared { .. } => "root",
|
||||
InstantiationMode::LocalCopy => "inlined",
|
||||
};
|
||||
let size = data.size_estimate;
|
||||
let _ = with_no_trimmed_paths!(writeln!(
|
||||
s,
|
||||
" - {item} [{linkage:?}] [{symbol_hash}] ({kind}, size: {size})"
|
||||
@ -1100,8 +1096,8 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> (&DefIdSet, &[Co
|
||||
let mut item_to_cgus: FxHashMap<_, Vec<_>> = Default::default();
|
||||
|
||||
for cgu in codegen_units {
|
||||
for (&mono_item, &linkage) in cgu.items() {
|
||||
item_to_cgus.entry(mono_item).or_default().push((cgu.name(), linkage));
|
||||
for (&mono_item, &data) in cgu.items() {
|
||||
item_to_cgus.entry(mono_item).or_default().push((cgu.name(), data.linkage));
|
||||
}
|
||||
}
|
||||
|
||||
@ -1114,7 +1110,7 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> (&DefIdSet, &[Co
|
||||
let cgus = item_to_cgus.get_mut(i).unwrap_or(&mut empty);
|
||||
cgus.sort_by_key(|(name, _)| *name);
|
||||
cgus.dedup();
|
||||
for &(ref cgu_name, (linkage, _)) in cgus.iter() {
|
||||
for &(ref cgu_name, linkage) in cgus.iter() {
|
||||
output.push(' ');
|
||||
output.push_str(cgu_name.as_str());
|
||||
|
||||
|
@ -311,22 +311,6 @@ fn param_env_reveal_all_normalized(tcx: TyCtxt<'_>, def_id: DefId) -> ty::ParamE
|
||||
tcx.param_env(def_id).with_reveal_all_normalized(tcx)
|
||||
}
|
||||
|
||||
fn instance_def_size_estimate<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
instance_def: ty::InstanceDef<'tcx>,
|
||||
) -> usize {
|
||||
use ty::InstanceDef;
|
||||
|
||||
match instance_def {
|
||||
InstanceDef::Item(..) | InstanceDef::DropGlue(..) => {
|
||||
let mir = tcx.instance_mir(instance_def);
|
||||
mir.basic_blocks.iter().map(|bb| bb.statements.len() + 1).sum()
|
||||
}
|
||||
// Estimate the size of other compiler-generated shims to be 1.
|
||||
_ => 1,
|
||||
}
|
||||
}
|
||||
|
||||
/// If `def_id` is an issue 33140 hack impl, returns its self type; otherwise, returns `None`.
|
||||
///
|
||||
/// See [`ty::ImplOverlapKind::Issue33140`] for more details.
|
||||
@ -432,7 +416,6 @@ pub fn provide(providers: &mut Providers) {
|
||||
adt_sized_constraint,
|
||||
param_env,
|
||||
param_env_reveal_all_normalized,
|
||||
instance_def_size_estimate,
|
||||
issue33140_self_ty,
|
||||
defaultness,
|
||||
unsizing_params_for_adt,
|
||||
|
Loading…
Reference in New Issue
Block a user