mirror of
https://github.com/rust-lang/rust.git
synced 2024-10-30 14:01:51 +00:00
Auto merge of #120931 - chenyukang:yukang-cleanup-hashmap, r=michaelwoerister
Clean up potential_query_instability with FxIndexMap and UnordMap From https://github.com/rust-lang/rust/pull/120485#issuecomment-1916437191 r? `@michaelwoerister`
This commit is contained in:
commit
fa9f77ff35
@ -15,7 +15,7 @@
|
||||
#![allow(rustc::diagnostic_outside_of_impl)]
|
||||
#![allow(rustc::untranslatable_diagnostic)]
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_errors::Diagnostic;
|
||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||
use rustc_hir::lang_items::LangItem;
|
||||
@ -180,7 +180,7 @@ struct UniversalRegionIndices<'tcx> {
|
||||
/// basically equivalent to an `GenericArgs`, except that it also
|
||||
/// contains an entry for `ReStatic` -- it might be nice to just
|
||||
/// use an args, and then handle `ReStatic` another way.
|
||||
indices: FxHashMap<ty::Region<'tcx>, RegionVid>,
|
||||
indices: FxIndexMap<ty::Region<'tcx>, RegionVid>,
|
||||
|
||||
/// The vid assigned to `'static`. Used only for diagnostics.
|
||||
pub fr_static: RegionVid,
|
||||
@ -325,9 +325,6 @@ impl<'tcx> UniversalRegions<'tcx> {
|
||||
}
|
||||
|
||||
/// Gets an iterator over all the early-bound regions that have names.
|
||||
/// Iteration order may be unstable, so this should only be used when
|
||||
/// iteration order doesn't affect anything
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
pub fn named_universal_regions<'s>(
|
||||
&'s self,
|
||||
) -> impl Iterator<Item = (ty::Region<'tcx>, ty::RegionVid)> + 's {
|
||||
|
@ -21,6 +21,7 @@ use rustc_middle::dep_graph::WorkProduct;
|
||||
use rustc_middle::middle::exported_symbols::{SymbolExportInfo, SymbolExportLevel};
|
||||
use rustc_session::config::{self, CrateType, Lto};
|
||||
|
||||
use std::collections::BTreeMap;
|
||||
use std::ffi::{CStr, CString};
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
@ -787,7 +788,7 @@ pub unsafe fn optimize_thin_module(
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ThinLTOKeysMap {
|
||||
// key = llvm name of importing module, value = LLVM cache key
|
||||
keys: FxHashMap<String, String>,
|
||||
keys: BTreeMap<String, String>,
|
||||
}
|
||||
|
||||
impl ThinLTOKeysMap {
|
||||
@ -797,7 +798,6 @@ impl ThinLTOKeysMap {
|
||||
let mut writer = io::BufWriter::new(file);
|
||||
// The entries are loaded back into a hash map in `load_from_file()`, so
|
||||
// the order in which we write them to file here does not matter.
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
for (module, key) in &self.keys {
|
||||
writeln!(writer, "{module} {key}")?;
|
||||
}
|
||||
@ -806,7 +806,7 @@ impl ThinLTOKeysMap {
|
||||
|
||||
fn load_from_file(path: &Path) -> io::Result<Self> {
|
||||
use std::io::BufRead;
|
||||
let mut keys = FxHashMap::default();
|
||||
let mut keys = BTreeMap::default();
|
||||
let file = File::open(path)?;
|
||||
for line in io::BufReader::new(file).lines() {
|
||||
let line = line?;
|
||||
|
@ -403,7 +403,6 @@ fn codegenned_and_inlined_items(tcx: TyCtxt<'_>) -> DefIdSet {
|
||||
let mut result = items.clone();
|
||||
|
||||
for cgu in cgus {
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
for item in cgu.items().keys() {
|
||||
if let mir::mono::MonoItem::Fn(ref instance) = item {
|
||||
let did = instance.def_id();
|
||||
|
@ -25,7 +25,7 @@
|
||||
|
||||
use crate::errors;
|
||||
use rustc_ast as ast;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::unord::UnordMap;
|
||||
use rustc_data_structures::unord::UnordSet;
|
||||
use rustc_errors::{DiagnosticArgValue, IntoDiagnosticArg};
|
||||
use rustc_hir::def_id::LOCAL_CRATE;
|
||||
@ -218,8 +218,8 @@ pub enum ComparisonKind {
|
||||
}
|
||||
|
||||
struct TrackerData {
|
||||
actual_reuse: FxHashMap<String, CguReuse>,
|
||||
expected_reuse: FxHashMap<String, (String, Span, CguReuse, ComparisonKind)>,
|
||||
actual_reuse: UnordMap<String, CguReuse>,
|
||||
expected_reuse: UnordMap<String, (String, Span, CguReuse, ComparisonKind)>,
|
||||
}
|
||||
|
||||
pub struct CguReuseTracker {
|
||||
@ -267,9 +267,7 @@ impl CguReuseTracker {
|
||||
|
||||
fn check_expected_reuse(&self, sess: &Session) {
|
||||
if let Some(ref data) = self.data {
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
let mut keys = data.expected_reuse.keys().collect::<Vec<_>>();
|
||||
keys.sort_unstable();
|
||||
let keys = data.expected_reuse.keys().into_sorted_stable_ord();
|
||||
for cgu_name in keys {
|
||||
let &(ref cgu_user_name, ref error_span, expected_reuse, comparison_kind) =
|
||||
data.expected_reuse.get(cgu_name).unwrap();
|
||||
|
@ -1,4 +1,4 @@
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_data_structures::fx::FxIndexSet;
|
||||
use rustc_data_structures::memmap::Mmap;
|
||||
use rustc_session::cstore::DllImport;
|
||||
use rustc_session::Session;
|
||||
@ -41,7 +41,7 @@ pub trait ArchiveBuilderBuilder {
|
||||
&'a self,
|
||||
rlib: &'a Path,
|
||||
outdir: &Path,
|
||||
bundled_lib_file_names: &FxHashSet<Symbol>,
|
||||
bundled_lib_file_names: &FxIndexSet<Symbol>,
|
||||
) -> Result<(), ExtractBundledLibsError<'_>> {
|
||||
let archive_map = unsafe {
|
||||
Mmap::map(
|
||||
|
@ -1,7 +1,6 @@
|
||||
use rustc_arena::TypedArena;
|
||||
use rustc_ast::CRATE_NODE_ID;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
||||
use rustc_data_structures::memmap::Mmap;
|
||||
use rustc_data_structures::temp_dir::MaybeTempDir;
|
||||
use rustc_errors::{DiagCtxt, ErrorGuaranteed};
|
||||
@ -534,9 +533,9 @@ fn link_staticlib<'a>(
|
||||
|
||||
let native_libs = codegen_results.crate_info.native_libraries[&cnum].iter();
|
||||
let relevant = native_libs.clone().filter(|lib| relevant_lib(sess, lib));
|
||||
let relevant_libs: FxHashSet<_> = relevant.filter_map(|lib| lib.filename).collect();
|
||||
let relevant_libs: FxIndexSet<_> = relevant.filter_map(|lib| lib.filename).collect();
|
||||
|
||||
let bundled_libs: FxHashSet<_> = native_libs.filter_map(|lib| lib.filename).collect();
|
||||
let bundled_libs: FxIndexSet<_> = native_libs.filter_map(|lib| lib.filename).collect();
|
||||
ab.add_archive(
|
||||
path,
|
||||
Box::new(move |fname: &str| {
|
||||
@ -564,11 +563,7 @@ fn link_staticlib<'a>(
|
||||
.extract_bundled_libs(path, tempdir.as_ref(), &relevant_libs)
|
||||
.unwrap_or_else(|e| sess.dcx().emit_fatal(e));
|
||||
|
||||
// We sort the libraries below
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
let mut relevant_libs: Vec<Symbol> = relevant_libs.into_iter().collect();
|
||||
relevant_libs.sort_unstable();
|
||||
for filename in relevant_libs {
|
||||
for filename in relevant_libs.iter() {
|
||||
let joined = tempdir.as_ref().join(filename.as_str());
|
||||
let path = joined.as_path();
|
||||
ab.add_archive(path, Box::new(|_| false)).unwrap();
|
||||
@ -682,13 +677,14 @@ fn link_dwarf_object<'a>(
|
||||
}
|
||||
|
||||
// Input rlibs contain .o/.dwo files from dependencies.
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
let input_rlibs = cg_results
|
||||
.crate_info
|
||||
.used_crate_source
|
||||
.values()
|
||||
.filter_map(|csource| csource.rlib.as_ref())
|
||||
.map(|(path, _)| path);
|
||||
.items()
|
||||
.filter_map(|(_, csource)| csource.rlib.as_ref())
|
||||
.map(|(path, _)| path)
|
||||
.into_sorted_stable_ord();
|
||||
|
||||
for input_rlib in input_rlibs {
|
||||
debug!(?input_rlib);
|
||||
package.add_input_object(input_rlib)?;
|
||||
@ -2456,7 +2452,7 @@ fn add_native_libs_from_crate(
|
||||
codegen_results: &CodegenResults,
|
||||
tmpdir: &Path,
|
||||
search_paths: &SearchPaths,
|
||||
bundled_libs: &FxHashSet<Symbol>,
|
||||
bundled_libs: &FxIndexSet<Symbol>,
|
||||
cnum: CrateNum,
|
||||
link_static: bool,
|
||||
link_dynamic: bool,
|
||||
@ -2777,7 +2773,7 @@ fn add_static_crate<'a>(
|
||||
codegen_results: &CodegenResults,
|
||||
tmpdir: &Path,
|
||||
cnum: CrateNum,
|
||||
bundled_lib_file_names: &FxHashSet<Symbol>,
|
||||
bundled_lib_file_names: &FxIndexSet<Symbol>,
|
||||
) {
|
||||
let src = &codegen_results.crate_info.used_crate_source[&cnum];
|
||||
let cratepath = &src.rlib.as_ref().unwrap().0;
|
||||
|
@ -1001,7 +1001,7 @@ pub struct CguMessage;
|
||||
|
||||
struct Diagnostic {
|
||||
msgs: Vec<(DiagnosticMessage, Style)>,
|
||||
args: FxHashMap<DiagnosticArgName, DiagnosticArgValue>,
|
||||
args: FxIndexMap<DiagnosticArgName, DiagnosticArgValue>,
|
||||
code: Option<ErrCode>,
|
||||
lvl: Level,
|
||||
}
|
||||
@ -1813,7 +1813,7 @@ impl Translate for SharedEmitter {
|
||||
|
||||
impl Emitter for SharedEmitter {
|
||||
fn emit_diagnostic(&mut self, diag: rustc_errors::Diagnostic) {
|
||||
let args: FxHashMap<DiagnosticArgName, DiagnosticArgValue> =
|
||||
let args: FxIndexMap<DiagnosticArgName, DiagnosticArgValue> =
|
||||
diag.args().map(|(name, arg)| (name.clone(), arg.clone())).collect();
|
||||
drop(self.sender.send(SharedEmitterMessage::Diagnostic(Diagnostic {
|
||||
msgs: diag.messages.clone(),
|
||||
|
@ -16,9 +16,10 @@ use crate::{CachedModuleCodegen, CompiledModule, CrateInfo, MemFlags, ModuleCode
|
||||
|
||||
use rustc_ast::expand::allocator::{global_fn_name, AllocatorKind, ALLOCATOR_METHODS};
|
||||
use rustc_attr as attr;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexSet};
|
||||
use rustc_data_structures::profiling::{get_resident_set_size, print_time_passes_entry};
|
||||
use rustc_data_structures::sync::par_map;
|
||||
use rustc_data_structures::unord::UnordMap;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
|
||||
use rustc_hir::lang_items::LangItem;
|
||||
@ -851,6 +852,8 @@ impl CrateInfo {
|
||||
// `compiler_builtins` are always placed last to ensure that they're linked correctly.
|
||||
used_crates.extend(compiler_builtins);
|
||||
|
||||
let crates = tcx.crates(());
|
||||
let n_crates = crates.len();
|
||||
let mut info = CrateInfo {
|
||||
target_cpu,
|
||||
crate_types,
|
||||
@ -862,19 +865,15 @@ impl CrateInfo {
|
||||
is_no_builtins: Default::default(),
|
||||
native_libraries: Default::default(),
|
||||
used_libraries: tcx.native_libraries(LOCAL_CRATE).iter().map(Into::into).collect(),
|
||||
crate_name: Default::default(),
|
||||
crate_name: UnordMap::with_capacity(n_crates),
|
||||
used_crates,
|
||||
used_crate_source: Default::default(),
|
||||
used_crate_source: UnordMap::with_capacity(n_crates),
|
||||
dependency_formats: tcx.dependency_formats(()).clone(),
|
||||
windows_subsystem,
|
||||
natvis_debugger_visualizers: Default::default(),
|
||||
};
|
||||
let crates = tcx.crates(());
|
||||
|
||||
let n_crates = crates.len();
|
||||
info.native_libraries.reserve(n_crates);
|
||||
info.crate_name.reserve(n_crates);
|
||||
info.used_crate_source.reserve(n_crates);
|
||||
|
||||
for &cnum in crates.iter() {
|
||||
info.native_libraries
|
||||
@ -901,7 +900,7 @@ impl CrateInfo {
|
||||
// by the compiler, but that's ok because all this stuff is unstable anyway.
|
||||
let target = &tcx.sess.target;
|
||||
if !are_upstream_rust_objects_already_included(tcx.sess) {
|
||||
let missing_weak_lang_items: FxHashSet<Symbol> = info
|
||||
let missing_weak_lang_items: FxIndexSet<Symbol> = info
|
||||
.used_crates
|
||||
.iter()
|
||||
.flat_map(|&cnum| tcx.missing_lang_items(cnum))
|
||||
|
@ -24,8 +24,10 @@ extern crate tracing;
|
||||
extern crate rustc_middle;
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_data_structures::unord::UnordMap;
|
||||
use rustc_hir::def_id::CrateNum;
|
||||
use rustc_middle::dep_graph::WorkProduct;
|
||||
use rustc_middle::middle::debugger_visualizer::DebuggerVisualizerFile;
|
||||
@ -152,16 +154,16 @@ impl From<&cstore::NativeLib> for NativeLib {
|
||||
pub struct CrateInfo {
|
||||
pub target_cpu: String,
|
||||
pub crate_types: Vec<CrateType>,
|
||||
pub exported_symbols: FxHashMap<CrateType, Vec<String>>,
|
||||
pub linked_symbols: FxHashMap<CrateType, Vec<(String, SymbolExportKind)>>,
|
||||
pub exported_symbols: UnordMap<CrateType, Vec<String>>,
|
||||
pub linked_symbols: FxIndexMap<CrateType, Vec<(String, SymbolExportKind)>>,
|
||||
pub local_crate_name: Symbol,
|
||||
pub compiler_builtins: Option<CrateNum>,
|
||||
pub profiler_runtime: Option<CrateNum>,
|
||||
pub is_no_builtins: FxHashSet<CrateNum>,
|
||||
pub native_libraries: FxHashMap<CrateNum, Vec<NativeLib>>,
|
||||
pub crate_name: FxHashMap<CrateNum, Symbol>,
|
||||
pub native_libraries: FxIndexMap<CrateNum, Vec<NativeLib>>,
|
||||
pub crate_name: UnordMap<CrateNum, Symbol>,
|
||||
pub used_libraries: Vec<NativeLib>,
|
||||
pub used_crate_source: FxHashMap<CrateNum, Lrc<CrateSource>>,
|
||||
pub used_crate_source: UnordMap<CrateNum, Lrc<CrateSource>>,
|
||||
pub used_crates: Vec<CrateNum>,
|
||||
pub dependency_formats: Lrc<Dependencies>,
|
||||
pub windows_subsystem: Option<String>,
|
||||
|
@ -13,7 +13,7 @@ use std::fmt;
|
||||
use std::ptr;
|
||||
|
||||
use rustc_ast::Mutability;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
|
||||
use rustc_middle::mir::display_allocation;
|
||||
use rustc_middle::ty::{self, Instance, ParamEnv, Ty, TyCtxt};
|
||||
use rustc_target::abi::{Align, HasDataLayout, Size};
|
||||
@ -104,13 +104,13 @@ pub struct Memory<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
|
||||
pub(super) alloc_map: M::MemoryMap,
|
||||
|
||||
/// Map for "extra" function pointers.
|
||||
extra_fn_ptr_map: FxHashMap<AllocId, M::ExtraFnVal>,
|
||||
extra_fn_ptr_map: FxIndexMap<AllocId, M::ExtraFnVal>,
|
||||
|
||||
/// To be able to compare pointers with null, and to check alignment for accesses
|
||||
/// to ZSTs (where pointers may dangle), we keep track of the size even for allocations
|
||||
/// that do not exist any more.
|
||||
// FIXME: this should not be public, but interning currently needs access to it
|
||||
pub(super) dead_alloc_map: FxHashMap<AllocId, (Size, Align)>,
|
||||
pub(super) dead_alloc_map: FxIndexMap<AllocId, (Size, Align)>,
|
||||
}
|
||||
|
||||
/// A reference to some allocation that was already bounds-checked for the given region
|
||||
@ -135,8 +135,8 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
|
||||
pub fn new() -> Self {
|
||||
Memory {
|
||||
alloc_map: M::MemoryMap::default(),
|
||||
extra_fn_ptr_map: FxHashMap::default(),
|
||||
dead_alloc_map: FxHashMap::default(),
|
||||
extra_fn_ptr_map: FxIndexMap::default(),
|
||||
dead_alloc_map: FxIndexMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -524,6 +524,11 @@ impl<K: Eq + Hash, V> UnordMap<K, V> {
|
||||
UnordItems(self.inner.into_iter())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn keys(&self) -> UnordItems<&K, impl Iterator<Item = &K>> {
|
||||
UnordItems(self.inner.keys())
|
||||
}
|
||||
|
||||
/// Returns the entries of this map in stable sort order (as defined by `ToStableHashKey`).
|
||||
///
|
||||
/// The `cache_sort_key` parameter controls if [slice::sort_by_cached_key] or
|
||||
|
@ -3,7 +3,7 @@ use crate::{
|
||||
CodeSuggestion, DiagnosticBuilder, DiagnosticMessage, EmissionGuarantee, ErrCode, Level,
|
||||
MultiSpan, SubdiagnosticMessage, Substitution, SubstitutionPart, SuggestionStyle,
|
||||
};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_error_messages::fluent_value_from_str_list_sep_by_and;
|
||||
use rustc_error_messages::FluentValue;
|
||||
use rustc_lint_defs::{Applicability, LintExpectationId};
|
||||
@ -105,7 +105,7 @@ pub struct Diagnostic {
|
||||
pub span: MultiSpan,
|
||||
pub children: Vec<SubDiagnostic>,
|
||||
pub suggestions: Result<Vec<CodeSuggestion>, SuggestionsDisabled>,
|
||||
args: FxHashMap<DiagnosticArgName, DiagnosticArgValue>,
|
||||
args: FxIndexMap<DiagnosticArgName, DiagnosticArgValue>,
|
||||
|
||||
/// This is not used for highlighting or rendering any error message. Rather, it can be used
|
||||
/// as a sort key to sort a buffer of diagnostics. By default, it is the primary span of
|
||||
@ -898,9 +898,6 @@ impl Diagnostic {
|
||||
self
|
||||
}
|
||||
|
||||
// Exact iteration order of diagnostic arguments shouldn't make a difference to output because
|
||||
// they're only used in interpolation.
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
pub fn args(&self) -> impl Iterator<Item = DiagnosticArg<'_>> {
|
||||
self.args.iter()
|
||||
}
|
||||
@ -914,7 +911,7 @@ impl Diagnostic {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn replace_args(&mut self, args: FxHashMap<DiagnosticArgName, DiagnosticArgValue>) {
|
||||
pub fn replace_args(&mut self, args: FxIndexMap<DiagnosticArgName, DiagnosticArgValue>) {
|
||||
self.args = args;
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_errors::{codes::*, struct_span_code_err};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
@ -241,7 +241,7 @@ impl<'tcx> dyn AstConv<'tcx> + '_ {
|
||||
binding: &ConvertedBinding<'_, 'tcx>,
|
||||
bounds: &mut Bounds<'tcx>,
|
||||
speculative: bool,
|
||||
dup_bindings: &mut FxHashMap<DefId, Span>,
|
||||
dup_bindings: &mut FxIndexMap<DefId, Span>,
|
||||
path_span: Span,
|
||||
only_self_bounds: OnlySelfBounds,
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
|
@ -5,7 +5,7 @@ use crate::errors::{
|
||||
};
|
||||
use crate::fluent_generated as fluent;
|
||||
use crate::traits::error_reporting::report_object_safety_error;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet};
|
||||
use rustc_data_structures::fx::{FxIndexMap, FxIndexSet};
|
||||
use rustc_data_structures::sorted_map::SortedMap;
|
||||
use rustc_data_structures::unord::UnordMap;
|
||||
use rustc_errors::{
|
||||
@ -806,7 +806,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
if suggestions.len() != 1 || already_has_generics_args_suggestion {
|
||||
// We don't need this label if there's an inline suggestion, show otherwise.
|
||||
for (span, assoc_items) in &associated_types {
|
||||
let mut names: FxHashMap<_, usize> = FxHashMap::default();
|
||||
let mut names: FxIndexMap<_, usize> = FxIndexMap::default();
|
||||
for item in assoc_items {
|
||||
types_count += 1;
|
||||
*names.entry(item.name).or_insert(0) += 1;
|
||||
|
@ -16,7 +16,7 @@ use crate::errors::AmbiguousLifetimeBound;
|
||||
use crate::middle::resolve_bound_vars as rbv;
|
||||
use crate::require_c_abi_if_c_variadic;
|
||||
use rustc_ast::TraitObjectSyntax;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
|
||||
use rustc_errors::{
|
||||
codes::*, struct_span_code_err, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed,
|
||||
FatalError, MultiSpan,
|
||||
@ -752,7 +752,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
debug!(?poly_trait_ref, ?assoc_bindings);
|
||||
bounds.push_trait_bound(tcx, poly_trait_ref, span, polarity);
|
||||
|
||||
let mut dup_bindings = FxHashMap::default();
|
||||
let mut dup_bindings = FxIndexMap::default();
|
||||
for binding in &assoc_bindings {
|
||||
// Don't register additional associated type bounds for negative bounds,
|
||||
// since we should have emitten an error for them earlier, and they will
|
||||
|
@ -1367,7 +1367,7 @@ fn check_type_alias_type_params_are_used<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalD
|
||||
// `Sized` bounds. If they came last for example, this would break `Trait + /*elab*/Sized`
|
||||
// since it would overwrite the span of the user-written bound. This could be fixed by
|
||||
// folding the spans with `Span::to` which requires a bit of effort I think.
|
||||
.collect::<FxHashMap<_, _>>()
|
||||
.collect::<FxIndexMap<_, _>>()
|
||||
});
|
||||
|
||||
let mut params_used = BitSet::new_empty(generics.params.len());
|
||||
|
@ -1,7 +1,7 @@
|
||||
use super::potentially_plural_count;
|
||||
use crate::errors::{LifetimesOrBoundsMismatchOnTrait, MethodShouldReturnFuture};
|
||||
use hir::def_id::{DefId, DefIdMap, LocalDefId};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexSet};
|
||||
use rustc_data_structures::fx::{FxHashSet, FxIndexMap, FxIndexSet};
|
||||
use rustc_errors::{codes::*, pluralize, struct_span_code_err, Applicability, ErrorGuaranteed};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
@ -392,7 +392,7 @@ fn compare_method_predicate_entailment<'tcx>(
|
||||
|
||||
struct RemapLateBound<'a, 'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
mapping: &'a FxHashMap<ty::BoundRegionKind, ty::BoundRegionKind>,
|
||||
mapping: &'a FxIndexMap<ty::BoundRegionKind, ty::BoundRegionKind>,
|
||||
}
|
||||
|
||||
impl<'tcx> TypeFolder<TyCtxt<'tcx>> for RemapLateBound<'_, 'tcx> {
|
||||
@ -553,7 +553,7 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>(
|
||||
// prove below that the hidden types are well formed.
|
||||
let universe = infcx.create_next_universe();
|
||||
let mut idx = 0;
|
||||
let mapping: FxHashMap<_, _> = collector
|
||||
let mapping: FxIndexMap<_, _> = collector
|
||||
.types
|
||||
.iter()
|
||||
.map(|(_, &(ty, _))| {
|
||||
@ -690,7 +690,7 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>(
|
||||
// contains `def_id`'s early-bound regions.
|
||||
let id_args = GenericArgs::identity_for_item(tcx, def_id);
|
||||
debug!(?id_args, ?args);
|
||||
let map: FxHashMap<_, _> = std::iter::zip(args, id_args)
|
||||
let map: FxIndexMap<_, _> = std::iter::zip(args, id_args)
|
||||
.skip(tcx.generics_of(trait_m.def_id).count())
|
||||
.filter_map(|(a, b)| Some((a.as_region()?, b.as_region()?)))
|
||||
.collect();
|
||||
@ -766,7 +766,7 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>(
|
||||
|
||||
struct ImplTraitInTraitCollector<'a, 'tcx> {
|
||||
ocx: &'a ObligationCtxt<'a, 'tcx>,
|
||||
types: FxHashMap<DefId, (Ty<'tcx>, ty::GenericArgsRef<'tcx>)>,
|
||||
types: FxIndexMap<DefId, (Ty<'tcx>, ty::GenericArgsRef<'tcx>)>,
|
||||
span: Span,
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
body_id: LocalDefId,
|
||||
@ -779,7 +779,7 @@ impl<'a, 'tcx> ImplTraitInTraitCollector<'a, 'tcx> {
|
||||
param_env: ty::ParamEnv<'tcx>,
|
||||
body_id: LocalDefId,
|
||||
) -> Self {
|
||||
ImplTraitInTraitCollector { ocx, types: FxHashMap::default(), span, param_env, body_id }
|
||||
ImplTraitInTraitCollector { ocx, types: FxIndexMap::default(), span, param_env, body_id }
|
||||
}
|
||||
}
|
||||
|
||||
@ -838,7 +838,7 @@ impl<'tcx> TypeFolder<TyCtxt<'tcx>> for ImplTraitInTraitCollector<'_, 'tcx> {
|
||||
|
||||
struct RemapHiddenTyRegions<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
map: FxHashMap<ty::Region<'tcx>, ty::Region<'tcx>>,
|
||||
map: FxIndexMap<ty::Region<'tcx>, ty::Region<'tcx>>,
|
||||
num_trait_args: usize,
|
||||
num_impl_args: usize,
|
||||
def_id: DefId,
|
||||
|
@ -76,7 +76,7 @@ pub use check::check_abi;
|
||||
|
||||
use std::num::NonZeroU32;
|
||||
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
|
||||
use rustc_errors::ErrorGuaranteed;
|
||||
use rustc_errors::{pluralize, struct_span_code_err, Diagnostic, DiagnosticBuilder};
|
||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||
@ -307,7 +307,7 @@ fn bounds_from_generic_predicates<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
predicates: impl IntoIterator<Item = (ty::Clause<'tcx>, Span)>,
|
||||
) -> (String, String) {
|
||||
let mut types: FxHashMap<Ty<'tcx>, Vec<DefId>> = FxHashMap::default();
|
||||
let mut types: FxIndexMap<Ty<'tcx>, Vec<DefId>> = FxIndexMap::default();
|
||||
let mut projections = vec![];
|
||||
for (predicate, _) in predicates {
|
||||
debug!("predicate {:?}", predicate);
|
||||
|
@ -1,4 +1,5 @@
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::fx::IndexEntry;
|
||||
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
|
||||
use rustc_errors::{codes::*, struct_span_code_err};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::DefKind;
|
||||
@ -9,7 +10,6 @@ use rustc_middle::ty::{self, TyCtxt};
|
||||
use rustc_span::{ErrorGuaranteed, Symbol};
|
||||
use rustc_trait_selection::traits::{self, SkipLeakCheck};
|
||||
use smallvec::SmallVec;
|
||||
use std::collections::hash_map::Entry;
|
||||
|
||||
pub fn crate_inherent_impls_overlap_check(tcx: TyCtxt<'_>, (): ()) -> Result<(), ErrorGuaranteed> {
|
||||
let mut inherent_overlap_checker = InherentOverlapChecker { tcx };
|
||||
@ -63,7 +63,7 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
||||
fn check_for_duplicate_items_in_impl(&self, impl_: DefId) -> Result<(), ErrorGuaranteed> {
|
||||
let impl_items = self.tcx.associated_items(impl_);
|
||||
|
||||
let mut seen_items = FxHashMap::default();
|
||||
let mut seen_items = FxIndexMap::default();
|
||||
let mut res = Ok(());
|
||||
for impl_item in impl_items.in_definition_order() {
|
||||
let span = self.tcx.def_span(impl_item.def_id);
|
||||
@ -71,7 +71,7 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
||||
|
||||
let norm_ident = ident.normalize_to_macros_2_0();
|
||||
match seen_items.entry(norm_ident) {
|
||||
Entry::Occupied(entry) => {
|
||||
IndexEntry::Occupied(entry) => {
|
||||
let former = entry.get();
|
||||
res = Err(struct_span_code_err!(
|
||||
self.tcx.dcx(),
|
||||
@ -84,7 +84,7 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
||||
.with_span_label(*former, format!("other definition for `{ident}`"))
|
||||
.emit());
|
||||
}
|
||||
Entry::Vacant(entry) => {
|
||||
IndexEntry::Vacant(entry) => {
|
||||
entry.insert(span);
|
||||
}
|
||||
}
|
||||
@ -216,7 +216,7 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
||||
}
|
||||
let mut connected_regions: IndexVec<RegionId, _> = Default::default();
|
||||
// Reverse map from the Symbol to the connected region id.
|
||||
let mut connected_region_ids = FxHashMap::default();
|
||||
let mut connected_region_ids = FxIndexMap::default();
|
||||
|
||||
for (i, &(&_impl_def_id, impl_items)) in impls_items.iter().enumerate() {
|
||||
if impl_items.len() == 0 {
|
||||
@ -228,7 +228,7 @@ impl<'tcx> InherentOverlapChecker<'tcx> {
|
||||
.in_definition_order()
|
||||
.filter_map(|item| {
|
||||
let entry = connected_region_ids.entry(item.name);
|
||||
if let Entry::Occupied(e) = &entry {
|
||||
if let IndexEntry::Occupied(e) = &entry {
|
||||
Some(*e.get())
|
||||
} else {
|
||||
idents_to_add.push(item.name);
|
||||
|
@ -15,7 +15,7 @@
|
||||
//! crate as a kind of pass. This should eventually be factored away.
|
||||
|
||||
use rustc_data_structures::captures::Captures;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::fx::{FxHashSet, FxIndexMap};
|
||||
use rustc_data_structures::unord::UnordMap;
|
||||
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, StashKey};
|
||||
use rustc_hir as hir;
|
||||
@ -834,12 +834,12 @@ impl From<NestedSpan> for FieldDeclSpan {
|
||||
|
||||
struct FieldUniquenessCheckContext<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
seen_fields: FxHashMap<Ident, FieldDeclSpan>,
|
||||
seen_fields: FxIndexMap<Ident, FieldDeclSpan>,
|
||||
}
|
||||
|
||||
impl<'tcx> FieldUniquenessCheckContext<'tcx> {
|
||||
fn new(tcx: TyCtxt<'tcx>) -> Self {
|
||||
Self { tcx, seen_fields: FxHashMap::default() }
|
||||
Self { tcx, seen_fields: FxIndexMap::default() }
|
||||
}
|
||||
|
||||
/// Check if a given field `ident` declared at `field_decl` has been declared elsewhere before.
|
||||
|
@ -1,4 +1,4 @@
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_middle::ty::{self, OutlivesPredicate, TyCtxt};
|
||||
|
||||
@ -6,12 +6,12 @@ use super::utils::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ExplicitPredicatesMap<'tcx> {
|
||||
map: FxHashMap<DefId, ty::EarlyBinder<RequiredPredicates<'tcx>>>,
|
||||
map: FxIndexMap<DefId, ty::EarlyBinder<RequiredPredicates<'tcx>>>,
|
||||
}
|
||||
|
||||
impl<'tcx> ExplicitPredicatesMap<'tcx> {
|
||||
pub fn new() -> ExplicitPredicatesMap<'tcx> {
|
||||
ExplicitPredicatesMap { map: FxHashMap::default() }
|
||||
ExplicitPredicatesMap { map: FxIndexMap::default() }
|
||||
}
|
||||
|
||||
pub(crate) fn explicit_predicates_of(
|
||||
|
@ -1,4 +1,4 @@
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_middle::ty::{self, Ty, TyCtxt};
|
||||
@ -15,12 +15,12 @@ use super::utils::*;
|
||||
/// now be filled with inferred predicates.
|
||||
pub(super) fn infer_predicates(
|
||||
tcx: TyCtxt<'_>,
|
||||
) -> FxHashMap<DefId, ty::EarlyBinder<RequiredPredicates<'_>>> {
|
||||
) -> FxIndexMap<DefId, ty::EarlyBinder<RequiredPredicates<'_>>> {
|
||||
debug!("infer_predicates");
|
||||
|
||||
let mut explicit_map = ExplicitPredicatesMap::new();
|
||||
|
||||
let mut global_inferred_outlives = FxHashMap::default();
|
||||
let mut global_inferred_outlives = FxIndexMap::default();
|
||||
|
||||
// If new predicates were added then we need to re-calculate
|
||||
// all crates since there could be new implied predicates.
|
||||
@ -101,7 +101,7 @@ fn insert_required_predicates_to_be_wf<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
ty: Ty<'tcx>,
|
||||
span: Span,
|
||||
global_inferred_outlives: &FxHashMap<DefId, ty::EarlyBinder<RequiredPredicates<'tcx>>>,
|
||||
global_inferred_outlives: &FxIndexMap<DefId, ty::EarlyBinder<RequiredPredicates<'tcx>>>,
|
||||
required_predicates: &mut RequiredPredicates<'tcx>,
|
||||
explicit_map: &mut ExplicitPredicatesMap<'tcx>,
|
||||
) {
|
||||
@ -322,7 +322,7 @@ fn check_inferred_predicates<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
def_id: DefId,
|
||||
args: ty::GenericArgsRef<'tcx>,
|
||||
global_inferred_outlives: &FxHashMap<DefId, ty::EarlyBinder<RequiredPredicates<'tcx>>>,
|
||||
global_inferred_outlives: &FxIndexMap<DefId, ty::EarlyBinder<RequiredPredicates<'tcx>>>,
|
||||
required_predicates: &mut RequiredPredicates<'tcx>,
|
||||
) {
|
||||
// Load the current set of inferred and explicit predicates from `global_inferred_outlives`
|
||||
|
@ -24,6 +24,7 @@ use crate::{
|
||||
use rustc_ast as ast;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||
use rustc_data_structures::unord::UnordMap;
|
||||
use rustc_errors::{
|
||||
codes::*, pluralize, struct_span_code_err, AddToDiagnostic, Applicability, Diagnostic,
|
||||
DiagnosticBuilder, ErrCode, ErrorGuaranteed, StashKey,
|
||||
@ -1709,7 +1710,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
.fields
|
||||
.iter_enumerated()
|
||||
.map(|(i, field)| (field.ident(tcx).normalize_to_macros_2_0(), (i, field)))
|
||||
.collect::<FxHashMap<_, _>>();
|
||||
.collect::<UnordMap<_, _>>();
|
||||
|
||||
let mut seen_fields = FxHashMap::default();
|
||||
|
||||
@ -1954,18 +1955,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
&self,
|
||||
adt_ty: Ty<'tcx>,
|
||||
span: Span,
|
||||
remaining_fields: FxHashMap<Ident, (FieldIdx, &ty::FieldDef)>,
|
||||
remaining_fields: UnordMap<Ident, (FieldIdx, &ty::FieldDef)>,
|
||||
variant: &'tcx ty::VariantDef,
|
||||
ast_fields: &'tcx [hir::ExprField<'tcx>],
|
||||
args: GenericArgsRef<'tcx>,
|
||||
) {
|
||||
let len = remaining_fields.len();
|
||||
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
let mut displayable_field_names: Vec<&str> =
|
||||
remaining_fields.keys().map(|ident| ident.as_str()).collect();
|
||||
// sorting &str primitives here, sort_unstable is ok
|
||||
displayable_field_names.sort_unstable();
|
||||
let displayable_field_names: Vec<&str> =
|
||||
remaining_fields.items().map(|(ident, _)| ident.as_str()).into_sorted_stable_ord();
|
||||
|
||||
let mut truncated_fields_error = String::new();
|
||||
let remaining_fields_names = match &displayable_field_names[..] {
|
||||
|
@ -306,11 +306,7 @@ fn early_lint_checks(tcx: TyCtxt<'_>, (): ()) {
|
||||
|
||||
// Gate identifiers containing invalid Unicode codepoints that were recovered during lexing.
|
||||
sess.parse_sess.bad_unicode_identifiers.with_lock(|identifiers| {
|
||||
// We will soon sort, so the initial order does not matter.
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
let mut identifiers: Vec<_> = identifiers.drain().collect();
|
||||
identifiers.sort_by_key(|&(key, _)| key);
|
||||
for (ident, mut spans) in identifiers.into_iter() {
|
||||
for (ident, mut spans) in identifiers.drain(..) {
|
||||
spans.sort();
|
||||
if ident == sym::ferris {
|
||||
let first_span = spans[0];
|
||||
|
@ -18,8 +18,9 @@ use self::TargetLint::*;
|
||||
|
||||
use crate::levels::LintLevelsBuilder;
|
||||
use crate::passes::{EarlyLintPassObject, LateLintPassObject};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_data_structures::sync;
|
||||
use rustc_data_structures::unord::UnordMap;
|
||||
use rustc_errors::{DecorateLint, DiagnosticBuilder, DiagnosticMessage, MultiSpan};
|
||||
use rustc_feature::Features;
|
||||
use rustc_hir as hir;
|
||||
@ -69,10 +70,10 @@ pub struct LintStore {
|
||||
pub late_module_passes: Vec<Box<LateLintPassFactory>>,
|
||||
|
||||
/// Lints indexed by name.
|
||||
by_name: FxHashMap<String, TargetLint>,
|
||||
by_name: UnordMap<String, TargetLint>,
|
||||
|
||||
/// Map of registered lint groups to what lints they expand to.
|
||||
lint_groups: FxHashMap<&'static str, LintGroup>,
|
||||
lint_groups: FxIndexMap<&'static str, LintGroup>,
|
||||
}
|
||||
|
||||
impl LintStoreMarker for LintStore {}
|
||||
@ -152,8 +153,6 @@ impl LintStore {
|
||||
pub fn get_lint_groups<'t>(
|
||||
&'t self,
|
||||
) -> impl Iterator<Item = (&'static str, Vec<LintId>, bool)> + 't {
|
||||
// This function is not used in a way which observes the order of lints.
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
self.lint_groups
|
||||
.iter()
|
||||
.filter(|(_, LintGroup { depr, .. })| {
|
||||
@ -326,9 +325,11 @@ impl LintStore {
|
||||
|
||||
/// True if this symbol represents a lint group name.
|
||||
pub fn is_lint_group(&self, lint_name: Symbol) -> bool {
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
let lint_groups = self.lint_groups.keys().collect::<Vec<_>>();
|
||||
debug!("is_lint_group(lint_name={:?}, lint_groups={:?})", lint_name, lint_groups);
|
||||
debug!(
|
||||
"is_lint_group(lint_name={:?}, lint_groups={:?})",
|
||||
lint_name,
|
||||
self.lint_groups.keys().collect::<Vec<_>>()
|
||||
);
|
||||
let lint_name_str = lint_name.as_str();
|
||||
self.lint_groups.contains_key(lint_name_str) || {
|
||||
let warnings_name_str = crate::WARNINGS.name_lower();
|
||||
@ -372,12 +373,9 @@ impl LintStore {
|
||||
None => {
|
||||
// 1. The tool is currently running, so this lint really doesn't exist.
|
||||
// FIXME: should this handle tools that never register a lint, like rustfmt?
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
let lints = self.by_name.keys().collect::<Vec<_>>();
|
||||
debug!("lints={:?}", lints);
|
||||
debug!("lints={:?}", self.by_name);
|
||||
let tool_prefix = format!("{tool_name}::");
|
||||
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
return if self.by_name.keys().any(|lint| lint.starts_with(&tool_prefix)) {
|
||||
self.no_lint_suggestion(&complete_name, tool_name.as_str())
|
||||
} else {
|
||||
|
@ -179,7 +179,6 @@ impl EarlyLintPass for NonAsciiIdents {
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
let mut symbols: Vec<_> = symbols.iter().collect();
|
||||
symbols.sort_by_key(|k| k.1);
|
||||
|
||||
for (symbol, &sp) in symbols.iter() {
|
||||
let symbol_str = symbol.as_str();
|
||||
if symbol_str.is_ascii() {
|
||||
|
@ -4,6 +4,7 @@ use rustc_attr::InlineAttr;
|
||||
use rustc_data_structures::base_n;
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_data_structures::stable_hasher::{Hash128, HashStable, StableHasher};
|
||||
use rustc_hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
|
||||
use rustc_hir::ItemId;
|
||||
@ -241,7 +242,7 @@ pub struct CodegenUnit<'tcx> {
|
||||
/// contain something unique to this crate (e.g., a module path)
|
||||
/// as well as the crate name and disambiguator.
|
||||
name: Symbol,
|
||||
items: FxHashMap<MonoItem<'tcx>, MonoItemData>,
|
||||
items: FxIndexMap<MonoItem<'tcx>, MonoItemData>,
|
||||
size_estimate: usize,
|
||||
primary: bool,
|
||||
/// True if this is CGU is used to hold code coverage information for dead code,
|
||||
@ -316,13 +317,11 @@ impl<'tcx> CodegenUnit<'tcx> {
|
||||
self.primary = true;
|
||||
}
|
||||
|
||||
/// The order of these items is non-determinstic.
|
||||
pub fn items(&self) -> &FxHashMap<MonoItem<'tcx>, MonoItemData> {
|
||||
pub fn items(&self) -> &FxIndexMap<MonoItem<'tcx>, MonoItemData> {
|
||||
&self.items
|
||||
}
|
||||
|
||||
/// The order of these items is non-determinstic.
|
||||
pub fn items_mut(&mut self) -> &mut FxHashMap<MonoItem<'tcx>, MonoItemData> {
|
||||
pub fn items_mut(&mut self) -> &mut FxIndexMap<MonoItem<'tcx>, MonoItemData> {
|
||||
&mut self.items
|
||||
}
|
||||
|
||||
|
@ -368,7 +368,7 @@ fn merge_codegen_units<'tcx>(
|
||||
// Move the items from `cgu_src` to `cgu_dst`. Some of them may be
|
||||
// duplicate inlined items, in which case the destination CGU is
|
||||
// unaffected. Recalculate size estimates afterwards.
|
||||
cgu_dst.items_mut().extend(cgu_src.items_mut().drain());
|
||||
cgu_dst.items_mut().extend(cgu_src.items_mut().drain(..));
|
||||
cgu_dst.compute_size_estimate();
|
||||
|
||||
// Record that `cgu_dst` now contains all the stuff that was in
|
||||
@ -407,7 +407,7 @@ fn merge_codegen_units<'tcx>(
|
||||
// Move the items from `smallest` to `second_smallest`. Some of them
|
||||
// may be duplicate inlined items, in which case the destination CGU is
|
||||
// unaffected. Recalculate size estimates afterwards.
|
||||
second_smallest.items_mut().extend(smallest.items_mut().drain());
|
||||
second_smallest.items_mut().extend(smallest.items_mut().drain(..));
|
||||
second_smallest.compute_size_estimate();
|
||||
|
||||
// Don't update `cgu_contents`, that's only for incremental builds.
|
||||
|
@ -11,7 +11,7 @@ use crate::lint::{
|
||||
};
|
||||
use crate::Session;
|
||||
use rustc_ast::node_id::NodeId;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet};
|
||||
use rustc_data_structures::sync::{AppendOnlyVec, Lock, Lrc};
|
||||
use rustc_errors::{emitter::SilentEmitter, DiagCtxt};
|
||||
use rustc_errors::{
|
||||
@ -205,19 +205,19 @@ pub struct ParseSess {
|
||||
/// Places where identifiers that contain invalid Unicode codepoints but that look like they
|
||||
/// should be. Useful to avoid bad tokenization when encountering emoji. We group them to
|
||||
/// provide a single error per unique incorrect identifier.
|
||||
pub bad_unicode_identifiers: Lock<FxHashMap<Symbol, Vec<Span>>>,
|
||||
pub bad_unicode_identifiers: Lock<FxIndexMap<Symbol, Vec<Span>>>,
|
||||
source_map: Lrc<SourceMap>,
|
||||
pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
|
||||
/// Contains the spans of block expressions that could have been incomplete based on the
|
||||
/// operation token that followed it, but that the parser cannot identify without further
|
||||
/// analysis.
|
||||
pub ambiguous_block_expr_parse: Lock<FxHashMap<Span, Span>>,
|
||||
pub ambiguous_block_expr_parse: Lock<FxIndexMap<Span, Span>>,
|
||||
pub gated_spans: GatedSpans,
|
||||
pub symbol_gallery: SymbolGallery,
|
||||
/// Environment variables accessed during the build and their values when they exist.
|
||||
pub env_depinfo: Lock<FxHashSet<(Symbol, Option<Symbol>)>>,
|
||||
pub env_depinfo: Lock<FxIndexSet<(Symbol, Option<Symbol>)>>,
|
||||
/// File paths accessed during the build.
|
||||
pub file_depinfo: Lock<FxHashSet<Symbol>>,
|
||||
pub file_depinfo: Lock<FxIndexSet<Symbol>>,
|
||||
/// Whether cfg(version) should treat the current release as incomplete
|
||||
pub assume_incomplete_release: bool,
|
||||
/// Spans passed to `proc_macro::quote_span`. Each span has a numerical
|
||||
@ -247,7 +247,7 @@ impl ParseSess {
|
||||
bad_unicode_identifiers: Lock::new(Default::default()),
|
||||
source_map,
|
||||
buffered_lints: Lock::new(vec![]),
|
||||
ambiguous_block_expr_parse: Lock::new(FxHashMap::default()),
|
||||
ambiguous_block_expr_parse: Lock::new(Default::default()),
|
||||
gated_spans: GatedSpans::default(),
|
||||
symbol_gallery: SymbolGallery::default(),
|
||||
env_depinfo: Default::default(),
|
||||
|
@ -9,14 +9,6 @@ help: Unicode character '➖' (Heavy Minus Sign) looks like '-' (Minus/Hyphen),
|
||||
LL | let _ = i_like_to_😄_a_lot() - 4;
|
||||
| ~
|
||||
|
||||
error: Ferris cannot be used as an identifier
|
||||
--> $DIR/emoji-identifiers.rs:17:9
|
||||
|
|
||||
LL | let 🦀 = 1;
|
||||
| ^^ help: try using their name instead: `ferris`
|
||||
LL | dbg!(🦀);
|
||||
| ^^
|
||||
|
||||
error: identifiers cannot contain emoji: `ABig👩👩👧👧Family`
|
||||
--> $DIR/emoji-identifiers.rs:1:8
|
||||
|
|
||||
@ -64,6 +56,14 @@ error: identifiers cannot contain emoji: `i_like_to_😄_a_lot`
|
||||
LL | let _ = i_like_to_😄_a_lot() ➖ 4;
|
||||
| ^^^^^^^^^^^^^^^^^^
|
||||
|
||||
error: Ferris cannot be used as an identifier
|
||||
--> $DIR/emoji-identifiers.rs:17:9
|
||||
|
|
||||
LL | let 🦀 = 1;
|
||||
| ^^ help: try using their name instead: `ferris`
|
||||
LL | dbg!(🦀);
|
||||
| ^^
|
||||
|
||||
error[E0599]: no function or associated item named `full_of✨` found for struct `👀` in the current scope
|
||||
--> $DIR/emoji-identifiers.rs:9:8
|
||||
|
|
||||
|
Loading…
Reference in New Issue
Block a user