mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 14:55:26 +00:00
Auto merge of #129809 - matthiaskrgr:rollup-cyygnxh, r=matthiaskrgr
Rollup of 15 pull requests Successful merges: - #120221 (Don't make statement nonterminals match pattern nonterminals) - #126183 (Separate core search logic with search ui) - #129123 (rustdoc-json: Add test for `Self` type) - #129366 (linker: Synchronize native library search in rustc and linker) - #129527 (Don't use `TyKind` in a lint) - #129534 (Deny `wasm_c_abi` lint to nudge the last 25%) - #129640 (Re-enable android tests/benches in alloc/core) - #129642 (Bump backtrace to 0.3.74~ish) - #129675 (allow BufReader::peek to be called on unsized types) - #129723 (Simplify some extern providers) - #129724 (Remove `Option<!>` return types.) - #129725 (Stop using `ty::GenericPredicates` for non-predicates_of queries) - #129731 (Allow running `./x.py test compiler`) - #129751 (interpret/visitor: make memory order iteration slightly more efficient) - #129754 (wasi: Fix sleeping for `Duration::MAX`) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
9649706ead
@ -486,6 +486,9 @@ impl Token {
|
||||
}
|
||||
|
||||
/// Returns `true` if the token can appear at the start of an expression.
|
||||
///
|
||||
/// **NB**: Take care when modifying this function, since it will change
|
||||
/// the stable set of tokens that are allowed to match an expr nonterminal.
|
||||
pub fn can_begin_expr(&self) -> bool {
|
||||
match self.uninterpolate().kind {
|
||||
Ident(name, is_raw) =>
|
||||
@ -504,10 +507,13 @@ impl Token {
|
||||
PathSep | // global path
|
||||
Lifetime(..) | // labeled loop
|
||||
Pound => true, // expression attributes
|
||||
Interpolated(ref nt) => matches!(&**nt, NtLiteral(..) |
|
||||
NtExpr(..) |
|
||||
NtBlock(..) |
|
||||
NtPath(..)),
|
||||
Interpolated(ref nt) =>
|
||||
matches!(&**nt,
|
||||
NtBlock(..) |
|
||||
NtExpr(..) |
|
||||
NtLiteral(..) |
|
||||
NtPath(..)
|
||||
),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
@ -515,23 +521,32 @@ impl Token {
|
||||
/// Returns `true` if the token can appear at the start of a pattern.
|
||||
///
|
||||
/// Shamelessly borrowed from `can_begin_expr`, only used for diagnostics right now.
|
||||
pub fn can_begin_pattern(&self) -> bool {
|
||||
match self.uninterpolate().kind {
|
||||
Ident(name, is_raw) =>
|
||||
ident_can_begin_expr(name, self.span, is_raw), // value name or keyword
|
||||
| OpenDelim(Delimiter::Bracket | Delimiter::Parenthesis) // tuple or array
|
||||
| Literal(..) // literal
|
||||
| BinOp(Minus) // unary minus
|
||||
| BinOp(And) // reference
|
||||
| AndAnd // double reference
|
||||
// DotDotDot is no longer supported
|
||||
| DotDot | DotDotDot | DotDotEq // ranges
|
||||
| Lt | BinOp(Shl) // associated path
|
||||
| PathSep => true, // global path
|
||||
Interpolated(ref nt) => matches!(&**nt, NtLiteral(..) |
|
||||
NtPat(..) |
|
||||
NtBlock(..) |
|
||||
NtPath(..)),
|
||||
pub fn can_begin_pattern(&self, pat_kind: NtPatKind) -> bool {
|
||||
match &self.uninterpolate().kind {
|
||||
// box, ref, mut, and other identifiers (can stricten)
|
||||
Ident(..) | NtIdent(..) |
|
||||
OpenDelim(Delimiter::Parenthesis) | // tuple pattern
|
||||
OpenDelim(Delimiter::Bracket) | // slice pattern
|
||||
BinOp(And) | // reference
|
||||
BinOp(Minus) | // negative literal
|
||||
AndAnd | // double reference
|
||||
Literal(_) | // literal
|
||||
DotDot | // range pattern (future compat)
|
||||
DotDotDot | // range pattern (future compat)
|
||||
PathSep | // path
|
||||
Lt | // path (UFCS constant)
|
||||
BinOp(Shl) => true, // path (double UFCS)
|
||||
// leading vert `|` or-pattern
|
||||
BinOp(Or) => matches!(pat_kind, PatWithOr),
|
||||
Interpolated(nt) =>
|
||||
matches!(&**nt,
|
||||
| NtExpr(..)
|
||||
| NtLiteral(..)
|
||||
| NtMeta(..)
|
||||
| NtPat(..)
|
||||
| NtPath(..)
|
||||
| NtTy(..)
|
||||
),
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ use std::collections::BTreeSet;
|
||||
use std::ffi::OsString;
|
||||
use std::fs::{read, File, OpenOptions};
|
||||
use std::io::{BufWriter, Write};
|
||||
use std::ops::Deref;
|
||||
use std::ops::{ControlFlow, Deref};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::{ExitStatus, Output, Stdio};
|
||||
use std::{env, fmt, fs, io, mem, str};
|
||||
@ -18,8 +18,8 @@ use rustc_data_structures::temp_dir::MaybeTempDir;
|
||||
use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed, FatalError};
|
||||
use rustc_fs_util::{fix_windows_verbatim_for_gcc, try_canonicalize};
|
||||
use rustc_hir::def_id::{CrateNum, LOCAL_CRATE};
|
||||
use rustc_metadata::find_native_static_library;
|
||||
use rustc_metadata::fs::{copy_to_stdout, emit_wrapper_file, METADATA_FILENAME};
|
||||
use rustc_metadata::{find_native_static_library, walk_native_lib_search_dirs};
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::middle::debugger_visualizer::DebuggerVisualizerFile;
|
||||
use rustc_middle::middle::dependency_format::Linkage;
|
||||
@ -2110,50 +2110,19 @@ fn add_library_search_dirs(
|
||||
return;
|
||||
}
|
||||
|
||||
// Library search paths explicitly supplied by user (`-L` on the command line).
|
||||
for search_path in sess.target_filesearch(PathKind::Native).cli_search_paths() {
|
||||
cmd.include_path(&fix_windows_verbatim_for_gcc(&search_path.dir));
|
||||
}
|
||||
for search_path in sess.target_filesearch(PathKind::Framework).cli_search_paths() {
|
||||
// Contrary to the `-L` docs only framework-specific paths are considered here.
|
||||
if search_path.kind != PathKind::All {
|
||||
cmd.framework_path(&search_path.dir);
|
||||
}
|
||||
}
|
||||
|
||||
// The toolchain ships some native library components and self-contained linking was enabled.
|
||||
// Add the self-contained library directory to search paths.
|
||||
if self_contained_components.intersects(
|
||||
LinkSelfContainedComponents::LIBC
|
||||
| LinkSelfContainedComponents::UNWIND
|
||||
| LinkSelfContainedComponents::MINGW,
|
||||
) {
|
||||
let lib_path = sess.target_tlib_path.dir.join("self-contained");
|
||||
cmd.include_path(&fix_windows_verbatim_for_gcc(&lib_path));
|
||||
}
|
||||
|
||||
// Toolchains for some targets may ship `libunwind.a`, but place it into the main sysroot
|
||||
// library directory instead of the self-contained directories.
|
||||
// Sanitizer libraries have the same issue and are also linked by name on Apple targets.
|
||||
// The targets here should be in sync with `copy_third_party_objects` in bootstrap.
|
||||
// FIXME: implement `-Clink-self-contained=+/-unwind,+/-sanitizers`, move the shipped libunwind
|
||||
// and sanitizers to self-contained directory, and stop adding this search path.
|
||||
if sess.target.vendor == "fortanix"
|
||||
|| sess.target.os == "linux"
|
||||
|| sess.target.os == "fuchsia"
|
||||
|| sess.target.is_like_osx && !sess.opts.unstable_opts.sanitizer.is_empty()
|
||||
{
|
||||
cmd.include_path(&fix_windows_verbatim_for_gcc(&sess.target_tlib_path.dir));
|
||||
}
|
||||
|
||||
// Mac Catalyst uses the macOS SDK, but to link to iOS-specific frameworks
|
||||
// we must have the support library stubs in the library search path (#121430).
|
||||
if let Some(sdk_root) = apple_sdk_root
|
||||
&& sess.target.llvm_target.contains("macabi")
|
||||
{
|
||||
cmd.include_path(&sdk_root.join("System/iOSSupport/usr/lib"));
|
||||
cmd.framework_path(&sdk_root.join("System/iOSSupport/System/Library/Frameworks"));
|
||||
}
|
||||
walk_native_lib_search_dirs(
|
||||
sess,
|
||||
self_contained_components,
|
||||
apple_sdk_root,
|
||||
|dir, is_framework| {
|
||||
if is_framework {
|
||||
cmd.framework_path(dir);
|
||||
} else {
|
||||
cmd.include_path(&fix_windows_verbatim_for_gcc(dir));
|
||||
}
|
||||
ControlFlow::<()>::Continue(())
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
/// Add options making relocation sections in the produced ELF files read-only
|
||||
|
@ -7,7 +7,7 @@ use std::{env, iter, mem, str};
|
||||
|
||||
use cc::windows_registry;
|
||||
use rustc_hir::def_id::{CrateNum, LOCAL_CRATE};
|
||||
use rustc_metadata::find_native_static_library;
|
||||
use rustc_metadata::{find_native_static_library, try_find_native_static_library};
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::middle::dependency_format::Linkage;
|
||||
use rustc_middle::middle::exported_symbols;
|
||||
@ -891,9 +891,15 @@ impl<'a> Linker for MsvcLinker<'a> {
|
||||
}
|
||||
|
||||
fn link_staticlib_by_name(&mut self, name: &str, verbatim: bool, whole_archive: bool) {
|
||||
let prefix = if whole_archive { "/WHOLEARCHIVE:" } else { "" };
|
||||
let suffix = if verbatim { "" } else { ".lib" };
|
||||
self.link_arg(format!("{prefix}{name}{suffix}"));
|
||||
// On MSVC-like targets rustc supports static libraries using alternative naming
|
||||
// scheme (`libfoo.a`) unsupported by linker, search for such libraries manually.
|
||||
if let Some(path) = try_find_native_static_library(self.sess, name, verbatim) {
|
||||
self.link_staticlib_by_path(&path, whole_archive);
|
||||
} else {
|
||||
let prefix = if whole_archive { "/WHOLEARCHIVE:" } else { "" };
|
||||
let suffix = if verbatim { "" } else { ".lib" };
|
||||
self.link_arg(format!("{prefix}{name}{suffix}"));
|
||||
}
|
||||
}
|
||||
|
||||
fn link_staticlib_by_path(&mut self, path: &Path, whole_archive: bool) {
|
||||
|
@ -25,14 +25,15 @@ pub trait ValueVisitor<'tcx, M: Machine<'tcx>>: Sized {
|
||||
}
|
||||
|
||||
/// This function provides the chance to reorder the order in which fields are visited for
|
||||
/// `FieldsShape::Aggregate`: The order of fields will be
|
||||
/// `(0..num_fields).map(aggregate_field_order)`.
|
||||
/// `FieldsShape::Aggregate`.
|
||||
///
|
||||
/// The default means we iterate in source declaration order; alternative this can do an inverse
|
||||
/// lookup in `memory_index` to use memory field order instead.
|
||||
/// The default means we iterate in source declaration order; alternatively this can do some
|
||||
/// work with `memory_index` to iterate in memory order.
|
||||
#[inline(always)]
|
||||
fn aggregate_field_order(_memory_index: &IndexVec<FieldIdx, u32>, idx: usize) -> usize {
|
||||
idx
|
||||
fn aggregate_field_iter(
|
||||
memory_index: &IndexVec<FieldIdx, u32>,
|
||||
) -> impl Iterator<Item = FieldIdx> + 'static {
|
||||
memory_index.indices()
|
||||
}
|
||||
|
||||
// Recursive actions, ready to be overloaded.
|
||||
@ -172,9 +173,9 @@ pub trait ValueVisitor<'tcx, M: Machine<'tcx>>: Sized {
|
||||
&FieldsShape::Union(fields) => {
|
||||
self.visit_union(v, fields)?;
|
||||
}
|
||||
FieldsShape::Arbitrary { offsets, memory_index } => {
|
||||
for idx in 0..offsets.len() {
|
||||
let idx = Self::aggregate_field_order(memory_index, idx);
|
||||
FieldsShape::Arbitrary { memory_index, .. } => {
|
||||
for idx in Self::aggregate_field_iter(memory_index) {
|
||||
let idx = idx.as_usize();
|
||||
let field = self.ecx().project_field(v, idx)?;
|
||||
self.visit_field(v, idx, &field)?;
|
||||
}
|
||||
|
@ -420,7 +420,7 @@ impl<'tcx> HirTyLowerer<'tcx> for ItemCtxt<'tcx> {
|
||||
span: Span,
|
||||
def_id: LocalDefId,
|
||||
assoc_name: Ident,
|
||||
) -> ty::GenericPredicates<'tcx> {
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
self.tcx.at(span).type_param_predicates((self.item_def_id, def_id, assoc_name))
|
||||
}
|
||||
|
||||
|
@ -580,24 +580,24 @@ pub(super) fn explicit_predicates_of<'tcx>(
|
||||
/// Ensures that the super-predicates of the trait with a `DefId`
|
||||
/// of `trait_def_id` are lowered and stored. This also ensures that
|
||||
/// the transitive super-predicates are lowered.
|
||||
pub(super) fn explicit_super_predicates_of(
|
||||
tcx: TyCtxt<'_>,
|
||||
pub(super) fn explicit_super_predicates_of<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
trait_def_id: LocalDefId,
|
||||
) -> ty::GenericPredicates<'_> {
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
implied_predicates_with_filter(tcx, trait_def_id.to_def_id(), PredicateFilter::SelfOnly)
|
||||
}
|
||||
|
||||
pub(super) fn explicit_supertraits_containing_assoc_item(
|
||||
tcx: TyCtxt<'_>,
|
||||
pub(super) fn explicit_supertraits_containing_assoc_item<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
(trait_def_id, assoc_name): (DefId, Ident),
|
||||
) -> ty::GenericPredicates<'_> {
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
implied_predicates_with_filter(tcx, trait_def_id, PredicateFilter::SelfThatDefines(assoc_name))
|
||||
}
|
||||
|
||||
pub(super) fn explicit_implied_predicates_of(
|
||||
tcx: TyCtxt<'_>,
|
||||
pub(super) fn explicit_implied_predicates_of<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
trait_def_id: LocalDefId,
|
||||
) -> ty::GenericPredicates<'_> {
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
implied_predicates_with_filter(
|
||||
tcx,
|
||||
trait_def_id.to_def_id(),
|
||||
@ -612,11 +612,11 @@ pub(super) fn explicit_implied_predicates_of(
|
||||
/// Ensures that the super-predicates of the trait with a `DefId`
|
||||
/// of `trait_def_id` are lowered and stored. This also ensures that
|
||||
/// the transitive super-predicates are lowered.
|
||||
pub(super) fn implied_predicates_with_filter(
|
||||
tcx: TyCtxt<'_>,
|
||||
pub(super) fn implied_predicates_with_filter<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
trait_def_id: DefId,
|
||||
filter: PredicateFilter,
|
||||
) -> ty::GenericPredicates<'_> {
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
let Some(trait_def_id) = trait_def_id.as_local() else {
|
||||
// if `assoc_name` is None, then the query should've been redirected to an
|
||||
// external provider
|
||||
@ -679,20 +679,16 @@ pub(super) fn implied_predicates_with_filter(
|
||||
_ => {}
|
||||
}
|
||||
|
||||
ty::GenericPredicates {
|
||||
parent: None,
|
||||
predicates: implied_bounds,
|
||||
effects_min_tys: ty::List::empty(),
|
||||
}
|
||||
ty::EarlyBinder::bind(implied_bounds)
|
||||
}
|
||||
|
||||
/// Returns the predicates defined on `item_def_id` of the form
|
||||
/// `X: Foo` where `X` is the type parameter `def_id`.
|
||||
#[instrument(level = "trace", skip(tcx))]
|
||||
pub(super) fn type_param_predicates(
|
||||
tcx: TyCtxt<'_>,
|
||||
pub(super) fn type_param_predicates<'tcx>(
|
||||
tcx: TyCtxt<'tcx>,
|
||||
(item_def_id, def_id, assoc_name): (LocalDefId, LocalDefId, Ident),
|
||||
) -> ty::GenericPredicates<'_> {
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
use rustc_hir::*;
|
||||
use rustc_middle::ty::Ty;
|
||||
|
||||
@ -713,18 +709,20 @@ pub(super) fn type_param_predicates(
|
||||
tcx.generics_of(item_def_id).parent.map(|def_id| def_id.expect_local())
|
||||
};
|
||||
|
||||
let mut result = parent
|
||||
.map(|parent| {
|
||||
let icx = ItemCtxt::new(tcx, parent);
|
||||
icx.probe_ty_param_bounds(DUMMY_SP, def_id, assoc_name)
|
||||
})
|
||||
.unwrap_or_default();
|
||||
let result = if let Some(parent) = parent {
|
||||
let icx = ItemCtxt::new(tcx, parent);
|
||||
icx.probe_ty_param_bounds(DUMMY_SP, def_id, assoc_name)
|
||||
} else {
|
||||
ty::EarlyBinder::bind(&[] as &[_])
|
||||
};
|
||||
let mut extend = None;
|
||||
|
||||
let item_hir_id = tcx.local_def_id_to_hir_id(item_def_id);
|
||||
|
||||
let hir_node = tcx.hir_node(item_hir_id);
|
||||
let Some(hir_generics) = hir_node.generics() else { return result };
|
||||
let Some(hir_generics) = hir_node.generics() else {
|
||||
return result;
|
||||
};
|
||||
if let Node::Item(item) = hir_node
|
||||
&& let ItemKind::Trait(..) = item.kind
|
||||
// Implied `Self: Trait` and supertrait bounds.
|
||||
@ -748,9 +746,10 @@ pub(super) fn type_param_predicates(
|
||||
_ => false,
|
||||
}),
|
||||
);
|
||||
result.predicates =
|
||||
tcx.arena.alloc_from_iter(result.predicates.iter().copied().chain(extra_predicates));
|
||||
result
|
||||
|
||||
ty::EarlyBinder::bind(
|
||||
tcx.arena.alloc_from_iter(result.skip_binder().iter().copied().chain(extra_predicates)),
|
||||
)
|
||||
}
|
||||
|
||||
impl<'tcx> ItemCtxt<'tcx> {
|
||||
|
@ -1761,7 +1761,7 @@ impl<'a, 'tcx> BoundVarContext<'a, 'tcx> {
|
||||
break Some((bound_vars.into_iter().collect(), assoc_item));
|
||||
}
|
||||
let predicates = tcx.explicit_supertraits_containing_assoc_item((def_id, assoc_name));
|
||||
let obligations = predicates.predicates.iter().filter_map(|&(pred, _)| {
|
||||
let obligations = predicates.iter_identity_copied().filter_map(|(pred, _)| {
|
||||
let bound_predicate = pred.kind();
|
||||
match bound_predicate.skip_binder() {
|
||||
ty::ClauseKind::Trait(data) => {
|
||||
|
@ -136,7 +136,7 @@ pub trait HirTyLowerer<'tcx> {
|
||||
span: Span,
|
||||
def_id: LocalDefId,
|
||||
assoc_name: Ident,
|
||||
) -> ty::GenericPredicates<'tcx>;
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]>;
|
||||
|
||||
/// Lower an associated type to a projection.
|
||||
///
|
||||
@ -831,13 +831,13 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ {
|
||||
debug!(?ty_param_def_id, ?assoc_name, ?span);
|
||||
let tcx = self.tcx();
|
||||
|
||||
let predicates = &self.probe_ty_param_bounds(span, ty_param_def_id, assoc_name).predicates;
|
||||
let predicates = &self.probe_ty_param_bounds(span, ty_param_def_id, assoc_name);
|
||||
debug!("predicates={:#?}", predicates);
|
||||
|
||||
self.probe_single_bound_for_assoc_item(
|
||||
|| {
|
||||
let trait_refs = predicates
|
||||
.iter()
|
||||
.iter_identity_copied()
|
||||
.filter_map(|(p, _)| Some(p.as_trait_clause()?.map_bound(|t| t.trait_ref)));
|
||||
traits::transitive_bounds_that_define_assoc_item(tcx, trait_refs, assoc_name)
|
||||
},
|
||||
|
@ -263,27 +263,24 @@ impl<'tcx> HirTyLowerer<'tcx> for FnCtxt<'_, 'tcx> {
|
||||
_: Span,
|
||||
def_id: LocalDefId,
|
||||
_: Ident,
|
||||
) -> ty::GenericPredicates<'tcx> {
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
let tcx = self.tcx;
|
||||
let item_def_id = tcx.hir().ty_param_owner(def_id);
|
||||
let generics = tcx.generics_of(item_def_id);
|
||||
let index = generics.param_def_id_to_index[&def_id.to_def_id()];
|
||||
// HACK(eddyb) should get the original `Span`.
|
||||
let span = tcx.def_span(def_id);
|
||||
ty::GenericPredicates {
|
||||
parent: None,
|
||||
predicates: tcx.arena.alloc_from_iter(
|
||||
self.param_env.caller_bounds().iter().filter_map(|predicate| {
|
||||
match predicate.kind().skip_binder() {
|
||||
ty::ClauseKind::Trait(data) if data.self_ty().is_param(index) => {
|
||||
Some((predicate, span))
|
||||
}
|
||||
_ => None,
|
||||
|
||||
ty::EarlyBinder::bind(tcx.arena.alloc_from_iter(
|
||||
self.param_env.caller_bounds().iter().filter_map(|predicate| {
|
||||
match predicate.kind().skip_binder() {
|
||||
ty::ClauseKind::Trait(data) if data.self_ty().is_param(index) => {
|
||||
Some((predicate, span))
|
||||
}
|
||||
}),
|
||||
),
|
||||
effects_min_tys: ty::List::empty(),
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}),
|
||||
))
|
||||
}
|
||||
|
||||
fn lower_assoc_ty(
|
||||
|
@ -123,7 +123,7 @@ pub fn transitive_bounds_that_define_assoc_item<'tcx>(
|
||||
|
||||
stack.extend(
|
||||
tcx.explicit_supertraits_containing_assoc_item((trait_ref.def_id(), assoc_name))
|
||||
.instantiate_own_identity()
|
||||
.iter_identity_copied()
|
||||
.map(|(clause, _)| clause.instantiate_supertrait(tcx, trait_ref))
|
||||
.filter_map(|clause| clause.as_trait_clause())
|
||||
// FIXME: Negative supertraits are elaborated here lol
|
||||
|
@ -265,8 +265,6 @@ fn structurally_same_type_impl<'tcx>(
|
||||
} else {
|
||||
// Do a full, depth-first comparison between the two.
|
||||
use rustc_type_ir::TyKind::*;
|
||||
let a_kind = a.kind();
|
||||
let b_kind = b.kind();
|
||||
|
||||
let compare_layouts = |a, b| -> Result<bool, &'tcx LayoutError<'tcx>> {
|
||||
debug!("compare_layouts({:?}, {:?})", a, b);
|
||||
@ -281,12 +279,11 @@ fn structurally_same_type_impl<'tcx>(
|
||||
Ok(a_layout == b_layout)
|
||||
};
|
||||
|
||||
#[allow(rustc::usage_of_ty_tykind)]
|
||||
let is_primitive_or_pointer =
|
||||
|kind: &ty::TyKind<'_>| kind.is_primitive() || matches!(kind, RawPtr(..) | Ref(..));
|
||||
|ty: Ty<'tcx>| ty.is_primitive() || matches!(ty.kind(), RawPtr(..) | Ref(..));
|
||||
|
||||
ensure_sufficient_stack(|| {
|
||||
match (a_kind, b_kind) {
|
||||
match (a.kind(), b.kind()) {
|
||||
(Adt(a_def, _), Adt(b_def, _)) => {
|
||||
// We can immediately rule out these types as structurally same if
|
||||
// their layouts differ.
|
||||
@ -382,17 +379,21 @@ fn structurally_same_type_impl<'tcx>(
|
||||
|
||||
// An Adt and a primitive or pointer type. This can be FFI-safe if non-null
|
||||
// enum layout optimisation is being applied.
|
||||
(Adt(..), other_kind) | (other_kind, Adt(..))
|
||||
if is_primitive_or_pointer(other_kind) =>
|
||||
{
|
||||
let (primitive, adt) =
|
||||
if is_primitive_or_pointer(a.kind()) { (a, b) } else { (b, a) };
|
||||
if let Some(ty) = types::repr_nullable_ptr(tcx, param_env, adt, ckind) {
|
||||
ty == primitive
|
||||
(Adt(..), _) if is_primitive_or_pointer(b) => {
|
||||
if let Some(ty) = types::repr_nullable_ptr(tcx, param_env, a, ckind) {
|
||||
ty == b
|
||||
} else {
|
||||
compare_layouts(a, b).unwrap_or(false)
|
||||
}
|
||||
}
|
||||
(_, Adt(..)) if is_primitive_or_pointer(a) => {
|
||||
if let Some(ty) = types::repr_nullable_ptr(tcx, param_env, b, ckind) {
|
||||
ty == a
|
||||
} else {
|
||||
compare_layouts(a, b).unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
// Otherwise, just compare the layouts. This may fail to lint for some
|
||||
// incompatible types, but at the very least, will stop reads into
|
||||
// uninitialised memory.
|
||||
|
@ -45,8 +45,7 @@ impl<'tcx> LateLintPass<'tcx> for MultipleSupertraitUpcastable {
|
||||
let direct_super_traits_iter = cx
|
||||
.tcx
|
||||
.explicit_super_predicates_of(def_id)
|
||||
.predicates
|
||||
.into_iter()
|
||||
.iter_identity_copied()
|
||||
.filter_map(|(pred, _)| pred.as_trait_clause());
|
||||
if direct_super_traits_iter.count() > 1 {
|
||||
cx.emit_span_lint(
|
||||
|
@ -4771,7 +4771,7 @@ declare_lint! {
|
||||
/// version of Rust this will be fixed and therefore dependencies relying
|
||||
/// on the non-spec-compliant C ABI will stop functioning.
|
||||
pub WASM_C_ABI,
|
||||
Warn,
|
||||
Deny,
|
||||
"detects dependencies that are incompatible with the Wasm C ABI",
|
||||
@future_incompatible = FutureIncompatibleInfo {
|
||||
reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps,
|
||||
|
@ -3,6 +3,7 @@
|
||||
#![allow(rustc::potential_query_instability)]
|
||||
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
|
||||
#![doc(rust_logo)]
|
||||
#![feature(control_flow_enum)]
|
||||
#![feature(coroutines)]
|
||||
#![feature(decl_macro)]
|
||||
#![feature(error_iter)]
|
||||
@ -35,7 +36,9 @@ pub mod locator;
|
||||
|
||||
pub use creader::{load_symbol_from_dylib, DylibError};
|
||||
pub use fs::{emit_wrapper_file, METADATA_FILENAME};
|
||||
pub use native_libs::find_native_static_library;
|
||||
pub use native_libs::{
|
||||
find_native_static_library, try_find_native_static_library, walk_native_lib_search_dirs,
|
||||
};
|
||||
pub use rmeta::{encode_metadata, rendered_const, EncodedMetadata, METADATA_HEADER};
|
||||
|
||||
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
|
||||
|
@ -1,4 +1,5 @@
|
||||
use std::path::PathBuf;
|
||||
use std::ops::ControlFlow;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use rustc_ast::{NestedMetaItem, CRATE_NODE_ID};
|
||||
use rustc_attr as attr;
|
||||
@ -16,10 +17,68 @@ use rustc_session::Session;
|
||||
use rustc_span::def_id::{DefId, LOCAL_CRATE};
|
||||
use rustc_span::symbol::{sym, Symbol};
|
||||
use rustc_target::spec::abi::Abi;
|
||||
use rustc_target::spec::LinkSelfContainedComponents;
|
||||
|
||||
use crate::{errors, fluent_generated};
|
||||
|
||||
pub fn find_native_static_library(name: &str, verbatim: bool, sess: &Session) -> PathBuf {
|
||||
pub fn walk_native_lib_search_dirs<R>(
|
||||
sess: &Session,
|
||||
self_contained_components: LinkSelfContainedComponents,
|
||||
apple_sdk_root: Option<&Path>,
|
||||
mut f: impl FnMut(&Path, bool /*is_framework*/) -> ControlFlow<R>,
|
||||
) -> ControlFlow<R> {
|
||||
// Library search paths explicitly supplied by user (`-L` on the command line).
|
||||
for search_path in sess.target_filesearch(PathKind::Native).cli_search_paths() {
|
||||
f(&search_path.dir, false)?;
|
||||
}
|
||||
for search_path in sess.target_filesearch(PathKind::Framework).cli_search_paths() {
|
||||
// Frameworks are looked up strictly in framework-specific paths.
|
||||
if search_path.kind != PathKind::All {
|
||||
f(&search_path.dir, true)?;
|
||||
}
|
||||
}
|
||||
|
||||
// The toolchain ships some native library components and self-contained linking was enabled.
|
||||
// Add the self-contained library directory to search paths.
|
||||
if self_contained_components.intersects(
|
||||
LinkSelfContainedComponents::LIBC
|
||||
| LinkSelfContainedComponents::UNWIND
|
||||
| LinkSelfContainedComponents::MINGW,
|
||||
) {
|
||||
f(&sess.target_tlib_path.dir.join("self-contained"), false)?;
|
||||
}
|
||||
|
||||
// Toolchains for some targets may ship `libunwind.a`, but place it into the main sysroot
|
||||
// library directory instead of the self-contained directories.
|
||||
// Sanitizer libraries have the same issue and are also linked by name on Apple targets.
|
||||
// The targets here should be in sync with `copy_third_party_objects` in bootstrap.
|
||||
// FIXME: implement `-Clink-self-contained=+/-unwind,+/-sanitizers`, move the shipped libunwind
|
||||
// and sanitizers to self-contained directory, and stop adding this search path.
|
||||
if sess.target.vendor == "fortanix"
|
||||
|| sess.target.os == "linux"
|
||||
|| sess.target.os == "fuchsia"
|
||||
|| sess.target.is_like_osx && !sess.opts.unstable_opts.sanitizer.is_empty()
|
||||
{
|
||||
f(&sess.target_tlib_path.dir, false)?;
|
||||
}
|
||||
|
||||
// Mac Catalyst uses the macOS SDK, but to link to iOS-specific frameworks
|
||||
// we must have the support library stubs in the library search path (#121430).
|
||||
if let Some(sdk_root) = apple_sdk_root
|
||||
&& sess.target.llvm_target.contains("macabi")
|
||||
{
|
||||
f(&sdk_root.join("System/iOSSupport/usr/lib"), false)?;
|
||||
f(&sdk_root.join("System/iOSSupport/System/Library/Frameworks"), true)?;
|
||||
}
|
||||
|
||||
ControlFlow::Continue(())
|
||||
}
|
||||
|
||||
pub fn try_find_native_static_library(
|
||||
sess: &Session,
|
||||
name: &str,
|
||||
verbatim: bool,
|
||||
) -> Option<PathBuf> {
|
||||
let formats = if verbatim {
|
||||
vec![("".into(), "".into())]
|
||||
} else {
|
||||
@ -30,16 +89,29 @@ pub fn find_native_static_library(name: &str, verbatim: bool, sess: &Session) ->
|
||||
if os == unix { vec![os] } else { vec![os, unix] }
|
||||
};
|
||||
|
||||
for path in sess.target_filesearch(PathKind::Native).search_paths() {
|
||||
for (prefix, suffix) in &formats {
|
||||
let test = path.dir.join(format!("{prefix}{name}{suffix}"));
|
||||
if test.exists() {
|
||||
return test;
|
||||
// FIXME: Account for self-contained linking settings and Apple SDK.
|
||||
walk_native_lib_search_dirs(
|
||||
sess,
|
||||
LinkSelfContainedComponents::empty(),
|
||||
None,
|
||||
|dir, is_framework| {
|
||||
if !is_framework {
|
||||
for (prefix, suffix) in &formats {
|
||||
let test = dir.join(format!("{prefix}{name}{suffix}"));
|
||||
if test.exists() {
|
||||
return ControlFlow::Break(test);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ControlFlow::Continue(())
|
||||
},
|
||||
)
|
||||
.break_value()
|
||||
}
|
||||
|
||||
sess.dcx().emit_fatal(errors::MissingNativeLibrary::new(name, verbatim));
|
||||
pub fn find_native_static_library(name: &str, verbatim: bool, sess: &Session) -> PathBuf {
|
||||
try_find_native_static_library(sess, name, verbatim)
|
||||
.unwrap_or_else(|| sess.dcx().emit_fatal(errors::MissingNativeLibrary::new(name, verbatim)))
|
||||
}
|
||||
|
||||
fn find_bundled_library(
|
||||
|
@ -1070,34 +1070,6 @@ impl<'a> CrateMetadataRef<'a> {
|
||||
)
|
||||
}
|
||||
|
||||
fn get_explicit_item_bounds<'tcx>(
|
||||
self,
|
||||
index: DefIndex,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
let lazy = self.root.tables.explicit_item_bounds.get(self, index);
|
||||
let output = if lazy.is_default() {
|
||||
&mut []
|
||||
} else {
|
||||
tcx.arena.alloc_from_iter(lazy.decode((self, tcx)))
|
||||
};
|
||||
ty::EarlyBinder::bind(&*output)
|
||||
}
|
||||
|
||||
fn get_explicit_item_super_predicates<'tcx>(
|
||||
self,
|
||||
index: DefIndex,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
let lazy = self.root.tables.explicit_item_super_predicates.get(self, index);
|
||||
let output = if lazy.is_default() {
|
||||
&mut []
|
||||
} else {
|
||||
tcx.arena.alloc_from_iter(lazy.decode((self, tcx)))
|
||||
};
|
||||
ty::EarlyBinder::bind(&*output)
|
||||
}
|
||||
|
||||
fn get_variant(
|
||||
self,
|
||||
kind: DefKind,
|
||||
@ -1323,10 +1295,6 @@ impl<'a> CrateMetadataRef<'a> {
|
||||
self.root.tables.optimized_mir.get(self, id).is_some()
|
||||
}
|
||||
|
||||
fn cross_crate_inlinable(self, id: DefIndex) -> bool {
|
||||
self.root.tables.cross_crate_inlinable.get(self, id)
|
||||
}
|
||||
|
||||
fn get_fn_has_self_parameter(self, id: DefIndex, sess: &'a Session) -> bool {
|
||||
self.root
|
||||
.tables
|
||||
|
@ -32,13 +32,20 @@ trait ProcessQueryValue<'tcx, T> {
|
||||
fn process_decoded(self, _tcx: TyCtxt<'tcx>, _err: impl Fn() -> !) -> T;
|
||||
}
|
||||
|
||||
impl<T> ProcessQueryValue<'_, Option<T>> for Option<T> {
|
||||
impl<T> ProcessQueryValue<'_, T> for T {
|
||||
#[inline(always)]
|
||||
fn process_decoded(self, _tcx: TyCtxt<'_>, _err: impl Fn() -> !) -> Option<T> {
|
||||
fn process_decoded(self, _tcx: TyCtxt<'_>, _err: impl Fn() -> !) -> T {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<'tcx, T> ProcessQueryValue<'tcx, ty::EarlyBinder<'tcx, T>> for T {
|
||||
#[inline(always)]
|
||||
fn process_decoded(self, _tcx: TyCtxt<'_>, _err: impl Fn() -> !) -> ty::EarlyBinder<'tcx, T> {
|
||||
ty::EarlyBinder::bind(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> ProcessQueryValue<'_, T> for Option<T> {
|
||||
#[inline(always)]
|
||||
fn process_decoded(self, _tcx: TyCtxt<'_>, err: impl Fn() -> !) -> T {
|
||||
@ -69,6 +76,24 @@ impl<'a, 'tcx, T: Copy + Decodable<DecodeContext<'a, 'tcx>>> ProcessQueryValue<'
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx, T: Copy + Decodable<DecodeContext<'a, 'tcx>>>
|
||||
ProcessQueryValue<'tcx, ty::EarlyBinder<'tcx, &'tcx [T]>>
|
||||
for Option<DecodeIterator<'a, 'tcx, T>>
|
||||
{
|
||||
#[inline(always)]
|
||||
fn process_decoded(
|
||||
self,
|
||||
tcx: TyCtxt<'tcx>,
|
||||
_err: impl Fn() -> !,
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [T]> {
|
||||
ty::EarlyBinder::bind(if let Some(iter) = self {
|
||||
tcx.arena.alloc_from_iter(iter)
|
||||
} else {
|
||||
&[]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'tcx, T: Copy + Decodable<DecodeContext<'a, 'tcx>>>
|
||||
ProcessQueryValue<'tcx, Option<&'tcx [T]>> for Option<DecodeIterator<'a, 'tcx, T>>
|
||||
{
|
||||
@ -103,7 +128,12 @@ macro_rules! provide_one {
|
||||
provide_one! {
|
||||
$tcx, $def_id, $other, $cdata, $name => {
|
||||
let lazy = $cdata.root.tables.$name.get($cdata, $def_id.index);
|
||||
if lazy.is_default() { &[] } else { $tcx.arena.alloc_from_iter(lazy.decode(($cdata, $tcx))) }
|
||||
let value = if lazy.is_default() {
|
||||
&[] as &[_]
|
||||
} else {
|
||||
$tcx.arena.alloc_from_iter(lazy.decode(($cdata, $tcx)))
|
||||
};
|
||||
value.process_decoded($tcx, || panic!("{:?} does not have a {:?}", $def_id, stringify!($name)))
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -212,15 +242,15 @@ impl IntoArgs for (CrateNum, SimplifiedType) {
|
||||
}
|
||||
|
||||
provide! { tcx, def_id, other, cdata,
|
||||
explicit_item_bounds => { cdata.get_explicit_item_bounds(def_id.index, tcx) }
|
||||
explicit_item_super_predicates => { cdata.get_explicit_item_super_predicates(def_id.index, tcx) }
|
||||
explicit_item_bounds => { table_defaulted_array }
|
||||
explicit_item_super_predicates => { table_defaulted_array }
|
||||
explicit_predicates_of => { table }
|
||||
generics_of => { table }
|
||||
inferred_outlives_of => { table_defaulted_array }
|
||||
explicit_super_predicates_of => { table }
|
||||
explicit_implied_predicates_of => { table }
|
||||
type_of => { table }
|
||||
type_alias_is_lazy => { cdata.root.tables.type_alias_is_lazy.get(cdata, def_id.index) }
|
||||
type_alias_is_lazy => { table_direct }
|
||||
variances_of => { table }
|
||||
fn_sig => { table }
|
||||
codegen_fn_attrs => { table }
|
||||
@ -241,7 +271,7 @@ provide! { tcx, def_id, other, cdata,
|
||||
lookup_default_body_stability => { table }
|
||||
lookup_deprecation_entry => { table }
|
||||
params_in_repr => { table }
|
||||
unused_generic_params => { cdata.root.tables.unused_generic_params.get(cdata, def_id.index) }
|
||||
unused_generic_params => { table_direct }
|
||||
def_kind => { cdata.def_kind(def_id.index) }
|
||||
impl_parent => { table }
|
||||
defaultness => { table_direct }
|
||||
@ -287,9 +317,7 @@ provide! { tcx, def_id, other, cdata,
|
||||
.process_decoded(tcx, || panic!("{def_id:?} does not have trait_impl_trait_tys")))
|
||||
}
|
||||
|
||||
associated_type_for_effects => {
|
||||
table
|
||||
}
|
||||
associated_type_for_effects => { table }
|
||||
associated_types_for_impl_traits_in_associated_fn => { table_defaulted_array }
|
||||
|
||||
visibility => { cdata.get_visibility(def_id.index) }
|
||||
@ -310,7 +338,7 @@ provide! { tcx, def_id, other, cdata,
|
||||
item_attrs => { tcx.arena.alloc_from_iter(cdata.get_item_attrs(def_id.index, tcx.sess)) }
|
||||
is_mir_available => { cdata.is_item_mir_available(def_id.index) }
|
||||
is_ctfe_mir_available => { cdata.is_ctfe_mir_available(def_id.index) }
|
||||
cross_crate_inlinable => { cdata.cross_crate_inlinable(def_id.index) }
|
||||
cross_crate_inlinable => { table_direct }
|
||||
|
||||
dylib_dependency_formats => { cdata.get_dylib_dependency_formats(tcx) }
|
||||
is_private_dep => { cdata.private_dep }
|
||||
|
@ -1446,8 +1446,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||
}
|
||||
if let DefKind::Trait = def_kind {
|
||||
record!(self.tables.trait_def[def_id] <- self.tcx.trait_def(def_id));
|
||||
record!(self.tables.explicit_super_predicates_of[def_id] <- self.tcx.explicit_super_predicates_of(def_id));
|
||||
record!(self.tables.explicit_implied_predicates_of[def_id] <- self.tcx.explicit_implied_predicates_of(def_id));
|
||||
record_array!(self.tables.explicit_super_predicates_of[def_id] <-
|
||||
self.tcx.explicit_super_predicates_of(def_id).skip_binder());
|
||||
record_array!(self.tables.explicit_implied_predicates_of[def_id] <-
|
||||
self.tcx.explicit_implied_predicates_of(def_id).skip_binder());
|
||||
|
||||
let module_children = self.tcx.module_children_local(local_id);
|
||||
record_array!(self.tables.module_children_non_reexports[def_id] <-
|
||||
@ -1455,8 +1457,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
|
||||
}
|
||||
if let DefKind::TraitAlias = def_kind {
|
||||
record!(self.tables.trait_def[def_id] <- self.tcx.trait_def(def_id));
|
||||
record!(self.tables.explicit_super_predicates_of[def_id] <- self.tcx.explicit_super_predicates_of(def_id));
|
||||
record!(self.tables.explicit_implied_predicates_of[def_id] <- self.tcx.explicit_implied_predicates_of(def_id));
|
||||
record_array!(self.tables.explicit_super_predicates_of[def_id] <-
|
||||
self.tcx.explicit_super_predicates_of(def_id).skip_binder());
|
||||
record_array!(self.tables.explicit_implied_predicates_of[def_id] <-
|
||||
self.tcx.explicit_implied_predicates_of(def_id).skip_binder());
|
||||
}
|
||||
if let DefKind::Trait | DefKind::Impl { .. } = def_kind {
|
||||
let associated_item_def_ids = self.tcx.associated_item_def_ids(def_id);
|
||||
|
@ -419,10 +419,10 @@ define_tables! {
|
||||
lookup_deprecation_entry: Table<DefIndex, LazyValue<attr::Deprecation>>,
|
||||
explicit_predicates_of: Table<DefIndex, LazyValue<ty::GenericPredicates<'static>>>,
|
||||
generics_of: Table<DefIndex, LazyValue<ty::Generics>>,
|
||||
explicit_super_predicates_of: Table<DefIndex, LazyValue<ty::GenericPredicates<'static>>>,
|
||||
explicit_super_predicates_of: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
|
||||
// As an optimization, we only store this for trait aliases,
|
||||
// since it's identical to explicit_super_predicates_of for traits.
|
||||
explicit_implied_predicates_of: Table<DefIndex, LazyValue<ty::GenericPredicates<'static>>>,
|
||||
explicit_implied_predicates_of: Table<DefIndex, LazyArray<(ty::Clause<'static>, Span)>>,
|
||||
type_of: Table<DefIndex, LazyValue<ty::EarlyBinder<'static, Ty<'static>>>>,
|
||||
variances_of: Table<DefIndex, LazyArray<ty::Variance>>,
|
||||
fn_sig: Table<DefIndex, LazyValue<ty::EarlyBinder<'static, ty::PolyFnSig<'static>>>>,
|
||||
|
@ -651,7 +651,7 @@ rustc_queries! {
|
||||
/// is a subset of the full list of predicates. We store these in a separate map
|
||||
/// because we must evaluate them even during type conversion, often before the full
|
||||
/// predicates are available (note that super-predicates must not be cyclic).
|
||||
query explicit_super_predicates_of(key: DefId) -> ty::GenericPredicates<'tcx> {
|
||||
query explicit_super_predicates_of(key: DefId) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
desc { |tcx| "computing the super predicates of `{}`", tcx.def_path_str(key) }
|
||||
cache_on_disk_if { key.is_local() }
|
||||
separate_provide_extern
|
||||
@ -662,7 +662,7 @@ rustc_queries! {
|
||||
/// of the trait. For regular traits, this includes all super-predicates and their
|
||||
/// associated type bounds. For trait aliases, currently, this includes all of the
|
||||
/// predicates of the trait alias.
|
||||
query explicit_implied_predicates_of(key: DefId) -> ty::GenericPredicates<'tcx> {
|
||||
query explicit_implied_predicates_of(key: DefId) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
desc { |tcx| "computing the implied predicates of `{}`", tcx.def_path_str(key) }
|
||||
cache_on_disk_if { key.is_local() }
|
||||
separate_provide_extern
|
||||
@ -671,7 +671,9 @@ rustc_queries! {
|
||||
/// The Ident is the name of an associated type.The query returns only the subset
|
||||
/// of supertraits that define the given associated type. This is used to avoid
|
||||
/// cycles in resolving type-dependent associated item paths like `T::Item`.
|
||||
query explicit_supertraits_containing_assoc_item(key: (DefId, rustc_span::symbol::Ident)) -> ty::GenericPredicates<'tcx> {
|
||||
query explicit_supertraits_containing_assoc_item(
|
||||
key: (DefId, rustc_span::symbol::Ident)
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
desc { |tcx| "computing the super traits of `{}` with associated type name `{}`",
|
||||
tcx.def_path_str(key.0),
|
||||
key.1
|
||||
@ -680,7 +682,9 @@ rustc_queries! {
|
||||
|
||||
/// To avoid cycles within the predicates of a single item we compute
|
||||
/// per-type-parameter predicates for resolving `T::AssocTy`.
|
||||
query type_param_predicates(key: (LocalDefId, LocalDefId, rustc_span::symbol::Ident)) -> ty::GenericPredicates<'tcx> {
|
||||
query type_param_predicates(
|
||||
key: (LocalDefId, LocalDefId, rustc_span::symbol::Ident)
|
||||
) -> ty::EarlyBinder<'tcx, &'tcx [(ty::Clause<'tcx>, Span)]> {
|
||||
desc { |tcx| "computing the bounds for type parameter `{}`", tcx.hir().ty_param_name(key.1) }
|
||||
}
|
||||
|
||||
|
@ -349,16 +349,14 @@ impl<'tcx> Interner for TyCtxt<'tcx> {
|
||||
self,
|
||||
def_id: DefId,
|
||||
) -> ty::EarlyBinder<'tcx, impl IntoIterator<Item = (ty::Clause<'tcx>, Span)>> {
|
||||
ty::EarlyBinder::bind(self.explicit_super_predicates_of(def_id).instantiate_identity(self))
|
||||
self.explicit_super_predicates_of(def_id).map_bound(|preds| preds.into_iter().copied())
|
||||
}
|
||||
|
||||
fn explicit_implied_predicates_of(
|
||||
self,
|
||||
def_id: DefId,
|
||||
) -> ty::EarlyBinder<'tcx, impl IntoIterator<Item = (ty::Clause<'tcx>, Span)>> {
|
||||
ty::EarlyBinder::bind(
|
||||
self.explicit_implied_predicates_of(def_id).instantiate_identity(self),
|
||||
)
|
||||
self.explicit_implied_predicates_of(def_id).map_bound(|preds| preds.into_iter().copied())
|
||||
}
|
||||
|
||||
fn has_target_features(self, def_id: DefId) -> bool {
|
||||
|
@ -1000,7 +1000,7 @@ impl<'tcx> Ty<'tcx> {
|
||||
|
||||
#[inline]
|
||||
pub fn is_primitive(self) -> bool {
|
||||
self.kind().is_primitive()
|
||||
matches!(self.kind(), Bool | Char | Int(_) | Uint(_) | Float(_))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
|
@ -382,7 +382,7 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
|
||||
place: PlaceIndex,
|
||||
mut operand: OpTy<'tcx>,
|
||||
projection: &[PlaceElem<'tcx>],
|
||||
) -> Option<!> {
|
||||
) {
|
||||
for &(mut proj_elem) in projection {
|
||||
if let PlaceElem::Index(index) = proj_elem {
|
||||
if let FlatSet::Elem(index) = state.get(index.into(), &self.map)
|
||||
@ -391,10 +391,14 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
|
||||
{
|
||||
proj_elem = PlaceElem::ConstantIndex { offset, min_length, from_end: false };
|
||||
} else {
|
||||
return None;
|
||||
return;
|
||||
}
|
||||
}
|
||||
operand = self.ecx.project(&operand, proj_elem).ok()?;
|
||||
operand = if let Ok(operand) = self.ecx.project(&operand, proj_elem) {
|
||||
operand
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
self.map.for_each_projection_value(
|
||||
@ -426,8 +430,6 @@ impl<'a, 'tcx> ConstAnalysis<'a, 'tcx> {
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn binary_op(
|
||||
|
@ -191,26 +191,26 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
|
||||
/// Recursion entry point to find threading opportunities.
|
||||
#[instrument(level = "trace", skip(self))]
|
||||
fn start_from_switch(&mut self, bb: BasicBlock) -> Option<!> {
|
||||
fn start_from_switch(&mut self, bb: BasicBlock) {
|
||||
let bbdata = &self.body[bb];
|
||||
if bbdata.is_cleanup || self.loop_headers.contains(bb) {
|
||||
return None;
|
||||
return;
|
||||
}
|
||||
let (discr, targets) = bbdata.terminator().kind.as_switch()?;
|
||||
let discr = discr.place()?;
|
||||
let Some((discr, targets)) = bbdata.terminator().kind.as_switch() else { return };
|
||||
let Some(discr) = discr.place() else { return };
|
||||
debug!(?discr, ?bb);
|
||||
|
||||
let discr_ty = discr.ty(self.body, self.tcx).ty;
|
||||
let discr_layout = self.ecx.layout_of(discr_ty).ok()?;
|
||||
let Ok(discr_layout) = self.ecx.layout_of(discr_ty) else { return };
|
||||
|
||||
let discr = self.map.find(discr.as_ref())?;
|
||||
let Some(discr) = self.map.find(discr.as_ref()) else { return };
|
||||
debug!(?discr);
|
||||
|
||||
let cost = CostChecker::new(self.tcx, self.param_env, None, self.body);
|
||||
let mut state = State::new_reachable();
|
||||
|
||||
let conds = if let Some((value, then, else_)) = targets.as_static_if() {
|
||||
let value = ScalarInt::try_from_uint(value, discr_layout.size)?;
|
||||
let Some(value) = ScalarInt::try_from_uint(value, discr_layout.size) else { return };
|
||||
self.arena.alloc_from_iter([
|
||||
Condition { value, polarity: Polarity::Eq, target: then },
|
||||
Condition { value, polarity: Polarity::Ne, target: else_ },
|
||||
@ -225,7 +225,6 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
state.insert_value_idx(discr, conds, self.map);
|
||||
|
||||
self.find_opportunity(bb, state, cost, 0);
|
||||
None
|
||||
}
|
||||
|
||||
/// Recursively walk statements backwards from this bb's terminator to find threading
|
||||
@ -364,18 +363,17 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
lhs: PlaceIndex,
|
||||
rhs: ImmTy<'tcx>,
|
||||
state: &mut State<ConditionSet<'a>>,
|
||||
) -> Option<!> {
|
||||
) {
|
||||
let register_opportunity = |c: Condition| {
|
||||
debug!(?bb, ?c.target, "register");
|
||||
self.opportunities.push(ThreadingOpportunity { chain: vec![bb], target: c.target })
|
||||
};
|
||||
|
||||
let conditions = state.try_get_idx(lhs, self.map)?;
|
||||
if let Immediate::Scalar(Scalar::Int(int)) = *rhs {
|
||||
if let Some(conditions) = state.try_get_idx(lhs, self.map)
|
||||
&& let Immediate::Scalar(Scalar::Int(int)) = *rhs
|
||||
{
|
||||
conditions.iter_matches(int).for_each(register_opportunity);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`.
|
||||
@ -428,22 +426,23 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
lhs: PlaceIndex,
|
||||
rhs: &Operand<'tcx>,
|
||||
state: &mut State<ConditionSet<'a>>,
|
||||
) -> Option<!> {
|
||||
) {
|
||||
match rhs {
|
||||
// If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`.
|
||||
Operand::Constant(constant) => {
|
||||
let constant =
|
||||
self.ecx.eval_mir_constant(&constant.const_, constant.span, None).ok()?;
|
||||
let Ok(constant) =
|
||||
self.ecx.eval_mir_constant(&constant.const_, constant.span, None)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
self.process_constant(bb, lhs, constant, state);
|
||||
}
|
||||
// Transfer the conditions on the copied rhs.
|
||||
Operand::Move(rhs) | Operand::Copy(rhs) => {
|
||||
let rhs = self.map.find(rhs.as_ref())?;
|
||||
let Some(rhs) = self.map.find(rhs.as_ref()) else { return };
|
||||
state.insert_place_idx(rhs, lhs, self.map);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[instrument(level = "trace", skip(self))]
|
||||
@ -453,16 +452,14 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
lhs_place: &Place<'tcx>,
|
||||
rhs: &Rvalue<'tcx>,
|
||||
state: &mut State<ConditionSet<'a>>,
|
||||
) -> Option<!> {
|
||||
let lhs = self.map.find(lhs_place.as_ref())?;
|
||||
) {
|
||||
let Some(lhs) = self.map.find(lhs_place.as_ref()) else { return };
|
||||
match rhs {
|
||||
Rvalue::Use(operand) => self.process_operand(bb, lhs, operand, state)?,
|
||||
Rvalue::Use(operand) => self.process_operand(bb, lhs, operand, state),
|
||||
// Transfer the conditions on the copy rhs.
|
||||
Rvalue::CopyForDeref(rhs) => {
|
||||
self.process_operand(bb, lhs, &Operand::Copy(*rhs), state)?
|
||||
}
|
||||
Rvalue::CopyForDeref(rhs) => self.process_operand(bb, lhs, &Operand::Copy(*rhs), state),
|
||||
Rvalue::Discriminant(rhs) => {
|
||||
let rhs = self.map.find_discr(rhs.as_ref())?;
|
||||
let Some(rhs) = self.map.find_discr(rhs.as_ref()) else { return };
|
||||
state.insert_place_idx(rhs, lhs, self.map);
|
||||
}
|
||||
// If we expect `lhs ?= A`, we have an opportunity if we assume `constant == A`.
|
||||
@ -470,7 +467,7 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
let agg_ty = lhs_place.ty(self.body, self.tcx).ty;
|
||||
let lhs = match kind {
|
||||
// Do not support unions.
|
||||
AggregateKind::Adt(.., Some(_)) => return None,
|
||||
AggregateKind::Adt(.., Some(_)) => return,
|
||||
AggregateKind::Adt(_, variant_index, ..) if agg_ty.is_enum() => {
|
||||
if let Some(discr_target) = self.map.apply(lhs, TrackElem::Discriminant)
|
||||
&& let Ok(discr_value) =
|
||||
@ -478,7 +475,11 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
{
|
||||
self.process_immediate(bb, discr_target, discr_value, state);
|
||||
}
|
||||
self.map.apply(lhs, TrackElem::Variant(*variant_index))?
|
||||
if let Some(idx) = self.map.apply(lhs, TrackElem::Variant(*variant_index)) {
|
||||
idx
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
_ => lhs,
|
||||
};
|
||||
@ -490,8 +491,8 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
}
|
||||
// Transfer the conditions on the copy rhs, after inversing polarity.
|
||||
Rvalue::UnaryOp(UnOp::Not, Operand::Move(place) | Operand::Copy(place)) => {
|
||||
let conditions = state.try_get_idx(lhs, self.map)?;
|
||||
let place = self.map.find(place.as_ref())?;
|
||||
let Some(conditions) = state.try_get_idx(lhs, self.map) else { return };
|
||||
let Some(place) = self.map.find(place.as_ref()) else { return };
|
||||
let conds = conditions.map(self.arena, Condition::inv);
|
||||
state.insert_value_idx(place, conds, self.map);
|
||||
}
|
||||
@ -502,21 +503,25 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
box (Operand::Move(place) | Operand::Copy(place), Operand::Constant(value))
|
||||
| box (Operand::Constant(value), Operand::Move(place) | Operand::Copy(place)),
|
||||
) => {
|
||||
let conditions = state.try_get_idx(lhs, self.map)?;
|
||||
let place = self.map.find(place.as_ref())?;
|
||||
let Some(conditions) = state.try_get_idx(lhs, self.map) else { return };
|
||||
let Some(place) = self.map.find(place.as_ref()) else { return };
|
||||
let equals = match op {
|
||||
BinOp::Eq => ScalarInt::TRUE,
|
||||
BinOp::Ne => ScalarInt::FALSE,
|
||||
_ => return None,
|
||||
_ => return,
|
||||
};
|
||||
if value.const_.ty().is_floating_point() {
|
||||
// Floating point equality does not follow bit-patterns.
|
||||
// -0.0 and NaN both have special rules for equality,
|
||||
// and therefore we cannot use integer comparisons for them.
|
||||
// Avoid handling them, though this could be extended in the future.
|
||||
return None;
|
||||
return;
|
||||
}
|
||||
let value = value.const_.normalize(self.tcx, self.param_env).try_to_scalar_int()?;
|
||||
let Some(value) =
|
||||
value.const_.normalize(self.tcx, self.param_env).try_to_scalar_int()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let conds = conditions.map(self.arena, |c| Condition {
|
||||
value,
|
||||
polarity: if c.matches(equals) { Polarity::Eq } else { Polarity::Ne },
|
||||
@ -527,8 +532,6 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
|
||||
_ => {}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[instrument(level = "trace", skip(self))]
|
||||
@ -537,7 +540,7 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
bb: BasicBlock,
|
||||
stmt: &Statement<'tcx>,
|
||||
state: &mut State<ConditionSet<'a>>,
|
||||
) -> Option<!> {
|
||||
) {
|
||||
let register_opportunity = |c: Condition| {
|
||||
debug!(?bb, ?c.target, "register");
|
||||
self.opportunities.push(ThreadingOpportunity { chain: vec![bb], target: c.target })
|
||||
@ -550,12 +553,12 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
// If we expect `discriminant(place) ?= A`,
|
||||
// we have an opportunity if `variant_index ?= A`.
|
||||
StatementKind::SetDiscriminant { box place, variant_index } => {
|
||||
let discr_target = self.map.find_discr(place.as_ref())?;
|
||||
let Some(discr_target) = self.map.find_discr(place.as_ref()) else { return };
|
||||
let enum_ty = place.ty(self.body, self.tcx).ty;
|
||||
// `SetDiscriminant` may be a no-op if the assigned variant is the untagged variant
|
||||
// of a niche encoding. If we cannot ensure that we write to the discriminant, do
|
||||
// nothing.
|
||||
let enum_layout = self.ecx.layout_of(enum_ty).ok()?;
|
||||
let Ok(enum_layout) = self.ecx.layout_of(enum_ty) else { return };
|
||||
let writes_discriminant = match enum_layout.variants {
|
||||
Variants::Single { index } => {
|
||||
assert_eq!(index, *variant_index);
|
||||
@ -568,24 +571,25 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
} => *variant_index != untagged_variant,
|
||||
};
|
||||
if writes_discriminant {
|
||||
let discr = self.ecx.discriminant_for_variant(enum_ty, *variant_index).ok()?;
|
||||
self.process_immediate(bb, discr_target, discr, state)?;
|
||||
let Ok(discr) = self.ecx.discriminant_for_variant(enum_ty, *variant_index)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
self.process_immediate(bb, discr_target, discr, state);
|
||||
}
|
||||
}
|
||||
// If we expect `lhs ?= true`, we have an opportunity if we assume `lhs == true`.
|
||||
StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(
|
||||
Operand::Copy(place) | Operand::Move(place),
|
||||
)) => {
|
||||
let conditions = state.try_get(place.as_ref(), self.map)?;
|
||||
let Some(conditions) = state.try_get(place.as_ref(), self.map) else { return };
|
||||
conditions.iter_matches(ScalarInt::TRUE).for_each(register_opportunity);
|
||||
}
|
||||
StatementKind::Assign(box (lhs_place, rhs)) => {
|
||||
self.process_assign(bb, lhs_place, rhs, state)?;
|
||||
self.process_assign(bb, lhs_place, rhs, state);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[instrument(level = "trace", skip(self, state, cost))]
|
||||
@ -638,17 +642,17 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
targets: &SwitchTargets,
|
||||
target_bb: BasicBlock,
|
||||
state: &mut State<ConditionSet<'a>>,
|
||||
) -> Option<!> {
|
||||
) {
|
||||
debug_assert_ne!(target_bb, START_BLOCK);
|
||||
debug_assert_eq!(self.body.basic_blocks.predecessors()[target_bb].len(), 1);
|
||||
|
||||
let discr = discr.place()?;
|
||||
let Some(discr) = discr.place() else { return };
|
||||
let discr_ty = discr.ty(self.body, self.tcx).ty;
|
||||
let discr_layout = self.ecx.layout_of(discr_ty).ok()?;
|
||||
let conditions = state.try_get(discr.as_ref(), self.map)?;
|
||||
let Ok(discr_layout) = self.ecx.layout_of(discr_ty) else { return };
|
||||
let Some(conditions) = state.try_get(discr.as_ref(), self.map) else { return };
|
||||
|
||||
if let Some((value, _)) = targets.iter().find(|&(_, target)| target == target_bb) {
|
||||
let value = ScalarInt::try_from_uint(value, discr_layout.size)?;
|
||||
let Some(value) = ScalarInt::try_from_uint(value, discr_layout.size) else { return };
|
||||
debug_assert_eq!(targets.iter().filter(|&(_, target)| target == target_bb).count(), 1);
|
||||
|
||||
// We are inside `target_bb`. Since we have a single predecessor, we know we passed
|
||||
@ -662,7 +666,7 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
} else if let Some((value, _, else_bb)) = targets.as_static_if()
|
||||
&& target_bb == else_bb
|
||||
{
|
||||
let value = ScalarInt::try_from_uint(value, discr_layout.size)?;
|
||||
let Some(value) = ScalarInt::try_from_uint(value, discr_layout.size) else { return };
|
||||
|
||||
// We only know that `discr != value`. That's much weaker information than
|
||||
// the equality we had in the previous arm. All we can conclude is that
|
||||
@ -675,8 +679,6 @@ impl<'tcx, 'a> TOFinder<'tcx, 'a> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -469,12 +469,12 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
msg: &AssertKind<Operand<'tcx>>,
|
||||
cond: &Operand<'tcx>,
|
||||
location: Location,
|
||||
) -> Option<!> {
|
||||
let value = &self.eval_operand(cond)?;
|
||||
) {
|
||||
let Some(value) = &self.eval_operand(cond) else { return };
|
||||
trace!("assertion on {:?} should be {:?}", value, expected);
|
||||
|
||||
let expected = Scalar::from_bool(expected);
|
||||
let value_const = self.use_ecx(|this| this.ecx.read_scalar(value))?;
|
||||
let Some(value_const) = self.use_ecx(|this| this.ecx.read_scalar(value)) else { return };
|
||||
|
||||
if expected != value_const {
|
||||
// Poison all places this operand references so that further code
|
||||
@ -516,14 +516,12 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
AssertKind::BoundsCheck { len, index }
|
||||
}
|
||||
// Remaining overflow errors are already covered by checks on the binary operators.
|
||||
AssertKind::Overflow(..) | AssertKind::OverflowNeg(_) => return None,
|
||||
AssertKind::Overflow(..) | AssertKind::OverflowNeg(_) => return,
|
||||
// Need proper const propagator for these.
|
||||
_ => return None,
|
||||
_ => return,
|
||||
};
|
||||
self.report_assert_as_lint(location, AssertLintKind::UnconditionalPanic, msg);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn ensure_not_propagated(&self, local: Local) {
|
||||
|
@ -86,25 +86,7 @@ impl<'a> Parser<'a> {
|
||||
token::Interpolated(nt) => may_be_ident(nt),
|
||||
_ => false,
|
||||
},
|
||||
NonterminalKind::Pat(pat_kind) => match &token.kind {
|
||||
// box, ref, mut, and other identifiers (can stricten)
|
||||
token::Ident(..) | token::NtIdent(..) |
|
||||
token::OpenDelim(Delimiter::Parenthesis) | // tuple pattern
|
||||
token::OpenDelim(Delimiter::Bracket) | // slice pattern
|
||||
token::BinOp(token::And) | // reference
|
||||
token::BinOp(token::Minus) | // negative literal
|
||||
token::AndAnd | // double reference
|
||||
token::Literal(_) | // literal
|
||||
token::DotDot | // range pattern (future compat)
|
||||
token::DotDotDot | // range pattern (future compat)
|
||||
token::PathSep | // path
|
||||
token::Lt | // path (UFCS constant)
|
||||
token::BinOp(token::Shl) => true, // path (double UFCS)
|
||||
// leading vert `|` or-pattern
|
||||
token::BinOp(token::Or) => matches!(pat_kind, PatWithOr),
|
||||
token::Interpolated(nt) => may_be_ident(nt),
|
||||
_ => false,
|
||||
},
|
||||
NonterminalKind::Pat(pat_kind) => token.can_begin_pattern(pat_kind),
|
||||
NonterminalKind::Lifetime => match &token.kind {
|
||||
token::Lifetime(_) | token::NtLifetime(..) => true,
|
||||
_ => false,
|
||||
|
@ -444,7 +444,11 @@ impl<'a> Parser<'a> {
|
||||
|
||||
let mut lo = self.token.span;
|
||||
|
||||
if self.token.is_keyword(kw::Let) && self.look_ahead(1, |tok| tok.can_begin_pattern()) {
|
||||
if self.token.is_keyword(kw::Let)
|
||||
&& self.look_ahead(1, |tok| {
|
||||
tok.can_begin_pattern(token::NtPatKind::PatParam { inferred: false })
|
||||
})
|
||||
{
|
||||
self.bump();
|
||||
self.dcx().emit_err(RemoveLet { span: lo });
|
||||
lo = self.token.span;
|
||||
|
@ -378,7 +378,10 @@ impl<'a> Parser<'a> {
|
||||
if self.may_recover()
|
||||
&& prev_token_before_parsing == token::PathSep
|
||||
&& (style == PathStyle::Expr && self.token.can_begin_expr()
|
||||
|| style == PathStyle::Pat && self.token.can_begin_pattern())
|
||||
|| style == PathStyle::Pat
|
||||
&& self.token.can_begin_pattern(token::NtPatKind::PatParam {
|
||||
inferred: false,
|
||||
}))
|
||||
{
|
||||
snapshot = Some(self.create_snapshot_for_diagnostic());
|
||||
}
|
||||
|
@ -185,12 +185,11 @@ fn predicates_reference_self(
|
||||
) -> SmallVec<[Span; 1]> {
|
||||
let trait_ref = ty::Binder::dummy(ty::TraitRef::identity(tcx, trait_def_id));
|
||||
let predicates = if supertraits_only {
|
||||
tcx.explicit_super_predicates_of(trait_def_id)
|
||||
tcx.explicit_super_predicates_of(trait_def_id).skip_binder()
|
||||
} else {
|
||||
tcx.predicates_of(trait_def_id)
|
||||
tcx.predicates_of(trait_def_id).predicates
|
||||
};
|
||||
predicates
|
||||
.predicates
|
||||
.iter()
|
||||
.map(|&(predicate, sp)| (predicate.instantiate_supertrait(tcx, trait_ref), sp))
|
||||
.filter_map(|(clause, sp)| {
|
||||
@ -266,9 +265,8 @@ fn super_predicates_have_non_lifetime_binders(
|
||||
return SmallVec::new();
|
||||
}
|
||||
tcx.explicit_super_predicates_of(trait_def_id)
|
||||
.predicates
|
||||
.iter()
|
||||
.filter_map(|(pred, span)| pred.has_non_region_bound_vars().then_some(*span))
|
||||
.iter_identity_copied()
|
||||
.filter_map(|(pred, span)| pred.has_non_region_bound_vars().then_some(span))
|
||||
.collect()
|
||||
}
|
||||
|
||||
|
@ -600,21 +600,19 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
|
||||
|
||||
// Check supertraits hold. This is so that their associated type bounds
|
||||
// will be checked in the code below.
|
||||
for super_trait in tcx
|
||||
for (supertrait, _) in tcx
|
||||
.explicit_super_predicates_of(trait_predicate.def_id())
|
||||
.instantiate(tcx, trait_predicate.trait_ref.args)
|
||||
.predicates
|
||||
.into_iter()
|
||||
.iter_instantiated_copied(tcx, trait_predicate.trait_ref.args)
|
||||
{
|
||||
let normalized_super_trait = normalize_with_depth_to(
|
||||
let normalized_supertrait = normalize_with_depth_to(
|
||||
self,
|
||||
obligation.param_env,
|
||||
obligation.cause.clone(),
|
||||
obligation.recursion_depth + 1,
|
||||
super_trait,
|
||||
supertrait,
|
||||
&mut nested,
|
||||
);
|
||||
nested.push(obligation.with(tcx, normalized_super_trait));
|
||||
nested.push(obligation.with(tcx, normalized_supertrait));
|
||||
}
|
||||
|
||||
let assoc_types: Vec<_> = tcx
|
||||
|
@ -131,7 +131,7 @@ impl<'tcx> TraitAliasExpander<'tcx> {
|
||||
let predicates = tcx.explicit_super_predicates_of(trait_ref.def_id());
|
||||
debug!(?predicates);
|
||||
|
||||
let items = predicates.predicates.iter().rev().filter_map(|(pred, span)| {
|
||||
let items = predicates.skip_binder().iter().rev().filter_map(|(pred, span)| {
|
||||
pred.instantiate_supertrait(tcx, trait_ref)
|
||||
.as_trait_clause()
|
||||
.map(|trait_ref| item.clone_and_push(trait_ref.map_bound(|t| t.trait_ref), *span))
|
||||
|
@ -120,8 +120,7 @@ fn prepare_vtable_segments_inner<'tcx, T>(
|
||||
|
||||
let mut direct_super_traits_iter = tcx
|
||||
.explicit_super_predicates_of(inner_most_trait_ref.def_id())
|
||||
.predicates
|
||||
.into_iter()
|
||||
.iter_identity_copied()
|
||||
.filter_map(move |(pred, _)| {
|
||||
pred.instantiate_supertrait(tcx, inner_most_trait_ref).as_trait_clause()
|
||||
});
|
||||
|
@ -254,13 +254,6 @@ pub enum TyKind<I: Interner> {
|
||||
Error(I::ErrorGuaranteed),
|
||||
}
|
||||
|
||||
impl<I: Interner> TyKind<I> {
|
||||
#[inline]
|
||||
pub fn is_primitive(&self) -> bool {
|
||||
matches!(self, Bool | Char | Int(_) | Uint(_) | Float(_))
|
||||
}
|
||||
}
|
||||
|
||||
// This is manually implemented because a derive would require `I: Debug`
|
||||
impl<I: Interner> fmt::Debug for TyKind<I> {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
|
@ -1,6 +1,3 @@
|
||||
// Disabling on android for the time being
|
||||
// See https://github.com/rust-lang/rust/issues/73535#event-3477699747
|
||||
#![cfg(not(target_os = "android"))]
|
||||
// Disabling in Miri as these would take too long.
|
||||
#![cfg(not(miri))]
|
||||
#![feature(btree_extract_if)]
|
||||
|
@ -723,7 +723,6 @@ fn test_reserve_exact() {
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
|
||||
#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
|
||||
fn test_try_with_capacity() {
|
||||
let string = String::try_with_capacity(1000).unwrap();
|
||||
assert_eq!(0, string.len());
|
||||
@ -734,7 +733,6 @@ fn test_try_with_capacity() {
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
|
||||
#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
|
||||
fn test_try_reserve() {
|
||||
// These are the interesting cases:
|
||||
// * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM)
|
||||
@ -803,7 +801,6 @@ fn test_try_reserve() {
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
|
||||
#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
|
||||
fn test_try_reserve_exact() {
|
||||
// This is exactly the same as test_try_reserve with the method changed.
|
||||
// See that test for comments.
|
||||
|
@ -1695,7 +1695,6 @@ fn test_reserve_exact() {
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
|
||||
#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
|
||||
fn test_try_with_capacity() {
|
||||
let mut vec: Vec<u32> = Vec::try_with_capacity(5).unwrap();
|
||||
assert_eq!(0, vec.len());
|
||||
@ -1707,7 +1706,6 @@ fn test_try_with_capacity() {
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
|
||||
#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
|
||||
fn test_try_reserve() {
|
||||
// These are the interesting cases:
|
||||
// * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM)
|
||||
@ -1803,7 +1801,6 @@ fn test_try_reserve() {
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
|
||||
#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
|
||||
fn test_try_reserve_exact() {
|
||||
// This is exactly the same as test_try_reserve with the method changed.
|
||||
// See that test for comments.
|
||||
|
@ -1185,7 +1185,6 @@ fn test_reserve_exact_2() {
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
|
||||
#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
|
||||
fn test_try_with_capacity() {
|
||||
let vec: VecDeque<u32> = VecDeque::try_with_capacity(5).unwrap();
|
||||
assert_eq!(0, vec.len());
|
||||
@ -1196,7 +1195,6 @@ fn test_try_with_capacity() {
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
|
||||
#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
|
||||
fn test_try_reserve() {
|
||||
// These are the interesting cases:
|
||||
// * exactly isize::MAX should never trigger a CapacityOverflow (can be OOM)
|
||||
@ -1292,7 +1290,6 @@ fn test_try_reserve() {
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(miri, ignore)] // Miri does not support signalling OOM
|
||||
#[cfg_attr(target_os = "android", ignore)] // Android used in CI has a broken dlmalloc
|
||||
fn test_try_reserve_exact() {
|
||||
// This is exactly the same as test_try_reserve with the method changed.
|
||||
// See that test for comments.
|
||||
|
@ -1 +1 @@
|
||||
Subproject commit 72265bea210891ae47bbe6d4f17b493ef0606619
|
||||
Subproject commit 230570f2dac80a601f5c0b30da00cc9480bd35eb
|
@ -1,7 +1,4 @@
|
||||
//! This tests the `Integer::{ilog,log2,log10}` methods. These tests are in a
|
||||
//! separate file because there's both a large number of them, and not all tests
|
||||
//! can be run on Android. This is because in Android `ilog2` uses an imprecise
|
||||
//! approximation:https://github.com/rust-lang/rust/blob/4825e12fc9c79954aa0fe18f5521efa6c19c7539/src/libstd/sys/unix/android.rs#L27-L53
|
||||
//! Tests for the `Integer::{ilog,log2,log10}` methods.
|
||||
|
||||
#[test]
|
||||
fn checked_ilog() {
|
||||
@ -48,6 +45,10 @@ fn checked_ilog2() {
|
||||
assert_eq!(0i8.checked_ilog2(), None);
|
||||
assert_eq!(0i16.checked_ilog2(), None);
|
||||
|
||||
assert_eq!(8192u16.checked_ilog2(), Some((8192f32).log2() as u32));
|
||||
assert_eq!(32768u16.checked_ilog2(), Some((32768f32).log2() as u32));
|
||||
assert_eq!(8192i16.checked_ilog2(), Some((8192f32).log2() as u32));
|
||||
|
||||
for i in 1..=u8::MAX {
|
||||
assert_eq!(i.checked_ilog2(), Some((i as f32).log2() as u32), "checking {i}");
|
||||
}
|
||||
@ -77,15 +78,6 @@ fn checked_ilog2() {
|
||||
}
|
||||
}
|
||||
|
||||
// Validate cases that fail on Android's imprecise float ilog2 implementation.
|
||||
#[test]
|
||||
#[cfg(not(target_os = "android"))]
|
||||
fn checked_ilog2_not_android() {
|
||||
assert_eq!(8192u16.checked_ilog2(), Some((8192f32).log2() as u32));
|
||||
assert_eq!(32768u16.checked_ilog2(), Some((32768f32).log2() as u32));
|
||||
assert_eq!(8192i16.checked_ilog2(), Some((8192f32).log2() as u32));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn checked_ilog10() {
|
||||
assert_eq!(0u8.checked_ilog10(), None);
|
||||
|
@ -94,7 +94,9 @@ impl<R: Read> BufReader<R> {
|
||||
pub fn with_capacity(capacity: usize, inner: R) -> BufReader<R> {
|
||||
BufReader { inner, buf: Buffer::with_capacity(capacity) }
|
||||
}
|
||||
}
|
||||
|
||||
impl<R: Read + ?Sized> BufReader<R> {
|
||||
/// Attempt to look ahead `n` bytes.
|
||||
///
|
||||
/// `n` must be less than `capacity`.
|
||||
|
@ -136,36 +136,37 @@ impl Thread {
|
||||
}
|
||||
|
||||
pub fn sleep(dur: Duration) {
|
||||
let nanos = dur.as_nanos();
|
||||
assert!(nanos <= u64::MAX as u128);
|
||||
let mut nanos = dur.as_nanos();
|
||||
while nanos > 0 {
|
||||
const USERDATA: wasi::Userdata = 0x0123_45678;
|
||||
|
||||
const USERDATA: wasi::Userdata = 0x0123_45678;
|
||||
let clock = wasi::SubscriptionClock {
|
||||
id: wasi::CLOCKID_MONOTONIC,
|
||||
timeout: u64::try_from(nanos).unwrap_or(u64::MAX),
|
||||
precision: 0,
|
||||
flags: 0,
|
||||
};
|
||||
nanos -= u128::from(clock.timeout);
|
||||
|
||||
let clock = wasi::SubscriptionClock {
|
||||
id: wasi::CLOCKID_MONOTONIC,
|
||||
timeout: nanos as u64,
|
||||
precision: 0,
|
||||
flags: 0,
|
||||
};
|
||||
|
||||
let in_ = wasi::Subscription {
|
||||
userdata: USERDATA,
|
||||
u: wasi::SubscriptionU { tag: 0, u: wasi::SubscriptionUU { clock } },
|
||||
};
|
||||
unsafe {
|
||||
let mut event: wasi::Event = mem::zeroed();
|
||||
let res = wasi::poll_oneoff(&in_, &mut event, 1);
|
||||
match (res, event) {
|
||||
(
|
||||
Ok(1),
|
||||
wasi::Event {
|
||||
userdata: USERDATA,
|
||||
error: wasi::ERRNO_SUCCESS,
|
||||
type_: wasi::EVENTTYPE_CLOCK,
|
||||
..
|
||||
},
|
||||
) => {}
|
||||
_ => panic!("thread::sleep(): unexpected result of poll_oneoff"),
|
||||
let in_ = wasi::Subscription {
|
||||
userdata: USERDATA,
|
||||
u: wasi::SubscriptionU { tag: 0, u: wasi::SubscriptionUU { clock } },
|
||||
};
|
||||
unsafe {
|
||||
let mut event: wasi::Event = mem::zeroed();
|
||||
let res = wasi::poll_oneoff(&in_, &mut event, 1);
|
||||
match (res, event) {
|
||||
(
|
||||
Ok(1),
|
||||
wasi::Event {
|
||||
userdata: USERDATA,
|
||||
error: wasi::ERRNO_SUCCESS,
|
||||
type_: wasi::EVENTTYPE_CLOCK,
|
||||
..
|
||||
},
|
||||
) => {}
|
||||
_ => panic!("thread::sleep(): unexpected result of poll_oneoff"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -14,9 +14,8 @@ use crate::core::build_steps::synthetic_targets::MirOptPanicAbortSyntheticTarget
|
||||
use crate::core::build_steps::tool::{self, SourceType, Tool};
|
||||
use crate::core::build_steps::toolstate::ToolState;
|
||||
use crate::core::build_steps::{compile, dist, llvm};
|
||||
use crate::core::builder;
|
||||
use crate::core::builder::{
|
||||
crate_description, Builder, Compiler, Kind, RunConfig, ShouldRun, Step,
|
||||
self, crate_description, Alias, Builder, Compiler, Kind, RunConfig, ShouldRun, Step,
|
||||
};
|
||||
use crate::core::config::flags::{get_completion, Subcommand};
|
||||
use crate::core::config::TargetSelection;
|
||||
@ -2435,18 +2434,14 @@ impl Step for CrateLibrustc {
|
||||
const ONLY_HOSTS: bool = true;
|
||||
|
||||
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
|
||||
run.crate_or_deps("rustc-main")
|
||||
run.crate_or_deps("rustc-main").path("compiler")
|
||||
}
|
||||
|
||||
fn make_run(run: RunConfig<'_>) {
|
||||
let builder = run.builder;
|
||||
let host = run.build_triple();
|
||||
let compiler = builder.compiler_for(builder.top_stage, host, host);
|
||||
let crates = run
|
||||
.paths
|
||||
.iter()
|
||||
.map(|p| builder.crate_paths[&p.assert_single_path().path].clone())
|
||||
.collect();
|
||||
let crates = run.make_run_crates(Alias::Compiler);
|
||||
|
||||
builder.ensure(CrateLibrustc { compiler, target: run.target, crates });
|
||||
}
|
||||
|
@ -47,7 +47,7 @@ KIND=PATH` where `KIND` may be one of:
|
||||
directory.
|
||||
- `native` — Only search for native libraries in this directory.
|
||||
- `framework` — Only search for macOS frameworks in this directory.
|
||||
- `all` — Search for all library kinds in this directory. This is the default
|
||||
- `all` — Search for all library kinds in this directory, except frameworks. This is the default
|
||||
if `KIND` is not specified.
|
||||
|
||||
<a id="option-l-link-lib"></a>
|
||||
|
@ -14,7 +14,6 @@
|
||||
use rustc_data_structures::fx::FxIndexMap;
|
||||
use rustc_data_structures::unord::UnordSet;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_middle::ty;
|
||||
use thin_vec::ThinVec;
|
||||
|
||||
use crate::clean;
|
||||
@ -113,18 +112,9 @@ fn trait_is_same_or_supertrait(cx: &DocContext<'_>, child: DefId, trait_: DefId)
|
||||
return true;
|
||||
}
|
||||
let predicates = cx.tcx.explicit_super_predicates_of(child);
|
||||
debug_assert!(cx.tcx.generics_of(child).has_self);
|
||||
let self_ty = cx.tcx.types.self_param;
|
||||
predicates
|
||||
.predicates
|
||||
.iter()
|
||||
.filter_map(|(pred, _)| {
|
||||
if let ty::ClauseKind::Trait(pred) = pred.kind().skip_binder() {
|
||||
if pred.trait_ref.self_ty() == self_ty { Some(pred.def_id()) } else { None }
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.iter_identity_copied()
|
||||
.filter_map(|(pred, _)| Some(pred.as_trait_clause()?.def_id()))
|
||||
.any(|did| trait_is_same_or_supertrait(cx, did, trait_))
|
||||
}
|
||||
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -246,7 +246,7 @@ fn collect_supertrait_bounds<'tcx>(cx: &LateContext<'tcx>, bounds: GenericBounds
|
||||
&& let [.., path] = poly_trait.trait_ref.path.segments
|
||||
&& poly_trait.bound_generic_params.is_empty()
|
||||
&& let Some(trait_def_id) = path.res.opt_def_id()
|
||||
&& let predicates = cx.tcx.explicit_super_predicates_of(trait_def_id).predicates
|
||||
&& let predicates = cx.tcx.explicit_super_predicates_of(trait_def_id).skip_binder()
|
||||
// If the trait has no supertrait, there is no need to collect anything from that bound
|
||||
&& !predicates.is_empty()
|
||||
{
|
||||
|
@ -25,8 +25,7 @@ fn is_subtrait_of_any(cx: &LateContext<'_>, ty: Ty<'_>) -> bool {
|
||||
|| cx
|
||||
.tcx
|
||||
.explicit_super_predicates_of(tr.def_id)
|
||||
.predicates
|
||||
.iter()
|
||||
.iter_identity_copied()
|
||||
.any(|(clause, _)| {
|
||||
matches!(clause.kind().skip_binder(), ty::ClauseKind::Trait(super_tr)
|
||||
if cx.tcx.is_diagnostic_item(sym::Any, super_tr.def_id()))
|
||||
|
@ -91,7 +91,7 @@ fn path_to_sized_bound(cx: &LateContext<'_>, trait_bound: &PolyTraitRef<'_>) ->
|
||||
return true;
|
||||
}
|
||||
|
||||
for &(predicate, _) in cx.tcx.explicit_super_predicates_of(trait_def_id).predicates {
|
||||
for (predicate, _) in cx.tcx.explicit_super_predicates_of(trait_def_id).iter_identity_copied() {
|
||||
if let ClauseKind::Trait(trait_predicate) = predicate.kind().skip_binder()
|
||||
&& trait_predicate.polarity == PredicatePolarity::Positive
|
||||
&& !path.contains(&trait_predicate.def_id())
|
||||
|
@ -630,14 +630,11 @@ pub trait EvalContextExt<'tcx>: crate::MiriInterpCxExt<'tcx> {
|
||||
self.ecx
|
||||
}
|
||||
|
||||
fn aggregate_field_order(memory_index: &IndexVec<FieldIdx, u32>, idx: usize) -> usize {
|
||||
// We need to do an *inverse* lookup: find the field that has position `idx` in memory order.
|
||||
for (src_field, &mem_pos) in memory_index.iter_enumerated() {
|
||||
if mem_pos as usize == idx {
|
||||
return src_field.as_usize();
|
||||
}
|
||||
}
|
||||
panic!("invalid `memory_index`, could not find {}-th field in memory order", idx);
|
||||
fn aggregate_field_iter(
|
||||
memory_index: &IndexVec<FieldIdx, u32>,
|
||||
) -> impl Iterator<Item = FieldIdx> + 'static {
|
||||
let inverse_memory_index = memory_index.invert_bijective_mapping();
|
||||
inverse_memory_index.into_iter()
|
||||
}
|
||||
|
||||
// Hook to detect `UnsafeCell`.
|
||||
|
@ -427,7 +427,6 @@ function loadSearchJS(doc_folder, resource_suffix) {
|
||||
return list[descIndex];
|
||||
},
|
||||
loadedDescShard: function(crate, shard, data) {
|
||||
//console.log(this.descShards);
|
||||
this.descShards.get(crate)[shard].resolve(data.split("\n"));
|
||||
},
|
||||
};
|
||||
@ -436,15 +435,15 @@ function loadSearchJS(doc_folder, resource_suffix) {
|
||||
const searchJs = fs.readdirSync(staticFiles).find(f => f.match(/search.*\.js$/));
|
||||
const searchModule = require(path.join(staticFiles, searchJs));
|
||||
searchModule.initSearch(searchIndex.searchIndex);
|
||||
|
||||
const docSearch = searchModule.docSearch;
|
||||
return {
|
||||
doSearch: function(queryStr, filterCrate, currentCrate) {
|
||||
return searchModule.execQuery(searchModule.parseQuery(queryStr),
|
||||
return docSearch.execQuery(searchModule.parseQuery(queryStr),
|
||||
filterCrate, currentCrate);
|
||||
},
|
||||
getCorrections: function(queryStr, filterCrate, currentCrate) {
|
||||
const parsedQuery = searchModule.parseQuery(queryStr);
|
||||
searchModule.execQuery(parsedQuery, filterCrate, currentCrate);
|
||||
docSearch.execQuery(parsedQuery, filterCrate, currentCrate);
|
||||
return parsedQuery.correction;
|
||||
},
|
||||
parseQuery: searchModule.parseQuery,
|
||||
|
2
tests/run-make/native-lib-alt-naming/native.rs
Normal file
2
tests/run-make/native-lib-alt-naming/native.rs
Normal file
@ -0,0 +1,2 @@
|
||||
#[no_mangle]
|
||||
pub extern "C" fn native_lib_alt_naming() {}
|
15
tests/run-make/native-lib-alt-naming/rmake.rs
Normal file
15
tests/run-make/native-lib-alt-naming/rmake.rs
Normal file
@ -0,0 +1,15 @@
|
||||
// On MSVC the alternative naming format for static libraries (`libfoo.a`) is accepted in addition
|
||||
// to the default format (`foo.lib`).
|
||||
|
||||
//REMOVE@ only-msvc
|
||||
|
||||
use run_make_support::rustc;
|
||||
|
||||
fn main() {
|
||||
// Prepare the native library.
|
||||
rustc().input("native.rs").crate_type("staticlib").output("libnative.a").run();
|
||||
|
||||
// Try to link to it from both a rlib and a bin.
|
||||
rustc().input("rust.rs").crate_type("rlib").arg("-lstatic=native").run();
|
||||
rustc().input("rust.rs").crate_type("bin").arg("-lstatic=native").run();
|
||||
}
|
1
tests/run-make/native-lib-alt-naming/rust.rs
Normal file
1
tests/run-make/native-lib-alt-naming/rust.rs
Normal file
@ -0,0 +1 @@
|
||||
pub fn main() {}
|
58
tests/rustdoc-json/traits/self.rs
Normal file
58
tests/rustdoc-json/traits/self.rs
Normal file
@ -0,0 +1,58 @@
|
||||
// ignore-tidy-linelength
|
||||
|
||||
pub struct Foo;
|
||||
|
||||
// Check that Self is represented uniformly between inherent impls, trait impls,
|
||||
// and trait definitions, even though it uses both SelfTyParam and SelfTyAlias
|
||||
// internally.
|
||||
//
|
||||
// Each assertion matches 3 times, and should be the same each time.
|
||||
|
||||
impl Foo {
|
||||
//@ ismany '$.index[*][?(@.name=="by_ref")].inner.function.decl.inputs[0][0]' '"self"' '"self"' '"self"'
|
||||
//@ ismany '$.index[*][?(@.name=="by_ref")].inner.function.decl.inputs[0][1].borrowed_ref.type.generic' '"Self"' '"Self"' '"Self"'
|
||||
//@ ismany '$.index[*][?(@.name=="by_ref")].inner.function.decl.inputs[0][1].borrowed_ref.lifetime' null null null
|
||||
//@ ismany '$.index[*][?(@.name=="by_ref")].inner.function.decl.inputs[0][1].borrowed_ref.mutable' false false false
|
||||
pub fn by_ref(&self) {}
|
||||
|
||||
//@ ismany '$.index[*][?(@.name=="by_exclusive_ref")].inner.function.decl.inputs[0][0]' '"self"' '"self"' '"self"'
|
||||
//@ ismany '$.index[*][?(@.name=="by_exclusive_ref")].inner.function.decl.inputs[0][1].borrowed_ref.type.generic' '"Self"' '"Self"' '"Self"'
|
||||
//@ ismany '$.index[*][?(@.name=="by_exclusive_ref")].inner.function.decl.inputs[0][1].borrowed_ref.lifetime' null null null
|
||||
//@ ismany '$.index[*][?(@.name=="by_exclusive_ref")].inner.function.decl.inputs[0][1].borrowed_ref.mutable' true true true
|
||||
pub fn by_exclusive_ref(&mut self) {}
|
||||
|
||||
//@ ismany '$.index[*][?(@.name=="by_value")].inner.function.decl.inputs[0][0]' '"self"' '"self"' '"self"'
|
||||
//@ ismany '$.index[*][?(@.name=="by_value")].inner.function.decl.inputs[0][1].generic' '"Self"' '"Self"' '"Self"'
|
||||
pub fn by_value(self) {}
|
||||
|
||||
//@ ismany '$.index[*][?(@.name=="with_lifetime")].inner.function.decl.inputs[0][0]' '"self"' '"self"' '"self"'
|
||||
//@ ismany '$.index[*][?(@.name=="with_lifetime")].inner.function.decl.inputs[0][1].borrowed_ref.type.generic' '"Self"' '"Self"' '"Self"'
|
||||
//@ ismany '$.index[*][?(@.name=="with_lifetime")].inner.function.decl.inputs[0][1].borrowed_ref.lifetime' \"\'a\" \"\'a\" \"\'a\"
|
||||
//@ ismany '$.index[*][?(@.name=="with_lifetime")].inner.function.decl.inputs[0][1].borrowed_ref.mutable' false false false
|
||||
pub fn with_lifetime<'a>(&'a self) {}
|
||||
|
||||
//@ ismany '$.index[*][?(@.name=="build")].inner.function.decl.output.generic' '"Self"' '"Self"' '"Self"'
|
||||
pub fn build() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Bar;
|
||||
|
||||
pub trait SelfParams {
|
||||
fn by_ref(&self);
|
||||
fn by_exclusive_ref(&mut self);
|
||||
fn by_value(self);
|
||||
fn with_lifetime<'a>(&'a self);
|
||||
fn build() -> Self;
|
||||
}
|
||||
|
||||
impl SelfParams for Bar {
|
||||
fn by_ref(&self) {}
|
||||
fn by_exclusive_ref(&mut self) {}
|
||||
fn by_value(self) {}
|
||||
fn with_lifetime<'a>(&'a self) {}
|
||||
fn build() -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
19
tests/ui/pattern/patterns-dont-match-nt-statement.rs
Normal file
19
tests/ui/pattern/patterns-dont-match-nt-statement.rs
Normal file
@ -0,0 +1,19 @@
|
||||
//@ check-pass
|
||||
|
||||
// Make sure that a `stmt` nonterminal does not eagerly match against
|
||||
// a `pat`, since this will always cause a parse error...
|
||||
|
||||
macro_rules! m {
|
||||
($pat:pat) => {};
|
||||
($stmt:stmt) => {};
|
||||
}
|
||||
|
||||
macro_rules! m2 {
|
||||
($stmt:stmt) => {
|
||||
m! { $stmt }
|
||||
};
|
||||
}
|
||||
|
||||
m2! { let x = 1 }
|
||||
|
||||
fn main() {}
|
Loading…
Reference in New Issue
Block a user