mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 14:55:26 +00:00
Auto merge of #130016 - matthiaskrgr:rollup-fopistw, r=matthiaskrgr
Rollup of 6 pull requests Successful merges: - #129021 (Check WF of source type's signature on fn pointer cast) - #129781 (Make `./x.py <cmd> compiler/<crate>` aware of the crate's features) - #129963 (Inaccurate `{Path,OsStr}::to_string_lossy()` documentation) - #129969 (Make `Ty::boxed_ty` return an `Option`) - #129995 (Remove wasm32-wasip2's tier 2 status from release notes) - #130013 (coverage: Count await when the Future is immediately ready ) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
a3af2085cc
@ -34,7 +34,6 @@ Compiler
|
|||||||
- [Add Tier 3 `std` Xtensa targets:](https://github.com/rust-lang/rust/pull/126380/) `xtensa-esp32-espidf`, `xtensa-esp32s2-espidf`, `xtensa-esp32s3-espidf`
|
- [Add Tier 3 `std` Xtensa targets:](https://github.com/rust-lang/rust/pull/126380/) `xtensa-esp32-espidf`, `xtensa-esp32s2-espidf`, `xtensa-esp32s3-espidf`
|
||||||
- [Add Tier 3 i686 Redox OS target:](https://github.com/rust-lang/rust/pull/126192/) `i686-unknown-redox`
|
- [Add Tier 3 i686 Redox OS target:](https://github.com/rust-lang/rust/pull/126192/) `i686-unknown-redox`
|
||||||
- [Promote `arm64ec-pc-windows-msvc` to Tier 2.](https://github.com/rust-lang/rust/pull/126039/)
|
- [Promote `arm64ec-pc-windows-msvc` to Tier 2.](https://github.com/rust-lang/rust/pull/126039/)
|
||||||
- [Promote `wasm32-wasip2` to Tier 2.](https://github.com/rust-lang/rust/pull/126967/)
|
|
||||||
- [Promote `loongarch64-unknown-linux-musl` to Tier 2 with host tools.](https://github.com/rust-lang/rust/pull/126298/)
|
- [Promote `loongarch64-unknown-linux-musl` to Tier 2 with host tools.](https://github.com/rust-lang/rust/pull/126298/)
|
||||||
- [Enable full tools and profiler for LoongArch Linux targets.](https://github.com/rust-lang/rust/pull/127078/)
|
- [Enable full tools and profiler for LoongArch Linux targets.](https://github.com/rust-lang/rust/pull/127078/)
|
||||||
- [Unconditionally warn on usage of `wasm32-wasi`.](https://github.com/rust-lang/rust/pull/126662/) (see compatibility note below)
|
- [Unconditionally warn on usage of `wasm32-wasi`.](https://github.com/rust-lang/rust/pull/126662/) (see compatibility note below)
|
||||||
|
@ -662,9 +662,10 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, '_, 'tcx> {
|
|||||||
// `&dyn Trait`
|
// `&dyn Trait`
|
||||||
ty::Ref(_, ty, _) if ty.is_trait() => true,
|
ty::Ref(_, ty, _) if ty.is_trait() => true,
|
||||||
// `Box<dyn Trait>`
|
// `Box<dyn Trait>`
|
||||||
_ if ty.is_box() && ty.boxed_ty().is_trait() => {
|
_ if ty.boxed_ty().is_some_and(Ty::is_trait) => {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
// `dyn Trait`
|
// `dyn Trait`
|
||||||
_ if ty.is_trait() => true,
|
_ if ty.is_trait() => true,
|
||||||
// Anything else.
|
// Anything else.
|
||||||
|
@ -345,9 +345,9 @@ impl<'infcx, 'tcx> MirBorrowckCtxt<'_, '_, 'infcx, 'tcx> {
|
|||||||
variant_index: Option<VariantIdx>,
|
variant_index: Option<VariantIdx>,
|
||||||
including_tuple_field: IncludingTupleField,
|
including_tuple_field: IncludingTupleField,
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
if ty.is_box() {
|
if let Some(boxed_ty) = ty.boxed_ty() {
|
||||||
// If the type is a box, the field is described from the boxed type
|
// If the type is a box, the field is described from the boxed type
|
||||||
self.describe_field_from_ty(ty.boxed_ty(), field, variant_index, including_tuple_field)
|
self.describe_field_from_ty(boxed_ty, field, variant_index, including_tuple_field)
|
||||||
} else {
|
} else {
|
||||||
match *ty.kind() {
|
match *ty.kind() {
|
||||||
ty::Adt(def, _) => {
|
ty::Adt(def, _) => {
|
||||||
|
@ -1979,19 +1979,76 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
|
|
||||||
match cast_kind {
|
match cast_kind {
|
||||||
CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer) => {
|
CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer) => {
|
||||||
let fn_sig = op.ty(body, tcx).fn_sig(tcx);
|
let src_sig = op.ty(body, tcx).fn_sig(tcx);
|
||||||
|
|
||||||
|
// HACK: This shouldn't be necessary... We can remove this when we actually
|
||||||
|
// get binders with where clauses, then elaborate implied bounds into that
|
||||||
|
// binder, and implement a higher-ranked subtyping algorithm that actually
|
||||||
|
// respects these implied bounds.
|
||||||
|
//
|
||||||
|
// This protects against the case where we are casting from a higher-ranked
|
||||||
|
// fn item to a non-higher-ranked fn pointer, where the cast throws away
|
||||||
|
// implied bounds that would've needed to be checked at the call site. This
|
||||||
|
// only works when we're casting to a non-higher-ranked fn ptr, since
|
||||||
|
// placeholders in the target signature could have untracked implied
|
||||||
|
// bounds, resulting in incorrect errors.
|
||||||
|
//
|
||||||
|
// We check that this signature is WF before subtyping the signature with
|
||||||
|
// the target fn sig.
|
||||||
|
if src_sig.has_bound_regions()
|
||||||
|
&& let ty::FnPtr(target_fn_tys, target_hdr) = *ty.kind()
|
||||||
|
&& let target_sig = target_fn_tys.with(target_hdr)
|
||||||
|
&& let Some(target_sig) = target_sig.no_bound_vars()
|
||||||
|
{
|
||||||
|
let src_sig = self.infcx.instantiate_binder_with_fresh_vars(
|
||||||
|
span,
|
||||||
|
BoundRegionConversionTime::HigherRankedType,
|
||||||
|
src_sig,
|
||||||
|
);
|
||||||
|
let src_ty = Ty::new_fn_ptr(self.tcx(), ty::Binder::dummy(src_sig));
|
||||||
|
self.prove_predicate(
|
||||||
|
ty::ClauseKind::WellFormed(src_ty.into()),
|
||||||
|
location.to_locations(),
|
||||||
|
ConstraintCategory::Cast { unsize_to: None },
|
||||||
|
);
|
||||||
|
|
||||||
|
let src_ty = self.normalize(src_ty, location);
|
||||||
|
if let Err(terr) = self.sub_types(
|
||||||
|
src_ty,
|
||||||
|
*ty,
|
||||||
|
location.to_locations(),
|
||||||
|
ConstraintCategory::Cast { unsize_to: None },
|
||||||
|
) {
|
||||||
|
span_mirbug!(
|
||||||
|
self,
|
||||||
|
rvalue,
|
||||||
|
"equating {:?} with {:?} yields {:?}",
|
||||||
|
target_sig,
|
||||||
|
src_sig,
|
||||||
|
terr
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
let src_ty = Ty::new_fn_ptr(tcx, src_sig);
|
||||||
|
// HACK: We want to assert that the signature of the source fn is
|
||||||
|
// well-formed, because we don't enforce that via the WF of FnDef
|
||||||
|
// types normally. This should be removed when we improve the tracking
|
||||||
|
// of implied bounds of fn signatures.
|
||||||
|
self.prove_predicate(
|
||||||
|
ty::ClauseKind::WellFormed(src_ty.into()),
|
||||||
|
location.to_locations(),
|
||||||
|
ConstraintCategory::Cast { unsize_to: None },
|
||||||
|
);
|
||||||
|
|
||||||
// The type that we see in the fcx is like
|
// The type that we see in the fcx is like
|
||||||
// `foo::<'a, 'b>`, where `foo` is the path to a
|
// `foo::<'a, 'b>`, where `foo` is the path to a
|
||||||
// function definition. When we extract the
|
// function definition. When we extract the
|
||||||
// signature, it comes from the `fn_sig` query,
|
// signature, it comes from the `fn_sig` query,
|
||||||
// and hence may contain unnormalized results.
|
// and hence may contain unnormalized results.
|
||||||
let fn_sig = self.normalize(fn_sig, location);
|
let src_ty = self.normalize(src_ty, location);
|
||||||
|
|
||||||
let ty_fn_ptr_from = Ty::new_fn_ptr(tcx, fn_sig);
|
|
||||||
|
|
||||||
if let Err(terr) = self.sub_types(
|
if let Err(terr) = self.sub_types(
|
||||||
ty_fn_ptr_from,
|
src_ty,
|
||||||
*ty,
|
*ty,
|
||||||
location.to_locations(),
|
location.to_locations(),
|
||||||
ConstraintCategory::Cast { unsize_to: None },
|
ConstraintCategory::Cast { unsize_to: None },
|
||||||
@ -2000,7 +2057,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
|||||||
self,
|
self,
|
||||||
rvalue,
|
rvalue,
|
||||||
"equating {:?} with {:?} yields {:?}",
|
"equating {:?} with {:?} yields {:?}",
|
||||||
ty_fn_ptr_from,
|
src_ty,
|
||||||
ty,
|
ty,
|
||||||
terr
|
terr
|
||||||
);
|
);
|
||||||
|
@ -456,7 +456,7 @@ pub(crate) fn type_di_node<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) ->
|
|||||||
if def.is_box()
|
if def.is_box()
|
||||||
&& args.get(1).map_or(true, |arg| cx.layout_of(arg.expect_ty()).is_1zst()) =>
|
&& args.get(1).map_or(true, |arg| cx.layout_of(arg.expect_ty()).is_1zst()) =>
|
||||||
{
|
{
|
||||||
build_pointer_or_reference_di_node(cx, t, t.boxed_ty(), unique_type_id)
|
build_pointer_or_reference_di_node(cx, t, t.expect_boxed_ty(), unique_type_id)
|
||||||
}
|
}
|
||||||
ty::FnDef(..) | ty::FnPtr(..) => build_subroutine_type_di_node(cx, unique_type_id),
|
ty::FnDef(..) | ty::FnPtr(..) => build_subroutine_type_di_node(cx, unique_type_id),
|
||||||
ty::Closure(..) => build_closure_env_di_node(cx, unique_type_id),
|
ty::Closure(..) => build_closure_env_di_node(cx, unique_type_id),
|
||||||
|
@ -189,7 +189,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
|
|||||||
ty::Ref(_, ty, _) => *ty,
|
ty::Ref(_, ty, _) => *ty,
|
||||||
ty::RawPtr(ty, _) => *ty,
|
ty::RawPtr(ty, _) => *ty,
|
||||||
// We only accept `Box` with the default allocator.
|
// We only accept `Box` with the default allocator.
|
||||||
_ if ty.is_box_global(*self.tcx) => ty.boxed_ty(),
|
_ if ty.is_box_global(*self.tcx) => ty.expect_boxed_ty(),
|
||||||
_ => return Ok(None),
|
_ => return Ok(None),
|
||||||
}))
|
}))
|
||||||
};
|
};
|
||||||
|
@ -63,8 +63,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
// Instead, the problem is that the array-into_iter hack will no longer
|
// Instead, the problem is that the array-into_iter hack will no longer
|
||||||
// apply in Rust 2021.
|
// apply in Rust 2021.
|
||||||
(ARRAY_INTO_ITER, "2021")
|
(ARRAY_INTO_ITER, "2021")
|
||||||
} else if self_ty.is_box()
|
} else if self_ty.boxed_ty().is_some_and(Ty::is_slice)
|
||||||
&& self_ty.boxed_ty().is_slice()
|
|
||||||
&& !span.at_least_rust_2024()
|
&& !span.at_least_rust_2024()
|
||||||
{
|
{
|
||||||
// In this case, it wasn't really a prelude addition that was the problem.
|
// In this case, it wasn't really a prelude addition that was the problem.
|
||||||
|
@ -1485,8 +1485,7 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
|
|||||||
|
|
||||||
// Some trait methods are excluded for boxed slices before 2024.
|
// Some trait methods are excluded for boxed slices before 2024.
|
||||||
// (`boxed_slice.into_iter()` wants a slice iterator for compatibility.)
|
// (`boxed_slice.into_iter()` wants a slice iterator for compatibility.)
|
||||||
if self_ty.is_box()
|
if self_ty.boxed_ty().is_some_and(Ty::is_slice)
|
||||||
&& self_ty.boxed_ty().is_slice()
|
|
||||||
&& !method_name.span.at_least_rust_2024()
|
&& !method_name.span.at_least_rust_2024()
|
||||||
{
|
{
|
||||||
let trait_def = self.tcx.trait_def(poly_trait_ref.def_id());
|
let trait_def = self.tcx.trait_def(poly_trait_ref.def_id());
|
||||||
|
@ -94,12 +94,9 @@ impl<'tcx> LateLintPass<'tcx> for ShadowedIntoIter {
|
|||||||
fn is_ref_to_array(ty: Ty<'_>) -> bool {
|
fn is_ref_to_array(ty: Ty<'_>) -> bool {
|
||||||
if let ty::Ref(_, pointee_ty, _) = *ty.kind() { pointee_ty.is_array() } else { false }
|
if let ty::Ref(_, pointee_ty, _) = *ty.kind() { pointee_ty.is_array() } else { false }
|
||||||
}
|
}
|
||||||
fn is_boxed_slice(ty: Ty<'_>) -> bool {
|
|
||||||
ty.is_box() && ty.boxed_ty().is_slice()
|
|
||||||
}
|
|
||||||
fn is_ref_to_boxed_slice(ty: Ty<'_>) -> bool {
|
fn is_ref_to_boxed_slice(ty: Ty<'_>) -> bool {
|
||||||
if let ty::Ref(_, pointee_ty, _) = *ty.kind() {
|
if let ty::Ref(_, pointee_ty, _) = *ty.kind() {
|
||||||
is_boxed_slice(pointee_ty)
|
pointee_ty.boxed_ty().is_some_and(Ty::is_slice)
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
@ -119,7 +116,7 @@ impl<'tcx> LateLintPass<'tcx> for ShadowedIntoIter {
|
|||||||
.iter()
|
.iter()
|
||||||
.copied()
|
.copied()
|
||||||
.take_while(|ty| !is_ref_to_boxed_slice(*ty))
|
.take_while(|ty| !is_ref_to_boxed_slice(*ty))
|
||||||
.position(|ty| is_boxed_slice(ty))
|
.position(|ty| ty.boxed_ty().is_some_and(Ty::is_slice))
|
||||||
{
|
{
|
||||||
(BOXED_SLICE_INTO_ITER, "Box<[T]>", "2024", idx == 0)
|
(BOXED_SLICE_INTO_ITER, "Box<[T]>", "2024", idx == 0)
|
||||||
} else {
|
} else {
|
||||||
|
@ -1304,8 +1304,10 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
|
|||||||
|
|
||||||
match *ty.kind() {
|
match *ty.kind() {
|
||||||
ty::Adt(def, args) => {
|
ty::Adt(def, args) => {
|
||||||
if def.is_box() && matches!(self.mode, CItemKind::Definition) {
|
if let Some(boxed) = ty.boxed_ty()
|
||||||
if ty.boxed_ty().is_sized(tcx, self.cx.param_env) {
|
&& matches!(self.mode, CItemKind::Definition)
|
||||||
|
{
|
||||||
|
if boxed.is_sized(tcx, self.cx.param_env) {
|
||||||
return FfiSafe;
|
return FfiSafe;
|
||||||
} else {
|
} else {
|
||||||
return FfiUnsafe {
|
return FfiUnsafe {
|
||||||
|
@ -283,9 +283,8 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults {
|
|||||||
}
|
}
|
||||||
|
|
||||||
match *ty.kind() {
|
match *ty.kind() {
|
||||||
ty::Adt(..) if ty.is_box() => {
|
ty::Adt(..) if let Some(boxed) = ty.boxed_ty() => {
|
||||||
let boxed_ty = ty.boxed_ty();
|
is_ty_must_use(cx, boxed, expr, span)
|
||||||
is_ty_must_use(cx, boxed_ty, expr, span)
|
|
||||||
.map(|inner| MustUsePath::Boxed(Box::new(inner)))
|
.map(|inner| MustUsePath::Boxed(Box::new(inner)))
|
||||||
}
|
}
|
||||||
ty::Adt(def, args) if cx.tcx.is_lang_item(def.did(), LangItem::Pin) => {
|
ty::Adt(def, args) if cx.tcx.is_lang_item(def.did(), LangItem::Pin) => {
|
||||||
|
@ -1075,11 +1075,13 @@ where
|
|||||||
// the raw pointer, so size and align are set to the boxed type, but `pointee.safe`
|
// the raw pointer, so size and align are set to the boxed type, but `pointee.safe`
|
||||||
// will still be `None`.
|
// will still be `None`.
|
||||||
if let Some(ref mut pointee) = result {
|
if let Some(ref mut pointee) = result {
|
||||||
if offset.bytes() == 0 && this.ty.is_box() {
|
if offset.bytes() == 0
|
||||||
|
&& let Some(boxed_ty) = this.ty.boxed_ty()
|
||||||
|
{
|
||||||
debug_assert!(pointee.safe.is_none());
|
debug_assert!(pointee.safe.is_none());
|
||||||
let optimize = tcx.sess.opts.optimize != OptLevel::No;
|
let optimize = tcx.sess.opts.optimize != OptLevel::No;
|
||||||
pointee.safe = Some(PointerKind::Box {
|
pointee.safe = Some(PointerKind::Box {
|
||||||
unpin: optimize && this.ty.boxed_ty().is_unpin(tcx, cx.param_env()),
|
unpin: optimize && boxed_ty.is_unpin(tcx, cx.param_env()),
|
||||||
global: this.ty.is_box_global(tcx),
|
global: this.ty.is_box_global(tcx),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -1170,14 +1170,19 @@ impl<'tcx> Ty<'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Panics if called on any type other than `Box<T>`.
|
pub fn boxed_ty(self) -> Option<Ty<'tcx>> {
|
||||||
pub fn boxed_ty(self) -> Ty<'tcx> {
|
|
||||||
match self.kind() {
|
match self.kind() {
|
||||||
Adt(def, args) if def.is_box() => args.type_at(0),
|
Adt(def, args) if def.is_box() => Some(args.type_at(0)),
|
||||||
_ => bug!("`boxed_ty` is called on non-box type {:?}", self),
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Panics if called on any type other than `Box<T>`.
|
||||||
|
pub fn expect_boxed_ty(self) -> Ty<'tcx> {
|
||||||
|
self.boxed_ty()
|
||||||
|
.unwrap_or_else(|| bug!("`expect_boxed_ty` is called on non-box type {:?}", self))
|
||||||
|
}
|
||||||
|
|
||||||
/// A scalar type is one that denotes an atomic datum, with no sub-components.
|
/// A scalar type is one that denotes an atomic datum, with no sub-components.
|
||||||
/// (A RawPtr is scalar because it represents a non-managed pointer, so its
|
/// (A RawPtr is scalar because it represents a non-managed pointer, so its
|
||||||
/// contents are abstract to rustc.)
|
/// contents are abstract to rustc.)
|
||||||
@ -1323,7 +1328,7 @@ impl<'tcx> Ty<'tcx> {
|
|||||||
/// Some types -- notably unsafe ptrs -- can only be dereferenced explicitly.
|
/// Some types -- notably unsafe ptrs -- can only be dereferenced explicitly.
|
||||||
pub fn builtin_deref(self, explicit: bool) -> Option<Ty<'tcx>> {
|
pub fn builtin_deref(self, explicit: bool) -> Option<Ty<'tcx>> {
|
||||||
match *self.kind() {
|
match *self.kind() {
|
||||||
Adt(def, _) if def.is_box() => Some(self.boxed_ty()),
|
_ if let Some(boxed) = self.boxed_ty() => Some(boxed),
|
||||||
Ref(_, ty, _) => Some(ty),
|
Ref(_, ty, _) => Some(ty),
|
||||||
RawPtr(ty, _) if explicit => Some(ty),
|
RawPtr(ty, _) if explicit => Some(ty),
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -1628,7 +1628,7 @@ impl<'tcx> ExplicitSelf<'tcx> {
|
|||||||
_ if is_self_ty(self_arg_ty) => ByValue,
|
_ if is_self_ty(self_arg_ty) => ByValue,
|
||||||
ty::Ref(region, ty, mutbl) if is_self_ty(ty) => ByReference(region, mutbl),
|
ty::Ref(region, ty, mutbl) if is_self_ty(ty) => ByReference(region, mutbl),
|
||||||
ty::RawPtr(ty, mutbl) if is_self_ty(ty) => ByRawPointer(mutbl),
|
ty::RawPtr(ty, mutbl) if is_self_ty(ty) => ByRawPointer(mutbl),
|
||||||
ty::Adt(def, _) if def.is_box() && is_self_ty(self_arg_ty.boxed_ty()) => ByBox,
|
_ if self_arg_ty.boxed_ty().is_some_and(is_self_ty) => ByBox,
|
||||||
_ => Other,
|
_ => Other,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,7 @@ use std::collections::VecDeque;
|
|||||||
use rustc_data_structures::captures::Captures;
|
use rustc_data_structures::captures::Captures;
|
||||||
use rustc_data_structures::fx::FxHashSet;
|
use rustc_data_structures::fx::FxHashSet;
|
||||||
use rustc_middle::mir;
|
use rustc_middle::mir;
|
||||||
use rustc_span::Span;
|
use rustc_span::{DesugaringKind, ExpnKind, MacroKind, Span};
|
||||||
use tracing::{debug, debug_span, instrument};
|
use tracing::{debug, debug_span, instrument};
|
||||||
|
|
||||||
use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph};
|
use crate::coverage::graph::{BasicCoverageBlock, CoverageGraph};
|
||||||
@ -25,7 +25,7 @@ pub(super) fn extract_refined_covspans(
|
|||||||
|
|
||||||
// First, perform the passes that need macro information.
|
// First, perform the passes that need macro information.
|
||||||
covspans.sort_by(|a, b| basic_coverage_blocks.cmp_in_dominator_order(a.bcb, b.bcb));
|
covspans.sort_by(|a, b| basic_coverage_blocks.cmp_in_dominator_order(a.bcb, b.bcb));
|
||||||
remove_unwanted_macro_spans(&mut covspans);
|
remove_unwanted_expansion_spans(&mut covspans);
|
||||||
split_visible_macro_spans(&mut covspans);
|
split_visible_macro_spans(&mut covspans);
|
||||||
|
|
||||||
// We no longer need the extra information in `SpanFromMir`, so convert to `Covspan`.
|
// We no longer need the extra information in `SpanFromMir`, so convert to `Covspan`.
|
||||||
@ -76,18 +76,24 @@ pub(super) fn extract_refined_covspans(
|
|||||||
/// invocation, which is unhelpful. Keeping only the first such span seems to
|
/// invocation, which is unhelpful. Keeping only the first such span seems to
|
||||||
/// give better mappings, so remove the others.
|
/// give better mappings, so remove the others.
|
||||||
///
|
///
|
||||||
|
/// Similarly, `await` expands to a branch on the discriminant of `Poll`, which
|
||||||
|
/// leads to incorrect coverage if the `Future` is immediately ready (#98712).
|
||||||
|
///
|
||||||
/// (The input spans should be sorted in BCB dominator order, so that the
|
/// (The input spans should be sorted in BCB dominator order, so that the
|
||||||
/// retained "first" span is likely to dominate the others.)
|
/// retained "first" span is likely to dominate the others.)
|
||||||
fn remove_unwanted_macro_spans(covspans: &mut Vec<SpanFromMir>) {
|
fn remove_unwanted_expansion_spans(covspans: &mut Vec<SpanFromMir>) {
|
||||||
let mut seen_macro_spans = FxHashSet::default();
|
let mut deduplicated_spans = FxHashSet::default();
|
||||||
covspans.retain(|covspan| {
|
|
||||||
// Ignore (retain) non-macro-expansion spans.
|
|
||||||
if covspan.visible_macro.is_none() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Retain only the first macro-expanded covspan with this span.
|
covspans.retain(|covspan| {
|
||||||
seen_macro_spans.insert(covspan.span)
|
match covspan.expn_kind {
|
||||||
|
// Retain only the first await-related or macro-expanded covspan with this span.
|
||||||
|
Some(ExpnKind::Desugaring(kind)) if kind == DesugaringKind::Await => {
|
||||||
|
deduplicated_spans.insert(covspan.span)
|
||||||
|
}
|
||||||
|
Some(ExpnKind::Macro(MacroKind::Bang, _)) => deduplicated_spans.insert(covspan.span),
|
||||||
|
// Ignore (retain) other spans.
|
||||||
|
_ => true,
|
||||||
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -99,7 +105,9 @@ fn split_visible_macro_spans(covspans: &mut Vec<SpanFromMir>) {
|
|||||||
let mut extra_spans = vec![];
|
let mut extra_spans = vec![];
|
||||||
|
|
||||||
covspans.retain(|covspan| {
|
covspans.retain(|covspan| {
|
||||||
let Some(visible_macro) = covspan.visible_macro else { return true };
|
let Some(ExpnKind::Macro(MacroKind::Bang, visible_macro)) = covspan.expn_kind else {
|
||||||
|
return true;
|
||||||
|
};
|
||||||
|
|
||||||
let split_len = visible_macro.as_str().len() as u32 + 1;
|
let split_len = visible_macro.as_str().len() as u32 + 1;
|
||||||
let (before, after) = covspan.span.split_at(split_len);
|
let (before, after) = covspan.span.split_at(split_len);
|
||||||
@ -111,8 +119,8 @@ fn split_visible_macro_spans(covspans: &mut Vec<SpanFromMir>) {
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
extra_spans.push(SpanFromMir::new(before, covspan.visible_macro, covspan.bcb));
|
extra_spans.push(SpanFromMir::new(before, covspan.expn_kind.clone(), covspan.bcb));
|
||||||
extra_spans.push(SpanFromMir::new(after, covspan.visible_macro, covspan.bcb));
|
extra_spans.push(SpanFromMir::new(after, covspan.expn_kind.clone(), covspan.bcb));
|
||||||
false // Discard the original covspan that we just split.
|
false // Discard the original covspan that we just split.
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -3,13 +3,13 @@ use rustc_middle::mir::coverage::CoverageKind;
|
|||||||
use rustc_middle::mir::{
|
use rustc_middle::mir::{
|
||||||
self, FakeReadCause, Statement, StatementKind, Terminator, TerminatorKind,
|
self, FakeReadCause, Statement, StatementKind, Terminator, TerminatorKind,
|
||||||
};
|
};
|
||||||
use rustc_span::{Span, Symbol};
|
use rustc_span::{ExpnKind, Span};
|
||||||
|
|
||||||
use crate::coverage::graph::{
|
use crate::coverage::graph::{
|
||||||
BasicCoverageBlock, BasicCoverageBlockData, CoverageGraph, START_BCB,
|
BasicCoverageBlock, BasicCoverageBlockData, CoverageGraph, START_BCB,
|
||||||
};
|
};
|
||||||
use crate::coverage::spans::Covspan;
|
use crate::coverage::spans::Covspan;
|
||||||
use crate::coverage::unexpand::unexpand_into_body_span_with_visible_macro;
|
use crate::coverage::unexpand::unexpand_into_body_span_with_expn_kind;
|
||||||
use crate::coverage::ExtractedHirInfo;
|
use crate::coverage::ExtractedHirInfo;
|
||||||
|
|
||||||
pub(crate) struct ExtractedCovspans {
|
pub(crate) struct ExtractedCovspans {
|
||||||
@ -60,7 +60,7 @@ fn bcb_to_initial_coverage_spans<'a, 'tcx>(
|
|||||||
let data = &mir_body[bb];
|
let data = &mir_body[bb];
|
||||||
|
|
||||||
let unexpand = move |expn_span| {
|
let unexpand = move |expn_span| {
|
||||||
unexpand_into_body_span_with_visible_macro(expn_span, body_span)
|
unexpand_into_body_span_with_expn_kind(expn_span, body_span)
|
||||||
// Discard any spans that fill the entire body, because they tend
|
// Discard any spans that fill the entire body, because they tend
|
||||||
// to represent compiler-inserted code, e.g. implicitly returning `()`.
|
// to represent compiler-inserted code, e.g. implicitly returning `()`.
|
||||||
.filter(|(span, _)| !span.source_equal(body_span))
|
.filter(|(span, _)| !span.source_equal(body_span))
|
||||||
@ -68,9 +68,9 @@ fn bcb_to_initial_coverage_spans<'a, 'tcx>(
|
|||||||
|
|
||||||
let mut extract_statement_span = |statement| {
|
let mut extract_statement_span = |statement| {
|
||||||
let expn_span = filtered_statement_span(statement)?;
|
let expn_span = filtered_statement_span(statement)?;
|
||||||
let (span, visible_macro) = unexpand(expn_span)?;
|
let (span, expn_kind) = unexpand(expn_span)?;
|
||||||
|
|
||||||
initial_covspans.push(SpanFromMir::new(span, visible_macro, bcb));
|
initial_covspans.push(SpanFromMir::new(span, expn_kind, bcb));
|
||||||
Some(())
|
Some(())
|
||||||
};
|
};
|
||||||
for statement in data.statements.iter() {
|
for statement in data.statements.iter() {
|
||||||
@ -79,9 +79,9 @@ fn bcb_to_initial_coverage_spans<'a, 'tcx>(
|
|||||||
|
|
||||||
let mut extract_terminator_span = |terminator| {
|
let mut extract_terminator_span = |terminator| {
|
||||||
let expn_span = filtered_terminator_span(terminator)?;
|
let expn_span = filtered_terminator_span(terminator)?;
|
||||||
let (span, visible_macro) = unexpand(expn_span)?;
|
let (span, expn_kind) = unexpand(expn_span)?;
|
||||||
|
|
||||||
initial_covspans.push(SpanFromMir::new(span, visible_macro, bcb));
|
initial_covspans.push(SpanFromMir::new(span, expn_kind, bcb));
|
||||||
Some(())
|
Some(())
|
||||||
};
|
};
|
||||||
extract_terminator_span(data.terminator());
|
extract_terminator_span(data.terminator());
|
||||||
@ -214,7 +214,7 @@ pub(crate) struct SpanFromMir {
|
|||||||
/// With the exception of `fn_sig_span`, this should always be contained
|
/// With the exception of `fn_sig_span`, this should always be contained
|
||||||
/// within `body_span`.
|
/// within `body_span`.
|
||||||
pub(crate) span: Span,
|
pub(crate) span: Span,
|
||||||
pub(crate) visible_macro: Option<Symbol>,
|
pub(crate) expn_kind: Option<ExpnKind>,
|
||||||
pub(crate) bcb: BasicCoverageBlock,
|
pub(crate) bcb: BasicCoverageBlock,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -223,12 +223,12 @@ impl SpanFromMir {
|
|||||||
Self::new(fn_sig_span, None, START_BCB)
|
Self::new(fn_sig_span, None, START_BCB)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn new(span: Span, visible_macro: Option<Symbol>, bcb: BasicCoverageBlock) -> Self {
|
pub(crate) fn new(span: Span, expn_kind: Option<ExpnKind>, bcb: BasicCoverageBlock) -> Self {
|
||||||
Self { span, visible_macro, bcb }
|
Self { span, expn_kind, bcb }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn into_covspan(self) -> Covspan {
|
pub(crate) fn into_covspan(self) -> Covspan {
|
||||||
let Self { span, visible_macro: _, bcb } = self;
|
let Self { span, expn_kind: _, bcb } = self;
|
||||||
Covspan { span, bcb }
|
Covspan { span, bcb }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
use rustc_span::{ExpnKind, MacroKind, Span, Symbol};
|
use rustc_span::{ExpnKind, Span};
|
||||||
|
|
||||||
/// Walks through the expansion ancestors of `original_span` to find a span that
|
/// Walks through the expansion ancestors of `original_span` to find a span that
|
||||||
/// is contained in `body_span` and has the same [syntax context] as `body_span`.
|
/// is contained in `body_span` and has the same [syntax context] as `body_span`.
|
||||||
@ -13,20 +13,15 @@ pub(crate) fn unexpand_into_body_span(original_span: Span, body_span: Span) -> O
|
|||||||
///
|
///
|
||||||
/// If the returned span represents a bang-macro invocation (e.g. `foo!(..)`),
|
/// If the returned span represents a bang-macro invocation (e.g. `foo!(..)`),
|
||||||
/// the returned symbol will be the name of that macro (e.g. `foo`).
|
/// the returned symbol will be the name of that macro (e.g. `foo`).
|
||||||
pub(crate) fn unexpand_into_body_span_with_visible_macro(
|
pub(crate) fn unexpand_into_body_span_with_expn_kind(
|
||||||
original_span: Span,
|
original_span: Span,
|
||||||
body_span: Span,
|
body_span: Span,
|
||||||
) -> Option<(Span, Option<Symbol>)> {
|
) -> Option<(Span, Option<ExpnKind>)> {
|
||||||
let (span, prev) = unexpand_into_body_span_with_prev(original_span, body_span)?;
|
let (span, prev) = unexpand_into_body_span_with_prev(original_span, body_span)?;
|
||||||
|
|
||||||
let visible_macro = prev
|
let expn_kind = prev.map(|prev| prev.ctxt().outer_expn_data().kind);
|
||||||
.map(|prev| match prev.ctxt().outer_expn_data().kind {
|
|
||||||
ExpnKind::Macro(MacroKind::Bang, name) => Some(name),
|
|
||||||
_ => None,
|
|
||||||
})
|
|
||||||
.flatten();
|
|
||||||
|
|
||||||
Some((span, visible_macro))
|
Some((span, expn_kind))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Walks through the expansion ancestors of `original_span` to find a span that
|
/// Walks through the expansion ancestors of `original_span` to find a span that
|
||||||
|
@ -62,11 +62,13 @@ impl<'tcx, 'a> MutVisitor<'tcx> for ElaborateBoxDerefVisitor<'tcx, 'a> {
|
|||||||
let base_ty = self.local_decls[place.local].ty;
|
let base_ty = self.local_decls[place.local].ty;
|
||||||
|
|
||||||
// Derefer ensures that derefs are always the first projection
|
// Derefer ensures that derefs are always the first projection
|
||||||
if place.projection.first() == Some(&PlaceElem::Deref) && base_ty.is_box() {
|
if let Some(PlaceElem::Deref) = place.projection.first()
|
||||||
|
&& let Some(boxed_ty) = base_ty.boxed_ty()
|
||||||
|
{
|
||||||
let source_info = self.local_decls[place.local].source_info;
|
let source_info = self.local_decls[place.local].source_info;
|
||||||
|
|
||||||
let (unique_ty, nonnull_ty, ptr_ty) =
|
let (unique_ty, nonnull_ty, ptr_ty) =
|
||||||
build_ptr_tys(tcx, base_ty.boxed_ty(), self.unique_did, self.nonnull_did);
|
build_ptr_tys(tcx, boxed_ty, self.unique_did, self.nonnull_did);
|
||||||
|
|
||||||
let ptr_local = self.patch.new_temp(ptr_ty, source_info.span);
|
let ptr_local = self.patch.new_temp(ptr_ty, source_info.span);
|
||||||
|
|
||||||
@ -120,13 +122,15 @@ impl<'tcx> crate::MirPass<'tcx> for ElaborateBoxDerefs {
|
|||||||
for (base, elem) in place.iter_projections() {
|
for (base, elem) in place.iter_projections() {
|
||||||
let base_ty = base.ty(&body.local_decls, tcx).ty;
|
let base_ty = base.ty(&body.local_decls, tcx).ty;
|
||||||
|
|
||||||
if elem == PlaceElem::Deref && base_ty.is_box() {
|
if let PlaceElem::Deref = elem
|
||||||
|
&& let Some(boxed_ty) = base_ty.boxed_ty()
|
||||||
|
{
|
||||||
// Clone the projections before us, since now we need to mutate them.
|
// Clone the projections before us, since now we need to mutate them.
|
||||||
let new_projections =
|
let new_projections =
|
||||||
new_projections.get_or_insert_with(|| base.projection.to_vec());
|
new_projections.get_or_insert_with(|| base.projection.to_vec());
|
||||||
|
|
||||||
let (unique_ty, nonnull_ty, ptr_ty) =
|
let (unique_ty, nonnull_ty, ptr_ty) =
|
||||||
build_ptr_tys(tcx, base_ty.boxed_ty(), unique_did, nonnull_did);
|
build_ptr_tys(tcx, boxed_ty, unique_did, nonnull_did);
|
||||||
|
|
||||||
new_projections.extend_from_slice(&build_projection(
|
new_projections.extend_from_slice(&build_projection(
|
||||||
unique_ty, nonnull_ty, ptr_ty,
|
unique_ty, nonnull_ty, ptr_ty,
|
||||||
|
@ -1041,8 +1041,11 @@ fn find_vtable_types_for_unsizing<'tcx>(
|
|||||||
match (source_ty.kind(), target_ty.kind()) {
|
match (source_ty.kind(), target_ty.kind()) {
|
||||||
(&ty::Ref(_, a, _), &ty::Ref(_, b, _) | &ty::RawPtr(b, _))
|
(&ty::Ref(_, a, _), &ty::Ref(_, b, _) | &ty::RawPtr(b, _))
|
||||||
| (&ty::RawPtr(a, _), &ty::RawPtr(b, _)) => ptr_vtable(a, b),
|
| (&ty::RawPtr(a, _), &ty::RawPtr(b, _)) => ptr_vtable(a, b),
|
||||||
(&ty::Adt(def_a, _), &ty::Adt(def_b, _)) if def_a.is_box() && def_b.is_box() => {
|
(_, _)
|
||||||
ptr_vtable(source_ty.boxed_ty(), target_ty.boxed_ty())
|
if let Some(source_boxed) = source_ty.boxed_ty()
|
||||||
|
&& let Some(target_boxed) = target_ty.boxed_ty() =>
|
||||||
|
{
|
||||||
|
ptr_vtable(source_boxed, target_boxed)
|
||||||
}
|
}
|
||||||
|
|
||||||
// T as dyn* Trait
|
// T as dyn* Trait
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
// tidy-alphabetical-start
|
// tidy-alphabetical-start
|
||||||
#![feature(array_windows)]
|
#![feature(array_windows)]
|
||||||
|
#![feature(if_let_guard)]
|
||||||
|
#![feature(let_chains)]
|
||||||
#![warn(unreachable_pub)]
|
#![warn(unreachable_pub)]
|
||||||
// tidy-alphabetical-end
|
// tidy-alphabetical-end
|
||||||
|
|
||||||
|
@ -348,8 +348,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
if let Some(ty::error::ExpectedFound { found, .. }) = exp_found
|
if let Some(ty::error::ExpectedFound { found, .. }) = exp_found
|
||||||
&& ty.is_box()
|
&& ty.boxed_ty() == Some(found)
|
||||||
&& ty.boxed_ty() == found
|
|
||||||
&& let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span)
|
&& let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span)
|
||||||
{
|
{
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
|
@ -852,7 +852,7 @@ impl OsStr {
|
|||||||
|
|
||||||
/// Converts an `OsStr` to a <code>[Cow]<[str]></code>.
|
/// Converts an `OsStr` to a <code>[Cow]<[str]></code>.
|
||||||
///
|
///
|
||||||
/// Any non-Unicode sequences are replaced with
|
/// Any non-UTF-8 sequences are replaced with
|
||||||
/// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD].
|
/// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD].
|
||||||
///
|
///
|
||||||
/// [U+FFFD]: crate::char::REPLACEMENT_CHARACTER
|
/// [U+FFFD]: crate::char::REPLACEMENT_CHARACTER
|
||||||
|
@ -2200,7 +2200,7 @@ impl Path {
|
|||||||
|
|
||||||
/// Converts a `Path` to a [`Cow<str>`].
|
/// Converts a `Path` to a [`Cow<str>`].
|
||||||
///
|
///
|
||||||
/// Any non-Unicode sequences are replaced with
|
/// Any non-UTF-8 sequences are replaced with
|
||||||
/// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD].
|
/// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD].
|
||||||
///
|
///
|
||||||
/// [U+FFFD]: super::char::REPLACEMENT_CHARACTER
|
/// [U+FFFD]: super::char::REPLACEMENT_CHARACTER
|
||||||
|
@ -228,7 +228,7 @@ impl Step for Rustc {
|
|||||||
self.override_build_kind.unwrap_or(builder.kind),
|
self.override_build_kind.unwrap_or(builder.kind),
|
||||||
);
|
);
|
||||||
|
|
||||||
rustc_cargo(builder, &mut cargo, target, &compiler);
|
rustc_cargo(builder, &mut cargo, target, &compiler, &self.crates);
|
||||||
|
|
||||||
// For ./x.py clippy, don't run with --all-targets because
|
// For ./x.py clippy, don't run with --all-targets because
|
||||||
// linting tests and benchmarks can produce very noisy results
|
// linting tests and benchmarks can produce very noisy results
|
||||||
|
@ -197,7 +197,7 @@ impl Step for Rustc {
|
|||||||
Kind::Clippy,
|
Kind::Clippy,
|
||||||
);
|
);
|
||||||
|
|
||||||
rustc_cargo(builder, &mut cargo, target, &compiler);
|
rustc_cargo(builder, &mut cargo, target, &compiler, &self.crates);
|
||||||
|
|
||||||
// Explicitly pass -p for all compiler crates -- this will force cargo
|
// Explicitly pass -p for all compiler crates -- this will force cargo
|
||||||
// to also lint the tests/benches/examples for these crates, rather
|
// to also lint the tests/benches/examples for these crates, rather
|
||||||
|
@ -988,7 +988,7 @@ impl Step for Rustc {
|
|||||||
Kind::Build,
|
Kind::Build,
|
||||||
);
|
);
|
||||||
|
|
||||||
rustc_cargo(builder, &mut cargo, target, &compiler);
|
rustc_cargo(builder, &mut cargo, target, &compiler, &self.crates);
|
||||||
|
|
||||||
// NB: all RUSTFLAGS should be added to `rustc_cargo()` so they will be
|
// NB: all RUSTFLAGS should be added to `rustc_cargo()` so they will be
|
||||||
// consistently applied by check/doc/test modes too.
|
// consistently applied by check/doc/test modes too.
|
||||||
@ -1047,10 +1047,11 @@ pub fn rustc_cargo(
|
|||||||
cargo: &mut Cargo,
|
cargo: &mut Cargo,
|
||||||
target: TargetSelection,
|
target: TargetSelection,
|
||||||
compiler: &Compiler,
|
compiler: &Compiler,
|
||||||
|
crates: &[String],
|
||||||
) {
|
) {
|
||||||
cargo
|
cargo
|
||||||
.arg("--features")
|
.arg("--features")
|
||||||
.arg(builder.rustc_features(builder.kind, target))
|
.arg(builder.rustc_features(builder.kind, target, crates))
|
||||||
.arg("--manifest-path")
|
.arg("--manifest-path")
|
||||||
.arg(builder.src.join("compiler/rustc/Cargo.toml"));
|
.arg(builder.src.join("compiler/rustc/Cargo.toml"));
|
||||||
|
|
||||||
|
@ -826,7 +826,7 @@ impl Step for Rustc {
|
|||||||
// see https://github.com/rust-lang/rust/pull/122066#issuecomment-1983049222
|
// see https://github.com/rust-lang/rust/pull/122066#issuecomment-1983049222
|
||||||
// cargo.rustdocflag("--generate-link-to-definition");
|
// cargo.rustdocflag("--generate-link-to-definition");
|
||||||
|
|
||||||
compile::rustc_cargo(builder, &mut cargo, target, &compiler);
|
compile::rustc_cargo(builder, &mut cargo, target, &compiler, &self.crates);
|
||||||
cargo.arg("-Zskip-rustdoc-fingerprint");
|
cargo.arg("-Zskip-rustdoc-fingerprint");
|
||||||
|
|
||||||
// Only include compiler crates, no dependencies of those, such as `libc`.
|
// Only include compiler crates, no dependencies of those, such as `libc`.
|
||||||
|
@ -2690,7 +2690,7 @@ impl Step for Crate {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
Mode::Rustc => {
|
Mode::Rustc => {
|
||||||
compile::rustc_cargo(builder, &mut cargo, target, &compiler);
|
compile::rustc_cargo(builder, &mut cargo, target, &compiler, &self.crates);
|
||||||
}
|
}
|
||||||
_ => panic!("can only test libraries"),
|
_ => panic!("can only test libraries"),
|
||||||
};
|
};
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
use std::collections::BTreeMap;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
||||||
use serde_derive::Deserialize;
|
use serde_derive::Deserialize;
|
||||||
@ -21,6 +22,7 @@ struct Package {
|
|||||||
manifest_path: String,
|
manifest_path: String,
|
||||||
dependencies: Vec<Dependency>,
|
dependencies: Vec<Dependency>,
|
||||||
targets: Vec<Target>,
|
targets: Vec<Target>,
|
||||||
|
features: BTreeMap<String, Vec<String>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// For more information, see the output of
|
/// For more information, see the output of
|
||||||
@ -51,7 +53,13 @@ pub fn build(build: &mut Build) {
|
|||||||
.map(|dep| dep.name)
|
.map(|dep| dep.name)
|
||||||
.collect();
|
.collect();
|
||||||
let has_lib = package.targets.iter().any(|t| t.kind.iter().any(|k| k == "lib"));
|
let has_lib = package.targets.iter().any(|t| t.kind.iter().any(|k| k == "lib"));
|
||||||
let krate = Crate { name: name.clone(), deps, path, has_lib };
|
let krate = Crate {
|
||||||
|
name: name.clone(),
|
||||||
|
deps,
|
||||||
|
path,
|
||||||
|
has_lib,
|
||||||
|
features: package.features.keys().cloned().collect(),
|
||||||
|
};
|
||||||
let relative_path = krate.local_path(build);
|
let relative_path = krate.local_path(build);
|
||||||
build.crates.insert(name.clone(), krate);
|
build.crates.insert(name.clone(), krate);
|
||||||
let existing_path = build.crate_paths.insert(relative_path, name);
|
let existing_path = build.crate_paths.insert(relative_path, name);
|
||||||
|
@ -183,6 +183,7 @@ struct Crate {
|
|||||||
deps: HashSet<String>,
|
deps: HashSet<String>,
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
has_lib: bool,
|
has_lib: bool,
|
||||||
|
features: Vec<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Crate {
|
impl Crate {
|
||||||
@ -672,16 +673,24 @@ impl Build {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Gets the space-separated set of activated features for the compiler.
|
/// Gets the space-separated set of activated features for the compiler.
|
||||||
fn rustc_features(&self, kind: Kind, target: TargetSelection) -> String {
|
fn rustc_features(&self, kind: Kind, target: TargetSelection, crates: &[String]) -> String {
|
||||||
|
let possible_features_by_crates: HashSet<_> = crates
|
||||||
|
.iter()
|
||||||
|
.flat_map(|krate| &self.crates[krate].features)
|
||||||
|
.map(std::ops::Deref::deref)
|
||||||
|
.collect();
|
||||||
|
let check = |feature: &str| -> bool {
|
||||||
|
crates.is_empty() || possible_features_by_crates.contains(feature)
|
||||||
|
};
|
||||||
let mut features = vec![];
|
let mut features = vec![];
|
||||||
if self.config.jemalloc {
|
if self.config.jemalloc && check("jemalloc") {
|
||||||
features.push("jemalloc");
|
features.push("jemalloc");
|
||||||
}
|
}
|
||||||
if self.config.llvm_enabled(target) || kind == Kind::Check {
|
if (self.config.llvm_enabled(target) || kind == Kind::Check) && check("llvm") {
|
||||||
features.push("llvm");
|
features.push("llvm");
|
||||||
}
|
}
|
||||||
// keep in sync with `bootstrap/compile.rs:rustc_cargo_env`
|
// keep in sync with `bootstrap/compile.rs:rustc_cargo_env`
|
||||||
if self.config.rustc_parallel {
|
if self.config.rustc_parallel && check("rustc_use_parallel_compiler") {
|
||||||
features.push("rustc_use_parallel_compiler");
|
features.push("rustc_use_parallel_compiler");
|
||||||
}
|
}
|
||||||
if self.config.rust_randomize_layout {
|
if self.config.rust_randomize_layout {
|
||||||
@ -693,7 +702,7 @@ impl Build {
|
|||||||
// which is everything (including debug/trace/etc.)
|
// which is everything (including debug/trace/etc.)
|
||||||
// if its unset, if debug_assertions is on, then debug_logging will also be on
|
// if its unset, if debug_assertions is on, then debug_logging will also be on
|
||||||
// as well as tracing *ignoring* this feature when debug_assertions is on
|
// as well as tracing *ignoring* this feature when debug_assertions is on
|
||||||
if !self.config.rust_debug_logging {
|
if !self.config.rust_debug_logging && check("max_level_info") {
|
||||||
features.push("max_level_info");
|
features.push("max_level_info");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ declare_clippy_lint! {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn is_non_trait_box(ty: Ty<'_>) -> bool {
|
fn is_non_trait_box(ty: Ty<'_>) -> bool {
|
||||||
ty.is_box() && !ty.boxed_ty().is_trait()
|
ty.boxed_ty().is_some_and(|boxed| !boxed.is_trait())
|
||||||
}
|
}
|
||||||
|
|
||||||
struct EscapeDelegate<'a, 'tcx> {
|
struct EscapeDelegate<'a, 'tcx> {
|
||||||
@ -191,8 +191,8 @@ impl<'a, 'tcx> Delegate<'tcx> for EscapeDelegate<'a, 'tcx> {
|
|||||||
impl<'a, 'tcx> EscapeDelegate<'a, 'tcx> {
|
impl<'a, 'tcx> EscapeDelegate<'a, 'tcx> {
|
||||||
fn is_large_box(&self, ty: Ty<'tcx>) -> bool {
|
fn is_large_box(&self, ty: Ty<'tcx>) -> bool {
|
||||||
// Large types need to be boxed to avoid stack overflows.
|
// Large types need to be boxed to avoid stack overflows.
|
||||||
if ty.is_box() {
|
if let Some(boxed_ty) = ty.boxed_ty() {
|
||||||
self.cx.layout_of(ty.boxed_ty()).map_or(0, |l| l.size.bytes()) > self.too_large_for_stack
|
self.cx.layout_of(boxed_ty).map_or(0, |l| l.size.bytes()) > self.too_large_for_stack
|
||||||
} else {
|
} else {
|
||||||
false
|
false
|
||||||
}
|
}
|
||||||
|
@ -5187,8 +5187,8 @@ impl SelfKind {
|
|||||||
fn matches_value<'a>(cx: &LateContext<'a>, parent_ty: Ty<'a>, ty: Ty<'a>) -> bool {
|
fn matches_value<'a>(cx: &LateContext<'a>, parent_ty: Ty<'a>, ty: Ty<'a>) -> bool {
|
||||||
if ty == parent_ty {
|
if ty == parent_ty {
|
||||||
true
|
true
|
||||||
} else if ty.is_box() {
|
} else if let Some(boxed_ty) = ty.boxed_ty() {
|
||||||
ty.boxed_ty() == parent_ty
|
boxed_ty == parent_ty
|
||||||
} else if is_type_diagnostic_item(cx, ty, sym::Rc) || is_type_diagnostic_item(cx, ty, sym::Arc) {
|
} else if is_type_diagnostic_item(cx, ty, sym::Rc) || is_type_diagnostic_item(cx, ty, sym::Arc) {
|
||||||
if let ty::Adt(_, args) = ty.kind() {
|
if let ty::Adt(_, args) = ty.kind() {
|
||||||
args.types().next().map_or(false, |t| t == parent_ty)
|
args.types().next().map_or(false, |t| t == parent_ty)
|
||||||
|
@ -16,7 +16,7 @@ pub(super) fn derefs_to_slice<'tcx>(
|
|||||||
fn may_slice<'a>(cx: &LateContext<'a>, ty: Ty<'a>) -> bool {
|
fn may_slice<'a>(cx: &LateContext<'a>, ty: Ty<'a>) -> bool {
|
||||||
match ty.kind() {
|
match ty.kind() {
|
||||||
ty::Slice(_) => true,
|
ty::Slice(_) => true,
|
||||||
ty::Adt(def, _) if def.is_box() => may_slice(cx, ty.boxed_ty()),
|
ty::Adt(..) if let Some(boxed) = ty.boxed_ty() => may_slice(cx, boxed),
|
||||||
ty::Adt(..) => is_type_diagnostic_item(cx, ty, sym::Vec),
|
ty::Adt(..) => is_type_diagnostic_item(cx, ty, sym::Vec),
|
||||||
ty::Array(_, size) => size.try_eval_target_usize(cx.tcx, cx.param_env).is_some(),
|
ty::Array(_, size) => size.try_eval_target_usize(cx.tcx, cx.param_env).is_some(),
|
||||||
ty::Ref(_, inner, _) => may_slice(cx, *inner),
|
ty::Ref(_, inner, _) => may_slice(cx, *inner),
|
||||||
@ -33,7 +33,7 @@ pub(super) fn derefs_to_slice<'tcx>(
|
|||||||
} else {
|
} else {
|
||||||
match ty.kind() {
|
match ty.kind() {
|
||||||
ty::Slice(_) => Some(expr),
|
ty::Slice(_) => Some(expr),
|
||||||
ty::Adt(def, _) if def.is_box() && may_slice(cx, ty.boxed_ty()) => Some(expr),
|
_ if ty.boxed_ty().is_some_and(|boxed| may_slice(cx, boxed)) => Some(expr),
|
||||||
ty::Ref(_, inner, _) => {
|
ty::Ref(_, inner, _) => {
|
||||||
if may_slice(cx, *inner) {
|
if may_slice(cx, *inner) {
|
||||||
Some(expr)
|
Some(expr)
|
||||||
|
@ -75,11 +75,9 @@ impl UnnecessaryBoxReturns {
|
|||||||
.instantiate_bound_regions_with_erased(cx.tcx.fn_sig(def_id).skip_binder())
|
.instantiate_bound_regions_with_erased(cx.tcx.fn_sig(def_id).skip_binder())
|
||||||
.output();
|
.output();
|
||||||
|
|
||||||
if !return_ty.is_box() {
|
let Some(boxed_ty) = return_ty.boxed_ty() else {
|
||||||
return;
|
return;
|
||||||
}
|
};
|
||||||
|
|
||||||
let boxed_ty = return_ty.boxed_ty();
|
|
||||||
|
|
||||||
// It's sometimes useful to return Box<T> if T is unsized, so don't lint those.
|
// It's sometimes useful to return Box<T> if T is unsized, so don't lint those.
|
||||||
// Also, don't lint if we know that T is very large, in which case returning
|
// Also, don't lint if we know that T is very large, in which case returning
|
||||||
|
@ -704,8 +704,8 @@ pub fn expr_sig<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>) -> Option<ExprFnS
|
|||||||
|
|
||||||
/// If the type is function like, get the signature for it.
|
/// If the type is function like, get the signature for it.
|
||||||
pub fn ty_sig<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<ExprFnSig<'tcx>> {
|
pub fn ty_sig<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> Option<ExprFnSig<'tcx>> {
|
||||||
if ty.is_box() {
|
if let Some(boxed_ty) = ty.boxed_ty() {
|
||||||
return ty_sig(cx, ty.boxed_ty());
|
return ty_sig(cx, boxed_ty);
|
||||||
}
|
}
|
||||||
match *ty.kind() {
|
match *ty.kind() {
|
||||||
ty::Closure(id, subs) => {
|
ty::Closure(id, subs) => {
|
||||||
|
@ -92,20 +92,18 @@ Number of file 0 mappings: 1
|
|||||||
- Code(Counter(0)) at (prev + 25, 1) to (start + 0, 23)
|
- Code(Counter(0)) at (prev + 25, 1) to (start + 0, 23)
|
||||||
|
|
||||||
Function name: async::g::{closure#0} (unused)
|
Function name: async::g::{closure#0} (unused)
|
||||||
Raw bytes (69): 0x[01, 01, 00, 0d, 00, 19, 17, 01, 0c, 00, 02, 09, 00, 0a, 00, 00, 0e, 00, 11, 00, 00, 12, 00, 17, 00, 00, 1b, 00, 1c, 00, 00, 20, 00, 22, 00, 01, 09, 00, 0a, 00, 00, 0e, 00, 11, 00, 00, 12, 00, 17, 00, 00, 1b, 00, 1c, 00, 00, 20, 00, 22, 00, 01, 0e, 00, 10, 00, 02, 01, 00, 02]
|
Raw bytes (59): 0x[01, 01, 00, 0b, 00, 19, 17, 01, 0c, 00, 02, 09, 00, 0a, 00, 00, 0e, 00, 17, 00, 00, 1b, 00, 1c, 00, 00, 20, 00, 22, 00, 01, 09, 00, 0a, 00, 00, 0e, 00, 17, 00, 00, 1b, 00, 1c, 00, 00, 20, 00, 22, 00, 01, 0e, 00, 10, 00, 02, 01, 00, 02]
|
||||||
Number of files: 1
|
Number of files: 1
|
||||||
- file 0 => global file 1
|
- file 0 => global file 1
|
||||||
Number of expressions: 0
|
Number of expressions: 0
|
||||||
Number of file 0 mappings: 13
|
Number of file 0 mappings: 11
|
||||||
- Code(Zero) at (prev + 25, 23) to (start + 1, 12)
|
- Code(Zero) at (prev + 25, 23) to (start + 1, 12)
|
||||||
- Code(Zero) at (prev + 2, 9) to (start + 0, 10)
|
- Code(Zero) at (prev + 2, 9) to (start + 0, 10)
|
||||||
- Code(Zero) at (prev + 0, 14) to (start + 0, 17)
|
- Code(Zero) at (prev + 0, 14) to (start + 0, 23)
|
||||||
- Code(Zero) at (prev + 0, 18) to (start + 0, 23)
|
|
||||||
- Code(Zero) at (prev + 0, 27) to (start + 0, 28)
|
- Code(Zero) at (prev + 0, 27) to (start + 0, 28)
|
||||||
- Code(Zero) at (prev + 0, 32) to (start + 0, 34)
|
- Code(Zero) at (prev + 0, 32) to (start + 0, 34)
|
||||||
- Code(Zero) at (prev + 1, 9) to (start + 0, 10)
|
- Code(Zero) at (prev + 1, 9) to (start + 0, 10)
|
||||||
- Code(Zero) at (prev + 0, 14) to (start + 0, 17)
|
- Code(Zero) at (prev + 0, 14) to (start + 0, 23)
|
||||||
- Code(Zero) at (prev + 0, 18) to (start + 0, 23)
|
|
||||||
- Code(Zero) at (prev + 0, 27) to (start + 0, 28)
|
- Code(Zero) at (prev + 0, 27) to (start + 0, 28)
|
||||||
- Code(Zero) at (prev + 0, 32) to (start + 0, 34)
|
- Code(Zero) at (prev + 0, 32) to (start + 0, 34)
|
||||||
- Code(Zero) at (prev + 1, 14) to (start + 0, 16)
|
- Code(Zero) at (prev + 1, 14) to (start + 0, 16)
|
||||||
@ -120,15 +118,14 @@ Number of file 0 mappings: 1
|
|||||||
- Code(Counter(0)) at (prev + 33, 1) to (start + 0, 22)
|
- Code(Counter(0)) at (prev + 33, 1) to (start + 0, 22)
|
||||||
|
|
||||||
Function name: async::h::{closure#0} (unused)
|
Function name: async::h::{closure#0} (unused)
|
||||||
Raw bytes (44): 0x[01, 01, 00, 08, 00, 21, 16, 03, 0c, 00, 04, 09, 00, 0a, 00, 00, 0e, 00, 13, 00, 00, 14, 00, 19, 00, 00, 1a, 00, 1b, 00, 00, 20, 00, 22, 00, 01, 0e, 00, 10, 00, 02, 01, 00, 02]
|
Raw bytes (39): 0x[01, 01, 00, 07, 00, 21, 16, 03, 0c, 00, 04, 09, 00, 0a, 00, 00, 0e, 00, 19, 00, 00, 1a, 00, 1b, 00, 00, 20, 00, 22, 00, 01, 0e, 00, 10, 00, 02, 01, 00, 02]
|
||||||
Number of files: 1
|
Number of files: 1
|
||||||
- file 0 => global file 1
|
- file 0 => global file 1
|
||||||
Number of expressions: 0
|
Number of expressions: 0
|
||||||
Number of file 0 mappings: 8
|
Number of file 0 mappings: 7
|
||||||
- Code(Zero) at (prev + 33, 22) to (start + 3, 12)
|
- Code(Zero) at (prev + 33, 22) to (start + 3, 12)
|
||||||
- Code(Zero) at (prev + 4, 9) to (start + 0, 10)
|
- Code(Zero) at (prev + 4, 9) to (start + 0, 10)
|
||||||
- Code(Zero) at (prev + 0, 14) to (start + 0, 19)
|
- Code(Zero) at (prev + 0, 14) to (start + 0, 25)
|
||||||
- Code(Zero) at (prev + 0, 20) to (start + 0, 25)
|
|
||||||
- Code(Zero) at (prev + 0, 26) to (start + 0, 27)
|
- Code(Zero) at (prev + 0, 26) to (start + 0, 27)
|
||||||
- Code(Zero) at (prev + 0, 32) to (start + 0, 34)
|
- Code(Zero) at (prev + 0, 32) to (start + 0, 34)
|
||||||
- Code(Zero) at (prev + 1, 14) to (start + 0, 16)
|
- Code(Zero) at (prev + 1, 14) to (start + 0, 16)
|
||||||
@ -143,28 +140,25 @@ Number of file 0 mappings: 1
|
|||||||
- Code(Counter(0)) at (prev + 42, 1) to (start + 0, 19)
|
- Code(Counter(0)) at (prev + 42, 1) to (start + 0, 19)
|
||||||
|
|
||||||
Function name: async::i::{closure#0}
|
Function name: async::i::{closure#0}
|
||||||
Raw bytes (78): 0x[01, 01, 02, 07, 21, 19, 1d, 0e, 01, 2a, 13, 04, 0c, 0d, 05, 09, 00, 0a, 01, 00, 0e, 00, 12, 05, 00, 13, 00, 18, 09, 00, 1c, 00, 21, 0d, 00, 27, 00, 2a, 15, 00, 2b, 00, 30, 1d, 01, 09, 00, 0a, 11, 00, 0e, 00, 11, 25, 00, 12, 00, 17, 29, 00, 1b, 00, 20, 1d, 00, 24, 00, 26, 21, 01, 0e, 00, 10, 03, 02, 01, 00, 02]
|
Raw bytes (63): 0x[01, 01, 02, 07, 19, 11, 15, 0b, 01, 2a, 13, 04, 0c, 09, 05, 09, 00, 0a, 01, 00, 0e, 00, 18, 05, 00, 1c, 00, 21, 09, 00, 27, 00, 30, 15, 01, 09, 00, 0a, 0d, 00, 0e, 00, 17, 1d, 00, 1b, 00, 20, 15, 00, 24, 00, 26, 19, 01, 0e, 00, 10, 03, 02, 01, 00, 02]
|
||||||
Number of files: 1
|
Number of files: 1
|
||||||
- file 0 => global file 1
|
- file 0 => global file 1
|
||||||
Number of expressions: 2
|
Number of expressions: 2
|
||||||
- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(8)
|
- expression 0 operands: lhs = Expression(1, Add), rhs = Counter(6)
|
||||||
- expression 1 operands: lhs = Counter(6), rhs = Counter(7)
|
- expression 1 operands: lhs = Counter(4), rhs = Counter(5)
|
||||||
Number of file 0 mappings: 14
|
Number of file 0 mappings: 11
|
||||||
- Code(Counter(0)) at (prev + 42, 19) to (start + 4, 12)
|
- Code(Counter(0)) at (prev + 42, 19) to (start + 4, 12)
|
||||||
- Code(Counter(3)) at (prev + 5, 9) to (start + 0, 10)
|
- Code(Counter(2)) at (prev + 5, 9) to (start + 0, 10)
|
||||||
- Code(Counter(0)) at (prev + 0, 14) to (start + 0, 18)
|
- Code(Counter(0)) at (prev + 0, 14) to (start + 0, 24)
|
||||||
- Code(Counter(1)) at (prev + 0, 19) to (start + 0, 24)
|
- Code(Counter(1)) at (prev + 0, 28) to (start + 0, 33)
|
||||||
- Code(Counter(2)) at (prev + 0, 28) to (start + 0, 33)
|
- Code(Counter(2)) at (prev + 0, 39) to (start + 0, 48)
|
||||||
- Code(Counter(3)) at (prev + 0, 39) to (start + 0, 42)
|
- Code(Counter(5)) at (prev + 1, 9) to (start + 0, 10)
|
||||||
- Code(Counter(5)) at (prev + 0, 43) to (start + 0, 48)
|
- Code(Counter(3)) at (prev + 0, 14) to (start + 0, 23)
|
||||||
- Code(Counter(7)) at (prev + 1, 9) to (start + 0, 10)
|
- Code(Counter(7)) at (prev + 0, 27) to (start + 0, 32)
|
||||||
- Code(Counter(4)) at (prev + 0, 14) to (start + 0, 17)
|
- Code(Counter(5)) at (prev + 0, 36) to (start + 0, 38)
|
||||||
- Code(Counter(9)) at (prev + 0, 18) to (start + 0, 23)
|
- Code(Counter(6)) at (prev + 1, 14) to (start + 0, 16)
|
||||||
- Code(Counter(10)) at (prev + 0, 27) to (start + 0, 32)
|
|
||||||
- Code(Counter(7)) at (prev + 0, 36) to (start + 0, 38)
|
|
||||||
- Code(Counter(8)) at (prev + 1, 14) to (start + 0, 16)
|
|
||||||
- Code(Expression(0, Add)) at (prev + 2, 1) to (start + 0, 2)
|
- Code(Expression(0, Add)) at (prev + 2, 1) to (start + 0, 2)
|
||||||
= ((c6 + c7) + c8)
|
= ((c4 + c5) + c6)
|
||||||
|
|
||||||
Function name: async::j
|
Function name: async::j
|
||||||
Raw bytes (58): 0x[01, 01, 02, 07, 0d, 05, 09, 0a, 01, 35, 01, 00, 0d, 01, 0b, 0b, 00, 0c, 05, 01, 09, 00, 0a, 01, 00, 0e, 00, 1b, 05, 00, 1f, 00, 27, 09, 01, 09, 00, 0a, 11, 00, 0e, 00, 1a, 09, 00, 1e, 00, 20, 0d, 01, 0e, 00, 10, 03, 02, 01, 00, 02]
|
Raw bytes (58): 0x[01, 01, 02, 07, 0d, 05, 09, 0a, 01, 35, 01, 00, 0d, 01, 0b, 0b, 00, 0c, 05, 01, 09, 00, 0a, 01, 00, 0e, 00, 1b, 05, 00, 1f, 00, 27, 09, 01, 09, 00, 0a, 11, 00, 0e, 00, 1a, 09, 00, 1e, 00, 20, 0d, 01, 0e, 00, 10, 03, 02, 01, 00, 02]
|
||||||
|
@ -45,9 +45,9 @@
|
|||||||
LL| 1| // executed asynchronously.
|
LL| 1| // executed asynchronously.
|
||||||
LL| 1| match x {
|
LL| 1| match x {
|
||||||
LL| 1| y if c(x).await == y + 1 => { d().await; }
|
LL| 1| y if c(x).await == y + 1 => { d().await; }
|
||||||
^0 ^0 ^0 ^0
|
^0 ^0
|
||||||
LL| 1| y if f().await == y + 1 => (),
|
LL| 1| y if f().await == y + 1 => (),
|
||||||
^0 ^0 ^0
|
^0 ^0
|
||||||
LL| 1| _ => (),
|
LL| 1| _ => (),
|
||||||
LL| | }
|
LL| | }
|
||||||
LL| 1|}
|
LL| 1|}
|
||||||
|
25
tests/coverage/await_ready.cov-map
Normal file
25
tests/coverage/await_ready.cov-map
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
Function name: await_ready::await_ready
|
||||||
|
Raw bytes (9): 0x[01, 01, 00, 01, 01, 0a, 01, 00, 1e]
|
||||||
|
Number of files: 1
|
||||||
|
- file 0 => global file 1
|
||||||
|
Number of expressions: 0
|
||||||
|
Number of file 0 mappings: 1
|
||||||
|
- Code(Counter(0)) at (prev + 10, 1) to (start + 0, 30)
|
||||||
|
|
||||||
|
Function name: await_ready::await_ready::{closure#0}
|
||||||
|
Raw bytes (14): 0x[01, 01, 00, 02, 01, 0a, 1e, 03, 0f, 05, 04, 01, 00, 02]
|
||||||
|
Number of files: 1
|
||||||
|
- file 0 => global file 1
|
||||||
|
Number of expressions: 0
|
||||||
|
Number of file 0 mappings: 2
|
||||||
|
- Code(Counter(0)) at (prev + 10, 30) to (start + 3, 15)
|
||||||
|
- Code(Counter(1)) at (prev + 4, 1) to (start + 0, 2)
|
||||||
|
|
||||||
|
Function name: await_ready::main
|
||||||
|
Raw bytes (9): 0x[01, 01, 00, 01, 01, 10, 01, 03, 02]
|
||||||
|
Number of files: 1
|
||||||
|
- file 0 => global file 1
|
||||||
|
Number of expressions: 0
|
||||||
|
Number of file 0 mappings: 1
|
||||||
|
- Code(Counter(0)) at (prev + 16, 1) to (start + 3, 2)
|
||||||
|
|
38
tests/coverage/await_ready.coverage
Normal file
38
tests/coverage/await_ready.coverage
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
LL| |#![feature(coverage_attribute)]
|
||||||
|
LL| |#![feature(custom_inner_attributes)] // for #![rustfmt::skip]
|
||||||
|
LL| |#![feature(noop_waker)]
|
||||||
|
LL| |#![rustfmt::skip]
|
||||||
|
LL| |//@ edition: 2021
|
||||||
|
LL| |
|
||||||
|
LL| |#[coverage(off)]
|
||||||
|
LL| |async fn ready() -> u8 { 1 }
|
||||||
|
LL| |
|
||||||
|
LL| 1|async fn await_ready() -> u8 {
|
||||||
|
LL| 1| // await should be covered even if the function never yields
|
||||||
|
LL| 1| ready()
|
||||||
|
LL| 1| .await
|
||||||
|
LL| 1|}
|
||||||
|
LL| |
|
||||||
|
LL| 1|fn main() {
|
||||||
|
LL| 1| let mut future = Box::pin(await_ready());
|
||||||
|
LL| 1| executor::block_on(future.as_mut());
|
||||||
|
LL| 1|}
|
||||||
|
LL| |
|
||||||
|
LL| |mod executor {
|
||||||
|
LL| | use core::future::Future;
|
||||||
|
LL| | use core::pin::pin;
|
||||||
|
LL| | use core::task::{Context, Poll, Waker};
|
||||||
|
LL| |
|
||||||
|
LL| | #[coverage(off)]
|
||||||
|
LL| | pub fn block_on<F: Future>(mut future: F) -> F::Output {
|
||||||
|
LL| | let mut future = pin!(future);
|
||||||
|
LL| | let mut context = Context::from_waker(Waker::noop());
|
||||||
|
LL| |
|
||||||
|
LL| | loop {
|
||||||
|
LL| | if let Poll::Ready(val) = future.as_mut().poll(&mut context) {
|
||||||
|
LL| | break val;
|
||||||
|
LL| | }
|
||||||
|
LL| | }
|
||||||
|
LL| | }
|
||||||
|
LL| |}
|
||||||
|
|
37
tests/coverage/await_ready.rs
Normal file
37
tests/coverage/await_ready.rs
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
#![feature(coverage_attribute)]
|
||||||
|
#![feature(custom_inner_attributes)] // for #![rustfmt::skip]
|
||||||
|
#![feature(noop_waker)]
|
||||||
|
#![rustfmt::skip]
|
||||||
|
//@ edition: 2021
|
||||||
|
|
||||||
|
#[coverage(off)]
|
||||||
|
async fn ready() -> u8 { 1 }
|
||||||
|
|
||||||
|
async fn await_ready() -> u8 {
|
||||||
|
// await should be covered even if the function never yields
|
||||||
|
ready()
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let mut future = Box::pin(await_ready());
|
||||||
|
executor::block_on(future.as_mut());
|
||||||
|
}
|
||||||
|
|
||||||
|
mod executor {
|
||||||
|
use core::future::Future;
|
||||||
|
use core::pin::pin;
|
||||||
|
use core::task::{Context, Poll, Waker};
|
||||||
|
|
||||||
|
#[coverage(off)]
|
||||||
|
pub fn block_on<F: Future>(mut future: F) -> F::Output {
|
||||||
|
let mut future = pin!(future);
|
||||||
|
let mut context = Context::from_waker(Waker::noop());
|
||||||
|
|
||||||
|
loop {
|
||||||
|
if let Poll::Ready(val) = future.as_mut().poll(&mut context) {
|
||||||
|
break val;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,13 @@
|
|||||||
|
//@ check-pass
|
||||||
|
//@ known-bug: #25860
|
||||||
|
|
||||||
|
static UNIT: &'static &'static () = &&();
|
||||||
|
|
||||||
|
fn foo<'a, 'b, T>(_: &'a &'b (), v: &'b T, _: &()) -> &'a T { v }
|
||||||
|
|
||||||
|
fn bad<'a, T>(x: &'a T) -> &'static T {
|
||||||
|
let f: fn(_, &'a T, &()) -> &'static T = foo;
|
||||||
|
f(UNIT, x, &())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
@ -0,0 +1,13 @@
|
|||||||
|
// Regression test for #129021.
|
||||||
|
|
||||||
|
static UNIT: &'static &'static () = &&();
|
||||||
|
|
||||||
|
fn foo<'a: 'a, 'b: 'b, T>(_: &'a &'b (), v: &'b T) -> &'a T { v }
|
||||||
|
|
||||||
|
fn bad<'a, T>(x: &'a T) -> &'static T {
|
||||||
|
let f: fn(_, &'a T) -> &'static T = foo;
|
||||||
|
//~^ ERROR lifetime may not live long enough
|
||||||
|
f(UNIT, x)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {}
|
@ -0,0 +1,10 @@
|
|||||||
|
error: lifetime may not live long enough
|
||||||
|
--> $DIR/implied-bounds-on-nested-references-plus-variance-early-bound.rs:8:12
|
||||||
|
|
|
||||||
|
LL | fn bad<'a, T>(x: &'a T) -> &'static T {
|
||||||
|
| -- lifetime `'a` defined here
|
||||||
|
LL | let f: fn(_, &'a T) -> &'static T = foo;
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ type annotation requires that `'a` must outlive `'static`
|
||||||
|
|
||||||
|
error: aborting due to 1 previous error
|
||||||
|
|
@ -0,0 +1,19 @@
|
|||||||
|
// Regression test for #129021.
|
||||||
|
|
||||||
|
trait ToArg<T> {
|
||||||
|
type Arg;
|
||||||
|
}
|
||||||
|
impl<T, U> ToArg<T> for U {
|
||||||
|
type Arg = T;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn extend_inner<'a, 'b>(x: &'a str) -> <&'b &'a () as ToArg<&'b str>>::Arg { x }
|
||||||
|
fn extend<'a, 'b>(x: &'a str) -> &'b str {
|
||||||
|
(extend_inner as fn(_) -> _)(x)
|
||||||
|
//~^ ERROR lifetime may not live long enough
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
let y = extend(&String::from("Hello World"));
|
||||||
|
println!("{}", y);
|
||||||
|
}
|
@ -0,0 +1,14 @@
|
|||||||
|
error: lifetime may not live long enough
|
||||||
|
--> $DIR/implied-bounds-on-nested-references-plus-variance-unnormalized.rs:12:5
|
||||||
|
|
|
||||||
|
LL | fn extend<'a, 'b>(x: &'a str) -> &'b str {
|
||||||
|
| -- -- lifetime `'b` defined here
|
||||||
|
| |
|
||||||
|
| lifetime `'a` defined here
|
||||||
|
LL | (extend_inner as fn(_) -> _)(x)
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ function was supposed to return data with lifetime `'b` but it is returning data with lifetime `'a`
|
||||||
|
|
|
||||||
|
= help: consider adding the following bound: `'a: 'b`
|
||||||
|
|
||||||
|
error: aborting due to 1 previous error
|
||||||
|
|
@ -1,8 +1,4 @@
|
|||||||
//@ check-pass
|
// Regression test for #129021.
|
||||||
//@ known-bug: #25860
|
|
||||||
|
|
||||||
// Should fail. The combination of variance and implied bounds for nested
|
|
||||||
// references allows us to infer a longer lifetime than we can prove.
|
|
||||||
|
|
||||||
static UNIT: &'static &'static () = &&();
|
static UNIT: &'static &'static () = &&();
|
||||||
|
|
||||||
@ -10,6 +6,7 @@ fn foo<'a, 'b, T>(_: &'a &'b (), v: &'b T) -> &'a T { v }
|
|||||||
|
|
||||||
fn bad<'a, T>(x: &'a T) -> &'static T {
|
fn bad<'a, T>(x: &'a T) -> &'static T {
|
||||||
let f: fn(_, &'a T) -> &'static T = foo;
|
let f: fn(_, &'a T) -> &'static T = foo;
|
||||||
|
//~^ ERROR lifetime may not live long enough
|
||||||
f(UNIT, x)
|
f(UNIT, x)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -0,0 +1,10 @@
|
|||||||
|
error: lifetime may not live long enough
|
||||||
|
--> $DIR/implied-bounds-on-nested-references-plus-variance.rs:8:12
|
||||||
|
|
|
||||||
|
LL | fn bad<'a, T>(x: &'a T) -> &'static T {
|
||||||
|
| -- lifetime `'a` defined here
|
||||||
|
LL | let f: fn(_, &'a T) -> &'static T = foo;
|
||||||
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ type annotation requires that `'a` must outlive `'static`
|
||||||
|
|
||||||
|
error: aborting due to 1 previous error
|
||||||
|
|
Loading…
Reference in New Issue
Block a user