Auto merge of #123029 - matthiaskrgr:rollup-6qsevhx, r=matthiaskrgr

Rollup of 7 pull requests

Successful merges:

 - #122858 (Tweak `parse_dot_suffix_expr`)
 - #122982 (Add more comments to the bootstrap code that handles `tests/coverage`)
 - #122990 (Clarify transmute example)
 - #122995 (Clean up unnecessary headers/flags in coverage mir-opt tests)
 - #123003 (CFI: Handle dyn with no principal)
 - #123005 (CFI: Support complex receivers)
 - #123020 (Temporarily remove nnethercote from the review rotation.)

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2024-03-25 12:00:21 +00:00
commit af98101ed8
16 changed files with 278 additions and 160 deletions

View File

@ -4634,6 +4634,7 @@ dependencies = [
"rustc_session",
"rustc_span",
"rustc_target",
"rustc_trait_selection",
"tracing",
"twox-hash",
]

View File

@ -16,7 +16,6 @@ use core::mem;
use core::ops::ControlFlow;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
use rustc_ast::tokenstream::Spacing;
use rustc_ast::util::case::Case;
use rustc_ast::util::classify;
use rustc_ast::util::parser::{prec_let_scrutinee_needs_par, AssocOp, Fixity};
@ -999,13 +998,57 @@ impl<'a> Parser<'a> {
}
pub fn parse_dot_suffix_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
// At this point we've consumed something like `expr.` and `self.token` holds the token
// after the dot.
match self.token.uninterpolate().kind {
token::Ident(..) => self.parse_dot_suffix(base, lo),
token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
Ok(self.parse_expr_tuple_field_access(lo, base, symbol, suffix, None))
let ident_span = self.token.span;
self.bump();
Ok(self.mk_expr_tuple_field_access(lo, ident_span, base, symbol, suffix))
}
token::Literal(token::Lit { kind: token::Float, symbol, suffix }) => {
Ok(self.parse_expr_tuple_field_access_float(lo, base, symbol, suffix))
Ok(match self.break_up_float(symbol, self.token.span) {
// 1e2
DestructuredFloat::Single(sym, _sp) => {
// `foo.1e2`: a single complete dot access, fully consumed. We end up with
// the `1e2` token in `self.prev_token` and the following token in
// `self.token`.
let ident_span = self.token.span;
self.bump();
self.mk_expr_tuple_field_access(lo, ident_span, base, sym, suffix)
}
// 1.
DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => {
// `foo.1.`: a single complete dot access and the start of another.
// We end up with the `sym` (`1`) token in `self.prev_token` and a dot in
// `self.token`.
assert!(suffix.is_none());
self.token = Token::new(token::Ident(sym, IdentIsRaw::No), ident_span);
self.bump_with((Token::new(token::Dot, dot_span), self.token_spacing));
self.mk_expr_tuple_field_access(lo, ident_span, base, sym, None)
}
// 1.2 | 1.2e3
DestructuredFloat::MiddleDot(
sym1,
ident1_span,
_dot_span,
sym2,
ident2_span,
) => {
// `foo.1.2` (or `foo.1.2e3`): two complete dot accesses. We end up with
// the `sym2` (`2` or `2e3`) token in `self.prev_token` and the following
// token in `self.token`.
let next_token2 =
Token::new(token::Ident(sym2, IdentIsRaw::No), ident2_span);
self.bump_with((next_token2, self.token_spacing));
self.bump();
let base1 =
self.mk_expr_tuple_field_access(lo, ident1_span, base, sym1, None);
self.mk_expr_tuple_field_access(lo, ident2_span, base1, sym2, suffix)
}
DestructuredFloat::Error => base,
})
}
_ => {
self.error_unexpected_after_dot();
@ -1119,41 +1162,6 @@ impl<'a> Parser<'a> {
}
}
fn parse_expr_tuple_field_access_float(
&mut self,
lo: Span,
base: P<Expr>,
float: Symbol,
suffix: Option<Symbol>,
) -> P<Expr> {
match self.break_up_float(float, self.token.span) {
// 1e2
DestructuredFloat::Single(sym, _sp) => {
self.parse_expr_tuple_field_access(lo, base, sym, suffix, None)
}
// 1.
DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => {
assert!(suffix.is_none());
self.token = Token::new(token::Ident(sym, IdentIsRaw::No), ident_span);
let next_token = (Token::new(token::Dot, dot_span), self.token_spacing);
self.parse_expr_tuple_field_access(lo, base, sym, None, Some(next_token))
}
// 1.2 | 1.2e3
DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span) => {
self.token = Token::new(token::Ident(symbol1, IdentIsRaw::No), ident1_span);
// This needs to be `Spacing::Alone` to prevent regressions.
// See issue #76399 and PR #76285 for more details
let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone);
let base1 =
self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1));
let next_token2 = Token::new(token::Ident(symbol2, IdentIsRaw::No), ident2_span);
self.bump_with((next_token2, self.token_spacing)); // `.`
self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None)
}
DestructuredFloat::Error => base,
}
}
/// Parse the field access used in offset_of, matched by `$(e:expr)+`.
/// Currently returns a list of idents. However, it should be possible in
/// future to also do array indices, which might be arbitrary expressions.
@ -1255,24 +1263,18 @@ impl<'a> Parser<'a> {
Ok(fields.into_iter().collect())
}
fn parse_expr_tuple_field_access(
fn mk_expr_tuple_field_access(
&mut self,
lo: Span,
ident_span: Span,
base: P<Expr>,
field: Symbol,
suffix: Option<Symbol>,
next_token: Option<(Token, Spacing)>,
) -> P<Expr> {
match next_token {
Some(next_token) => self.bump_with(next_token),
None => self.bump(),
}
let span = self.prev_token.span;
let field = ExprKind::Field(base, Ident::new(field, span));
if let Some(suffix) = suffix {
self.expect_no_tuple_index_suffix(span, suffix);
self.expect_no_tuple_index_suffix(ident_span, suffix);
}
self.mk_expr(lo.to(span), field)
self.mk_expr(lo.to(ident_span), ExprKind::Field(base, Ident::new(field, ident_span)))
}
/// Parse a function call expression, `expr(...)`.

View File

@ -15,6 +15,7 @@ rustc_middle = { path = "../rustc_middle" }
rustc_session = { path = "../rustc_session" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
rustc_trait_selection = { path = "../rustc_trait_selection" }
tracing = "0.1"
twox-hash = "1.6.3"
# tidy-alphabetical-end

View File

@ -90,6 +90,7 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![doc(rust_logo)]
#![feature(rustdoc_internals)]
#![feature(let_chains)]
#![allow(internal_features)]
#[macro_use]

View File

@ -11,6 +11,7 @@ use rustc_data_structures::base_n;
use rustc_data_structures::fx::FxHashMap;
use rustc_hir as hir;
use rustc_middle::ty::layout::IntegerExt;
use rustc_middle::ty::TypeVisitableExt;
use rustc_middle::ty::{
self, Const, ExistentialPredicate, FloatTy, FnSig, Instance, IntTy, List, Region, RegionKind,
TermKind, Ty, TyCtxt, UintTy,
@ -21,7 +22,9 @@ use rustc_span::sym;
use rustc_target::abi::call::{Conv, FnAbi, PassMode};
use rustc_target::abi::Integer;
use rustc_target::spec::abi::Abi;
use rustc_trait_selection::traits;
use std::fmt::Write as _;
use std::iter;
use crate::typeid::TypeIdOptions;
@ -747,9 +750,8 @@ fn transform_predicates<'tcx>(
tcx: TyCtxt<'tcx>,
predicates: &List<ty::PolyExistentialPredicate<'tcx>>,
) -> &'tcx List<ty::PolyExistentialPredicate<'tcx>> {
let predicates: Vec<ty::PolyExistentialPredicate<'tcx>> = predicates
.iter()
.filter_map(|predicate| match predicate.skip_binder() {
tcx.mk_poly_existential_predicates_from_iter(predicates.iter().filter_map(|predicate| {
match predicate.skip_binder() {
ty::ExistentialPredicate::Trait(trait_ref) => {
let trait_ref = ty::TraitRef::identity(tcx, trait_ref.def_id);
Some(ty::Binder::dummy(ty::ExistentialPredicate::Trait(
@ -758,9 +760,8 @@ fn transform_predicates<'tcx>(
}
ty::ExistentialPredicate::Projection(..) => None,
ty::ExistentialPredicate::AutoTrait(..) => Some(predicate),
})
.collect();
tcx.mk_poly_existential_predicates(&predicates)
}
}))
}
/// Transforms args for being encoded and used in the substitution dictionary.
@ -1115,51 +1116,46 @@ pub fn typeid_for_instance<'tcx>(
instance.args = strip_receiver_auto(tcx, instance.args)
}
if let Some(impl_id) = tcx.impl_of_method(instance.def_id())
&& let Some(trait_ref) = tcx.impl_trait_ref(impl_id)
{
let impl_method = tcx.associated_item(instance.def_id());
let method_id = impl_method
.trait_item_def_id
.expect("Part of a trait implementation, but not linked to the def_id?");
let trait_method = tcx.associated_item(method_id);
if traits::is_vtable_safe_method(tcx, trait_ref.skip_binder().def_id, trait_method) {
// Trait methods will have a Self polymorphic parameter, where the concreteized
// implementatation will not. We need to walk back to the more general trait method
let trait_ref = tcx.instantiate_and_normalize_erasing_regions(
instance.args,
ty::ParamEnv::reveal_all(),
trait_ref,
);
let invoke_ty = trait_object_ty(tcx, ty::Binder::dummy(trait_ref));
// At the call site, any call to this concrete function through a vtable will be
// `Virtual(method_id, idx)` with appropriate arguments for the method. Since we have the
// original method id, and we've recovered the trait arguments, we can make the callee
// instance we're computing the alias set for match the caller instance.
//
// Right now, our code ignores the vtable index everywhere, so we use 0 as a placeholder.
// If we ever *do* start encoding the vtable index, we will need to generate an alias set
// based on which vtables we are putting this method into, as there will be more than one
// index value when supertraits are involved.
instance.def = ty::InstanceDef::Virtual(method_id, 0);
let abstract_trait_args =
tcx.mk_args_trait(invoke_ty, trait_ref.args.into_iter().skip(1));
instance.args = instance.args.rebase_onto(tcx, impl_id, abstract_trait_args);
}
}
let fn_abi = tcx
.fn_abi_of_instance(tcx.param_env(instance.def_id()).and((instance, ty::List::empty())))
.unwrap_or_else(|instance| {
bug!("typeid_for_instance: couldn't get fn_abi of instance {:?}", instance)
});
// If this instance is a method and self is a reference, get the impl it belongs to
let impl_def_id = tcx.impl_of_method(instance.def_id());
if impl_def_id.is_some() && !fn_abi.args.is_empty() && fn_abi.args[0].layout.ty.is_ref() {
// If this impl is not an inherent impl, get the trait it implements
if let Some(trait_ref) = tcx.impl_trait_ref(impl_def_id.unwrap()) {
// Transform the concrete self into a reference to a trait object
let existential_predicate = trait_ref.map_bound(|trait_ref| {
ty::ExistentialPredicate::Trait(ty::ExistentialTraitRef::erase_self_ty(
tcx, trait_ref,
))
});
let existential_predicates = tcx.mk_poly_existential_predicates(&[ty::Binder::dummy(
existential_predicate.skip_binder(),
)]);
// Is the concrete self mutable?
let self_ty = if fn_abi.args[0].layout.ty.is_mutable_ptr() {
Ty::new_mut_ref(
tcx,
tcx.lifetimes.re_erased,
Ty::new_dynamic(tcx, existential_predicates, tcx.lifetimes.re_erased, ty::Dyn),
)
} else {
Ty::new_imm_ref(
tcx,
tcx.lifetimes.re_erased,
Ty::new_dynamic(tcx, existential_predicates, tcx.lifetimes.re_erased, ty::Dyn),
)
};
// Replace the concrete self in an fn_abi clone by the reference to a trait object
let mut fn_abi = fn_abi.clone();
// HACK(rcvalle): It is okay to not replace or update the entire ArgAbi here because the
// other fields are never used.
fn_abi.args[0].layout.ty = self_ty;
return typeid_for_fnabi(tcx, &fn_abi, options);
}
}
typeid_for_fnabi(tcx, fn_abi, options)
}
@ -1171,14 +1167,50 @@ fn strip_receiver_auto<'tcx>(
let ty::Dynamic(preds, lifetime, kind) = ty.kind() else {
bug!("Tried to strip auto traits from non-dynamic type {ty}");
};
let filtered_preds =
if preds.principal().is_some() {
let new_rcvr = if preds.principal().is_some() {
let filtered_preds =
tcx.mk_poly_existential_predicates_from_iter(preds.into_iter().filter(|pred| {
!matches!(pred.skip_binder(), ty::ExistentialPredicate::AutoTrait(..))
}))
} else {
ty::List::empty()
};
let new_rcvr = Ty::new_dynamic(tcx, filtered_preds, *lifetime, *kind);
}));
Ty::new_dynamic(tcx, filtered_preds, *lifetime, *kind)
} else {
// If there's no principal type, re-encode it as a unit, since we don't know anything
// about it. This technically discards the knowledge that it was a type that was made
// into a trait object at some point, but that's not a lot.
tcx.types.unit
};
tcx.mk_args_trait(new_rcvr, args.into_iter().skip(1))
}
fn trait_object_ty<'tcx>(tcx: TyCtxt<'tcx>, poly_trait_ref: ty::PolyTraitRef<'tcx>) -> Ty<'tcx> {
assert!(!poly_trait_ref.has_non_region_param());
let principal_pred = poly_trait_ref.map_bound(|trait_ref| {
ty::ExistentialPredicate::Trait(ty::ExistentialTraitRef::erase_self_ty(tcx, trait_ref))
});
let mut assoc_preds: Vec<_> = traits::supertraits(tcx, poly_trait_ref)
.flat_map(|super_poly_trait_ref| {
tcx.associated_items(super_poly_trait_ref.def_id())
.in_definition_order()
.filter(|item| item.kind == ty::AssocKind::Type)
.map(move |assoc_ty| {
super_poly_trait_ref.map_bound(|super_trait_ref| {
let alias_ty = ty::AliasTy::new(tcx, assoc_ty.def_id, super_trait_ref.args);
let resolved = tcx.normalize_erasing_regions(
ty::ParamEnv::reveal_all(),
alias_ty.to_ty(tcx),
);
ty::ExistentialPredicate::Projection(ty::ExistentialProjection {
def_id: assoc_ty.def_id,
args: ty::ExistentialTraitRef::erase_self_ty(tcx, super_trait_ref).args,
term: resolved.into(),
})
})
})
})
.collect();
assoc_preds.sort_by(|a, b| a.skip_binder().stable_cmp(tcx, &b.skip_binder()));
let preds = tcx.mk_poly_existential_predicates_from_iter(
iter::once(principal_pred).chain(assoc_preds.into_iter()),
);
Ty::new_dynamic(tcx, preds, tcx.lifetimes.re_erased, ty::Dyn)
}

View File

@ -1356,7 +1356,7 @@ extern "rust-intrinsic" {
/// let v_clone = v_orig.clone();
///
/// // This is the suggested, safe way.
/// // It does copy the entire vector, though, into a new array.
/// // It may copy the entire vector into a new one though, but also may not.
/// let v_collected = v_clone.into_iter()
/// .map(Some)
/// .collect::<Vec<Option<&i32>>>();

View File

@ -1289,9 +1289,9 @@ macro_rules! test_definitions {
/// Adapted from [`test_definitions`].
macro_rules! coverage_test_alias {
($name:ident {
alias_and_mode: $alias_and_mode:expr,
default: $default:expr,
only_hosts: $only_hosts:expr $(,)?
alias_and_mode: $alias_and_mode:expr, // &'static str
default: $default:expr, // bool
only_hosts: $only_hosts:expr $(,)? // bool
}) => {
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct $name {
@ -1309,6 +1309,8 @@ macro_rules! coverage_test_alias {
const ONLY_HOSTS: bool = $only_hosts;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
// Register the mode name as a command-line alias.
// This allows `x test coverage-map` and `x test coverage-run`.
run.alias($alias_and_mode)
}
@ -1319,8 +1321,7 @@ macro_rules! coverage_test_alias {
}
fn run(self, builder: &Builder<'_>) {
Coverage { compiler: self.compiler, target: self.target }
.run_unified_suite(builder, Self::MODE)
Coverage::run_coverage_tests(builder, self.compiler, self.target, Self::MODE);
}
}
};
@ -1449,11 +1450,20 @@ host_test!(RunMakeFullDeps {
default_test!(Assembly { path: "tests/assembly", mode: "assembly", suite: "assembly" });
/// Custom test step that is responsible for running the coverage tests
/// in multiple different modes.
/// Coverage tests are a bit more complicated than other test suites, because
/// we want to run the same set of test files in multiple different modes,
/// in a way that's convenient and flexible when invoked manually.
///
/// Each individual mode also has its own alias that will run the tests in
/// just that mode.
/// This combined step runs the specified tests (or all of `tests/coverage`)
/// in both "coverage-map" and "coverage-run" modes.
///
/// Used by:
/// - `x test coverage`
/// - `x test tests/coverage`
/// - `x test tests/coverage/trivial.rs` (etc)
///
/// (Each individual mode also has its own step that will run the tests in
/// just that mode.)
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Coverage {
pub compiler: Compiler,
@ -1464,24 +1474,41 @@ impl Coverage {
const PATH: &'static str = "tests/coverage";
const SUITE: &'static str = "coverage";
fn run_unified_suite(&self, builder: &Builder<'_>, mode: &'static str) {
/// Runs the coverage test suite (or a user-specified subset) in one mode.
///
/// This same function is used by the multi-mode step ([`Coverage`]) and by
/// the single-mode steps ([`CoverageMap`] and [`CoverageRun`]), to help
/// ensure that they all behave consistently with each other, regardless of
/// how the coverage tests have been invoked.
fn run_coverage_tests(
builder: &Builder<'_>,
compiler: Compiler,
target: TargetSelection,
mode: &'static str,
) {
// Like many other test steps, we delegate to a `Compiletest` step to
// actually run the tests. (See `test_definitions!`.)
builder.ensure(Compiletest {
compiler: self.compiler,
target: self.target,
compiler,
target,
mode,
suite: Self::SUITE,
path: Self::PATH,
compare_mode: None,
})
});
}
}
impl Step for Coverage {
type Output = ();
// We rely on the individual CoverageMap/CoverageRun steps to run themselves.
const DEFAULT: bool = false;
// When manually invoked, try to run as much as possible.
// Compiletest will automatically skip the "coverage-run" tests if necessary.
const ONLY_HOSTS: bool = false;
fn should_run(run: ShouldRun<'_>) -> ShouldRun<'_> {
// Take responsibility for command-line paths within `tests/coverage`.
run.suite_path(Self::PATH)
}
@ -1492,20 +1519,26 @@ impl Step for Coverage {
}
fn run(self, builder: &Builder<'_>) {
self.run_unified_suite(builder, CoverageMap::MODE);
self.run_unified_suite(builder, CoverageRun::MODE);
// Run the specified coverage tests (possibly all of them) in both modes.
Self::run_coverage_tests(builder, self.compiler, self.target, CoverageMap::MODE);
Self::run_coverage_tests(builder, self.compiler, self.target, CoverageRun::MODE);
}
}
// Aliases for running the coverage tests in only one mode.
// Runs `tests/coverage` in "coverage-map" mode only.
// Used by `x test` and `x test coverage-map`.
coverage_test_alias!(CoverageMap {
alias_and_mode: "coverage-map",
default: true,
only_hosts: false,
});
// Runs `tests/coverage` in "coverage-run" mode only.
// Used by `x test` and `x test coverage-run`.
coverage_test_alias!(CoverageRun {
alias_and_mode: "coverage-run",
default: true,
// Compiletest knows how to automatically skip these tests when cross-compiling,
// but skipping the whole step here makes it clearer that they haven't run at all.
only_hosts: true,
});

View File

@ -4,7 +4,7 @@
fn bar() -> bool {
let mut _0: bool;
+ coverage Code(Counter(0)) => /the/src/instrument_coverage.rs:21:1 - 23:2;
+ coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:19:1 - 21:2;
+
bb0: {
+ Coverage::CounterIncrement(0);

View File

@ -9,11 +9,11 @@
+ coverage ExpressionId(0) => Expression { lhs: Counter(0), op: Add, rhs: Counter(1) };
+ coverage ExpressionId(1) => Expression { lhs: Expression(0), op: Subtract, rhs: Counter(1) };
+ coverage Code(Counter(0)) => /the/src/instrument_coverage.rs:12:1 - 12:11;
+ coverage Code(Expression(0)) => /the/src/instrument_coverage.rs:13:5 - 14:17;
+ coverage Code(Expression(1)) => /the/src/instrument_coverage.rs:15:13 - 15:18;
+ coverage Code(Counter(1)) => /the/src/instrument_coverage.rs:16:10 - 16:11;
+ coverage Code(Expression(1)) => /the/src/instrument_coverage.rs:18:1 - 18:2;
+ coverage Code(Counter(0)) => $DIR/instrument_coverage.rs:10:1 - 10:11;
+ coverage Code(Expression(0)) => $DIR/instrument_coverage.rs:11:5 - 12:17;
+ coverage Code(Expression(1)) => $DIR/instrument_coverage.rs:13:13 - 13:18;
+ coverage Code(Counter(1)) => $DIR/instrument_coverage.rs:14:10 - 14:11;
+ coverage Code(Expression(1)) => $DIR/instrument_coverage.rs:16:1 - 16:2;
+
bb0: {
+ Coverage::CounterIncrement(0);

View File

@ -1,11 +1,9 @@
// skip-filecheck
// Test that `-C instrument-coverage` injects Coverage statements. The Coverage Counter statements
// are later converted into LLVM instrprof.increment intrinsics, during codegen.
// Test that `-C instrument-coverage` injects Coverage statements.
// The Coverage::CounterIncrement statements are later converted into LLVM
// instrprof.increment intrinsics, during codegen.
//@ unit-test: InstrumentCoverage
//@ needs-profiler-support
//@ ignore-windows
//@ compile-flags: -C instrument-coverage --remap-path-prefix={{src-base}}=/the/src
//@ compile-flags: -Cinstrument-coverage -Zno-profiler-runtime
// EMIT_MIR instrument_coverage.main.InstrumentCoverage.diff
// EMIT_MIR instrument_coverage.bar.InstrumentCoverage.diff
@ -22,17 +20,9 @@ fn bar() -> bool {
true
}
// Note that the MIR with injected coverage intrinsics includes references to source locations,
// including the source file absolute path. Typically, MIR pretty print output with file
// references are safe because the file prefixes are substituted with `$DIR`, but in this case
// the file references are encoded as function arguments, with an `Operand` type representation
// (`Slice` `Allocation` interned byte array) that cannot be normalized by simple substitution.
//
// The first workaround is to use the `SourceMap`-supported `--remap-path-prefix` option; however,
// the implementation of the `--remap-path-prefix` option currently joins the new prefix and the
// remaining source path with an OS-specific path separator (`\` on Windows). This difference still
// shows up in the byte array representation of the path, causing Windows tests to fail to match
// blessed results baselined with a `/` path separator.
//
// Since this `mir-opt` test does not have any significant platform dependencies, other than the
// path separator differences, the final workaround is to disable testing on Windows.
// CHECK: coverage ExpressionId({{[0-9]+}}) =>
// CHECK-DAG: coverage Code(Counter({{[0-9]+}})) =>
// CHECK-DAG: coverage Code(Expression({{[0-9]+}})) =>
// CHECK: bb0:
// CHECK-DAG: Coverage::ExpressionUsed({{[0-9]+}})
// CHECK-DAG: Coverage::CounterIncrement({{[0-9]+}})

View File

@ -5,15 +5,15 @@
let mut _0: ();
let mut _1: bool;
coverage branch { true: BlockMarkerId(0), false: BlockMarkerId(1) } => /the/src/instrument_coverage_cleanup.rs:15:8: 15:36 (#0)
coverage branch { true: BlockMarkerId(0), false: BlockMarkerId(1) } => $DIR/instrument_coverage_cleanup.rs:14:8: 14:36 (#0)
coverage ExpressionId(0) => Expression { lhs: Counter(0), op: Subtract, rhs: Counter(1) };
coverage ExpressionId(1) => Expression { lhs: Counter(1), op: Add, rhs: Expression(0) };
coverage Code(Counter(0)) => /the/src/instrument_coverage_cleanup.rs:14:1 - 15:36;
coverage Code(Expression(0)) => /the/src/instrument_coverage_cleanup.rs:15:37 - 15:39;
coverage Code(Counter(1)) => /the/src/instrument_coverage_cleanup.rs:15:39 - 15:40;
coverage Code(Expression(1)) => /the/src/instrument_coverage_cleanup.rs:16:1 - 16:2;
coverage Branch { true_term: Expression(0), false_term: Counter(1) } => /the/src/instrument_coverage_cleanup.rs:15:8 - 15:36;
coverage Code(Counter(0)) => $DIR/instrument_coverage_cleanup.rs:13:1 - 14:36;
coverage Code(Expression(0)) => $DIR/instrument_coverage_cleanup.rs:14:37 - 14:39;
coverage Code(Counter(1)) => $DIR/instrument_coverage_cleanup.rs:14:39 - 14:40;
coverage Code(Expression(1)) => $DIR/instrument_coverage_cleanup.rs:15:1 - 15:2;
coverage Branch { true_term: Expression(0), false_term: Counter(1) } => $DIR/instrument_coverage_cleanup.rs:14:8 - 14:36;
bb0: {
Coverage::CounterIncrement(0);

View File

@ -5,15 +5,15 @@
let mut _0: ();
let mut _1: bool;
coverage branch { true: BlockMarkerId(0), false: BlockMarkerId(1) } => /the/src/instrument_coverage_cleanup.rs:15:8: 15:36 (#0)
coverage branch { true: BlockMarkerId(0), false: BlockMarkerId(1) } => $DIR/instrument_coverage_cleanup.rs:14:8: 14:36 (#0)
+ coverage ExpressionId(0) => Expression { lhs: Counter(0), op: Subtract, rhs: Counter(1) };
+ coverage ExpressionId(1) => Expression { lhs: Counter(1), op: Add, rhs: Expression(0) };
+ coverage Code(Counter(0)) => /the/src/instrument_coverage_cleanup.rs:14:1 - 15:36;
+ coverage Code(Expression(0)) => /the/src/instrument_coverage_cleanup.rs:15:37 - 15:39;
+ coverage Code(Counter(1)) => /the/src/instrument_coverage_cleanup.rs:15:39 - 15:40;
+ coverage Code(Expression(1)) => /the/src/instrument_coverage_cleanup.rs:16:1 - 16:2;
+ coverage Branch { true_term: Expression(0), false_term: Counter(1) } => /the/src/instrument_coverage_cleanup.rs:15:8 - 15:36;
+ coverage Code(Counter(0)) => $DIR/instrument_coverage_cleanup.rs:13:1 - 14:36;
+ coverage Code(Expression(0)) => $DIR/instrument_coverage_cleanup.rs:14:37 - 14:39;
+ coverage Code(Counter(1)) => $DIR/instrument_coverage_cleanup.rs:14:39 - 14:40;
+ coverage Code(Expression(1)) => $DIR/instrument_coverage_cleanup.rs:15:1 - 15:2;
+ coverage Branch { true_term: Expression(0), false_term: Counter(1) } => $DIR/instrument_coverage_cleanup.rs:14:8 - 14:36;
+
bb0: {
+ Coverage::CounterIncrement(0);

View File

@ -7,7 +7,6 @@
//@ unit-test: InstrumentCoverage
//@ compile-flags: -Cinstrument-coverage -Zcoverage-options=branch -Zno-profiler-runtime
//@ compile-flags: --remap-path-prefix={{src-base}}=/the/src
// EMIT_MIR instrument_coverage_cleanup.main.InstrumentCoverage.diff
// EMIT_MIR instrument_coverage_cleanup.main.CleanupPostBorrowck.diff

View File

@ -0,0 +1,42 @@
// Check that more complex receivers work:
// * Arc<dyn Foo> as for custom receivers
// * &dyn Bar<T=Baz> for type constraints
//@ needs-sanitizer-cfi
// FIXME(#122848) Remove only-linux once OSX CFI binaries work
//@ only-linux
//@ compile-flags: --crate-type=bin -Cprefer-dynamic=off -Clto -Zsanitizer=cfi
//@ compile-flags: -C target-feature=-crt-static -C codegen-units=1 -C opt-level=0
//@ run-pass
use std::sync::Arc;
trait Foo {
fn foo(self: Arc<Self>);
}
struct FooImpl;
impl Foo for FooImpl {
fn foo(self: Arc<Self>) {}
}
trait Bar {
type T;
fn bar(&self) -> Self::T;
}
struct BarImpl;
impl Bar for BarImpl {
type T = i32;
fn bar(&self) -> Self::T { 7 }
}
fn main() {
let foo: Arc<dyn Foo> = Arc::new(FooImpl);
foo.foo();
let bar: &dyn Bar<T=i32> = &BarImpl;
assert_eq!(bar.bar(), 7);
}

View File

@ -0,0 +1,21 @@
// Check that dropping a trait object without a principal trait succeeds
//@ needs-sanitizer-cfi
// FIXME(#122848) Remove only-linux once OSX CFI binaries works
//@ only-linux
//@ compile-flags: --crate-type=bin -Cprefer-dynamic=off -Clto -Zsanitizer=cfi
//@ compile-flags: -C target-feature=-crt-static -C codegen-units=1 -C opt-level=0
// FIXME(#118761) Should be run-pass once the labels on drop are compatible.
// This test is being landed ahead of that to test that the compiler doesn't ICE while labeling the
// callsite for a drop, but the vtable doesn't have the correct label yet.
//@ build-pass
struct CustomDrop;
impl Drop for CustomDrop {
fn drop(&mut self) {}
}
fn main() {
let _ = Box::new(CustomDrop) as Box<dyn Send>;
}

View File

@ -775,7 +775,6 @@ compiler-team = [
compiler-team-contributors = [
"@TaKO8Ki",
"@Nadrieril",
"@nnethercote",
"@fmease",
"@fee1-dead",
]
@ -831,17 +830,14 @@ parser = [
"@compiler-errors",
"@davidtwco",
"@estebank",
"@nnethercote",
"@petrochenkov",
"@spastorino",
]
lexer = [
"@nnethercote",
"@petrochenkov",
"@estebank",
]
arena = [
"@nnethercote",
"@spastorino",
]
mir = [