mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 23:04:33 +00:00
Auto merge of #75037 - richkadel:llvm-coverage-map-gen-5.2, r=wesleywiser
Completes support for coverage in external crates Follow-up to #74959 : The prior PR corrected for errors encountered when trying to generate the coverage map on source code inlined from external crates (including macros and generics) by avoiding adding external DefIds to the coverage map. This made it possible to generate a coverage report including external crates, but the external crate coverage was incomplete (did not include coverage for the DefIds that were eliminated. The root issue was that the coverage map was converting Span locations to source file and locations, using the SourceMap for the current crate, and this would not work for spans from external crates (compliled with a different SourceMap). The solution was to convert the Spans to filename and location during MIR generation instead, so precompiled external crates would already have the correct source code locations embedded in their MIR, when imported into another crate. @wesleywiser FYI r? @tmandry
This commit is contained in:
commit
dab2ae0404
@ -1950,15 +1950,20 @@ extern "rust-intrinsic" {
|
||||
pub fn ptr_offset_from<T>(ptr: *const T, base: *const T) -> isize;
|
||||
|
||||
/// Internal placeholder for injecting code coverage counters when the "instrument-coverage"
|
||||
/// option is enabled. The placeholder is replaced with `llvm.instrprof.increment` during code
|
||||
/// generation.
|
||||
/// option is enabled. The source code region information is extracted prior to code generation,
|
||||
/// and added to the "coverage map", which is injected into the generated code as additional
|
||||
/// data. This intrinsic then triggers the generation of LLVM intrinsic call
|
||||
/// `instrprof.increment`, using the remaining args (`function_source_hash` and `index`).
|
||||
#[cfg(not(bootstrap))]
|
||||
#[lang = "count_code_region"]
|
||||
pub fn count_code_region(
|
||||
function_source_hash: u64,
|
||||
index: u32,
|
||||
start_byte_pos: u32,
|
||||
end_byte_pos: u32,
|
||||
file_name: &'static str,
|
||||
start_line: u32,
|
||||
start_col: u32,
|
||||
end_line: u32,
|
||||
end_col: u32,
|
||||
);
|
||||
|
||||
/// Internal marker for code coverage expressions, injected into the MIR when the
|
||||
@ -1973,8 +1978,11 @@ extern "rust-intrinsic" {
|
||||
index: u32,
|
||||
left_index: u32,
|
||||
right_index: u32,
|
||||
start_byte_pos: u32,
|
||||
end_byte_pos: u32,
|
||||
file_name: &'static str,
|
||||
start_line: u32,
|
||||
start_col: u32,
|
||||
end_line: u32,
|
||||
end_col: u32,
|
||||
);
|
||||
|
||||
/// This marker identifies a code region and two other counters or counter expressions
|
||||
@ -1986,14 +1994,24 @@ extern "rust-intrinsic" {
|
||||
index: u32,
|
||||
left_index: u32,
|
||||
right_index: u32,
|
||||
start_byte_pos: u32,
|
||||
end_byte_pos: u32,
|
||||
file_name: &'static str,
|
||||
start_line: u32,
|
||||
start_col: u32,
|
||||
end_line: u32,
|
||||
end_col: u32,
|
||||
);
|
||||
|
||||
/// This marker identifies a code region to be added to the "coverage map" to indicate source
|
||||
/// code that can never be reached.
|
||||
/// (See `coverage_counter_add` for more information.)
|
||||
pub fn coverage_unreachable(start_byte_pos: u32, end_byte_pos: u32);
|
||||
#[cfg(not(bootstrap))]
|
||||
pub fn coverage_unreachable(
|
||||
file_name: &'static str,
|
||||
start_line: u32,
|
||||
start_col: u32,
|
||||
end_line: u32,
|
||||
end_col: u32,
|
||||
);
|
||||
|
||||
/// See documentation of `<*const T>::guaranteed_eq` for details.
|
||||
#[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
|
||||
|
@ -92,7 +92,7 @@ impl CoverageMapGenerator {
|
||||
fn write_coverage_mappings(
|
||||
&mut self,
|
||||
expressions: Vec<CounterExpression>,
|
||||
counter_regions: impl Iterator<Item = (Counter, &'a Region)>,
|
||||
counter_regions: impl Iterator<Item = (Counter, &'tcx Region<'tcx>)>,
|
||||
coverage_mappings_buffer: &RustString,
|
||||
) {
|
||||
let mut counter_regions = counter_regions.collect::<Vec<_>>();
|
||||
@ -102,7 +102,7 @@ impl CoverageMapGenerator {
|
||||
|
||||
let mut virtual_file_mapping = Vec::new();
|
||||
let mut mapping_regions = Vec::new();
|
||||
let mut current_file_path = None;
|
||||
let mut current_file_name = None;
|
||||
let mut current_file_id = 0;
|
||||
|
||||
// Convert the list of (Counter, Region) pairs to an array of `CounterMappingRegion`, sorted
|
||||
@ -112,22 +112,22 @@ impl CoverageMapGenerator {
|
||||
// `filenames` array.
|
||||
counter_regions.sort_unstable_by_key(|(_counter, region)| *region);
|
||||
for (counter, region) in counter_regions {
|
||||
let (file_path, start_line, start_col, end_line, end_col) = region.file_start_and_end();
|
||||
let same_file = current_file_path.as_ref().map_or(false, |p| p == file_path);
|
||||
let Region { file_name, start_line, start_col, end_line, end_col } = *region;
|
||||
let same_file = current_file_name.as_ref().map_or(false, |p| p == file_name);
|
||||
if !same_file {
|
||||
if current_file_path.is_some() {
|
||||
if current_file_name.is_some() {
|
||||
current_file_id += 1;
|
||||
}
|
||||
current_file_path = Some(file_path.clone());
|
||||
let filename = CString::new(file_path.to_string_lossy().to_string())
|
||||
.expect("null error converting filename to C string");
|
||||
debug!(" file_id: {} = '{:?}'", current_file_id, filename);
|
||||
let filenames_index = match self.filename_to_index.get(&filename) {
|
||||
current_file_name = Some(file_name.to_string());
|
||||
let c_filename =
|
||||
CString::new(file_name).expect("null error converting filename to C string");
|
||||
debug!(" file_id: {} = '{:?}'", current_file_id, c_filename);
|
||||
let filenames_index = match self.filename_to_index.get(&c_filename) {
|
||||
Some(index) => *index,
|
||||
None => {
|
||||
let index = self.filenames.len() as u32;
|
||||
self.filenames.push(filename.clone());
|
||||
self.filename_to_index.insert(filename.clone(), index);
|
||||
self.filenames.push(c_filename.clone());
|
||||
self.filename_to_index.insert(c_filename.clone(), index);
|
||||
index
|
||||
}
|
||||
};
|
||||
|
@ -6,7 +6,7 @@ use crate::common::CodegenCx;
|
||||
use libc::c_uint;
|
||||
use llvm::coverageinfo::CounterMappingRegion;
|
||||
use log::debug;
|
||||
use rustc_codegen_ssa::coverageinfo::map::{CounterExpression, ExprKind, FunctionCoverage};
|
||||
use rustc_codegen_ssa::coverageinfo::map::{CounterExpression, ExprKind, FunctionCoverage, Region};
|
||||
use rustc_codegen_ssa::traits::{
|
||||
BaseTypeMethods, CoverageInfoBuilderMethods, CoverageInfoMethods, StaticMethods,
|
||||
};
|
||||
@ -49,19 +49,18 @@ impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
instance: Instance<'tcx>,
|
||||
function_source_hash: u64,
|
||||
id: u32,
|
||||
start_byte_pos: u32,
|
||||
end_byte_pos: u32,
|
||||
region: Region<'tcx>,
|
||||
) {
|
||||
debug!(
|
||||
"adding counter to coverage_regions: instance={:?}, function_source_hash={}, id={}, \
|
||||
byte range {}..{}",
|
||||
instance, function_source_hash, id, start_byte_pos, end_byte_pos,
|
||||
at {:?}",
|
||||
instance, function_source_hash, id, region,
|
||||
);
|
||||
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
|
||||
coverage_regions
|
||||
.entry(instance)
|
||||
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
|
||||
.add_counter(function_source_hash, id, start_byte_pos, end_byte_pos);
|
||||
.add_counter(function_source_hash, id, region);
|
||||
}
|
||||
|
||||
fn add_counter_expression_region(
|
||||
@ -71,43 +70,30 @@ impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
lhs: u32,
|
||||
op: ExprKind,
|
||||
rhs: u32,
|
||||
start_byte_pos: u32,
|
||||
end_byte_pos: u32,
|
||||
region: Region<'tcx>,
|
||||
) {
|
||||
debug!(
|
||||
"adding counter expression to coverage_regions: instance={:?}, id={}, {} {:?} {}, \
|
||||
byte range {}..{}",
|
||||
instance, id_descending_from_max, lhs, op, rhs, start_byte_pos, end_byte_pos,
|
||||
at {:?}",
|
||||
instance, id_descending_from_max, lhs, op, rhs, region,
|
||||
);
|
||||
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
|
||||
coverage_regions
|
||||
.entry(instance)
|
||||
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
|
||||
.add_counter_expression(
|
||||
id_descending_from_max,
|
||||
lhs,
|
||||
op,
|
||||
rhs,
|
||||
start_byte_pos,
|
||||
end_byte_pos,
|
||||
);
|
||||
.add_counter_expression(id_descending_from_max, lhs, op, rhs, region);
|
||||
}
|
||||
|
||||
fn add_unreachable_region(
|
||||
&mut self,
|
||||
instance: Instance<'tcx>,
|
||||
start_byte_pos: u32,
|
||||
end_byte_pos: u32,
|
||||
) {
|
||||
fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: Region<'tcx>) {
|
||||
debug!(
|
||||
"adding unreachable code to coverage_regions: instance={:?}, byte range {}..{}",
|
||||
instance, start_byte_pos, end_byte_pos,
|
||||
"adding unreachable code to coverage_regions: instance={:?}, at {:?}",
|
||||
instance, region,
|
||||
);
|
||||
let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
|
||||
coverage_regions
|
||||
.entry(instance)
|
||||
.or_insert_with(|| FunctionCoverage::new(self.tcx, instance))
|
||||
.add_unreachable_region(start_byte_pos, end_byte_pos);
|
||||
.add_unreachable_region(region);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -13,7 +13,7 @@ use rustc_ast::ast;
|
||||
use rustc_codegen_ssa::base::{compare_simd_types, to_immediate, wants_msvc_seh};
|
||||
use rustc_codegen_ssa::common::span_invalid_monomorphization_error;
|
||||
use rustc_codegen_ssa::common::{IntPredicate, TypeKind};
|
||||
use rustc_codegen_ssa::coverageinfo::ExprKind;
|
||||
use rustc_codegen_ssa::coverageinfo;
|
||||
use rustc_codegen_ssa::glue;
|
||||
use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue};
|
||||
use rustc_codegen_ssa::mir::place::PlaceRef;
|
||||
@ -93,10 +93,6 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
let mut is_codegen_intrinsic = true;
|
||||
// Set `is_codegen_intrinsic` to `false` to bypass `codegen_intrinsic_call()`.
|
||||
|
||||
if self.tcx.sess.opts.debugging_opts.instrument_coverage {
|
||||
// If the intrinsic is from the local MIR, add the coverage information to the Codegen
|
||||
// context, to be encoded into the local crate's coverage map.
|
||||
if caller_instance.def_id().is_local() {
|
||||
// FIXME(richkadel): Make sure to add coverage analysis tests on a crate with
|
||||
// external crate dependencies, where:
|
||||
// 1. Both binary and dependent crates are compiled with `-Zinstrument-coverage`
|
||||
@ -109,49 +105,53 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
caller_instance,
|
||||
op_to_u64(&args[FUNCTION_SOURCE_HASH]),
|
||||
op_to_u32(&args[COUNTER_ID]),
|
||||
op_to_u32(&args[START_BYTE_POS]),
|
||||
op_to_u32(&args[END_BYTE_POS]),
|
||||
coverageinfo::Region::new(
|
||||
op_to_str_slice(&args[FILE_NAME]),
|
||||
op_to_u32(&args[START_LINE]),
|
||||
op_to_u32(&args[START_COL]),
|
||||
op_to_u32(&args[END_LINE]),
|
||||
op_to_u32(&args[END_COL]),
|
||||
),
|
||||
);
|
||||
}
|
||||
sym::coverage_counter_add | sym::coverage_counter_subtract => {
|
||||
is_codegen_intrinsic = false;
|
||||
use coverage::coverage_counter_expression_args::*;
|
||||
self.add_counter_expression_region(
|
||||
caller_instance,
|
||||
op_to_u32(&args[EXPRESSION_ID]),
|
||||
op_to_u32(&args[LEFT_ID]),
|
||||
if intrinsic == sym::coverage_counter_add {
|
||||
ExprKind::Add
|
||||
coverageinfo::ExprKind::Add
|
||||
} else {
|
||||
ExprKind::Subtract
|
||||
coverageinfo::ExprKind::Subtract
|
||||
},
|
||||
op_to_u32(&args[RIGHT_ID]),
|
||||
op_to_u32(&args[START_BYTE_POS]),
|
||||
op_to_u32(&args[END_BYTE_POS]),
|
||||
coverageinfo::Region::new(
|
||||
op_to_str_slice(&args[FILE_NAME]),
|
||||
op_to_u32(&args[START_LINE]),
|
||||
op_to_u32(&args[START_COL]),
|
||||
op_to_u32(&args[END_LINE]),
|
||||
op_to_u32(&args[END_COL]),
|
||||
),
|
||||
);
|
||||
}
|
||||
sym::coverage_unreachable => {
|
||||
is_codegen_intrinsic = false;
|
||||
use coverage::coverage_unreachable_args::*;
|
||||
self.add_unreachable_region(
|
||||
caller_instance,
|
||||
op_to_u32(&args[START_BYTE_POS]),
|
||||
op_to_u32(&args[END_BYTE_POS]),
|
||||
coverageinfo::Region::new(
|
||||
op_to_str_slice(&args[FILE_NAME]),
|
||||
op_to_u32(&args[START_LINE]),
|
||||
op_to_u32(&args[START_COL]),
|
||||
op_to_u32(&args[END_LINE]),
|
||||
op_to_u32(&args[END_COL]),
|
||||
),
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Only the `count_code_region` coverage intrinsic is translated into an actual LLVM
|
||||
// intrinsic call (local or not); otherwise, set `is_codegen_intrinsic` to `false`.
|
||||
match intrinsic {
|
||||
sym::coverage_counter_add
|
||||
| sym::coverage_counter_subtract
|
||||
| sym::coverage_unreachable => {
|
||||
is_codegen_intrinsic = false;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
is_codegen_intrinsic
|
||||
}
|
||||
|
||||
@ -215,9 +215,6 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> {
|
||||
self.call(llfn, &[], None)
|
||||
}
|
||||
sym::count_code_region => {
|
||||
// FIXME(richkadel): The current implementation assumes the MIR for the given
|
||||
// caller_instance represents a single function. Validate and/or correct if inlining
|
||||
// and/or monomorphization invalidates these assumptions.
|
||||
let coverageinfo = tcx.coverageinfo(caller_instance.def_id());
|
||||
let mangled_fn = tcx.symbol_name(caller_instance);
|
||||
let (mangled_fn_name, _len_val) = self.const_str(Symbol::intern(mangled_fn.name));
|
||||
@ -2243,6 +2240,10 @@ fn float_type_width(ty: Ty<'_>) -> Option<u64> {
|
||||
}
|
||||
}
|
||||
|
||||
fn op_to_str_slice<'tcx>(op: &Operand<'tcx>) -> &'tcx str {
|
||||
Operand::value_from_const(op).try_to_str_slice().expect("Value is &str")
|
||||
}
|
||||
|
||||
fn op_to_u32<'tcx>(op: &Operand<'tcx>) -> u32 {
|
||||
Operand::scalar_from_const(op).to_u32().expect("Scalar is u32")
|
||||
}
|
||||
|
@ -3,12 +3,8 @@ pub use super::ffi::*;
|
||||
use rustc_index::vec::IndexVec;
|
||||
use rustc_middle::ty::Instance;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_span::source_map::{Pos, SourceMap};
|
||||
use rustc_span::{BytePos, FileName, Loc, RealFileName};
|
||||
|
||||
use std::cmp::{Ord, Ordering};
|
||||
use std::fmt;
|
||||
use std::path::PathBuf;
|
||||
use std::cmp::Ord;
|
||||
|
||||
rustc_index::newtype_index! {
|
||||
pub struct ExpressionOperandId {
|
||||
@ -38,127 +34,35 @@ rustc_index::newtype_index! {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Region {
|
||||
start: Loc,
|
||||
end: Loc,
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct Region<'tcx> {
|
||||
pub file_name: &'tcx str,
|
||||
pub start_line: u32,
|
||||
pub start_col: u32,
|
||||
pub end_line: u32,
|
||||
pub end_col: u32,
|
||||
}
|
||||
|
||||
impl Ord for Region {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
(&self.start.file.name, &self.start.line, &self.start.col, &self.end.line, &self.end.col)
|
||||
.cmp(&(
|
||||
&other.start.file.name,
|
||||
&other.start.line,
|
||||
&other.start.col,
|
||||
&other.end.line,
|
||||
&other.end.col,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Region {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Region {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.start.file.name == other.start.file.name
|
||||
&& self.start.line == other.start.line
|
||||
&& self.start.col == other.start.col
|
||||
&& self.end.line == other.end.line
|
||||
&& self.end.col == other.end.col
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Region {}
|
||||
|
||||
impl fmt::Display for Region {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
let (file_path, start_line, start_col, end_line, end_col) = self.file_start_and_end();
|
||||
write!(f, "{:?}:{}:{} - {}:{}", file_path, start_line, start_col, end_line, end_col)
|
||||
}
|
||||
}
|
||||
|
||||
impl Region {
|
||||
pub fn new(source_map: &SourceMap, start_byte_pos: u32, end_byte_pos: u32) -> Self {
|
||||
let start = source_map.lookup_char_pos(BytePos::from_u32(start_byte_pos));
|
||||
let end = source_map.lookup_char_pos(BytePos::from_u32(end_byte_pos));
|
||||
assert_eq!(
|
||||
start.file.name, end.file.name,
|
||||
"Region start ({} -> {:?}) and end ({} -> {:?}) don't come from the same source file!",
|
||||
start_byte_pos, start, end_byte_pos, end
|
||||
);
|
||||
Self { start, end }
|
||||
}
|
||||
|
||||
pub fn file_start_and_end<'a>(&'a self) -> (&'a PathBuf, u32, u32, u32, u32) {
|
||||
let start = &self.start;
|
||||
let end = &self.end;
|
||||
match &start.file.name {
|
||||
FileName::Real(RealFileName::Named(path)) => (
|
||||
path,
|
||||
start.line as u32,
|
||||
start.col.to_u32() + 1,
|
||||
end.line as u32,
|
||||
end.col.to_u32() + 1,
|
||||
),
|
||||
_ => {
|
||||
bug!("start.file.name should be a RealFileName, but it was: {:?}", start.file.name)
|
||||
}
|
||||
}
|
||||
impl<'tcx> Region<'tcx> {
|
||||
pub fn new(
|
||||
file_name: &'tcx str,
|
||||
start_line: u32,
|
||||
start_col: u32,
|
||||
end_line: u32,
|
||||
end_col: u32,
|
||||
) -> Self {
|
||||
Self { file_name, start_line, start_col, end_line, end_col }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ExpressionRegion {
|
||||
pub struct ExpressionRegion<'tcx> {
|
||||
lhs: ExpressionOperandId,
|
||||
op: ExprKind,
|
||||
rhs: ExpressionOperandId,
|
||||
region: Region,
|
||||
region: Region<'tcx>,
|
||||
}
|
||||
|
||||
// FIXME(richkadel): There seems to be a problem computing the file location in
|
||||
// some cases. I need to investigate this more. When I generate and show coverage
|
||||
// for the example binary in the crates.io crate `json5format`, I had a couple of
|
||||
// notable problems:
|
||||
//
|
||||
// 1. I saw a lot of coverage spans in `llvm-cov show` highlighting regions in
|
||||
// various comments (not corresponding to rustdoc code), indicating a possible
|
||||
// problem with the byte_pos-to-source-map implementation.
|
||||
//
|
||||
// 2. And (perhaps not related) when I build the aforementioned example binary with:
|
||||
// `RUST_FLAGS="-Zinstrument-coverage" cargo build --example formatjson5`
|
||||
// and then run that binary with
|
||||
// `LLVM_PROFILE_FILE="formatjson5.profraw" ./target/debug/examples/formatjson5 \
|
||||
// some.json5` for some reason the binary generates *TWO* `.profraw` files. One
|
||||
// named `default.profraw` and the other named `formatjson5.profraw` (the expected
|
||||
// name, in this case).
|
||||
//
|
||||
// 3. I think that if I eliminate regions within a function, their region_ids,
|
||||
// referenced in expressions, will be wrong? I think the ids are implied by their
|
||||
// array position in the final coverage map output (IIRC).
|
||||
//
|
||||
// 4. I suspect a problem (if not the only problem) is the SourceMap is wrong for some
|
||||
// region start/end byte positions. Just like I couldn't get the function hash at
|
||||
// intrinsic codegen time for external crate functions, I think the SourceMap I
|
||||
// have here only applies to the local crate, and I know I have coverages that
|
||||
// reference external crates.
|
||||
//
|
||||
// I still don't know if I fixed the hash problem correctly. If external crates
|
||||
// implement the function, can't I use the coverage counters already compiled
|
||||
// into those external crates? (Maybe not for generics and/or maybe not for
|
||||
// macros... not sure. But I need to understand this better.)
|
||||
//
|
||||
// If the byte range conversion is wrong, fix it. But if it
|
||||
// is right, then it is possible for the start and end to be in different files.
|
||||
// Can I do something other than ignore coverages that span multiple files?
|
||||
//
|
||||
// If I can resolve this, remove the "Option<>" result type wrapper
|
||||
// `regions_in_file_order()` accordingly.
|
||||
|
||||
/// Collects all of the coverage regions associated with (a) injected counters, (b) counter
|
||||
/// expressions (additions or subtraction), and (c) unreachable regions (always counted as zero),
|
||||
/// for a given Function. Counters and counter expressions have non-overlapping `id`s because they
|
||||
@ -171,19 +75,17 @@ pub struct ExpressionRegion {
|
||||
/// only whitespace or comments). According to LLVM Code Coverage Mapping documentation, "A count
|
||||
/// for a gap area is only used as the line execution count if there are no other regions on a
|
||||
/// line."
|
||||
pub struct FunctionCoverage<'a> {
|
||||
source_map: &'a SourceMap,
|
||||
pub struct FunctionCoverage<'tcx> {
|
||||
source_hash: u64,
|
||||
counters: IndexVec<CounterValueReference, Option<Region>>,
|
||||
expressions: IndexVec<InjectedExpressionIndex, Option<ExpressionRegion>>,
|
||||
unreachable_regions: Vec<Region>,
|
||||
counters: IndexVec<CounterValueReference, Option<Region<'tcx>>>,
|
||||
expressions: IndexVec<InjectedExpressionIndex, Option<ExpressionRegion<'tcx>>>,
|
||||
unreachable_regions: Vec<Region<'tcx>>,
|
||||
}
|
||||
|
||||
impl<'a> FunctionCoverage<'a> {
|
||||
pub fn new<'tcx: 'a>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self {
|
||||
impl<'tcx> FunctionCoverage<'tcx> {
|
||||
pub fn new(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self {
|
||||
let coverageinfo = tcx.coverageinfo(instance.def_id());
|
||||
Self {
|
||||
source_map: tcx.sess.source_map(),
|
||||
source_hash: 0, // will be set with the first `add_counter()`
|
||||
counters: IndexVec::from_elem_n(None, coverageinfo.num_counters as usize),
|
||||
expressions: IndexVec::from_elem_n(None, coverageinfo.num_expressions as usize),
|
||||
@ -194,20 +96,14 @@ impl<'a> FunctionCoverage<'a> {
|
||||
/// Adds a code region to be counted by an injected counter intrinsic.
|
||||
/// The source_hash (computed during coverage instrumentation) should also be provided, and
|
||||
/// should be the same for all counters in a given function.
|
||||
pub fn add_counter(
|
||||
&mut self,
|
||||
source_hash: u64,
|
||||
id: u32,
|
||||
start_byte_pos: u32,
|
||||
end_byte_pos: u32,
|
||||
) {
|
||||
pub fn add_counter(&mut self, source_hash: u64, id: u32, region: Region<'tcx>) {
|
||||
if self.source_hash == 0 {
|
||||
self.source_hash = source_hash;
|
||||
} else {
|
||||
debug_assert_eq!(source_hash, self.source_hash);
|
||||
}
|
||||
self.counters[CounterValueReference::from(id)]
|
||||
.replace(Region::new(self.source_map, start_byte_pos, end_byte_pos))
|
||||
.replace(region)
|
||||
.expect_none("add_counter called with duplicate `id`");
|
||||
}
|
||||
|
||||
@ -231,8 +127,7 @@ impl<'a> FunctionCoverage<'a> {
|
||||
lhs: u32,
|
||||
op: ExprKind,
|
||||
rhs: u32,
|
||||
start_byte_pos: u32,
|
||||
end_byte_pos: u32,
|
||||
region: Region<'tcx>,
|
||||
) {
|
||||
let expression_id = ExpressionOperandId::from(id_descending_from_max);
|
||||
let lhs = ExpressionOperandId::from(lhs);
|
||||
@ -240,18 +135,13 @@ impl<'a> FunctionCoverage<'a> {
|
||||
|
||||
let expression_index = self.expression_index(expression_id);
|
||||
self.expressions[expression_index]
|
||||
.replace(ExpressionRegion {
|
||||
lhs,
|
||||
op,
|
||||
rhs,
|
||||
region: Region::new(self.source_map, start_byte_pos, end_byte_pos),
|
||||
})
|
||||
.replace(ExpressionRegion { lhs, op, rhs, region })
|
||||
.expect_none("add_counter_expression called with duplicate `id_descending_from_max`");
|
||||
}
|
||||
|
||||
/// Add a region that will be marked as "unreachable", with a constant "zero counter".
|
||||
pub fn add_unreachable_region(&mut self, start_byte_pos: u32, end_byte_pos: u32) {
|
||||
self.unreachable_regions.push(Region::new(self.source_map, start_byte_pos, end_byte_pos));
|
||||
pub fn add_unreachable_region(&mut self, region: Region<'tcx>) {
|
||||
self.unreachable_regions.push(region)
|
||||
}
|
||||
|
||||
/// Return the source hash, generated from the HIR node structure, and used to indicate whether
|
||||
@ -264,8 +154,8 @@ impl<'a> FunctionCoverage<'a> {
|
||||
/// associated `Regions` (from which the LLVM-specific `CoverageMapGenerator` will create
|
||||
/// `CounterMappingRegion`s.
|
||||
pub fn get_expressions_and_counter_regions(
|
||||
&'a self,
|
||||
) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &'a Region)>) {
|
||||
&'tcx self,
|
||||
) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &'tcx Region<'tcx>)>) {
|
||||
assert!(self.source_hash != 0);
|
||||
|
||||
let counter_regions = self.counter_regions();
|
||||
@ -277,7 +167,7 @@ impl<'a> FunctionCoverage<'a> {
|
||||
(counter_expressions, counter_regions)
|
||||
}
|
||||
|
||||
fn counter_regions(&'a self) -> impl Iterator<Item = (Counter, &'a Region)> {
|
||||
fn counter_regions(&'tcx self) -> impl Iterator<Item = (Counter, &'tcx Region<'tcx>)> {
|
||||
self.counters.iter_enumerated().filter_map(|(index, entry)| {
|
||||
// Option::map() will return None to filter out missing counters. This may happen
|
||||
// if, for example, a MIR-instrumented counter is removed during an optimization.
|
||||
@ -288,8 +178,8 @@ impl<'a> FunctionCoverage<'a> {
|
||||
}
|
||||
|
||||
fn expressions_with_regions(
|
||||
&'a self,
|
||||
) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &'a Region)>) {
|
||||
&'tcx self,
|
||||
) -> (Vec<CounterExpression>, impl Iterator<Item = (Counter, &'tcx Region<'tcx>)>) {
|
||||
let mut counter_expressions = Vec::with_capacity(self.expressions.len());
|
||||
let mut expression_regions = Vec::with_capacity(self.expressions.len());
|
||||
let mut new_indexes =
|
||||
@ -350,7 +240,7 @@ impl<'a> FunctionCoverage<'a> {
|
||||
(counter_expressions, expression_regions.into_iter())
|
||||
}
|
||||
|
||||
fn unreachable_regions(&'a self) -> impl Iterator<Item = (Counter, &'a Region)> {
|
||||
fn unreachable_regions(&'tcx self) -> impl Iterator<Item = (Counter, &'tcx Region<'tcx>)> {
|
||||
self.unreachable_regions.iter().map(|region| (Counter::zero(), region))
|
||||
}
|
||||
|
||||
|
@ -2,3 +2,4 @@ pub mod ffi;
|
||||
pub mod map;
|
||||
|
||||
pub use map::ExprKind;
|
||||
pub use map::Region;
|
||||
|
@ -1,5 +1,5 @@
|
||||
use super::BackendTypes;
|
||||
use crate::coverageinfo::ExprKind;
|
||||
use crate::coverageinfo::{ExprKind, Region};
|
||||
use rustc_middle::ty::Instance;
|
||||
|
||||
pub trait CoverageInfoMethods: BackendTypes {
|
||||
@ -12,8 +12,7 @@ pub trait CoverageInfoBuilderMethods<'tcx>: BackendTypes {
|
||||
instance: Instance<'tcx>,
|
||||
function_source_hash: u64,
|
||||
index: u32,
|
||||
start_byte_pos: u32,
|
||||
end_byte_pos: u32,
|
||||
region: Region<'tcx>,
|
||||
);
|
||||
|
||||
fn add_counter_expression_region(
|
||||
@ -23,14 +22,8 @@ pub trait CoverageInfoBuilderMethods<'tcx>: BackendTypes {
|
||||
lhs: u32,
|
||||
op: ExprKind,
|
||||
rhs: u32,
|
||||
start_byte_pos: u32,
|
||||
end_byte_pos: u32,
|
||||
region: Region<'tcx>,
|
||||
);
|
||||
|
||||
fn add_unreachable_region(
|
||||
&mut self,
|
||||
instance: Instance<'tcx>,
|
||||
start_byte_pos: u32,
|
||||
end_byte_pos: u32,
|
||||
);
|
||||
fn add_unreachable_region(&mut self, instance: Instance<'tcx>, region: Region<'tcx>);
|
||||
}
|
||||
|
@ -4,8 +4,11 @@
|
||||
pub mod count_code_region_args {
|
||||
pub const FUNCTION_SOURCE_HASH: usize = 0;
|
||||
pub const COUNTER_ID: usize = 1;
|
||||
pub const START_BYTE_POS: usize = 2;
|
||||
pub const END_BYTE_POS: usize = 3;
|
||||
pub const FILE_NAME: usize = 2;
|
||||
pub const START_LINE: usize = 3;
|
||||
pub const START_COL: usize = 4;
|
||||
pub const END_LINE: usize = 5;
|
||||
pub const END_COL: usize = 6;
|
||||
}
|
||||
|
||||
/// Positional arguments to `libcore::coverage_counter_add()` and
|
||||
@ -14,12 +17,18 @@ pub mod coverage_counter_expression_args {
|
||||
pub const EXPRESSION_ID: usize = 0;
|
||||
pub const LEFT_ID: usize = 1;
|
||||
pub const RIGHT_ID: usize = 2;
|
||||
pub const START_BYTE_POS: usize = 3;
|
||||
pub const END_BYTE_POS: usize = 4;
|
||||
pub const FILE_NAME: usize = 3;
|
||||
pub const START_LINE: usize = 4;
|
||||
pub const START_COL: usize = 5;
|
||||
pub const END_LINE: usize = 6;
|
||||
pub const END_COL: usize = 7;
|
||||
}
|
||||
|
||||
/// Positional arguments to `libcore::coverage_unreachable()`
|
||||
pub mod coverage_unreachable_args {
|
||||
pub const START_BYTE_POS: usize = 0;
|
||||
pub const END_BYTE_POS: usize = 1;
|
||||
pub const FILE_NAME: usize = 0;
|
||||
pub const START_LINE: usize = 1;
|
||||
pub const START_COL: usize = 2;
|
||||
pub const END_LINE: usize = 3;
|
||||
pub const END_COL: usize = 4;
|
||||
}
|
||||
|
@ -56,6 +56,15 @@ impl<'tcx> ConstValue<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn try_to_str_slice(&self) -> Option<&'tcx str> {
|
||||
if let ConstValue::Slice { data, start, end } = *self {
|
||||
::std::str::from_utf8(data.inspect_with_undef_and_ptr_outside_interpreter(start..end))
|
||||
.ok()
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn try_to_bits(&self, size: Size) -> Option<u128> {
|
||||
self.try_to_scalar()?.to_bits(size).ok()
|
||||
}
|
||||
|
@ -2,7 +2,7 @@
|
||||
//!
|
||||
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/mir/index.html
|
||||
|
||||
use crate::mir::interpret::{GlobalAlloc, Scalar};
|
||||
use crate::mir::interpret::{Allocation, ConstValue, GlobalAlloc, Scalar};
|
||||
use crate::mir::visit::MirVisitable;
|
||||
use crate::ty::adjustment::PointerCast;
|
||||
use crate::ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
|
||||
@ -1842,6 +1842,33 @@ impl<'tcx> Operand<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Convenience helper to make a literal-like constant from a given `&str` slice.
|
||||
/// Since this is used to synthesize MIR, assumes `user_ty` is None.
|
||||
pub fn const_from_str(tcx: TyCtxt<'tcx>, val: &str, span: Span) -> Operand<'tcx> {
|
||||
let tcx = tcx;
|
||||
let allocation = Allocation::from_byte_aligned_bytes(val.as_bytes());
|
||||
let allocation = tcx.intern_const_alloc(allocation);
|
||||
let const_val = ConstValue::Slice { data: allocation, start: 0, end: val.len() };
|
||||
let ty = tcx.mk_imm_ref(tcx.lifetimes.re_erased, tcx.types.str_);
|
||||
Operand::Constant(box Constant {
|
||||
span,
|
||||
user_ty: None,
|
||||
literal: ty::Const::from_value(tcx, const_val, ty),
|
||||
})
|
||||
}
|
||||
|
||||
/// Convenience helper to make a `ConstValue` from the given `Operand`, assuming that `Operand`
|
||||
/// wraps a constant value (such as a `&str` slice). Panics if this is not the case.
|
||||
pub fn value_from_const(operand: &Operand<'tcx>) -> ConstValue<'tcx> {
|
||||
match operand {
|
||||
Operand::Constant(constant) => match constant.literal.val.try_to_value() {
|
||||
Some(const_value) => const_value,
|
||||
_ => panic!("{:?}: ConstValue expected", constant.literal.val),
|
||||
},
|
||||
_ => panic!("{:?}: Constant expected", operand),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_copy(&self) -> Self {
|
||||
match *self {
|
||||
Operand::Copy(_) | Operand::Constant(_) => self.clone(),
|
||||
|
@ -5,18 +5,19 @@ use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_hir::lang_items;
|
||||
use rustc_middle::hir;
|
||||
use rustc_middle::ich::StableHashingContext;
|
||||
use rustc_middle::mir;
|
||||
use rustc_middle::mir::coverage::*;
|
||||
use rustc_middle::mir::interpret::Scalar;
|
||||
use rustc_middle::mir::traversal;
|
||||
use rustc_middle::mir::{
|
||||
self, traversal, BasicBlock, BasicBlockData, CoverageInfo, Operand, Place, SourceInfo,
|
||||
SourceScope, StatementKind, Terminator, TerminatorKind,
|
||||
BasicBlock, BasicBlockData, CoverageInfo, Operand, Place, SourceInfo, SourceScope,
|
||||
StatementKind, Terminator, TerminatorKind,
|
||||
};
|
||||
use rustc_middle::ty;
|
||||
use rustc_middle::ty::query::Providers;
|
||||
use rustc_middle::ty::FnDef;
|
||||
use rustc_middle::ty::TyCtxt;
|
||||
use rustc_middle::ty::{FnDef, TyCtxt};
|
||||
use rustc_span::def_id::DefId;
|
||||
use rustc_span::{Pos, Span};
|
||||
use rustc_span::{FileName, Pos, RealFileName, Span};
|
||||
|
||||
/// Inserts call to count_code_region() as a placeholder to be replaced during code generation with
|
||||
/// the intrinsic llvm.instrprof.increment.
|
||||
@ -112,6 +113,7 @@ enum Op {
|
||||
struct InjectedCall<'tcx> {
|
||||
func: Operand<'tcx>,
|
||||
args: Vec<Operand<'tcx>>,
|
||||
span: Span,
|
||||
inject_at: Span,
|
||||
}
|
||||
|
||||
@ -179,12 +181,11 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
|
||||
let _ignore = mir_body;
|
||||
let id = self.next_counter();
|
||||
let function_source_hash = self.function_source_hash();
|
||||
let code_region = body_span;
|
||||
let scope = rustc_middle::mir::OUTERMOST_SOURCE_SCOPE;
|
||||
let is_cleanup = false;
|
||||
let next_block = rustc_middle::mir::START_BLOCK;
|
||||
self.inject_call(
|
||||
self.make_counter(id, function_source_hash, code_region),
|
||||
self.make_counter(id, function_source_hash, body_span),
|
||||
scope,
|
||||
is_cleanup,
|
||||
next_block,
|
||||
@ -201,14 +202,13 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
|
||||
let op = if add { Op::Add } else { Op::Subtract };
|
||||
let rhs = 2;
|
||||
|
||||
let code_region = body_span;
|
||||
let scope = rustc_middle::mir::OUTERMOST_SOURCE_SCOPE;
|
||||
let is_cleanup = false;
|
||||
let next_block = rustc_middle::mir::START_BLOCK;
|
||||
|
||||
let id = self.next_expression();
|
||||
self.inject_call(
|
||||
self.make_expression(id, code_region, lhs, op, rhs),
|
||||
self.make_expression(id, body_span, lhs, op, rhs),
|
||||
scope,
|
||||
is_cleanup,
|
||||
next_block,
|
||||
@ -216,13 +216,8 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
fn make_counter(
|
||||
&self,
|
||||
id: u32,
|
||||
function_source_hash: u64,
|
||||
code_region: Span,
|
||||
) -> InjectedCall<'tcx> {
|
||||
let inject_at = code_region.shrink_to_lo();
|
||||
fn make_counter(&self, id: u32, function_source_hash: u64, span: Span) -> InjectedCall<'tcx> {
|
||||
let inject_at = span.shrink_to_lo();
|
||||
|
||||
let func = function_handle(
|
||||
self.tcx,
|
||||
@ -239,24 +234,18 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
|
||||
debug_assert_eq!(COUNTER_ID, args.len());
|
||||
args.push(self.const_u32(id, inject_at));
|
||||
|
||||
debug_assert_eq!(START_BYTE_POS, args.len());
|
||||
args.push(self.const_u32(code_region.lo().to_u32(), inject_at));
|
||||
|
||||
debug_assert_eq!(END_BYTE_POS, args.len());
|
||||
args.push(self.const_u32(code_region.hi().to_u32(), inject_at));
|
||||
|
||||
InjectedCall { func, args, inject_at }
|
||||
InjectedCall { func, args, span, inject_at }
|
||||
}
|
||||
|
||||
fn make_expression(
|
||||
&self,
|
||||
id: u32,
|
||||
code_region: Span,
|
||||
span: Span,
|
||||
lhs: u32,
|
||||
op: Op,
|
||||
rhs: u32,
|
||||
) -> InjectedCall<'tcx> {
|
||||
let inject_at = code_region.shrink_to_lo();
|
||||
let inject_at = span.shrink_to_lo();
|
||||
|
||||
let func = function_handle(
|
||||
self.tcx,
|
||||
@ -282,13 +271,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
|
||||
debug_assert_eq!(RIGHT_ID, args.len());
|
||||
args.push(self.const_u32(rhs, inject_at));
|
||||
|
||||
debug_assert_eq!(START_BYTE_POS, args.len());
|
||||
args.push(self.const_u32(code_region.lo().to_u32(), inject_at));
|
||||
|
||||
debug_assert_eq!(END_BYTE_POS, args.len());
|
||||
args.push(self.const_u32(code_region.hi().to_u32(), inject_at));
|
||||
|
||||
InjectedCall { func, args, inject_at }
|
||||
InjectedCall { func, args, span, inject_at }
|
||||
}
|
||||
|
||||
fn inject_call(
|
||||
@ -298,7 +281,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
|
||||
is_cleanup: bool,
|
||||
next_block: BasicBlock,
|
||||
) {
|
||||
let InjectedCall { func, args, inject_at } = call;
|
||||
let InjectedCall { func, mut args, span, inject_at } = call;
|
||||
debug!(
|
||||
" injecting {}call to {:?}({:?}) at: {:?}, scope: {:?}",
|
||||
if is_cleanup { "cleanup " } else { "" },
|
||||
@ -310,6 +293,14 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
|
||||
|
||||
let mut patch = MirPatch::new(self.mir_body);
|
||||
|
||||
let (file_name, start_line, start_col, end_line, end_col) = self.code_region(&span);
|
||||
|
||||
args.push(self.const_str(&file_name, inject_at));
|
||||
args.push(self.const_u32(start_line, inject_at));
|
||||
args.push(self.const_u32(start_col, inject_at));
|
||||
args.push(self.const_u32(end_line, inject_at));
|
||||
args.push(self.const_u32(end_col, inject_at));
|
||||
|
||||
let temp = patch.new_temp(self.tcx.mk_unit(), inject_at);
|
||||
let new_block = patch.new_block(placeholder_block(inject_at, scope, is_cleanup));
|
||||
patch.patch_terminator(
|
||||
@ -335,6 +326,43 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
|
||||
self.mir_body.basic_blocks_mut().swap(next_block, new_block);
|
||||
}
|
||||
|
||||
/// Convert the Span into its file name, start line and column, and end line and column
|
||||
fn code_region(&self, span: &Span) -> (String, u32, u32, u32, u32) {
|
||||
let source_map = self.tcx.sess.source_map();
|
||||
let start = source_map.lookup_char_pos(span.lo());
|
||||
let end = if span.hi() == span.lo() {
|
||||
start.clone()
|
||||
} else {
|
||||
let end = source_map.lookup_char_pos(span.hi());
|
||||
debug_assert_eq!(
|
||||
start.file.name,
|
||||
end.file.name,
|
||||
"Region start ({:?} -> {:?}) and end ({:?} -> {:?}) don't come from the same source file!",
|
||||
span.lo(),
|
||||
start,
|
||||
span.hi(),
|
||||
end
|
||||
);
|
||||
end
|
||||
};
|
||||
match &start.file.name {
|
||||
FileName::Real(RealFileName::Named(path)) => (
|
||||
path.to_string_lossy().to_string(),
|
||||
start.line as u32,
|
||||
start.col.to_u32() + 1,
|
||||
end.line as u32,
|
||||
end.col.to_u32() + 1,
|
||||
),
|
||||
_ => {
|
||||
bug!("start.file.name should be a RealFileName, but it was: {:?}", start.file.name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn const_str(&self, value: &str, span: Span) -> Operand<'tcx> {
|
||||
Operand::const_from_str(self.tcx, value, span)
|
||||
}
|
||||
|
||||
fn const_u32(&self, value: u32, span: Span) -> Operand<'tcx> {
|
||||
Operand::const_from_scalar(self.tcx, self.tcx.types.u32, Scalar::from_u32(value), span)
|
||||
}
|
||||
|
@ -379,17 +379,46 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>) {
|
||||
|
||||
sym::nontemporal_store => (1, vec![tcx.mk_mut_ptr(param(0)), param(0)], tcx.mk_unit()),
|
||||
|
||||
sym::count_code_region => {
|
||||
(0, vec![tcx.types.u64, tcx.types.u32, tcx.types.u32, tcx.types.u32], tcx.mk_unit())
|
||||
}
|
||||
|
||||
sym::coverage_counter_add | sym::coverage_counter_subtract => (
|
||||
sym::count_code_region => (
|
||||
0,
|
||||
vec![tcx.types.u32, tcx.types.u32, tcx.types.u32, tcx.types.u32, tcx.types.u32],
|
||||
vec![
|
||||
tcx.types.u64,
|
||||
tcx.types.u32,
|
||||
tcx.mk_static_str(),
|
||||
tcx.types.u32,
|
||||
tcx.types.u32,
|
||||
tcx.types.u32,
|
||||
tcx.types.u32,
|
||||
],
|
||||
tcx.mk_unit(),
|
||||
),
|
||||
|
||||
sym::coverage_unreachable => (0, vec![tcx.types.u32, tcx.types.u32], tcx.mk_unit()),
|
||||
sym::coverage_counter_add | sym::coverage_counter_subtract => (
|
||||
0,
|
||||
vec![
|
||||
tcx.types.u32,
|
||||
tcx.types.u32,
|
||||
tcx.types.u32,
|
||||
tcx.mk_static_str(),
|
||||
tcx.types.u32,
|
||||
tcx.types.u32,
|
||||
tcx.types.u32,
|
||||
tcx.types.u32,
|
||||
],
|
||||
tcx.mk_unit(),
|
||||
),
|
||||
|
||||
sym::coverage_unreachable => (
|
||||
0,
|
||||
vec![
|
||||
tcx.mk_static_str(),
|
||||
tcx.types.u32,
|
||||
tcx.types.u32,
|
||||
tcx.types.u32,
|
||||
tcx.types.u32,
|
||||
],
|
||||
tcx.mk_unit(),
|
||||
),
|
||||
|
||||
other => {
|
||||
struct_span_err!(
|
||||
|
@ -2,58 +2,76 @@
|
||||
+ // MIR for `bar` after InstrumentCoverage
|
||||
|
||||
fn bar() -> bool {
|
||||
let mut _0: bool; // return place in scope 0 at $DIR/instrument_coverage.rs:18:13: 18:17
|
||||
+ let mut _1: (); // in scope 0 at $DIR/instrument_coverage.rs:18:18: 18:18
|
||||
let mut _0: bool; // return place in scope 0 at /the/src/instrument_coverage.rs:19:13: 19:17
|
||||
+ let mut _1: (); // in scope 0 at /the/src/instrument_coverage.rs:19:18: 19:18
|
||||
|
||||
bb0: {
|
||||
+ StorageLive(_1); // scope 0 at $DIR/instrument_coverage.rs:18:18: 18:18
|
||||
+ _1 = const std::intrinsics::count_code_region(const 10208505205182607101_u64, const 0_u32, const 529_u32, const 541_u32) -> bb2; // scope 0 at $DIR/instrument_coverage.rs:18:18: 18:18
|
||||
+ StorageLive(_1); // scope 0 at /the/src/instrument_coverage.rs:19:18: 19:18
|
||||
+ _1 = const std::intrinsics::count_code_region(const 10208505205182607101_u64, const 0_u32, const "/the/src/instrument_coverage.rs", const 19_u32, const 18_u32, const 21_u32, const 2_u32) -> bb2; // scope 0 at /the/src/instrument_coverage.rs:19:18: 19:18
|
||||
+ // ty::Const
|
||||
+ // + ty: unsafe extern "rust-intrinsic" fn(u64, u32, u32, u32) {std::intrinsics::count_code_region}
|
||||
+ // + ty: unsafe extern "rust-intrinsic" fn(u64, u32, &'static str, u32, u32, u32, u32) {std::intrinsics::count_code_region}
|
||||
+ // + val: Value(Scalar(<ZST>))
|
||||
+ // mir::Constant
|
||||
+ // + span: $DIR/instrument_coverage.rs:18:18: 18:18
|
||||
+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u64, u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
|
||||
+ // + span: /the/src/instrument_coverage.rs:19:18: 19:18
|
||||
+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u64, u32, &'static str, u32, u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
|
||||
+ // ty::Const
|
||||
+ // + ty: u64
|
||||
+ // + val: Value(Scalar(0x8dabe565aaa2aefd))
|
||||
+ // mir::Constant
|
||||
+ // + span: $DIR/instrument_coverage.rs:18:18: 18:18
|
||||
+ // + span: /the/src/instrument_coverage.rs:19:18: 19:18
|
||||
+ // + literal: Const { ty: u64, val: Value(Scalar(0x8dabe565aaa2aefd)) }
|
||||
+ // ty::Const
|
||||
+ // + ty: u32
|
||||
+ // + val: Value(Scalar(0x00000000))
|
||||
+ // mir::Constant
|
||||
+ // + span: $DIR/instrument_coverage.rs:18:18: 18:18
|
||||
+ // + span: /the/src/instrument_coverage.rs:19:18: 19:18
|
||||
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000000)) }
|
||||
+ // ty::Const
|
||||
+ // + ty: u32
|
||||
+ // + val: Value(Scalar(0x00000211))
|
||||
+ // + ty: &str
|
||||
+ // + val: Value(Slice { data: Allocation { bytes: [47, 116, 104, 101, 47, 115, 114, 99, 47, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 95, 99, 111, 118, 101, 114, 97, 103, 101, 46, 114, 115], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [2147483647], len: Size { raw: 31 } }, size: Size { raw: 31 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 31 })
|
||||
+ // mir::Constant
|
||||
+ // + span: $DIR/instrument_coverage.rs:18:18: 18:18
|
||||
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000211)) }
|
||||
+ // + span: /the/src/instrument_coverage.rs:19:18: 19:18
|
||||
+ // + literal: Const { ty: &str, val: Value(Slice { data: Allocation { bytes: [47, 116, 104, 101, 47, 115, 114, 99, 47, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 95, 99, 111, 118, 101, 114, 97, 103, 101, 46, 114, 115], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [2147483647], len: Size { raw: 31 } }, size: Size { raw: 31 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 31 }) }
|
||||
+ // ty::Const
|
||||
+ // + ty: u32
|
||||
+ // + val: Value(Scalar(0x0000021d))
|
||||
+ // + val: Value(Scalar(0x00000013))
|
||||
+ // mir::Constant
|
||||
+ // + span: $DIR/instrument_coverage.rs:18:18: 18:18
|
||||
+ // + literal: Const { ty: u32, val: Value(Scalar(0x0000021d)) }
|
||||
+ // + span: /the/src/instrument_coverage.rs:19:18: 19:18
|
||||
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000013)) }
|
||||
+ // ty::Const
|
||||
+ // + ty: u32
|
||||
+ // + val: Value(Scalar(0x00000012))
|
||||
+ // mir::Constant
|
||||
+ // + span: /the/src/instrument_coverage.rs:19:18: 19:18
|
||||
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000012)) }
|
||||
+ // ty::Const
|
||||
+ // + ty: u32
|
||||
+ // + val: Value(Scalar(0x00000015))
|
||||
+ // mir::Constant
|
||||
+ // + span: /the/src/instrument_coverage.rs:19:18: 19:18
|
||||
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000015)) }
|
||||
+ // ty::Const
|
||||
+ // + ty: u32
|
||||
+ // + val: Value(Scalar(0x00000002))
|
||||
+ // mir::Constant
|
||||
+ // + span: /the/src/instrument_coverage.rs:19:18: 19:18
|
||||
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000002)) }
|
||||
+ }
|
||||
+
|
||||
+ bb1 (cleanup): {
|
||||
+ resume; // scope 0 at $DIR/instrument_coverage.rs:18:1: 20:2
|
||||
+ resume; // scope 0 at /the/src/instrument_coverage.rs:19:1: 21:2
|
||||
+ }
|
||||
+
|
||||
+ bb2: {
|
||||
+ StorageDead(_1); // scope 0 at $DIR/instrument_coverage.rs:19:5: 19:9
|
||||
_0 = const true; // scope 0 at $DIR/instrument_coverage.rs:19:5: 19:9
|
||||
+ StorageDead(_1); // scope 0 at /the/src/instrument_coverage.rs:20:5: 20:9
|
||||
_0 = const true; // scope 0 at /the/src/instrument_coverage.rs:20:5: 20:9
|
||||
// ty::Const
|
||||
// + ty: bool
|
||||
// + val: Value(Scalar(0x01))
|
||||
// mir::Constant
|
||||
// + span: $DIR/instrument_coverage.rs:19:5: 19:9
|
||||
// + span: /the/src/instrument_coverage.rs:20:5: 20:9
|
||||
// + literal: Const { ty: bool, val: Value(Scalar(0x01)) }
|
||||
return; // scope 0 at $DIR/instrument_coverage.rs:20:2: 20:2
|
||||
return; // scope 0 at /the/src/instrument_coverage.rs:21:2: 21:2
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,99 +2,117 @@
|
||||
+ // MIR for `main` after InstrumentCoverage
|
||||
|
||||
fn main() -> () {
|
||||
let mut _0: (); // return place in scope 0 at $DIR/instrument_coverage.rs:9:11: 9:11
|
||||
let mut _1: (); // in scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2
|
||||
let mut _2: bool; // in scope 0 at $DIR/instrument_coverage.rs:11:12: 11:17
|
||||
let mut _3: !; // in scope 0 at $DIR/instrument_coverage.rs:11:18: 13:10
|
||||
+ let mut _4: (); // in scope 0 at $DIR/instrument_coverage.rs:9:11: 9:11
|
||||
let mut _0: (); // return place in scope 0 at /the/src/instrument_coverage.rs:10:11: 10:11
|
||||
let mut _1: (); // in scope 0 at /the/src/instrument_coverage.rs:10:1: 16:2
|
||||
let mut _2: bool; // in scope 0 at /the/src/instrument_coverage.rs:12:12: 12:17
|
||||
let mut _3: !; // in scope 0 at /the/src/instrument_coverage.rs:12:18: 14:10
|
||||
+ let mut _4: (); // in scope 0 at /the/src/instrument_coverage.rs:10:11: 10:11
|
||||
|
||||
bb0: {
|
||||
- falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at $DIR/instrument_coverage.rs:10:5: 14:6
|
||||
+ StorageLive(_4); // scope 0 at $DIR/instrument_coverage.rs:9:11: 9:11
|
||||
+ _4 = const std::intrinsics::count_code_region(const 16004455475339839479_u64, const 0_u32, const 425_u32, const 493_u32) -> bb7; // scope 0 at $DIR/instrument_coverage.rs:9:11: 9:11
|
||||
- falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at /the/src/instrument_coverage.rs:11:5: 15:6
|
||||
+ StorageLive(_4); // scope 0 at /the/src/instrument_coverage.rs:10:11: 10:11
|
||||
+ _4 = const std::intrinsics::count_code_region(const 16004455475339839479_u64, const 0_u32, const "/the/src/instrument_coverage.rs", const 10_u32, const 11_u32, const 16_u32, const 2_u32) -> bb7; // scope 0 at /the/src/instrument_coverage.rs:10:11: 10:11
|
||||
+ // ty::Const
|
||||
+ // + ty: unsafe extern "rust-intrinsic" fn(u64, u32, u32, u32) {std::intrinsics::count_code_region}
|
||||
+ // + ty: unsafe extern "rust-intrinsic" fn(u64, u32, &'static str, u32, u32, u32, u32) {std::intrinsics::count_code_region}
|
||||
+ // + val: Value(Scalar(<ZST>))
|
||||
+ // mir::Constant
|
||||
+ // + span: $DIR/instrument_coverage.rs:9:11: 9:11
|
||||
+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u64, u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
|
||||
+ // + span: /the/src/instrument_coverage.rs:10:11: 10:11
|
||||
+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u64, u32, &'static str, u32, u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
|
||||
+ // ty::Const
|
||||
+ // + ty: u64
|
||||
+ // + val: Value(Scalar(0xde1b3f75a72fc7f7))
|
||||
+ // mir::Constant
|
||||
+ // + span: $DIR/instrument_coverage.rs:9:11: 9:11
|
||||
+ // + span: /the/src/instrument_coverage.rs:10:11: 10:11
|
||||
+ // + literal: Const { ty: u64, val: Value(Scalar(0xde1b3f75a72fc7f7)) }
|
||||
+ // ty::Const
|
||||
+ // + ty: u32
|
||||
+ // + val: Value(Scalar(0x00000000))
|
||||
+ // mir::Constant
|
||||
+ // + span: $DIR/instrument_coverage.rs:9:11: 9:11
|
||||
+ // + span: /the/src/instrument_coverage.rs:10:11: 10:11
|
||||
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000000)) }
|
||||
+ // ty::Const
|
||||
+ // + ty: u32
|
||||
+ // + val: Value(Scalar(0x000001a9))
|
||||
+ // + ty: &str
|
||||
+ // + val: Value(Slice { data: Allocation { bytes: [47, 116, 104, 101, 47, 115, 114, 99, 47, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 95, 99, 111, 118, 101, 114, 97, 103, 101, 46, 114, 115], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [2147483647], len: Size { raw: 31 } }, size: Size { raw: 31 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 31 })
|
||||
+ // mir::Constant
|
||||
+ // + span: $DIR/instrument_coverage.rs:9:11: 9:11
|
||||
+ // + literal: Const { ty: u32, val: Value(Scalar(0x000001a9)) }
|
||||
+ // + span: /the/src/instrument_coverage.rs:10:11: 10:11
|
||||
+ // + literal: Const { ty: &str, val: Value(Slice { data: Allocation { bytes: [47, 116, 104, 101, 47, 115, 114, 99, 47, 105, 110, 115, 116, 114, 117, 109, 101, 110, 116, 95, 99, 111, 118, 101, 114, 97, 103, 101, 46, 114, 115], relocations: Relocations(SortedMap { data: [] }), init_mask: InitMask { blocks: [2147483647], len: Size { raw: 31 } }, size: Size { raw: 31 }, align: Align { pow2: 0 }, mutability: Not, extra: () }, start: 0, end: 31 }) }
|
||||
+ // ty::Const
|
||||
+ // + ty: u32
|
||||
+ // + val: Value(Scalar(0x000001ed))
|
||||
+ // + val: Value(Scalar(0x0000000a))
|
||||
+ // mir::Constant
|
||||
+ // + span: $DIR/instrument_coverage.rs:9:11: 9:11
|
||||
+ // + literal: Const { ty: u32, val: Value(Scalar(0x000001ed)) }
|
||||
+ // + span: /the/src/instrument_coverage.rs:10:11: 10:11
|
||||
+ // + literal: Const { ty: u32, val: Value(Scalar(0x0000000a)) }
|
||||
+ // ty::Const
|
||||
+ // + ty: u32
|
||||
+ // + val: Value(Scalar(0x0000000b))
|
||||
+ // mir::Constant
|
||||
+ // + span: /the/src/instrument_coverage.rs:10:11: 10:11
|
||||
+ // + literal: Const { ty: u32, val: Value(Scalar(0x0000000b)) }
|
||||
+ // ty::Const
|
||||
+ // + ty: u32
|
||||
+ // + val: Value(Scalar(0x00000010))
|
||||
+ // mir::Constant
|
||||
+ // + span: /the/src/instrument_coverage.rs:10:11: 10:11
|
||||
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000010)) }
|
||||
+ // ty::Const
|
||||
+ // + ty: u32
|
||||
+ // + val: Value(Scalar(0x00000002))
|
||||
+ // mir::Constant
|
||||
+ // + span: /the/src/instrument_coverage.rs:10:11: 10:11
|
||||
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000002)) }
|
||||
}
|
||||
|
||||
bb1: {
|
||||
StorageLive(_2); // scope 0 at $DIR/instrument_coverage.rs:11:12: 11:17
|
||||
_2 = const bar() -> [return: bb3, unwind: bb2]; // scope 0 at $DIR/instrument_coverage.rs:11:12: 11:17
|
||||
StorageLive(_2); // scope 0 at /the/src/instrument_coverage.rs:12:12: 12:17
|
||||
_2 = const bar() -> [return: bb3, unwind: bb2]; // scope 0 at /the/src/instrument_coverage.rs:12:12: 12:17
|
||||
// ty::Const
|
||||
// + ty: fn() -> bool {bar}
|
||||
// + val: Value(Scalar(<ZST>))
|
||||
// mir::Constant
|
||||
// + span: $DIR/instrument_coverage.rs:11:12: 11:15
|
||||
// + span: /the/src/instrument_coverage.rs:12:12: 12:15
|
||||
// + literal: Const { ty: fn() -> bool {bar}, val: Value(Scalar(<ZST>)) }
|
||||
}
|
||||
|
||||
bb2 (cleanup): {
|
||||
resume; // scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2
|
||||
resume; // scope 0 at /the/src/instrument_coverage.rs:10:1: 16:2
|
||||
}
|
||||
|
||||
bb3: {
|
||||
FakeRead(ForMatchedPlace, _2); // scope 0 at $DIR/instrument_coverage.rs:11:12: 11:17
|
||||
switchInt(_2) -> [false: bb5, otherwise: bb4]; // scope 0 at $DIR/instrument_coverage.rs:11:9: 13:10
|
||||
FakeRead(ForMatchedPlace, _2); // scope 0 at /the/src/instrument_coverage.rs:12:12: 12:17
|
||||
switchInt(_2) -> [false: bb5, otherwise: bb4]; // scope 0 at /the/src/instrument_coverage.rs:12:9: 14:10
|
||||
}
|
||||
|
||||
bb4: {
|
||||
falseEdge -> [real: bb6, imaginary: bb5]; // scope 0 at $DIR/instrument_coverage.rs:11:9: 13:10
|
||||
falseEdge -> [real: bb6, imaginary: bb5]; // scope 0 at /the/src/instrument_coverage.rs:12:9: 14:10
|
||||
}
|
||||
|
||||
bb5: {
|
||||
_1 = const (); // scope 0 at $DIR/instrument_coverage.rs:11:9: 13:10
|
||||
_1 = const (); // scope 0 at /the/src/instrument_coverage.rs:12:9: 14:10
|
||||
// ty::Const
|
||||
// + ty: ()
|
||||
// + val: Value(Scalar(<ZST>))
|
||||
// mir::Constant
|
||||
// + span: $DIR/instrument_coverage.rs:11:9: 13:10
|
||||
// + span: /the/src/instrument_coverage.rs:12:9: 14:10
|
||||
// + literal: Const { ty: (), val: Value(Scalar(<ZST>)) }
|
||||
StorageDead(_2); // scope 0 at $DIR/instrument_coverage.rs:14:5: 14:6
|
||||
goto -> bb0; // scope 0 at $DIR/instrument_coverage.rs:10:5: 14:6
|
||||
StorageDead(_2); // scope 0 at /the/src/instrument_coverage.rs:15:5: 15:6
|
||||
goto -> bb0; // scope 0 at /the/src/instrument_coverage.rs:11:5: 15:6
|
||||
}
|
||||
|
||||
bb6: {
|
||||
_0 = const (); // scope 0 at $DIR/instrument_coverage.rs:12:13: 12:18
|
||||
_0 = const (); // scope 0 at /the/src/instrument_coverage.rs:13:13: 13:18
|
||||
// ty::Const
|
||||
// + ty: ()
|
||||
// + val: Value(Scalar(<ZST>))
|
||||
// mir::Constant
|
||||
// + span: $DIR/instrument_coverage.rs:12:13: 12:18
|
||||
// + span: /the/src/instrument_coverage.rs:13:13: 13:18
|
||||
// + literal: Const { ty: (), val: Value(Scalar(<ZST>)) }
|
||||
StorageDead(_2); // scope 0 at $DIR/instrument_coverage.rs:14:5: 14:6
|
||||
return; // scope 0 at $DIR/instrument_coverage.rs:15:2: 15:2
|
||||
StorageDead(_2); // scope 0 at /the/src/instrument_coverage.rs:15:5: 15:6
|
||||
return; // scope 0 at /the/src/instrument_coverage.rs:16:2: 16:2
|
||||
+ }
|
||||
+
|
||||
+ bb7: {
|
||||
+ StorageDead(_4); // scope 0 at $DIR/instrument_coverage.rs:10:5: 14:6
|
||||
+ falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at $DIR/instrument_coverage.rs:10:5: 14:6
|
||||
+ StorageDead(_4); // scope 0 at /the/src/instrument_coverage.rs:11:5: 15:6
|
||||
+ falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at /the/src/instrument_coverage.rs:11:5: 15:6
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3,7 +3,8 @@
|
||||
// intrinsics, during codegen.
|
||||
|
||||
// needs-profiler-support
|
||||
// compile-flags: -Zinstrument-coverage
|
||||
// ignore-windows
|
||||
// compile-flags: -Zinstrument-coverage --remap-path-prefix={{src-base}}=/the/src
|
||||
// EMIT_MIR instrument_coverage.main.InstrumentCoverage.diff
|
||||
// EMIT_MIR instrument_coverage.bar.InstrumentCoverage.diff
|
||||
fn main() {
|
||||
@ -18,3 +19,18 @@ fn main() {
|
||||
fn bar() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
// Note that the MIR with injected coverage intrinsics includes references to source locations,
|
||||
// including the source file absolute path. Typically, MIR pretty print output with file
|
||||
// references are safe because the file prefixes are substituted with `$DIR`, but in this case
|
||||
// the file references are encoded as function arguments, with an `Operand` type representation
|
||||
// (`Slice` `Allocation` interned byte array) that cannot be normalized by simple substitution.
|
||||
//
|
||||
// The first workaround is to use the `SourceMap`-supported `--remap-path-prefix` option; however,
|
||||
// the implementation of the `--remap-path-prefix` option currently joins the new prefix and the
|
||||
// remaining source path with an OS-specific path separator (`\` on Windows). This difference still
|
||||
// shows up in the byte array representation of the path, causing Windows tests to fail to match
|
||||
// blessed results baselined with a `/` path separator.
|
||||
//
|
||||
// Since this `mir-opt` test does not have any significant platform dependencies, other than the
|
||||
// path separator differences, the final workaround is to disable testing on Windows.
|
||||
|
Loading…
Reference in New Issue
Block a user