diff --git a/src/librustc_codegen_llvm/coverageinfo/mapgen.rs b/src/librustc_codegen_llvm/coverageinfo/mapgen.rs index 7f48b1d864c..4d9747a43f2 100644 --- a/src/librustc_codegen_llvm/coverageinfo/mapgen.rs +++ b/src/librustc_codegen_llvm/coverageinfo/mapgen.rs @@ -1,23 +1,15 @@ -use crate::llvm; - use crate::common::CodegenCx; use crate::coverageinfo; +use crate::llvm; +use llvm::coverageinfo::CounterMappingRegion; use log::debug; -use rustc_codegen_ssa::coverageinfo::map::*; -use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods, MiscMethods}; +use rustc_codegen_ssa::coverageinfo::map::{Counter, CounterExpression, Region}; +use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods}; use rustc_data_structures::fx::FxHashMap; use rustc_llvm::RustString; -use rustc_middle::ty::Instance; -use rustc_middle::{bug, mir}; -use std::collections::BTreeMap; use std::ffi::CString; -use std::path::PathBuf; - -// FIXME(richkadel): Complete all variations of generating and exporting the coverage map to LLVM. -// The current implementation is an initial foundation with basic capabilities (Counters, but not -// CounterExpressions, etc.). /// Generates and exports the Coverage Map. /// @@ -32,174 +24,123 @@ use std::path::PathBuf; /// undocumented details in Clang's implementation (that may or may not be important) were also /// replicated for Rust's Coverage Map. pub fn finalize<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) { - let mut coverage_writer = CoverageMappingWriter::new(cx); - let function_coverage_map = cx.coverage_context().take_function_coverage_map(); + if function_coverage_map.is_empty() { + // This module has no functions with coverage instrumentation + return; + } + + let mut mapgen = CoverageMapGenerator::new(); // Encode coverage mappings and generate function records let mut function_records = Vec::<&'ll llvm::Value>::new(); let coverage_mappings_buffer = llvm::build_byte_buffer(|coverage_mappings_buffer| { for (instance, function_coverage) in function_coverage_map.into_iter() { - if let Some(function_record) = coverage_writer.write_function_mappings_and_record( - instance, - function_coverage, - coverage_mappings_buffer, - ) { - function_records.push(function_record); - } + debug!("Generate coverage map for: {:?}", instance); + + let mangled_function_name = cx.tcx.symbol_name(instance).to_string(); + let function_source_hash = function_coverage.source_hash(); + let (expressions, counter_regions) = + function_coverage.get_expressions_and_counter_regions(); + + let old_len = coverage_mappings_buffer.len(); + mapgen.write_coverage_mappings(expressions, counter_regions, coverage_mappings_buffer); + let mapping_data_size = coverage_mappings_buffer.len() - old_len; + debug_assert!( + mapping_data_size > 0, + "Every `FunctionCoverage` should have at least one counter" + ); + + let function_record = mapgen.make_function_record( + cx, + mangled_function_name, + function_source_hash, + mapping_data_size, + ); + function_records.push(function_record); } }); - // Encode all filenames covered in this module, ordered by `file_id` + // Encode all filenames referenced by counters/expressions in this module let filenames_buffer = llvm::build_byte_buffer(|filenames_buffer| { - coverageinfo::write_filenames_section_to_buffer( - &coverage_writer.filenames, - filenames_buffer, - ); + coverageinfo::write_filenames_section_to_buffer(&mapgen.filenames, filenames_buffer); }); - if coverage_mappings_buffer.len() > 0 { - // Generate the LLVM IR representation of the coverage map and store it in a well-known - // global constant. - coverage_writer.write_coverage_map( - function_records, - filenames_buffer, - coverage_mappings_buffer, - ); - } + // Generate the LLVM IR representation of the coverage map and store it in a well-known global + mapgen.save_generated_coverage_map( + cx, + function_records, + filenames_buffer, + coverage_mappings_buffer, + ); } -struct CoverageMappingWriter<'a, 'll, 'tcx> { - cx: &'a CodegenCx<'ll, 'tcx>, +struct CoverageMapGenerator { filenames: Vec, filename_to_index: FxHashMap, } -impl<'a, 'll, 'tcx> CoverageMappingWriter<'a, 'll, 'tcx> { - fn new(cx: &'a CodegenCx<'ll, 'tcx>) -> Self { - Self { cx, filenames: Vec::new(), filename_to_index: FxHashMap::::default() } +impl CoverageMapGenerator { + fn new() -> Self { + Self { filenames: Vec::new(), filename_to_index: FxHashMap::default() } } - /// For the given function, get the coverage region data, stream it to the given buffer, and - /// then generate and return a new function record. - fn write_function_mappings_and_record( + /// Using the `expressions` and `counter_regions` collected for the current function, generate + /// the `mapping_regions` and `virtual_file_mapping`, and capture any new filenames. Then use + /// LLVM APIs to encode the `virtual_file_mapping`, `expressions`, and `mapping_regions` into + /// the given `coverage_mappings` byte buffer, compliant with the LLVM Coverage Mapping format. + fn write_coverage_mappings( &mut self, - instance: Instance<'tcx>, - mut function_coverage: FunctionCoverage, - coverage_mappings_buffer: &RustString, - ) -> Option<&'ll llvm::Value> { - let cx = self.cx; - let coverageinfo: &mir::CoverageInfo = cx.tcx.coverageinfo(instance.def_id()); - debug!( - "Generate coverage map for: {:?}, num_counters: {}, num_expressions: {}", - instance, coverageinfo.num_counters, coverageinfo.num_expressions - ); - debug_assert!(coverageinfo.num_counters > 0); - - let regions_in_file_order = function_coverage.regions_in_file_order(cx.sess().source_map()); - if regions_in_file_order.len() == 0 { - return None; - } - - // Stream the coverage mapping regions for the function (`instance`) to the buffer, and - // compute the data byte size used. - let old_len = coverage_mappings_buffer.len(); - self.regions_to_mappings(regions_in_file_order, coverage_mappings_buffer); - let mapping_data_size = coverage_mappings_buffer.len() - old_len; - debug_assert!(mapping_data_size > 0); - - let mangled_function_name = cx.tcx.symbol_name(instance).to_string(); - let name_ref = coverageinfo::compute_hash(&mangled_function_name); - let function_source_hash = function_coverage.source_hash(); - - // Generate and return the function record - let name_ref_val = cx.const_u64(name_ref); - let mapping_data_size_val = cx.const_u32(mapping_data_size as u32); - let func_hash_val = cx.const_u64(function_source_hash); - Some(cx.const_struct( - &[name_ref_val, mapping_data_size_val, func_hash_val], - /*packed=*/ true, - )) - } - - /// For each coverage region, extract its coverage data from the earlier coverage analysis. - /// Use LLVM APIs to convert the data into buffered bytes compliant with the LLVM Coverage - /// Mapping format. - fn regions_to_mappings( - &mut self, - regions_in_file_order: BTreeMap>, + expressions: Vec, + counter_regions: impl Iterator, coverage_mappings_buffer: &RustString, ) { + let mut counter_regions = counter_regions.collect::>(); + if counter_regions.is_empty() { + return; + } + let mut virtual_file_mapping = Vec::new(); - let mut mapping_regions = coverageinfo::SmallVectorCounterMappingRegion::new(); - let mut expressions = coverageinfo::SmallVectorCounterExpression::new(); + let mut mapping_regions = Vec::new(); + let mut current_file_path = None; + let mut current_file_id = 0; - for (file_id, (file_path, file_coverage_regions)) in - regions_in_file_order.into_iter().enumerate() - { - let file_id = file_id as u32; - let filename = CString::new(file_path.to_string_lossy().to_string()) - .expect("null error converting filename to C string"); - debug!(" file_id: {} = '{:?}'", file_id, filename); - let filenames_index = match self.filename_to_index.get(&filename) { - Some(index) => *index, - None => { - let index = self.filenames.len() as u32; - self.filenames.push(filename.clone()); - self.filename_to_index.insert(filename, index); - index - } - }; - virtual_file_mapping.push(filenames_index); - - let mut mapping_indexes = vec![0 as u32; file_coverage_regions.len()]; - for (mapping_index, (region_id, _)) in file_coverage_regions.values().enumerate() { - mapping_indexes[*region_id] = mapping_index as u32; - } - - for (region_loc, (region_id, region_kind)) in file_coverage_regions.into_iter() { - let mapping_index = mapping_indexes[region_id]; - match region_kind { - CoverageKind::Counter => { - debug!( - " Counter {}, file_id: {}, region_loc: {}", - mapping_index, file_id, region_loc - ); - mapping_regions.push_from( - mapping_index, - file_id, - region_loc.start_line, - region_loc.start_col, - region_loc.end_line, - region_loc.end_col, - ); - } - CoverageKind::CounterExpression(lhs, op, rhs) => { - debug!( - " CounterExpression {} = {} {:?} {}, file_id: {}, region_loc: {:?}", - mapping_index, lhs, op, rhs, file_id, region_loc, - ); - mapping_regions.push_from( - mapping_index, - file_id, - region_loc.start_line, - region_loc.start_col, - region_loc.end_line, - region_loc.end_col, - ); - expressions.push_from(op, lhs, rhs); - } - CoverageKind::Unreachable => { - debug!( - " Unreachable region, file_id: {}, region_loc: {:?}", - file_id, region_loc, - ); - bug!("Unreachable region not expected and not yet handled!") - // FIXME(richkadel): implement and call - // mapping_regions.push_from(...) for unreachable regions - } + // Convert the list of (Counter, Region) pairs to an array of `CounterMappingRegion`, sorted + // by filename and position. Capture any new files to compute the `CounterMappingRegion`s + // `file_id` (indexing files referenced by the current function), and construct the + // function-specific `virtual_file_mapping` from `file_id` to its index in the module's + // `filenames` array. + counter_regions.sort_unstable_by_key(|(_counter, region)| *region); + for (counter, region) in counter_regions { + let (file_path, start_line, start_col, end_line, end_col) = region.file_start_and_end(); + let same_file = current_file_path.as_ref().map_or(false, |p| p == file_path); + if !same_file { + if current_file_path.is_some() { + current_file_id += 1; } + current_file_path = Some(file_path.clone()); + let filename = CString::new(file_path.to_string_lossy().to_string()) + .expect("null error converting filename to C string"); + debug!(" file_id: {} = '{:?}'", current_file_id, filename); + let filenames_index = match self.filename_to_index.get(&filename) { + Some(index) => *index, + None => { + let index = self.filenames.len() as u32; + self.filenames.push(filename.clone()); + self.filename_to_index.insert(filename.clone(), index); + index + } + }; + virtual_file_mapping.push(filenames_index); } + mapping_regions.push(CounterMappingRegion::code_region( + counter, + current_file_id, + start_line, + start_col, + end_line, + end_col, + )); } // Encode and append the current function's coverage mapping data @@ -211,14 +152,35 @@ impl<'a, 'll, 'tcx> CoverageMappingWriter<'a, 'll, 'tcx> { ); } - fn write_coverage_map( + /// Generate and return the function record `Value` + fn make_function_record( + &mut self, + cx: &CodegenCx<'ll, 'tcx>, + mangled_function_name: String, + function_source_hash: u64, + mapping_data_size: usize, + ) -> &'ll llvm::Value { + let name_ref = coverageinfo::compute_hash(&mangled_function_name); + let name_ref_val = cx.const_u64(name_ref); + let mapping_data_size_val = cx.const_u32(mapping_data_size as u32); + let func_hash_val = cx.const_u64(function_source_hash); + cx.const_struct( + &[name_ref_val, mapping_data_size_val, func_hash_val], + /*packed=*/ true, + ) + } + + /// Combine the filenames and coverage mappings buffers, construct coverage map header and the + /// array of function records, and combine everything into the complete coverage map. Save the + /// coverage map data into the LLVM IR as a static global using a specific, well-known section + /// and name. + fn save_generated_coverage_map( self, + cx: &CodegenCx<'ll, 'tcx>, function_records: Vec<&'ll llvm::Value>, filenames_buffer: Vec, mut coverage_mappings_buffer: Vec, ) { - let cx = self.cx; - // Concatenate the encoded filenames and encoded coverage mappings, and add additional zero // bytes as-needed to ensure 8-byte alignment. let mut coverage_size = coverage_mappings_buffer.len(); diff --git a/src/librustc_codegen_llvm/coverageinfo/mod.rs b/src/librustc_codegen_llvm/coverageinfo/mod.rs index 76894bcd6c1..9d2090eae8f 100644 --- a/src/librustc_codegen_llvm/coverageinfo/mod.rs +++ b/src/librustc_codegen_llvm/coverageinfo/mod.rs @@ -4,8 +4,9 @@ use crate::builder::Builder; use crate::common::CodegenCx; use libc::c_uint; +use llvm::coverageinfo::CounterMappingRegion; use log::debug; -use rustc_codegen_ssa::coverageinfo::map::*; +use rustc_codegen_ssa::coverageinfo::map::{CounterExpression, ExprKind, FunctionCoverage}; use rustc_codegen_ssa::traits::{ BaseTypeMethods, CoverageInfoBuilderMethods, CoverageInfoMethods, StaticMethods, }; @@ -23,7 +24,7 @@ const COVMAP_VAR_ALIGN_BYTES: usize = 8; /// A context object for maintaining all state needed by the coverageinfo module. pub struct CrateCoverageContext<'tcx> { // Coverage region data for each instrumented function identified by DefId. - pub(crate) function_coverage_map: RefCell, FunctionCoverage>>, + pub(crate) function_coverage_map: RefCell, FunctionCoverage<'tcx>>>, } impl<'tcx> CrateCoverageContext<'tcx> { @@ -31,7 +32,7 @@ impl<'tcx> CrateCoverageContext<'tcx> { Self { function_coverage_map: Default::default() } } - pub fn take_function_coverage_map(&self) -> FxHashMap, FunctionCoverage> { + pub fn take_function_coverage_map(&self) -> FxHashMap, FunctionCoverage<'tcx>> { self.function_coverage_map.replace(FxHashMap::default()) } } @@ -47,44 +48,49 @@ impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> { &mut self, instance: Instance<'tcx>, function_source_hash: u64, - index: u32, + id: u32, start_byte_pos: u32, end_byte_pos: u32, ) { debug!( - "adding counter to coverage_regions: instance={:?}, function_source_hash={}, index={}, byte range {}..{}", - instance, function_source_hash, index, start_byte_pos, end_byte_pos, + "adding counter to coverage_regions: instance={:?}, function_source_hash={}, id={}, \ + byte range {}..{}", + instance, function_source_hash, id, start_byte_pos, end_byte_pos, ); let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut(); coverage_regions .entry(instance) - .or_insert_with(|| { - FunctionCoverage::with_coverageinfo(self.tcx.coverageinfo(instance.def_id())) - }) - .add_counter(function_source_hash, index, start_byte_pos, end_byte_pos); + .or_insert_with(|| FunctionCoverage::new(self.tcx, instance)) + .add_counter(function_source_hash, id, start_byte_pos, end_byte_pos); } fn add_counter_expression_region( &mut self, instance: Instance<'tcx>, - index: u32, + id_descending_from_max: u32, lhs: u32, - op: CounterOp, + op: ExprKind, rhs: u32, start_byte_pos: u32, end_byte_pos: u32, ) { debug!( - "adding counter expression to coverage_regions: instance={:?}, index={}, {} {:?} {}, byte range {}..{}", - instance, index, lhs, op, rhs, start_byte_pos, end_byte_pos, + "adding counter expression to coverage_regions: instance={:?}, id={}, {} {:?} {}, \ + byte range {}..{}", + instance, id_descending_from_max, lhs, op, rhs, start_byte_pos, end_byte_pos, ); let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut(); coverage_regions .entry(instance) - .or_insert_with(|| { - FunctionCoverage::with_coverageinfo(self.tcx.coverageinfo(instance.def_id())) - }) - .add_counter_expression(index, lhs, op, rhs, start_byte_pos, end_byte_pos); + .or_insert_with(|| FunctionCoverage::new(self.tcx, instance)) + .add_counter_expression( + id_descending_from_max, + lhs, + op, + rhs, + start_byte_pos, + end_byte_pos, + ); } fn add_unreachable_region( @@ -100,108 +106,8 @@ impl CoverageInfoBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> { let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut(); coverage_regions .entry(instance) - .or_insert_with(|| { - FunctionCoverage::with_coverageinfo(self.tcx.coverageinfo(instance.def_id())) - }) - .add_unreachable(start_byte_pos, end_byte_pos); - } -} - -/// This struct wraps an opaque reference to the C++ template instantiation of -/// `llvm::SmallVector`. Each `coverage::CounterExpression` object is -/// constructed from primative-typed arguments, and pushed to the `SmallVector`, in the C++ -/// implementation of `LLVMRustCoverageSmallVectorCounterExpressionAdd()` (see -/// `src/rustllvm/CoverageMappingWrapper.cpp`). -pub struct SmallVectorCounterExpression<'a> { - pub raw: &'a mut llvm::coverageinfo::SmallVectorCounterExpression<'a>, -} - -impl SmallVectorCounterExpression<'a> { - pub fn new() -> Self { - SmallVectorCounterExpression { - raw: unsafe { llvm::LLVMRustCoverageSmallVectorCounterExpressionCreate() }, - } - } - - pub fn as_ptr(&self) -> *const llvm::coverageinfo::SmallVectorCounterExpression<'a> { - self.raw - } - - pub fn push_from( - &mut self, - kind: rustc_codegen_ssa::coverageinfo::CounterOp, - left_index: u32, - right_index: u32, - ) { - unsafe { - llvm::LLVMRustCoverageSmallVectorCounterExpressionAdd( - &mut *(self.raw as *mut _), - kind, - left_index, - right_index, - ) - } - } -} - -impl Drop for SmallVectorCounterExpression<'a> { - fn drop(&mut self) { - unsafe { - llvm::LLVMRustCoverageSmallVectorCounterExpressionDispose(&mut *(self.raw as *mut _)); - } - } -} - -/// This struct wraps an opaque reference to the C++ template instantiation of -/// `llvm::SmallVector`. Each `coverage::CounterMappingRegion` -/// object is constructed from primative-typed arguments, and pushed to the `SmallVector`, in the -/// C++ implementation of `LLVMRustCoverageSmallVectorCounterMappingRegionAdd()` (see -/// `src/rustllvm/CoverageMappingWrapper.cpp`). -pub struct SmallVectorCounterMappingRegion<'a> { - pub raw: &'a mut llvm::coverageinfo::SmallVectorCounterMappingRegion<'a>, -} - -impl SmallVectorCounterMappingRegion<'a> { - pub fn new() -> Self { - SmallVectorCounterMappingRegion { - raw: unsafe { llvm::LLVMRustCoverageSmallVectorCounterMappingRegionCreate() }, - } - } - - pub fn as_ptr(&self) -> *const llvm::coverageinfo::SmallVectorCounterMappingRegion<'a> { - self.raw - } - - pub fn push_from( - &mut self, - index: u32, - file_id: u32, - line_start: u32, - column_start: u32, - line_end: u32, - column_end: u32, - ) { - unsafe { - llvm::LLVMRustCoverageSmallVectorCounterMappingRegionAdd( - &mut *(self.raw as *mut _), - index, - file_id, - line_start, - column_start, - line_end, - column_end, - ) - } - } -} - -impl Drop for SmallVectorCounterMappingRegion<'a> { - fn drop(&mut self) { - unsafe { - llvm::LLVMRustCoverageSmallVectorCounterMappingRegionDispose( - &mut *(self.raw as *mut _), - ); - } + .or_insert_with(|| FunctionCoverage::new(self.tcx, instance)) + .add_unreachable_region(start_byte_pos, end_byte_pos); } } @@ -218,8 +124,8 @@ pub(crate) fn write_filenames_section_to_buffer(filenames: &Vec, buffer pub(crate) fn write_mapping_to_buffer( virtual_file_mapping: Vec, - expressions: SmallVectorCounterExpression<'_>, - mapping_regions: SmallVectorCounterMappingRegion<'_>, + expressions: Vec, + mut mapping_regions: Vec, buffer: &RustString, ) { unsafe { @@ -227,7 +133,9 @@ pub(crate) fn write_mapping_to_buffer( virtual_file_mapping.as_ptr(), virtual_file_mapping.len() as c_uint, expressions.as_ptr(), - mapping_regions.as_ptr(), + expressions.len() as c_uint, + mapping_regions.as_mut_ptr(), + mapping_regions.len() as c_uint, buffer, ); } diff --git a/src/librustc_codegen_llvm/intrinsic.rs b/src/librustc_codegen_llvm/intrinsic.rs index 11b1c95c58b..236f7f69653 100644 --- a/src/librustc_codegen_llvm/intrinsic.rs +++ b/src/librustc_codegen_llvm/intrinsic.rs @@ -13,7 +13,7 @@ use rustc_ast::ast; use rustc_codegen_ssa::base::{compare_simd_types, to_immediate, wants_msvc_seh}; use rustc_codegen_ssa::common::span_invalid_monomorphization_error; use rustc_codegen_ssa::common::{IntPredicate, TypeKind}; -use rustc_codegen_ssa::coverageinfo::CounterOp; +use rustc_codegen_ssa::coverageinfo::ExprKind; use rustc_codegen_ssa::glue; use rustc_codegen_ssa::mir::operand::{OperandRef, OperandValue}; use rustc_codegen_ssa::mir::place::PlaceRef; @@ -101,7 +101,7 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> { self.add_counter_region( caller_instance, op_to_u64(&args[FUNCTION_SOURCE_HASH]), - op_to_u32(&args[COUNTER_INDEX]), + op_to_u32(&args[COUNTER_ID]), op_to_u32(&args[START_BYTE_POS]), op_to_u32(&args[END_BYTE_POS]), ); @@ -111,14 +111,14 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> { use coverage::coverage_counter_expression_args::*; self.add_counter_expression_region( caller_instance, - op_to_u32(&args[COUNTER_EXPRESSION_INDEX]), - op_to_u32(&args[LEFT_INDEX]), + op_to_u32(&args[EXPRESSION_ID]), + op_to_u32(&args[LEFT_ID]), if intrinsic == sym::coverage_counter_add { - CounterOp::Add + ExprKind::Add } else { - CounterOp::Subtract + ExprKind::Subtract }, - op_to_u32(&args[RIGHT_INDEX]), + op_to_u32(&args[RIGHT_ID]), op_to_u32(&args[START_BYTE_POS]), op_to_u32(&args[END_BYTE_POS]), ); @@ -219,7 +219,7 @@ impl IntrinsicCallMethods<'tcx> for Builder<'a, 'll, 'tcx> { let num_counters = self.const_u32(coverageinfo.num_counters); use coverage::count_code_region_args::*; let hash = args[FUNCTION_SOURCE_HASH].immediate(); - let index = args[COUNTER_INDEX].immediate(); + let index = args[COUNTER_ID].immediate(); debug!( "translating Rust intrinsic `count_code_region()` to LLVM intrinsic: \ instrprof.increment(fn_name={}, hash={:?}, num_counters={:?}, index={:?})", diff --git a/src/librustc_codegen_llvm/llvm/ffi.rs b/src/librustc_codegen_llvm/llvm/ffi.rs index 9784beaa079..eb7dc827f93 100644 --- a/src/librustc_codegen_llvm/llvm/ffi.rs +++ b/src/librustc_codegen_llvm/llvm/ffi.rs @@ -1,7 +1,7 @@ #![allow(non_camel_case_types)] #![allow(non_upper_case_globals)] -use super::coverageinfo::{SmallVectorCounterExpression, SmallVectorCounterMappingRegion}; +use rustc_codegen_ssa::coverageinfo::map as coverage_map; use super::debuginfo::{ DIArray, DIBasicType, DIBuilder, DICompositeType, DIDerivedType, DIDescriptor, DIEnumerator, @@ -653,13 +653,152 @@ pub type DiagnosticHandler = unsafe extern "C" fn(&DiagnosticInfo, *mut c_void); pub type InlineAsmDiagHandler = unsafe extern "C" fn(&SMDiagnostic, *const c_void, c_uint); pub mod coverageinfo { - use super::InvariantOpaque; + use super::coverage_map; + /// Aligns with [llvm::coverage::CounterMappingRegion::RegionKind](https://github.com/rust-lang/llvm-project/blob/rustc/10.0-2020-05-05/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L205-L221) + #[derive(Copy, Clone, Debug)] #[repr(C)] - pub struct SmallVectorCounterExpression<'a>(InvariantOpaque<'a>); + pub enum RegionKind { + /// A CodeRegion associates some code with a counter + CodeRegion = 0, + /// An ExpansionRegion represents a file expansion region that associates + /// a source range with the expansion of a virtual source file, such as + /// for a macro instantiation or #include file. + ExpansionRegion = 1, + + /// A SkippedRegion represents a source range with code that was skipped + /// by a preprocessor or similar means. + SkippedRegion = 2, + + /// A GapRegion is like a CodeRegion, but its count is only set as the + /// line execution count when its the only region in the line. + GapRegion = 3, + } + + /// This struct provides LLVM's representation of a "CoverageMappingRegion", encoded into the + /// coverage map, in accordance with the + /// [LLVM Code Coverage Mapping Format](https://github.com/rust-lang/llvm-project/blob/llvmorg-8.0.0/llvm/docs/CoverageMappingFormat.rst#llvm-code-coverage-mapping-format). + /// The struct composes fields representing the `Counter` type and value(s) (injected counter + /// ID, or expression type and operands), the source file (an indirect index into a "filenames + /// array", encoded separately), and source location (start and end positions of the represented + /// code region). + /// + /// Aligns with [llvm::coverage::CounterMappingRegion](https://github.com/rust-lang/llvm-project/blob/rustc/10.0-2020-05-05/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L223-L226) + /// Important: The Rust struct layout (order and types of fields) must match its C++ + /// counterpart. + #[derive(Copy, Clone, Debug)] #[repr(C)] - pub struct SmallVectorCounterMappingRegion<'a>(InvariantOpaque<'a>); + pub struct CounterMappingRegion { + /// The counter type and type-dependent counter data, if any. + counter: coverage_map::Counter, + + /// An indirect reference to the source filename. In the LLVM Coverage Mapping Format, the + /// file_id is an index into a function-specific `virtual_file_mapping` array of indexes + /// that, in turn, are used to look up the filename for this region. + file_id: u32, + + /// If the `RegionKind` is an `ExpansionRegion`, the `expanded_file_id` can be used to find + /// the mapping regions created as a result of macro expansion, by checking if their file id + /// matches the expanded file id. + expanded_file_id: u32, + + /// 1-based starting line of the mapping region. + start_line: u32, + + /// 1-based starting column of the mapping region. + start_col: u32, + + /// 1-based ending line of the mapping region. + end_line: u32, + + /// 1-based ending column of the mapping region. If the high bit is set, the current + /// mapping region is a gap area. + end_col: u32, + + kind: RegionKind, + } + + impl CounterMappingRegion { + pub fn code_region( + counter: coverage_map::Counter, + file_id: u32, + start_line: u32, + start_col: u32, + end_line: u32, + end_col: u32, + ) -> Self { + Self { + counter, + file_id, + expanded_file_id: 0, + start_line, + start_col, + end_line, + end_col, + kind: RegionKind::CodeRegion, + } + } + + pub fn expansion_region( + file_id: u32, + expanded_file_id: u32, + start_line: u32, + start_col: u32, + end_line: u32, + end_col: u32, + ) -> Self { + Self { + counter: coverage_map::Counter::zero(), + file_id, + expanded_file_id, + start_line, + start_col, + end_line, + end_col, + kind: RegionKind::ExpansionRegion, + } + } + + pub fn skipped_region( + file_id: u32, + start_line: u32, + start_col: u32, + end_line: u32, + end_col: u32, + ) -> Self { + Self { + counter: coverage_map::Counter::zero(), + file_id, + expanded_file_id: 0, + start_line, + start_col, + end_line, + end_col, + kind: RegionKind::SkippedRegion, + } + } + + pub fn gap_region( + counter: coverage_map::Counter, + file_id: u32, + start_line: u32, + start_col: u32, + end_line: u32, + end_col: u32, + ) -> Self { + Self { + counter, + file_id, + expanded_file_id: 0, + start_line, + start_col, + end_line, + end_col: ((1 as u32) << 31) | end_col, + kind: RegionKind::GapRegion, + } + } + } } pub mod debuginfo { @@ -1645,33 +1784,6 @@ extern "C" { ConstraintsLen: size_t, ) -> bool; - pub fn LLVMRustCoverageSmallVectorCounterExpressionCreate() - -> &'a mut SmallVectorCounterExpression<'a>; - pub fn LLVMRustCoverageSmallVectorCounterExpressionDispose( - Container: &'a mut SmallVectorCounterExpression<'a>, - ); - pub fn LLVMRustCoverageSmallVectorCounterExpressionAdd( - Container: &mut SmallVectorCounterExpression<'a>, - Kind: rustc_codegen_ssa::coverageinfo::CounterOp, - LeftIndex: c_uint, - RightIndex: c_uint, - ); - - pub fn LLVMRustCoverageSmallVectorCounterMappingRegionCreate() - -> &'a mut SmallVectorCounterMappingRegion<'a>; - pub fn LLVMRustCoverageSmallVectorCounterMappingRegionDispose( - Container: &'a mut SmallVectorCounterMappingRegion<'a>, - ); - pub fn LLVMRustCoverageSmallVectorCounterMappingRegionAdd( - Container: &mut SmallVectorCounterMappingRegion<'a>, - Index: c_uint, - FileID: c_uint, - LineStart: c_uint, - ColumnStart: c_uint, - LineEnd: c_uint, - ColumnEnd: c_uint, - ); - #[allow(improper_ctypes)] pub fn LLVMRustCoverageWriteFilenamesSectionToBuffer( Filenames: *const *const c_char, @@ -1683,8 +1795,10 @@ extern "C" { pub fn LLVMRustCoverageWriteMappingToBuffer( VirtualFileMappingIDs: *const c_uint, NumVirtualFileMappingIDs: c_uint, - Expressions: *const SmallVectorCounterExpression<'_>, - MappingRegions: *const SmallVectorCounterMappingRegion<'_>, + Expressions: *const coverage_map::CounterExpression, + NumExpressions: c_uint, + MappingRegions: *mut coverageinfo::CounterMappingRegion, + NumMappingRegions: c_uint, BufferOut: &RustString, ); diff --git a/src/librustc_codegen_ssa/coverageinfo/ffi.rs b/src/librustc_codegen_ssa/coverageinfo/ffi.rs new file mode 100644 index 00000000000..5b04f994994 --- /dev/null +++ b/src/librustc_codegen_ssa/coverageinfo/ffi.rs @@ -0,0 +1,67 @@ +use super::map::{CounterValueReference, MappedExpressionIndex}; + +/// Aligns with [llvm::coverage::Counter::CounterKind](https://github.com/rust-lang/llvm-project/blob/rustc/10.0-2020-05-05/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L91) +#[derive(Copy, Clone, Debug)] +#[repr(C)] +enum CounterKind { + Zero = 0, + CounterValueReference = 1, + Expression = 2, +} + +/// A reference to an instance of an abstract "counter" that will yield a value in a coverage +/// report. Note that `id` has different interpretations, depending on the `kind`: +/// * For `CounterKind::Zero`, `id` is assumed to be `0` +/// * For `CounterKind::CounterValueReference`, `id` matches the `counter_id` of the injected +/// instrumentation counter (the `index` argument to the LLVM intrinsic +/// `instrprof.increment()`) +/// * For `CounterKind::Expression`, `id` is the index into the coverage map's array of +/// counter expressions. +/// Aligns with [llvm::coverage::Counter](https://github.com/rust-lang/llvm-project/blob/rustc/10.0-2020-05-05/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L98-L99) +/// Important: The Rust struct layout (order and types of fields) must match its C++ counterpart. +#[derive(Copy, Clone, Debug)] +#[repr(C)] +pub struct Counter { + // Important: The layout (order and types of fields) must match its C++ counterpart. + kind: CounterKind, + id: u32, +} + +impl Counter { + pub fn zero() -> Self { + Self { kind: CounterKind::Zero, id: 0 } + } + + pub fn counter_value_reference(counter_id: CounterValueReference) -> Self { + Self { kind: CounterKind::CounterValueReference, id: counter_id.into() } + } + + pub fn expression(mapped_expression_index: MappedExpressionIndex) -> Self { + Self { kind: CounterKind::Expression, id: mapped_expression_index.into() } + } +} + +/// Aligns with [llvm::coverage::CounterExpression::ExprKind](https://github.com/rust-lang/llvm-project/blob/rustc/10.0-2020-05-05/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L146) +#[derive(Copy, Clone, Debug)] +#[repr(C)] +pub enum ExprKind { + Subtract = 0, + Add = 1, +} + +/// Aligns with [llvm::coverage::CounterExpression](https://github.com/rust-lang/llvm-project/blob/rustc/10.0-2020-05-05/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L147-L148) +/// Important: The Rust struct layout (order and types of fields) must match its C++ +/// counterpart. +#[derive(Copy, Clone, Debug)] +#[repr(C)] +pub struct CounterExpression { + kind: ExprKind, + lhs: Counter, + rhs: Counter, +} + +impl CounterExpression { + pub fn new(lhs: Counter, kind: ExprKind, rhs: Counter) -> Self { + Self { kind, lhs, rhs } + } +} diff --git a/src/librustc_codegen_ssa/coverageinfo/map.rs b/src/librustc_codegen_ssa/coverageinfo/map.rs index a8ffef8bc5b..1e36c90baaf 100644 --- a/src/librustc_codegen_ssa/coverageinfo/map.rs +++ b/src/librustc_codegen_ssa/coverageinfo/map.rs @@ -1,289 +1,360 @@ -use rustc_data_structures::sync::Lrc; -use rustc_middle::mir; -use rustc_span::source_map::{Pos, SourceFile, SourceMap}; -use rustc_span::{BytePos, FileName, RealFileName}; +pub use super::ffi::*; + +use rustc_index::vec::IndexVec; +use rustc_middle::ty::Instance; +use rustc_middle::ty::TyCtxt; +use rustc_span::source_map::{Pos, SourceMap}; +use rustc_span::{BytePos, FileName, Loc, RealFileName}; use std::cmp::{Ord, Ordering}; -use std::collections::BTreeMap; use std::fmt; use std::path::PathBuf; -#[derive(Copy, Clone, Debug)] -#[repr(C)] -pub enum CounterOp { - // Note the order (and therefore the default values) is important. With the attribute - // `#[repr(C)]`, this enum matches the layout of the LLVM enum defined for the nested enum, - // `llvm::coverage::CounterExpression::ExprKind`, as shown in the following source snippet: - // https://github.com/rust-lang/llvm-project/blob/f208b70fbc4dee78067b3c5bd6cb92aa3ba58a1e/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L146 - Subtract, - Add, +rustc_index::newtype_index! { + pub struct ExpressionOperandId { + DEBUG_FORMAT = "ExpressionOperandId({})", + MAX = 0xFFFF_FFFF, + } } -#[derive(Copy, Clone, Debug)] -pub enum CoverageKind { - Counter, - CounterExpression(u32, CounterOp, u32), - Unreachable, +rustc_index::newtype_index! { + pub struct CounterValueReference { + DEBUG_FORMAT = "CounterValueReference({})", + MAX = 0xFFFF_FFFF, + } +} + +rustc_index::newtype_index! { + pub struct InjectedExpressionIndex { + DEBUG_FORMAT = "InjectedExpressionIndex({})", + MAX = 0xFFFF_FFFF, + } +} + +rustc_index::newtype_index! { + pub struct MappedExpressionIndex { + DEBUG_FORMAT = "MappedExpressionIndex({})", + MAX = 0xFFFF_FFFF, + } } #[derive(Clone, Debug)] -pub struct CoverageRegion { - pub kind: CoverageKind, - pub start_byte_pos: u32, - pub end_byte_pos: u32, +pub struct Region { + start: Loc, + end: Loc, } -impl CoverageRegion { - pub fn source_loc(&self, source_map: &SourceMap) -> Option<(Lrc, CoverageLoc)> { - let (start_file, start_line, start_col) = - lookup_file_line_col(source_map, BytePos::from_u32(self.start_byte_pos)); - let (end_file, end_line, end_col) = - lookup_file_line_col(source_map, BytePos::from_u32(self.end_byte_pos)); - let start_file_path = match &start_file.name { - FileName::Real(RealFileName::Named(path)) => path, - _ => { - bug!("start_file_path should be a RealFileName, but it was: {:?}", start_file.name) - } - }; - let end_file_path = match &end_file.name { - FileName::Real(RealFileName::Named(path)) => path, - _ => bug!("end_file_path should be a RealFileName, but it was: {:?}", end_file.name), - }; - if start_file_path == end_file_path { - Some((start_file, CoverageLoc { start_line, start_col, end_line, end_col })) - } else { - None - // FIXME(richkadel): There seems to be a problem computing the file location in - // some cases. I need to investigate this more. When I generate and show coverage - // for the example binary in the crates.io crate `json5format`, I had a couple of - // notable problems: - // - // 1. I saw a lot of coverage spans in `llvm-cov show` highlighting regions in - // various comments (not corresponding to rustdoc code), indicating a possible - // problem with the byte_pos-to-source-map implementation. - // - // 2. And (perhaps not related) when I build the aforementioned example binary with: - // `RUST_FLAGS="-Zinstrument-coverage" cargo build --example formatjson5` - // and then run that binary with - // `LLVM_PROFILE_FILE="formatjson5.profraw" ./target/debug/examples/formatjson5 \ - // some.json5` for some reason the binary generates *TWO* `.profraw` files. One - // named `default.profraw` and the other named `formatjson5.profraw` (the expected - // name, in this case). - // - // If the byte range conversion is wrong, fix it. But if it - // is right, then it is possible for the start and end to be in different files. - // Can I do something other than ignore coverages that span multiple files? - // - // If I can resolve this, remove the "Option<>" result type wrapper - // `regions_in_file_order()` accordingly. - } - } -} - -impl Default for CoverageRegion { - fn default() -> Self { - Self { - // The default kind (Unreachable) is a placeholder that will be overwritten before - // backend codegen. - kind: CoverageKind::Unreachable, - start_byte_pos: 0, - end_byte_pos: 0, - } - } -} - -/// A source code region used with coverage information. -#[derive(Debug, Eq, PartialEq)] -pub struct CoverageLoc { - /// The (1-based) line number of the region start. - pub start_line: u32, - /// The (1-based) column number of the region start. - pub start_col: u32, - /// The (1-based) line number of the region end. - pub end_line: u32, - /// The (1-based) column number of the region end. - pub end_col: u32, -} - -impl Ord for CoverageLoc { +impl Ord for Region { fn cmp(&self, other: &Self) -> Ordering { - (self.start_line, &self.start_col, &self.end_line, &self.end_col).cmp(&( - other.start_line, - &other.start_col, - &other.end_line, - &other.end_col, - )) + (&self.start.file.name, &self.start.line, &self.start.col, &self.end.line, &self.end.col) + .cmp(&( + &other.start.file.name, + &other.start.line, + &other.start.col, + &other.end.line, + &other.end.col, + )) } } -impl PartialOrd for CoverageLoc { +impl PartialOrd for Region { fn partial_cmp(&self, other: &Self) -> Option { Some(self.cmp(other)) } } -impl fmt::Display for CoverageLoc { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - // Customize debug format, and repeat the file name, so generated location strings are - // "clickable" in many IDEs. - write!(f, "{}:{} - {}:{}", self.start_line, self.start_col, self.end_line, self.end_col) +impl PartialEq for Region { + fn eq(&self, other: &Self) -> bool { + self.start.file.name == other.start.file.name + && self.start.line == other.start.line + && self.start.col == other.start.col + && self.end.line == other.end.line + && self.end.col == other.end.col } } -fn lookup_file_line_col(source_map: &SourceMap, byte_pos: BytePos) -> (Lrc, u32, u32) { - let found = source_map - .lookup_line(byte_pos) - .expect("should find coverage region byte position in source"); - let file = found.sf; - let line_pos = file.line_begin_pos(byte_pos); +impl Eq for Region {} - // Use 1-based indexing. - let line = (found.line + 1) as u32; - let col = (byte_pos - line_pos).to_u32() + 1; - - (file, line, col) +impl fmt::Display for Region { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let (file_path, start_line, start_col, end_line, end_col) = self.file_start_and_end(); + write!(f, "{:?}:{}:{} - {}:{}", file_path, start_line, start_col, end_line, end_col) + } } +impl Region { + pub fn new(source_map: &SourceMap, start_byte_pos: u32, end_byte_pos: u32) -> Self { + let start = source_map.lookup_char_pos(BytePos::from_u32(start_byte_pos)); + let end = source_map.lookup_char_pos(BytePos::from_u32(end_byte_pos)); + assert_eq!(start.file.name, end.file.name); + Self { start, end } + } + + pub fn file_start_and_end<'a>(&'a self) -> (&'a PathBuf, u32, u32, u32, u32) { + let start = &self.start; + let end = &self.end; + match &start.file.name { + FileName::Real(RealFileName::Named(path)) => ( + path, + start.line as u32, + start.col.to_u32() + 1, + end.line as u32, + end.col.to_u32() + 1, + ), + _ => { + bug!("start.file.name should be a RealFileName, but it was: {:?}", start.file.name) + } + } + } +} + +#[derive(Clone, Debug)] +pub struct ExpressionRegion { + lhs: ExpressionOperandId, + op: ExprKind, + rhs: ExpressionOperandId, + region: Region, +} + +// FIXME(richkadel): There seems to be a problem computing the file location in +// some cases. I need to investigate this more. When I generate and show coverage +// for the example binary in the crates.io crate `json5format`, I had a couple of +// notable problems: +// +// 1. I saw a lot of coverage spans in `llvm-cov show` highlighting regions in +// various comments (not corresponding to rustdoc code), indicating a possible +// problem with the byte_pos-to-source-map implementation. +// +// 2. And (perhaps not related) when I build the aforementioned example binary with: +// `RUST_FLAGS="-Zinstrument-coverage" cargo build --example formatjson5` +// and then run that binary with +// `LLVM_PROFILE_FILE="formatjson5.profraw" ./target/debug/examples/formatjson5 \ +// some.json5` for some reason the binary generates *TWO* `.profraw` files. One +// named `default.profraw` and the other named `formatjson5.profraw` (the expected +// name, in this case). +// +// 3. I think that if I eliminate regions within a function, their region_ids, +// referenced in expressions, will be wrong? I think the ids are implied by their +// array position in the final coverage map output (IIRC). +// +// 4. I suspect a problem (if not the only problem) is the SourceMap is wrong for some +// region start/end byte positions. Just like I couldn't get the function hash at +// intrinsic codegen time for external crate functions, I think the SourceMap I +// have here only applies to the local crate, and I know I have coverages that +// reference external crates. +// +// I still don't know if I fixed the hash problem correctly. If external crates +// implement the function, can't I use the coverage counters already compiled +// into those external crates? (Maybe not for generics and/or maybe not for +// macros... not sure. But I need to understand this better.) +// +// If the byte range conversion is wrong, fix it. But if it +// is right, then it is possible for the start and end to be in different files. +// Can I do something other than ignore coverages that span multiple files? +// +// If I can resolve this, remove the "Option<>" result type wrapper +// `regions_in_file_order()` accordingly. + /// Collects all of the coverage regions associated with (a) injected counters, (b) counter /// expressions (additions or subtraction), and (c) unreachable regions (always counted as zero), -/// for a given Function. Counters and counter expressions are indexed because they can be operands -/// in an expression. This struct also stores the `function_source_hash`, computed during -/// instrumentation and forwarded with counters. +/// for a given Function. Counters and counter expressions have non-overlapping `id`s because they +/// can both be operands in an expression. This struct also stores the `function_source_hash`, +/// computed during instrumentation, and forwarded with counters. /// -/// Note, it's important to distinguish the `unreachable` region type from what LLVM's refers to as -/// a "gap region" (or "gap area"). A gap region is a code region within a counted region (either -/// counter or expression), but the line or lines in the gap region are not executable (such as -/// lines with only whitespace or comments). According to LLVM Code Coverage Mapping documentation, -/// "A count for a gap area is only used as the line execution count if there are no other regions -/// on a line." -pub struct FunctionCoverage { +/// Note, it may be important to understand LLVM's definitions of `unreachable` regions versus "gap +/// regions" (or "gap areas"). A gap region is a code region within a counted region (either counter +/// or expression), but the line or lines in the gap region are not executable (such as lines with +/// only whitespace or comments). According to LLVM Code Coverage Mapping documentation, "A count +/// for a gap area is only used as the line execution count if there are no other regions on a +/// line." +pub struct FunctionCoverage<'a> { + source_map: &'a SourceMap, source_hash: u64, - counters: Vec, - expressions: Vec, - unreachable: Vec, - translated: bool, + counters: IndexVec>, + expressions: IndexVec>, + unreachable_regions: Vec, } -impl FunctionCoverage { - pub fn with_coverageinfo<'tcx>(coverageinfo: &'tcx mir::CoverageInfo) -> Self { +impl<'a> FunctionCoverage<'a> { + pub fn new<'tcx: 'a>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> Self { + let coverageinfo = tcx.coverageinfo(instance.def_id()); Self { + source_map: tcx.sess.source_map(), source_hash: 0, // will be set with the first `add_counter()` - counters: vec![CoverageRegion::default(); coverageinfo.num_counters as usize], - expressions: vec![CoverageRegion::default(); coverageinfo.num_expressions as usize], - unreachable: Vec::new(), - translated: false, + counters: IndexVec::from_elem_n(None, coverageinfo.num_counters as usize), + expressions: IndexVec::from_elem_n(None, coverageinfo.num_expressions as usize), + unreachable_regions: Vec::new(), } } - /// Adds a code region to be counted by an injected counter intrinsic. Return a counter ID - /// for the call. + /// Adds a code region to be counted by an injected counter intrinsic. + /// The source_hash (computed during coverage instrumentation) should also be provided, and + /// should be the same for all counters in a given function. pub fn add_counter( &mut self, source_hash: u64, - index: u32, + id: u32, start_byte_pos: u32, end_byte_pos: u32, ) { - self.source_hash = source_hash; - self.counters[index as usize] = - CoverageRegion { kind: CoverageKind::Counter, start_byte_pos, end_byte_pos }; + if self.source_hash == 0 { + self.source_hash = source_hash; + } else { + debug_assert_eq!(source_hash, self.source_hash); + } + self.counters[CounterValueReference::from(id)] + .replace(Region::new(self.source_map, start_byte_pos, end_byte_pos)) + .expect_none("add_counter called with duplicate `id`"); } + /// Both counters and "counter expressions" (or simply, "expressions") can be operands in other + /// expressions. Expression IDs start from `u32::MAX` and go down, so the range of expression + /// IDs will not overlap with the range of counter IDs. Counters and expressions can be added in + /// any order, and expressions can still be assigned contiguous (though descending) IDs, without + /// knowing what the last counter ID will be. + /// + /// When storing the expression data in the `expressions` vector in the `FunctionCoverage` + /// struct, its vector index is computed, from the given expression ID, by subtracting from + /// `u32::MAX`. + /// + /// Since the expression operands (`lhs` and `rhs`) can reference either counters or + /// expressions, an operand that references an expression also uses its original ID, descending + /// from `u32::MAX`. Theses operands are translated only during code generation, after all + /// counters and expressions have been added. pub fn add_counter_expression( &mut self, - translated_index: u32, + id_descending_from_max: u32, lhs: u32, - op: CounterOp, + op: ExprKind, rhs: u32, start_byte_pos: u32, end_byte_pos: u32, ) { - let index = u32::MAX - translated_index; - // Counter expressions start with "translated indexes", descending from `u32::MAX`, so - // the range of expression indexes is disjoint from the range of counter indexes. This way, - // both counters and expressions can be operands in other expressions. - // - // Once all counters have been added, the final "region index" for an expression is - // `counters.len() + expression_index` (where `expression_index` is its index in - // `self.expressions`), and the expression operands (`lhs` and `rhs`) can be converted to - // final "region index" references by the same conversion, after subtracting from - // `u32::MAX`. - self.expressions[index as usize] = CoverageRegion { - kind: CoverageKind::CounterExpression(lhs, op, rhs), - start_byte_pos, - end_byte_pos, - }; + let expression_id = ExpressionOperandId::from(id_descending_from_max); + let lhs = ExpressionOperandId::from(lhs); + let rhs = ExpressionOperandId::from(rhs); + + let expression_index = self.expression_index(expression_id); + self.expressions[expression_index] + .replace(ExpressionRegion { + lhs, + op, + rhs, + region: Region::new(self.source_map, start_byte_pos, end_byte_pos), + }) + .expect_none("add_counter_expression called with duplicate `id_descending_from_max`"); } - pub fn add_unreachable(&mut self, start_byte_pos: u32, end_byte_pos: u32) { - self.unreachable.push(CoverageRegion { - kind: CoverageKind::Unreachable, - start_byte_pos, - end_byte_pos, - }); + /// Add a region that will be marked as "unreachable", with a constant "zero counter". + pub fn add_unreachable_region(&mut self, start_byte_pos: u32, end_byte_pos: u32) { + self.unreachable_regions.push(Region::new(self.source_map, start_byte_pos, end_byte_pos)); } + /// Return the source hash, generated from the HIR node structure, and used to indicate whether + /// or not the source code structure changed between different compilations. pub fn source_hash(&self) -> u64 { self.source_hash } - fn regions(&'a mut self) -> impl Iterator { + /// Generate an array of CounterExpressions, and an iterator over all `Counter`s and their + /// associated `Regions` (from which the LLVM-specific `CoverageMapGenerator` will create + /// `CounterMappingRegion`s. + pub fn get_expressions_and_counter_regions( + &'a self, + ) -> (Vec, impl Iterator) { assert!(self.source_hash != 0); - self.ensure_expressions_translated(); - self.counters.iter().chain(self.expressions.iter().chain(self.unreachable.iter())) + + let counter_regions = self.counter_regions(); + let (counter_expressions, expression_regions) = self.expressions_with_regions(); + let unreachable_regions = self.unreachable_regions(); + + let counter_regions = + counter_regions.chain(expression_regions.into_iter().chain(unreachable_regions)); + (counter_expressions, counter_regions) } - pub fn regions_in_file_order( - &'a mut self, - source_map: &SourceMap, - ) -> BTreeMap> { - let mut regions_in_file_order = BTreeMap::new(); - for (region_id, region) in self.regions().enumerate() { - if let Some((source_file, region_loc)) = region.source_loc(source_map) { - // FIXME(richkadel): `region.source_loc()` sometimes fails with two different - // filenames for the start and end byte position. This seems wrong, but for - // now, if encountered, the region is skipped. If resolved, convert the result - // to a non-option value so regions are never skipped. - let real_file_path = match &(*source_file).name { - FileName::Real(RealFileName::Named(path)) => path.clone(), - _ => bug!("coverage mapping expected only real, named files"), - }; - let file_coverage_regions = - regions_in_file_order.entry(real_file_path).or_insert_with(|| BTreeMap::new()); - file_coverage_regions.insert(region_loc, (region_id, region.kind)); - } - } - regions_in_file_order + fn counter_regions(&'a self) -> impl Iterator { + self.counters.iter_enumerated().filter_map(|(index, entry)| { + // Option::map() will return None to filter out missing counters. This may happen + // if, for example, a MIR-instrumented counter is removed during an optimization. + entry.as_ref().map(|region| { + (Counter::counter_value_reference(index as CounterValueReference), region) + }) + }) } - /// A one-time translation of expression operands is needed, for any operands referencing - /// other CounterExpressions. CounterExpression operands get an initial operand ID that is - /// computed by the simple translation: `u32::max - expression_index` because, when created, - /// the total number of Counters is not yet known. This function recomputes region indexes - /// for expressions so they start with the next region index after the last counter index. - fn ensure_expressions_translated(&mut self) { - if !self.translated { - self.translated = true; - let start = self.counters.len() as u32; - assert!( - (start as u64 + self.expressions.len() as u64) < u32::MAX as u64, - "the number of counters and counter expressions in a single function exceeds {}", - u32::MAX - ); - for region in self.expressions.iter_mut() { - match region.kind { - CoverageKind::CounterExpression(lhs, op, rhs) => { - let lhs = to_region_index(start, lhs); - let rhs = to_region_index(start, rhs); - region.kind = CoverageKind::CounterExpression(lhs, op, rhs); - } - _ => bug!("expressions must only contain CounterExpression kinds"), + fn expressions_with_regions( + &'a self, + ) -> (Vec, impl Iterator) { + let mut counter_expressions = Vec::with_capacity(self.expressions.len()); + let mut expression_regions = Vec::with_capacity(self.expressions.len()); + let mut new_indexes = + IndexVec::from_elem_n(MappedExpressionIndex::from(u32::MAX), self.expressions.len()); + // Note, the initial value shouldn't matter since every index in use in `self.expressions` + // will be set, and after that, `new_indexes` will only be accessed using those same + // indexes. + + // Note that an `ExpressionRegion`s at any given index can include other expressions as + // operands, but expression operands can only come from the subset of expressions having + // `expression_index`s lower than the referencing `ExpressionRegion`. Therefore, it is + // reasonable to look up the new index of an expression operand while the `new_indexes` + // vector is only complete up to the current `ExpressionIndex`. + let id_to_counter = + |new_indexes: &IndexVec, + id: ExpressionOperandId| { + if id.index() < self.counters.len() { + let index = CounterValueReference::from(id.index()); + self.counters + .get(index) + .unwrap() // pre-validated + .as_ref() + .map(|_| Counter::counter_value_reference(index)) + } else { + let index = self.expression_index(id); + self.expressions + .get(index) + .expect("expression id is out of range") + .as_ref() + .map(|_| Counter::expression(new_indexes[index])) } + }; + + for (original_index, expression_region) in + self.expressions.iter_enumerated().filter_map(|(original_index, entry)| { + // Option::map() will return None to filter out missing expressions. This may happen + // if, for example, a MIR-instrumented expression is removed during an optimization. + entry.as_ref().map(|region| (original_index, region)) + }) + { + let region = &expression_region.region; + let ExpressionRegion { lhs, op, rhs, .. } = *expression_region; + + if let Some(Some((lhs_counter, rhs_counter))) = + id_to_counter(&new_indexes, lhs).map(|lhs_counter| { + id_to_counter(&new_indexes, rhs).map(|rhs_counter| (lhs_counter, rhs_counter)) + }) + { + // Both operands exist. `Expression` operands exist in `self.expressions` and have + // been assigned a `new_index`. + let mapped_expression_index = + MappedExpressionIndex::from(counter_expressions.len()); + counter_expressions.push(CounterExpression::new(lhs_counter, op, rhs_counter)); + new_indexes[original_index] = mapped_expression_index; + expression_regions.push((Counter::expression(mapped_expression_index), region)); } } + (counter_expressions, expression_regions.into_iter()) + } + + fn unreachable_regions(&'a self) -> impl Iterator { + self.unreachable_regions.iter().map(|region| (Counter::zero(), region)) + } + + fn expression_index( + &self, + id_descending_from_max: ExpressionOperandId, + ) -> InjectedExpressionIndex { + debug_assert!(id_descending_from_max.index() >= self.counters.len()); + InjectedExpressionIndex::from(u32::MAX - u32::from(id_descending_from_max)) } } - -fn to_region_index(start: u32, index: u32) -> u32 { - if index < start { index } else { start + (u32::MAX - index) } -} diff --git a/src/librustc_codegen_ssa/coverageinfo/mod.rs b/src/librustc_codegen_ssa/coverageinfo/mod.rs index 304f8e19da4..1f0ffd289b1 100644 --- a/src/librustc_codegen_ssa/coverageinfo/mod.rs +++ b/src/librustc_codegen_ssa/coverageinfo/mod.rs @@ -1,3 +1,4 @@ +pub mod ffi; pub mod map; -pub use map::CounterOp; +pub use map::ExprKind; diff --git a/src/librustc_codegen_ssa/lib.rs b/src/librustc_codegen_ssa/lib.rs index bdd73c08313..85260d30a3d 100644 --- a/src/librustc_codegen_ssa/lib.rs +++ b/src/librustc_codegen_ssa/lib.rs @@ -1,5 +1,6 @@ #![doc(html_root_url = "https://doc.rust-lang.org/nightly/")] #![feature(bool_to_option)] +#![feature(option_expect_none)] #![feature(box_patterns)] #![feature(try_blocks)] #![feature(in_band_lifetimes)] @@ -7,6 +8,8 @@ #![feature(or_patterns)] #![feature(trusted_len)] #![feature(associated_type_bounds)] +#![feature(const_fn)] // for rustc_index::newtype_index +#![feature(const_panic)] // for rustc_index::newtype_index #![recursion_limit = "256"] //! This crate contains codegen code that is used by all codegen backends (LLVM and others). diff --git a/src/librustc_codegen_ssa/traits/coverageinfo.rs b/src/librustc_codegen_ssa/traits/coverageinfo.rs index 1b9faa42484..db1d86c974e 100644 --- a/src/librustc_codegen_ssa/traits/coverageinfo.rs +++ b/src/librustc_codegen_ssa/traits/coverageinfo.rs @@ -1,5 +1,5 @@ use super::BackendTypes; -use crate::coverageinfo::CounterOp; +use crate::coverageinfo::ExprKind; use rustc_middle::ty::Instance; pub trait CoverageInfoMethods: BackendTypes { @@ -21,7 +21,7 @@ pub trait CoverageInfoBuilderMethods<'tcx>: BackendTypes { instance: Instance<'tcx>, index: u32, lhs: u32, - op: CounterOp, + op: ExprKind, rhs: u32, start_byte_pos: u32, end_byte_pos: u32, diff --git a/src/librustc_middle/mir/coverage/mod.rs b/src/librustc_middle/mir/coverage/mod.rs index 1e06dadfa24..82365ef6a73 100644 --- a/src/librustc_middle/mir/coverage/mod.rs +++ b/src/librustc_middle/mir/coverage/mod.rs @@ -3,7 +3,7 @@ /// Positional arguments to `libcore::count_code_region()` pub mod count_code_region_args { pub const FUNCTION_SOURCE_HASH: usize = 0; - pub const COUNTER_INDEX: usize = 1; + pub const COUNTER_ID: usize = 1; pub const START_BYTE_POS: usize = 2; pub const END_BYTE_POS: usize = 3; } @@ -11,9 +11,9 @@ pub mod count_code_region_args { /// Positional arguments to `libcore::coverage_counter_add()` and /// `libcore::coverage_counter_subtract()` pub mod coverage_counter_expression_args { - pub const COUNTER_EXPRESSION_INDEX: usize = 0; - pub const LEFT_INDEX: usize = 1; - pub const RIGHT_INDEX: usize = 2; + pub const EXPRESSION_ID: usize = 0; + pub const LEFT_ID: usize = 1; + pub const RIGHT_ID: usize = 2; pub const START_BYTE_POS: usize = 3; pub const END_BYTE_POS: usize = 4; } diff --git a/src/librustc_mir/transform/instrument_coverage.rs b/src/librustc_mir/transform/instrument_coverage.rs index 9933a975e4d..fe63a67fdbb 100644 --- a/src/librustc_mir/transform/instrument_coverage.rs +++ b/src/librustc_mir/transform/instrument_coverage.rs @@ -7,10 +7,9 @@ use rustc_middle::hir; use rustc_middle::ich::StableHashingContext; use rustc_middle::mir::coverage::*; use rustc_middle::mir::interpret::Scalar; -use rustc_middle::mir::CoverageInfo; use rustc_middle::mir::{ - self, traversal, BasicBlock, BasicBlockData, Operand, Place, SourceInfo, StatementKind, - Terminator, TerminatorKind, START_BLOCK, + self, traversal, BasicBlock, BasicBlockData, CoverageInfo, Operand, Place, SourceInfo, + SourceScope, StatementKind, Terminator, TerminatorKind, }; use rustc_middle::ty; use rustc_middle::ty::query::Providers; @@ -41,14 +40,14 @@ fn coverageinfo_from_mir<'tcx>(tcx: TyCtxt<'tcx>, mir_def_id: DefId) -> Coverage tcx.require_lang_item(lang_items::CoverageCounterSubtractFnLangItem, None); // The `num_counters` argument to `llvm.instrprof.increment` is the number of injected - // counters, with each counter having an index from `0..num_counters-1`. MIR optimization + // counters, with each counter having a counter ID from `0..num_counters-1`. MIR optimization // may split and duplicate some BasicBlock sequences. Simply counting the calls may not - // not work; but computing the num_counters by adding `1` to the highest index (for a given + // work; but computing the num_counters by adding `1` to the highest counter_id (for a given // instrumented function) is valid. // // `num_expressions` is the number of counter expressions added to the MIR body. Both // `num_counters` and `num_expressions` are used to initialize new vectors, during backend - // code generate, to lookup counters and expressions by their simple u32 indexes. + // code generate, to lookup counters and expressions by simple u32 indexes. let mut num_counters: u32 = 0; let mut num_expressions: u32 = 0; for terminator in @@ -57,27 +56,26 @@ fn coverageinfo_from_mir<'tcx>(tcx: TyCtxt<'tcx>, mir_def_id: DefId) -> Coverage if let TerminatorKind::Call { func: Operand::Constant(func), args, .. } = &terminator.kind { match func.literal.ty.kind { FnDef(id, _) if id == count_code_region_fn => { - let index_arg = - args.get(count_code_region_args::COUNTER_INDEX).expect("arg found"); - let counter_index = mir::Operand::scalar_from_const(index_arg) + let counter_id_arg = + args.get(count_code_region_args::COUNTER_ID).expect("arg found"); + let counter_id = mir::Operand::scalar_from_const(counter_id_arg) .to_u32() - .expect("index arg is u32"); - num_counters = std::cmp::max(num_counters, counter_index + 1); + .expect("counter_id arg is u32"); + num_counters = std::cmp::max(num_counters, counter_id + 1); } FnDef(id, _) if id == coverage_counter_add_fn || id == coverage_counter_subtract_fn => { - let index_arg = args - .get(coverage_counter_expression_args::COUNTER_EXPRESSION_INDEX) + let expression_id_arg = args + .get(coverage_counter_expression_args::EXPRESSION_ID) .expect("arg found"); - let translated_index = mir::Operand::scalar_from_const(index_arg) + let id_descending_from_max = mir::Operand::scalar_from_const(expression_id_arg) .to_u32() - .expect("index arg is u32"); - // Counter expressions start with "translated indexes", descending from - // `u32::MAX`, so the range of expression indexes is disjoint from the range of - // counter indexes. This way, both counters and expressions can be operands in - // other expressions. - let expression_index = u32::MAX - translated_index; + .expect("expression_id arg is u32"); + // Counter expressions are initially assigned IDs descending from `u32::MAX`, so + // the range of expression IDs is disjoint from the range of counter IDs. This + // way, both counters and expressions can be operands in other expressions. + let expression_index = u32::MAX - id_descending_from_max; num_expressions = std::cmp::max(num_expressions, expression_index + 1); } _ => {} @@ -97,12 +95,10 @@ fn call_terminators(data: &'tcx BasicBlockData<'tcx>) -> Option<&'tcx Terminator impl<'tcx> MirPass<'tcx> for InstrumentCoverage { fn run_pass(&self, tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, mir_body: &mut mir::Body<'tcx>) { - if tcx.sess.opts.debugging_opts.instrument_coverage { - // If the InstrumentCoverage pass is called on promoted MIRs, skip them. - // See: https://github.com/rust-lang/rust/pull/73011#discussion_r438317601 - if src.promoted.is_none() { - Instrumentor::new(tcx, src, mir_body).inject_counters(); - } + // If the InstrumentCoverage pass is called on promoted MIRs, skip them. + // See: https://github.com/rust-lang/rust/pull/73011#discussion_r438317601 + if src.promoted.is_none() { + Instrumentor::new(tcx, src, mir_body).inject_counters(); } } } @@ -113,6 +109,12 @@ enum Op { Subtract, } +struct InjectedCall<'tcx> { + func: Operand<'tcx>, + args: Vec>, + inject_at: Span, +} + struct Instrumentor<'a, 'tcx> { tcx: TyCtxt<'tcx>, mir_def_id: DefId, @@ -147,11 +149,8 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { } /// Expression IDs start from u32::MAX and go down because a CounterExpression can reference - /// (add or subtract counts) of both Counter regions and CounterExpression regions. The indexes - /// of each type of region must be contiguous, but also must be unique across both sets. - /// The expression IDs are eventually translated into region indexes (starting after the last - /// counter index, for the given function), during backend code generation, by the helper method - /// `rustc_codegen_ssa::coverageinfo::map::FunctionCoverage::translate_expressions()`. + /// (add or subtract counts) of both Counter regions and CounterExpression regions. The counter + /// expression operand IDs must be unique across both types. fn next_expression(&mut self) -> u32 { assert!(self.num_counters < u32::MAX - self.num_expressions); let next = u32::MAX - self.num_expressions; @@ -171,17 +170,25 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { } fn inject_counters(&mut self) { + let mir_body = &self.mir_body; let body_span = self.hir_body.value.span; - debug!( - "instrumenting {:?}, span: {}", - self.mir_def_id, - self.tcx.sess.source_map().span_to_string(body_span) - ); + debug!("instrumenting {:?}, span: {:?}", self.mir_def_id, body_span); // FIXME(richkadel): As a first step, counters are only injected at the top of each // function. The complete solution will inject counters at each conditional code branch. - let next_block = START_BLOCK; - self.inject_counter(body_span, next_block); + let _ignore = mir_body; + let id = self.next_counter(); + let function_source_hash = self.function_source_hash(); + let code_region = body_span; + let scope = rustc_middle::mir::OUTERMOST_SOURCE_SCOPE; + let is_cleanup = false; + let next_block = rustc_middle::mir::START_BLOCK; + self.inject_call( + self.make_counter(id, function_source_hash, code_region), + scope, + is_cleanup, + next_block, + ); // FIXME(richkadel): The next step to implement source based coverage analysis will be // instrumenting branches within functions, and some regions will be counted by "counter @@ -190,57 +197,68 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { let fake_use = false; if fake_use { let add = false; - if add { - self.inject_counter_expression(body_span, next_block, 1, Op::Add, 2); - } else { - self.inject_counter_expression(body_span, next_block, 1, Op::Subtract, 2); - } + let lhs = 1; + let op = if add { Op::Add } else { Op::Subtract }; + let rhs = 2; + + let code_region = body_span; + let scope = rustc_middle::mir::OUTERMOST_SOURCE_SCOPE; + let is_cleanup = false; + let next_block = rustc_middle::mir::START_BLOCK; + + let id = self.next_expression(); + self.inject_call( + self.make_expression(id, code_region, lhs, op, rhs), + scope, + is_cleanup, + next_block, + ); } } - fn inject_counter(&mut self, code_region: Span, next_block: BasicBlock) -> u32 { - let counter_id = self.next_counter(); - let function_source_hash = self.function_source_hash(); - let injection_point = code_region.shrink_to_lo(); + fn make_counter( + &self, + id: u32, + function_source_hash: u64, + code_region: Span, + ) -> InjectedCall<'tcx> { + let inject_at = code_region.shrink_to_lo(); - let count_code_region_fn = function_handle( + let func = function_handle( self.tcx, self.tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None), - injection_point, + inject_at, ); let mut args = Vec::new(); use count_code_region_args::*; debug_assert_eq!(FUNCTION_SOURCE_HASH, args.len()); - args.push(self.const_u64(function_source_hash, injection_point)); + args.push(self.const_u64(function_source_hash, inject_at)); - debug_assert_eq!(COUNTER_INDEX, args.len()); - args.push(self.const_u32(counter_id, injection_point)); + debug_assert_eq!(COUNTER_ID, args.len()); + args.push(self.const_u32(id, inject_at)); debug_assert_eq!(START_BYTE_POS, args.len()); - args.push(self.const_u32(code_region.lo().to_u32(), injection_point)); + args.push(self.const_u32(code_region.lo().to_u32(), inject_at)); debug_assert_eq!(END_BYTE_POS, args.len()); - args.push(self.const_u32(code_region.hi().to_u32(), injection_point)); + args.push(self.const_u32(code_region.hi().to_u32(), inject_at)); - self.inject_call(count_code_region_fn, args, injection_point, next_block); - - counter_id + InjectedCall { func, args, inject_at } } - fn inject_counter_expression( - &mut self, + fn make_expression( + &self, + id: u32, code_region: Span, - next_block: BasicBlock, lhs: u32, op: Op, rhs: u32, - ) -> u32 { - let expression_id = self.next_expression(); - let injection_point = code_region.shrink_to_lo(); + ) -> InjectedCall<'tcx> { + let inject_at = code_region.shrink_to_lo(); - let count_code_region_fn = function_handle( + let func = function_handle( self.tcx, self.tcx.require_lang_item( match op { @@ -249,43 +267,51 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { }, None, ), - injection_point, + inject_at, ); let mut args = Vec::new(); use coverage_counter_expression_args::*; - debug_assert_eq!(COUNTER_EXPRESSION_INDEX, args.len()); - args.push(self.const_u32(expression_id, injection_point)); + debug_assert_eq!(EXPRESSION_ID, args.len()); + args.push(self.const_u32(id, inject_at)); - debug_assert_eq!(LEFT_INDEX, args.len()); - args.push(self.const_u32(lhs, injection_point)); + debug_assert_eq!(LEFT_ID, args.len()); + args.push(self.const_u32(lhs, inject_at)); - debug_assert_eq!(RIGHT_INDEX, args.len()); - args.push(self.const_u32(rhs, injection_point)); + debug_assert_eq!(RIGHT_ID, args.len()); + args.push(self.const_u32(rhs, inject_at)); debug_assert_eq!(START_BYTE_POS, args.len()); - args.push(self.const_u32(code_region.lo().to_u32(), injection_point)); + args.push(self.const_u32(code_region.lo().to_u32(), inject_at)); debug_assert_eq!(END_BYTE_POS, args.len()); - args.push(self.const_u32(code_region.hi().to_u32(), injection_point)); + args.push(self.const_u32(code_region.hi().to_u32(), inject_at)); - self.inject_call(count_code_region_fn, args, injection_point, next_block); - - expression_id + InjectedCall { func, args, inject_at } } fn inject_call( &mut self, - func: Operand<'tcx>, - args: Vec>, - fn_span: Span, + call: InjectedCall<'tcx>, + scope: SourceScope, + is_cleanup: bool, next_block: BasicBlock, ) { + let InjectedCall { func, args, inject_at } = call; + debug!( + " injecting {}call to {:?}({:?}) at: {:?}, scope: {:?}", + if is_cleanup { "cleanup " } else { "" }, + func, + args, + inject_at, + scope, + ); + let mut patch = MirPatch::new(self.mir_body); - let temp = patch.new_temp(self.tcx.mk_unit(), fn_span); - let new_block = patch.new_block(placeholder_block(fn_span)); + let temp = patch.new_temp(self.tcx.mk_unit(), inject_at); + let new_block = patch.new_block(placeholder_block(inject_at, scope, is_cleanup)); patch.patch_terminator( new_block, TerminatorKind::Call { @@ -295,7 +321,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> { destination: Some((Place::from(temp), new_block)), cleanup: None, from_hir_call: false, - fn_span, + fn_span: inject_at, }, ); @@ -325,15 +351,15 @@ fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, fn_def_id: DefId, span: Span) -> Ope Operand::function_handle(tcx, fn_def_id, substs, span) } -fn placeholder_block(span: Span) -> BasicBlockData<'tcx> { +fn placeholder_block(span: Span, scope: SourceScope, is_cleanup: bool) -> BasicBlockData<'tcx> { BasicBlockData { statements: vec![], terminator: Some(Terminator { - source_info: SourceInfo::outermost(span), + source_info: SourceInfo { span, scope }, // this gets overwritten by the counter Call kind: TerminatorKind::Unreachable, }), - is_cleanup: false, + is_cleanup, } } diff --git a/src/librustc_mir/transform/mod.rs b/src/librustc_mir/transform/mod.rs index 283e4b289f2..26b4a696897 100644 --- a/src/librustc_mir/transform/mod.rs +++ b/src/librustc_mir/transform/mod.rs @@ -332,21 +332,25 @@ fn mir_validated( body.required_consts = required_consts; let promote_pass = promote_consts::PromoteTemps::default(); + let promote: &[&dyn MirPass<'tcx>] = &[ + // What we need to run borrowck etc. + &promote_pass, + &simplify::SimplifyCfg::new("qualify-consts"), + ]; + + let opt_coverage: &[&dyn MirPass<'tcx>] = if tcx.sess.opts.debugging_opts.instrument_coverage { + &[&instrument_coverage::InstrumentCoverage] + } else { + &[] + }; + run_passes( tcx, &mut body, InstanceDef::Item(def.to_global()), None, MirPhase::Validated, - &[&[ - // What we need to run borrowck etc. - &promote_pass, - &simplify::SimplifyCfg::new("qualify-consts"), - // If the `instrument-coverage` option is enabled, analyze the CFG, identify each - // conditional branch, construct a coverage map to be passed to LLVM, and inject counters - // where needed. - &instrument_coverage::InstrumentCoverage, - ]], + &[promote, opt_coverage], ); let promoted = promote_pass.promoted_fragments.into_inner(); diff --git a/src/librustc_session/options.rs b/src/librustc_session/options.rs index 502102fa178..80164840334 100644 --- a/src/librustc_session/options.rs +++ b/src/librustc_session/options.rs @@ -883,7 +883,7 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, "instrument the generated code to support LLVM source-based code coverage \ reports (note, the compiler build config must include `profiler = true`, \ and is mutually exclusive with `-C profile-generate`/`-C profile-use`); \ - implies `-C link-dead-code` (unless explicitly disabled)` and + implies `-C link-dead-code` (unless explicitly disabled)` and \ `-Z symbol-mangling-version=v0`; and disables/overrides some optimization \ options (default: no)"), instrument_mcount: bool = (false, parse_bool, [TRACKED], diff --git a/src/rustllvm/CoverageMappingWrapper.cpp b/src/rustllvm/CoverageMappingWrapper.cpp index c6c4cdb5562..7c8481540aa 100644 --- a/src/rustllvm/CoverageMappingWrapper.cpp +++ b/src/rustllvm/CoverageMappingWrapper.cpp @@ -8,60 +8,6 @@ using namespace llvm; -extern "C" SmallVectorTemplateBase - *LLVMRustCoverageSmallVectorCounterExpressionCreate() { - return new SmallVector(); -} - -extern "C" void LLVMRustCoverageSmallVectorCounterExpressionDispose( - SmallVectorTemplateBase *Vector) { - delete Vector; -} - -extern "C" void LLVMRustCoverageSmallVectorCounterExpressionAdd( - SmallVectorTemplateBase *Expressions, - coverage::CounterExpression::ExprKind Kind, - unsigned LeftIndex, - unsigned RightIndex) { - auto LHS = coverage::Counter::getCounter(LeftIndex); - auto RHS = coverage::Counter::getCounter(RightIndex); - Expressions->push_back(coverage::CounterExpression { Kind, LHS, RHS }); -} - -extern "C" SmallVectorTemplateBase - *LLVMRustCoverageSmallVectorCounterMappingRegionCreate() { - return new SmallVector(); -} - -extern "C" void LLVMRustCoverageSmallVectorCounterMappingRegionDispose( - SmallVectorTemplateBase *Vector) { - delete Vector; -} - -extern "C" void LLVMRustCoverageSmallVectorCounterMappingRegionAdd( - SmallVectorTemplateBase *MappingRegions, - unsigned Index, - unsigned FileID, - unsigned LineStart, - unsigned ColumnStart, - unsigned LineEnd, - unsigned ColumnEnd) { - auto Counter = coverage::Counter::getCounter(Index); - MappingRegions->push_back(coverage::CounterMappingRegion::makeRegion( - Counter, FileID, LineStart, - ColumnStart, LineEnd, ColumnEnd)); - - // FIXME(richkadel): As applicable, implement additional CounterMappingRegion types using the - // static method alternatives to `coverage::CounterMappingRegion::makeRegion`: - // - // makeExpansion(unsigned FileID, unsigned ExpandedFileID, unsigned LineStart, - // unsigned ColumnStart, unsigned LineEnd, unsigned ColumnEnd) { - // makeSkipped(unsigned FileID, unsigned LineStart, unsigned ColumnStart, - // unsigned LineEnd, unsigned ColumnEnd) { - // makeGapRegion(Counter Count, unsigned FileID, unsigned LineStart, - // unsigned ColumnStart, unsigned LineEnd, unsigned ColumnEnd) { -} - extern "C" void LLVMRustCoverageWriteFilenamesSectionToBuffer( const char* const Filenames[], size_t FilenamesLen, @@ -79,13 +25,15 @@ extern "C" void LLVMRustCoverageWriteFilenamesSectionToBuffer( extern "C" void LLVMRustCoverageWriteMappingToBuffer( const unsigned *VirtualFileMappingIDs, unsigned NumVirtualFileMappingIDs, - const SmallVectorImpl *Expressions, - SmallVectorImpl *MappingRegions, + const coverage::CounterExpression *Expressions, + unsigned NumExpressions, + coverage::CounterMappingRegion *MappingRegions, + unsigned NumMappingRegions, RustStringRef BufferOut) { auto CoverageMappingWriter = coverage::CoverageMappingWriter( - makeArrayRef(VirtualFileMappingIDs, NumVirtualFileMappingIDs), - makeArrayRef(*Expressions), - MutableArrayRef { *MappingRegions }); + makeArrayRef(VirtualFileMappingIDs, NumVirtualFileMappingIDs), + makeArrayRef(Expressions, NumExpressions), + makeMutableArrayRef(MappingRegions, NumMappingRegions)); RawRustStringOstream OS(BufferOut); CoverageMappingWriter.write(OS); }