2020-12-24 07:01:03 +00:00
|
|
|
//! Types for tracking pieces of source code within a crate.
|
|
|
|
//!
|
|
|
|
//! The [`SourceMap`] tracks all the source code used within a single crate, mapping
|
2014-12-22 17:04:23 +00:00
|
|
|
//! from integer byte positions to the original source code location. Each bit
|
|
|
|
//! of source parsed during crate parsing (typically files, in-memory strings,
|
|
|
|
//! or various bits of macro expansion) cover a continuous range of bytes in the
|
2020-12-24 07:01:03 +00:00
|
|
|
//! `SourceMap` and are represented by [`SourceFile`]s. Byte positions are stored in
|
|
|
|
//! [`Span`] and used pervasively in the compiler. They are absolute positions
|
2019-09-06 02:56:45 +00:00
|
|
|
//! within the `SourceMap`, which upon request can be converted to line and column
|
2014-12-22 17:04:23 +00:00
|
|
|
//! information, source code snippets, etc.
|
2012-11-13 00:45:24 +00:00
|
|
|
|
2019-11-15 13:27:09 +00:00
|
|
|
pub use crate::hygiene::{ExpnData, ExpnKind};
|
|
|
|
pub use crate::*;
|
2014-11-06 08:05:53 +00:00
|
|
|
|
2017-10-19 12:32:39 +00:00
|
|
|
use rustc_data_structures::fx::FxHashMap;
|
2017-12-15 22:50:07 +00:00
|
|
|
use rustc_data_structures::stable_hasher::StableHasher;
|
2020-10-29 00:00:00 +00:00
|
|
|
use rustc_data_structures::sync::{AtomicU32, Lrc, MappedReadGuard, ReadGuard, RwLock};
|
2017-12-15 22:50:07 +00:00
|
|
|
use std::hash::Hash;
|
2017-04-27 14:12:57 +00:00
|
|
|
use std::path::{Path, PathBuf};
|
2020-02-18 17:24:36 +00:00
|
|
|
use std::sync::atomic::Ordering;
|
2021-04-03 15:59:31 +00:00
|
|
|
use std::{clone::Clone, cmp};
|
|
|
|
use std::{convert::TryFrom, unreachable};
|
2014-12-22 17:04:23 +00:00
|
|
|
|
2016-06-21 22:08:13 +00:00
|
|
|
use std::fs;
|
2018-11-16 21:22:06 +00:00
|
|
|
use std::io;
|
2020-08-14 06:05:01 +00:00
|
|
|
use tracing::debug;
|
2019-02-06 17:33:01 +00:00
|
|
|
|
2019-08-01 21:26:40 +00:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests;
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Returns the span itself if it doesn't come from a macro expansion,
|
2014-04-09 20:42:25 +00:00
|
|
|
/// otherwise return the call site span up to the `enclosing_sp` by
|
2019-08-13 20:56:42 +00:00
|
|
|
/// following the `expn_data` chain.
|
2017-03-17 04:04:41 +00:00
|
|
|
pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span {
|
2019-08-13 20:56:42 +00:00
|
|
|
let expn_data1 = sp.ctxt().outer_expn_data();
|
|
|
|
let expn_data2 = enclosing_sp.ctxt().outer_expn_data();
|
|
|
|
if expn_data1.is_root() || !expn_data2.is_root() && expn_data1.call_site == expn_data2.call_site
|
|
|
|
{
|
2019-08-11 00:00:05 +00:00
|
|
|
sp
|
|
|
|
} else {
|
2019-08-13 20:56:42 +00:00
|
|
|
original_sp(expn_data1.call_site, enclosing_sp)
|
2014-04-09 20:42:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-24 17:16:36 +00:00
|
|
|
pub mod monotonic {
|
|
|
|
use std::ops::{Deref, DerefMut};
|
|
|
|
|
|
|
|
/// A `MonotonicVec` is a `Vec` which can only be grown.
|
|
|
|
/// Once inserted, an element can never be removed or swapped,
|
|
|
|
/// guaranteeing that any indices into a `MonotonicVec` are stable
|
|
|
|
// This is declared in its own module to ensure that the private
|
|
|
|
// field is inaccessible
|
|
|
|
pub struct MonotonicVec<T>(Vec<T>);
|
|
|
|
impl<T> MonotonicVec<T> {
|
|
|
|
pub fn new(val: Vec<T>) -> MonotonicVec<T> {
|
|
|
|
MonotonicVec(val)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn push(&mut self, val: T) {
|
|
|
|
self.0.push(val);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> Default for MonotonicVec<T> {
|
|
|
|
fn default() -> Self {
|
|
|
|
MonotonicVec::new(vec![])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> Deref for MonotonicVec<T> {
|
|
|
|
type Target = Vec<T>;
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
|
|
&self.0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> !DerefMut for MonotonicVec<T> {}
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
#[derive(Clone, Encodable, Decodable, Debug, Copy, HashStable_Generic)]
|
2016-06-21 22:08:13 +00:00
|
|
|
pub struct Spanned<T> {
|
|
|
|
pub node: T,
|
|
|
|
pub span: Span,
|
2015-12-13 12:12:47 +00:00
|
|
|
}
|
|
|
|
|
2016-06-21 22:08:13 +00:00
|
|
|
pub fn respan<T>(sp: Span, t: T) -> Spanned<T> {
|
|
|
|
Spanned { node: t, span: sp }
|
2013-01-30 17:56:33 +00:00
|
|
|
}
|
|
|
|
|
2016-06-21 22:08:13 +00:00
|
|
|
pub fn dummy_spanned<T>(t: T) -> Spanned<T> {
|
|
|
|
respan(DUMMY_SP, t)
|
2015-08-26 23:46:05 +00:00
|
|
|
}
|
|
|
|
|
2015-02-11 17:29:49 +00:00
|
|
|
// _____________________________________________________________________________
|
2018-08-18 10:13:52 +00:00
|
|
|
// SourceFile, MultiByteChar, FileName, FileLines
|
2015-02-11 17:29:49 +00:00
|
|
|
//
|
|
|
|
|
2015-05-13 22:44:57 +00:00
|
|
|
/// An abstraction over the fs operations used by the Parser.
|
|
|
|
pub trait FileLoader {
|
|
|
|
/// Query the existence of a file.
|
|
|
|
fn file_exists(&self, path: &Path) -> bool;
|
|
|
|
|
2021-08-22 16:15:49 +00:00
|
|
|
/// Read the contents of a UTF-8 file into memory.
|
2015-05-13 22:44:57 +00:00
|
|
|
fn read_file(&self, path: &Path) -> io::Result<String>;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A FileLoader that uses std::fs to load real files.
|
|
|
|
pub struct RealFileLoader;
|
|
|
|
|
|
|
|
impl FileLoader for RealFileLoader {
|
|
|
|
fn file_exists(&self, path: &Path) -> bool {
|
2021-05-07 16:32:40 +00:00
|
|
|
path.exists()
|
2015-05-13 22:44:57 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn read_file(&self, path: &Path) -> io::Result<String> {
|
2018-11-16 21:22:06 +00:00
|
|
|
fs::read_to_string(path)
|
2015-05-13 22:44:57 +00:00
|
|
|
}
|
|
|
|
}
|
2015-02-11 17:29:49 +00:00
|
|
|
|
2021-06-16 14:48:06 +00:00
|
|
|
/// This is a [SourceFile] identifier that is used to correlate source files between
|
|
|
|
/// subsequent compilation sessions (which is something we need to do during
|
|
|
|
/// incremental compilation).
|
|
|
|
///
|
|
|
|
/// The [StableSourceFileId] also contains the CrateNum of the crate the source
|
|
|
|
/// file was originally parsed for. This way we get two separate entries in
|
|
|
|
/// the [SourceMap] if the same file is part of both the local and an upstream
|
|
|
|
/// crate. Trying to only have one entry for both cases is problematic because
|
|
|
|
/// at the point where we discover that there's a local use of the file in
|
|
|
|
/// addition to the upstream one, we might already have made decisions based on
|
|
|
|
/// the assumption that it's an upstream file. Treating the two files as
|
|
|
|
/// different has no real downsides.
|
2020-06-11 14:49:57 +00:00
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Encodable, Decodable, Debug)]
|
2021-06-16 14:48:06 +00:00
|
|
|
pub struct StableSourceFileId {
|
|
|
|
// A hash of the source file's FileName. This is hash so that it's size
|
|
|
|
// is more predictable than if we included the actual FileName value.
|
2021-06-21 13:30:16 +00:00
|
|
|
pub file_name_hash: u64,
|
2021-06-16 14:48:06 +00:00
|
|
|
|
|
|
|
// The CrateNum of the crate this source file was originally parsed for.
|
|
|
|
// We cannot include this information in the hash because at the time
|
|
|
|
// of hashing we don't have the context to map from the CrateNum's numeric
|
|
|
|
// value to a StableCrateId.
|
2021-06-21 13:30:16 +00:00
|
|
|
pub cnum: CrateNum,
|
2021-06-16 14:48:06 +00:00
|
|
|
}
|
2017-12-15 22:50:07 +00:00
|
|
|
|
2020-05-29 18:04:03 +00:00
|
|
|
// FIXME: we need a more globally consistent approach to the problem solved by
|
|
|
|
// StableSourceFileId, perhaps built atop source_file.name_hash.
|
2018-10-29 20:26:13 +00:00
|
|
|
impl StableSourceFileId {
|
|
|
|
pub fn new(source_file: &SourceFile) -> StableSourceFileId {
|
2021-06-16 14:48:06 +00:00
|
|
|
StableSourceFileId::new_from_name(&source_file.name, source_file.cnum)
|
2018-10-30 14:10:42 +00:00
|
|
|
}
|
|
|
|
|
2021-06-16 14:48:06 +00:00
|
|
|
fn new_from_name(name: &FileName, cnum: CrateNum) -> StableSourceFileId {
|
2017-12-15 22:50:07 +00:00
|
|
|
let mut hasher = StableHasher::new();
|
2021-04-03 15:59:31 +00:00
|
|
|
name.hash(&mut hasher);
|
2021-06-16 14:48:06 +00:00
|
|
|
StableSourceFileId { file_name_hash: hasher.finish(), cnum }
|
2017-12-15 22:50:07 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-02-11 17:29:49 +00:00
|
|
|
// _____________________________________________________________________________
|
2018-08-18 10:13:35 +00:00
|
|
|
// SourceMap
|
2015-02-11 17:29:49 +00:00
|
|
|
//
|
|
|
|
|
2018-10-16 14:57:53 +00:00
|
|
|
#[derive(Default)]
|
2018-08-18 10:13:35 +00:00
|
|
|
pub(super) struct SourceMapFiles {
|
2020-06-24 17:16:36 +00:00
|
|
|
source_files: monotonic::MonotonicVec<Lrc<SourceFile>>,
|
2018-10-29 20:26:13 +00:00
|
|
|
stable_id_to_source_file: FxHashMap<StableSourceFileId, Lrc<SourceFile>>,
|
2018-03-14 17:11:37 +00:00
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:35 +00:00
|
|
|
pub struct SourceMap {
|
2020-02-18 17:24:36 +00:00
|
|
|
/// The address space below this value is currently used by the files in the source map.
|
|
|
|
used_address_space: AtomicU32,
|
|
|
|
|
2020-10-29 00:00:00 +00:00
|
|
|
files: RwLock<SourceMapFiles>,
|
2018-07-10 19:06:26 +00:00
|
|
|
file_loader: Box<dyn FileLoader + Sync + Send>,
|
2017-04-24 17:01:19 +00:00
|
|
|
// This is used to apply the file path remapping as specified via
|
2019-09-06 02:56:45 +00:00
|
|
|
// `--remap-path-prefix` to all `SourceFile`s allocated within this `SourceMap`.
|
2017-04-24 17:01:19 +00:00
|
|
|
path_mapping: FilePathMapping,
|
2020-03-31 05:17:15 +00:00
|
|
|
|
|
|
|
/// The algorithm used for hashing the contents of each source file.
|
|
|
|
hash_kind: SourceFileHashAlgorithm,
|
2011-07-05 09:48:19 +00:00
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:35 +00:00
|
|
|
impl SourceMap {
|
|
|
|
pub fn new(path_mapping: FilePathMapping) -> SourceMap {
|
2020-03-31 05:17:15 +00:00
|
|
|
Self::with_file_loader_and_hash_kind(
|
|
|
|
Box::new(RealFileLoader),
|
2020-02-18 17:24:36 +00:00
|
|
|
path_mapping,
|
2020-03-31 05:17:15 +00:00
|
|
|
SourceFileHashAlgorithm::Md5,
|
|
|
|
)
|
2015-05-13 22:44:57 +00:00
|
|
|
}
|
|
|
|
|
2020-03-31 05:17:15 +00:00
|
|
|
pub fn with_file_loader_and_hash_kind(
|
2018-07-10 19:06:26 +00:00
|
|
|
file_loader: Box<dyn FileLoader + Sync + Send>,
|
2017-04-24 17:01:19 +00:00
|
|
|
path_mapping: FilePathMapping,
|
2020-03-31 05:17:15 +00:00
|
|
|
hash_kind: SourceFileHashAlgorithm,
|
2018-08-18 10:13:35 +00:00
|
|
|
) -> SourceMap {
|
2020-02-18 17:24:36 +00:00
|
|
|
SourceMap {
|
|
|
|
used_address_space: AtomicU32::new(0),
|
|
|
|
files: Default::default(),
|
|
|
|
file_loader,
|
|
|
|
path_mapping,
|
2020-03-31 05:17:15 +00:00
|
|
|
hash_kind,
|
2020-02-18 17:24:36 +00:00
|
|
|
}
|
2012-01-21 09:00:06 +00:00
|
|
|
}
|
2012-11-13 02:24:56 +00:00
|
|
|
|
2017-04-24 17:01:19 +00:00
|
|
|
pub fn path_mapping(&self) -> &FilePathMapping {
|
|
|
|
&self.path_mapping
|
|
|
|
}
|
|
|
|
|
2015-05-13 22:44:57 +00:00
|
|
|
pub fn file_exists(&self, path: &Path) -> bool {
|
|
|
|
self.file_loader.file_exists(path)
|
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:52 +00:00
|
|
|
pub fn load_file(&self, path: &Path) -> io::Result<Lrc<SourceFile>> {
|
2016-03-23 03:01:37 +00:00
|
|
|
let src = self.file_loader.read_file(path)?;
|
2018-12-04 20:18:03 +00:00
|
|
|
let filename = path.to_owned().into();
|
2018-08-18 10:13:56 +00:00
|
|
|
Ok(self.new_source_file(filename, src))
|
2015-05-13 22:44:57 +00:00
|
|
|
}
|
|
|
|
|
2019-08-13 16:51:32 +00:00
|
|
|
/// Loads source file as a binary blob.
|
|
|
|
///
|
|
|
|
/// Unlike `load_file`, guarantees that no normalization like BOM-removal
|
|
|
|
/// takes place.
|
|
|
|
pub fn load_binary_file(&self, path: &Path) -> io::Result<Vec<u8>> {
|
|
|
|
// Ideally, this should use `self.file_loader`, but it can't
|
|
|
|
// deal with binary files yet.
|
|
|
|
let bytes = fs::read(path)?;
|
|
|
|
|
|
|
|
// We need to add file to the `SourceMap`, so that it is present
|
|
|
|
// in dep-info. There's also an edge case that file might be both
|
|
|
|
// loaded as a binary via `include_bytes!` and as proper `SourceFile`
|
|
|
|
// via `mod`, so we try to use real file contents and not just an
|
|
|
|
// empty string.
|
|
|
|
let text = std::str::from_utf8(&bytes).unwrap_or("").to_string();
|
|
|
|
self.new_source_file(path.to_owned().into(), text);
|
|
|
|
Ok(bytes)
|
|
|
|
}
|
|
|
|
|
2020-06-24 17:16:36 +00:00
|
|
|
// By returning a `MonotonicVec`, we ensure that consumers cannot invalidate
|
|
|
|
// any existing indices pointing into `files`.
|
2020-10-29 00:00:00 +00:00
|
|
|
pub fn files(&self) -> MappedReadGuard<'_, monotonic::MonotonicVec<Lrc<SourceFile>>> {
|
|
|
|
ReadGuard::map(self.files.borrow(), |files| &files.source_files)
|
2017-04-27 14:12:57 +00:00
|
|
|
}
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
pub fn source_file_by_stable_id(
|
|
|
|
&self,
|
|
|
|
stable_id: StableSourceFileId,
|
|
|
|
) -> Option<Lrc<SourceFile>> {
|
2020-02-29 12:14:52 +00:00
|
|
|
self.files.borrow().stable_id_to_source_file.get(&stable_id).cloned()
|
2017-10-19 12:32:39 +00:00
|
|
|
}
|
|
|
|
|
2020-02-18 17:24:36 +00:00
|
|
|
fn allocate_address_space(&self, size: usize) -> Result<usize, OffsetOverflowError> {
|
|
|
|
let size = u32::try_from(size).map_err(|_| OffsetOverflowError)?;
|
|
|
|
|
|
|
|
loop {
|
|
|
|
let current = self.used_address_space.load(Ordering::Relaxed);
|
|
|
|
let next = current
|
|
|
|
.checked_add(size)
|
|
|
|
// Add one so there is some space between files. This lets us distinguish
|
|
|
|
// positions in the `SourceMap`, even in the presence of zero-length files.
|
|
|
|
.and_then(|next| next.checked_add(1))
|
|
|
|
.ok_or(OffsetOverflowError)?;
|
|
|
|
|
|
|
|
if self
|
|
|
|
.used_address_space
|
|
|
|
.compare_exchange(current, next, Ordering::Relaxed, Ordering::Relaxed)
|
|
|
|
.is_ok()
|
|
|
|
{
|
|
|
|
return Ok(usize::try_from(current).unwrap());
|
|
|
|
}
|
2015-07-02 03:37:52 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Creates a new `SourceFile`.
|
|
|
|
/// If a file already exists in the `SourceMap` with the same ID, that file is returned
|
|
|
|
/// unmodified.
|
2018-08-18 10:13:56 +00:00
|
|
|
pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc<SourceFile> {
|
2019-06-17 08:35:26 +00:00
|
|
|
self.try_new_source_file(filename, src).unwrap_or_else(|OffsetOverflowError| {
|
|
|
|
eprintln!("fatal error: rustc does not support files larger than 4GB");
|
2019-11-15 13:27:09 +00:00
|
|
|
crate::fatal_error::FatalError.raise()
|
2019-06-17 08:35:26 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
fn try_new_source_file(
|
|
|
|
&self,
|
2021-04-03 15:59:31 +00:00
|
|
|
filename: FileName,
|
2019-06-17 08:35:26 +00:00
|
|
|
src: String,
|
|
|
|
) -> Result<Lrc<SourceFile>, OffsetOverflowError> {
|
2017-09-30 06:28:48 +00:00
|
|
|
// Note that filename may not be a valid path, eg it may be `<anon>` etc,
|
|
|
|
// but this is okay because the directory determined by `path.pop()` will
|
|
|
|
// be empty, so the working directory will be used.
|
2021-04-03 15:59:31 +00:00
|
|
|
let (filename, _) = self.path_mapping.map_filename_prefix(&filename);
|
2012-11-16 22:22:09 +00:00
|
|
|
|
2021-06-16 14:48:06 +00:00
|
|
|
let file_id = StableSourceFileId::new_from_name(&filename, LOCAL_CRATE);
|
2018-10-30 14:10:42 +00:00
|
|
|
|
2019-06-17 08:35:26 +00:00
|
|
|
let lrc_sf = match self.source_file_by_stable_id(file_id) {
|
2018-10-30 14:10:42 +00:00
|
|
|
Some(lrc_sf) => lrc_sf,
|
|
|
|
None => {
|
2020-02-18 17:24:36 +00:00
|
|
|
let start_pos = self.allocate_address_space(src.len())?;
|
|
|
|
|
2018-10-30 14:10:42 +00:00
|
|
|
let source_file = Lrc::new(SourceFile::new(
|
|
|
|
filename,
|
|
|
|
src,
|
|
|
|
Pos::from_usize(start_pos),
|
2020-03-31 05:17:15 +00:00
|
|
|
self.hash_kind,
|
2020-02-18 17:24:36 +00:00
|
|
|
));
|
2012-11-16 22:22:09 +00:00
|
|
|
|
2021-06-16 14:48:06 +00:00
|
|
|
// Let's make sure the file_id we generated above actually matches
|
|
|
|
// the ID we generate for the SourceFile we just created.
|
|
|
|
debug_assert_eq!(StableSourceFileId::new(&source_file), file_id);
|
|
|
|
|
2018-10-30 14:10:42 +00:00
|
|
|
let mut files = self.files.borrow_mut();
|
2017-10-19 12:32:39 +00:00
|
|
|
|
2018-10-30 14:10:42 +00:00
|
|
|
files.source_files.push(source_file.clone());
|
|
|
|
files.stable_id_to_source_file.insert(file_id, source_file.clone());
|
|
|
|
|
|
|
|
source_file
|
|
|
|
}
|
2019-06-17 08:35:26 +00:00
|
|
|
};
|
|
|
|
Ok(lrc_sf)
|
2012-11-16 22:22:09 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Allocates a new `SourceFile` representing a source file from an external
|
|
|
|
/// crate. The source code of such an "imported `SourceFile`" is not available,
|
2015-02-11 17:29:49 +00:00
|
|
|
/// but we still know enough to generate accurate debuginfo location
|
|
|
|
/// information for things inlined from other crates.
|
2018-09-04 15:09:49 +00:00
|
|
|
pub fn new_imported_source_file(
|
|
|
|
&self,
|
|
|
|
filename: FileName,
|
2020-03-31 05:17:15 +00:00
|
|
|
src_hash: SourceFileHash,
|
2018-09-04 15:09:49 +00:00
|
|
|
name_hash: u128,
|
|
|
|
source_len: usize,
|
2020-02-07 19:02:24 +00:00
|
|
|
cnum: CrateNum,
|
2018-09-04 15:09:49 +00:00
|
|
|
mut file_local_lines: Vec<BytePos>,
|
|
|
|
mut file_local_multibyte_chars: Vec<MultiByteChar>,
|
|
|
|
mut file_local_non_narrow_chars: Vec<NonNarrowChar>,
|
2019-10-03 00:55:31 +00:00
|
|
|
mut file_local_normalized_pos: Vec<NormalizedPos>,
|
2020-02-07 19:02:24 +00:00
|
|
|
original_start_pos: BytePos,
|
|
|
|
original_end_pos: BytePos,
|
2018-09-04 15:09:49 +00:00
|
|
|
) -> Lrc<SourceFile> {
|
2020-02-18 17:24:36 +00:00
|
|
|
let start_pos = self
|
|
|
|
.allocate_address_space(source_len)
|
|
|
|
.expect("not enough address space for imported source file");
|
2015-02-11 17:29:49 +00:00
|
|
|
|
|
|
|
let end_pos = Pos::from_usize(start_pos + source_len);
|
|
|
|
let start_pos = Pos::from_usize(start_pos);
|
|
|
|
|
2015-04-17 04:38:24 +00:00
|
|
|
for pos in &mut file_local_lines {
|
|
|
|
*pos = *pos + start_pos;
|
|
|
|
}
|
|
|
|
|
|
|
|
for mbc in &mut file_local_multibyte_chars {
|
|
|
|
mbc.pos = mbc.pos + start_pos;
|
|
|
|
}
|
2015-02-11 17:29:49 +00:00
|
|
|
|
2017-11-02 01:25:54 +00:00
|
|
|
for swc in &mut file_local_non_narrow_chars {
|
|
|
|
*swc = *swc + start_pos;
|
|
|
|
}
|
|
|
|
|
2019-10-03 00:55:31 +00:00
|
|
|
for nc in &mut file_local_normalized_pos {
|
|
|
|
nc.pos = nc.pos + start_pos;
|
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:56 +00:00
|
|
|
let source_file = Lrc::new(SourceFile {
|
2015-02-11 17:29:49 +00:00
|
|
|
name: filename,
|
|
|
|
src: None,
|
2017-08-07 05:54:09 +00:00
|
|
|
src_hash,
|
2020-02-07 19:02:24 +00:00
|
|
|
external_src: Lock::new(ExternalSource::Foreign {
|
|
|
|
kind: ExternalSourceKind::AbsentOk,
|
|
|
|
original_start_pos,
|
|
|
|
original_end_pos,
|
|
|
|
}),
|
2017-08-07 05:54:09 +00:00
|
|
|
start_pos,
|
|
|
|
end_pos,
|
2018-05-23 13:59:42 +00:00
|
|
|
lines: file_local_lines,
|
|
|
|
multibyte_chars: file_local_multibyte_chars,
|
|
|
|
non_narrow_chars: file_local_non_narrow_chars,
|
2019-10-03 00:55:31 +00:00
|
|
|
normalized_pos: file_local_normalized_pos,
|
2017-12-19 14:14:41 +00:00
|
|
|
name_hash,
|
2020-02-07 19:02:24 +00:00
|
|
|
cnum,
|
2015-02-11 17:29:49 +00:00
|
|
|
});
|
|
|
|
|
2018-03-14 17:11:37 +00:00
|
|
|
let mut files = self.files.borrow_mut();
|
2015-02-11 17:29:49 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
files.source_files.push(source_file.clone());
|
|
|
|
files
|
|
|
|
.stable_id_to_source_file
|
|
|
|
.insert(StableSourceFileId::new(&source_file), source_file.clone());
|
2017-10-19 12:32:39 +00:00
|
|
|
|
2018-08-18 10:13:56 +00:00
|
|
|
source_file
|
2015-02-11 17:29:49 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
// If there is a doctest offset, applies it to the line.
|
2018-12-04 20:18:03 +00:00
|
|
|
pub fn doctest_offset_line(&self, file: &FileName, orig: usize) -> usize {
|
2020-03-20 14:03:11 +00:00
|
|
|
match file {
|
2018-12-04 20:18:03 +00:00
|
|
|
FileName::DocTest(_, offset) => {
|
2020-03-21 12:37:29 +00:00
|
|
|
if *offset < 0 {
|
2018-12-04 20:18:03 +00:00
|
|
|
orig - (-(*offset)) as usize
|
2020-03-21 12:37:29 +00:00
|
|
|
} else {
|
|
|
|
orig + *offset as usize
|
|
|
|
}
|
2018-12-04 20:18:03 +00:00
|
|
|
}
|
|
|
|
_ => orig,
|
2020-03-20 14:03:11 +00:00
|
|
|
}
|
2018-01-08 14:47:23 +00:00
|
|
|
}
|
|
|
|
|
Updates to experimental coverage counter injection
This is a combination of 18 commits.
Commit #2:
Additional examples and some small improvements.
Commit #3:
fixed mir-opt non-mir extensions and spanview title elements
Corrected a fairly recent assumption in runtest.rs that all MIR dump
files end in .mir. (It was appending .mir to the graphviz .dot and
spanview .html file names when generating blessed output files. That
also left outdated files in the baseline alongside the files with the
incorrect names, which I've now removed.)
Updated spanview HTML title elements to match their content, replacing a
hardcoded and incorrect name that was left in accidentally when
originally submitted.
Commit #4:
added more test examples
also improved Makefiles with support for non-zero exit status and to
force validation of tests unless a specific test overrides it with a
specific comment.
Commit #5:
Fixed rare issues after testing on real-world crate
Commit #6:
Addressed PR feedback, and removed temporary -Zexperimental-coverage
-Zinstrument-coverage once again supports the latest capabilities of
LLVM instrprof coverage instrumentation.
Also fixed a bug in spanview.
Commit #7:
Fix closure handling, add tests for closures and inner items
And cleaned up other tests for consistency, and to make it more clear
where spans start/end by breaking up lines.
Commit #8:
renamed "typical" test results "expected"
Now that the `llvm-cov show` tests are improved to normally expect
matching actuals, and to allow individual tests to override that
expectation.
Commit #9:
test coverage of inline generic struct function
Commit #10:
Addressed review feedback
* Removed unnecessary Unreachable filter.
* Replaced a match wildcard with remining variants.
* Added more comments to help clarify the role of successors() in the
CFG traversal
Commit #11:
refactoring based on feedback
* refactored `fn coverage_spans()`.
* changed the way I expand an empty coverage span to improve performance
* fixed a typo that I had accidently left in, in visit.rs
Commit #12:
Optimized use of SourceMap and SourceFile
Commit #13:
Fixed a regression, and synched with upstream
Some generated test file names changed due to some new change upstream.
Commit #14:
Stripping out crate disambiguators from demangled names
These can vary depending on the test platform.
Commit #15:
Ignore llvm-cov show diff on test with generics, expand IO error message
Tests with generics produce llvm-cov show results with demangled names
that can include an unstable "crate disambiguator" (hex value). The
value changes when run in the Rust CI Windows environment. I added a sed
filter to strip them out (in a prior commit), but sed also appears to
fail in the same environment. Until I can figure out a workaround, I'm
just going to ignore this specific test result. I added a FIXME to
follow up later, but it's not that critical.
I also saw an error with Windows GNU, but the IO error did not
specify a path for the directory or file that triggered the error. I
updated the error messages to provide more info for next, time but also
noticed some other tests with similar steps did not fail. Looks
spurious.
Commit #16:
Modify rust-demangler to strip disambiguators by default
Commit #17:
Remove std::process::exit from coverage tests
Due to Issue #77553, programs that call std::process::exit() do not
generate coverage results on Windows MSVC.
Commit #18:
fix: test file paths exceeding Windows max path len
2020-09-01 23:15:17 +00:00
|
|
|
/// Return the SourceFile that contains the given `BytePos`
|
|
|
|
pub fn lookup_source_file(&self, pos: BytePos) -> Lrc<SourceFile> {
|
|
|
|
let idx = self.lookup_source_file_idx(pos);
|
|
|
|
(*self.files.borrow().source_files)[idx].clone()
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Looks up source information about a `BytePos`.
|
2013-04-17 16:15:08 +00:00
|
|
|
pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
|
Updates to experimental coverage counter injection
This is a combination of 18 commits.
Commit #2:
Additional examples and some small improvements.
Commit #3:
fixed mir-opt non-mir extensions and spanview title elements
Corrected a fairly recent assumption in runtest.rs that all MIR dump
files end in .mir. (It was appending .mir to the graphviz .dot and
spanview .html file names when generating blessed output files. That
also left outdated files in the baseline alongside the files with the
incorrect names, which I've now removed.)
Updated spanview HTML title elements to match their content, replacing a
hardcoded and incorrect name that was left in accidentally when
originally submitted.
Commit #4:
added more test examples
also improved Makefiles with support for non-zero exit status and to
force validation of tests unless a specific test overrides it with a
specific comment.
Commit #5:
Fixed rare issues after testing on real-world crate
Commit #6:
Addressed PR feedback, and removed temporary -Zexperimental-coverage
-Zinstrument-coverage once again supports the latest capabilities of
LLVM instrprof coverage instrumentation.
Also fixed a bug in spanview.
Commit #7:
Fix closure handling, add tests for closures and inner items
And cleaned up other tests for consistency, and to make it more clear
where spans start/end by breaking up lines.
Commit #8:
renamed "typical" test results "expected"
Now that the `llvm-cov show` tests are improved to normally expect
matching actuals, and to allow individual tests to override that
expectation.
Commit #9:
test coverage of inline generic struct function
Commit #10:
Addressed review feedback
* Removed unnecessary Unreachable filter.
* Replaced a match wildcard with remining variants.
* Added more comments to help clarify the role of successors() in the
CFG traversal
Commit #11:
refactoring based on feedback
* refactored `fn coverage_spans()`.
* changed the way I expand an empty coverage span to improve performance
* fixed a typo that I had accidently left in, in visit.rs
Commit #12:
Optimized use of SourceMap and SourceFile
Commit #13:
Fixed a regression, and synched with upstream
Some generated test file names changed due to some new change upstream.
Commit #14:
Stripping out crate disambiguators from demangled names
These can vary depending on the test platform.
Commit #15:
Ignore llvm-cov show diff on test with generics, expand IO error message
Tests with generics produce llvm-cov show results with demangled names
that can include an unstable "crate disambiguator" (hex value). The
value changes when run in the Rust CI Windows environment. I added a sed
filter to strip them out (in a prior commit), but sed also appears to
fail in the same environment. Until I can figure out a workaround, I'm
just going to ignore this specific test result. I added a FIXME to
follow up later, but it's not that critical.
I also saw an error with Windows GNU, but the IO error did not
specify a path for the directory or file that triggered the error. I
updated the error messages to provide more info for next, time but also
noticed some other tests with similar steps did not fail. Looks
spurious.
Commit #16:
Modify rust-demangler to strip disambiguators by default
Commit #17:
Remove std::process::exit from coverage tests
Due to Issue #77553, programs that call std::process::exit() do not
generate coverage results on Windows MSVC.
Commit #18:
fix: test file paths exceeding Windows max path len
2020-09-01 23:15:17 +00:00
|
|
|
let sf = self.lookup_source_file(pos);
|
|
|
|
let (line, col, col_display) = sf.lookup_file_pos_with_col_display(pos);
|
|
|
|
Loc { file: sf, line, col, col_display }
|
2015-06-16 18:47:09 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
// If the corresponding `SourceFile` is empty, does not return a line number.
|
2018-08-18 10:13:52 +00:00
|
|
|
pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Lrc<SourceFile>> {
|
Updates to experimental coverage counter injection
This is a combination of 18 commits.
Commit #2:
Additional examples and some small improvements.
Commit #3:
fixed mir-opt non-mir extensions and spanview title elements
Corrected a fairly recent assumption in runtest.rs that all MIR dump
files end in .mir. (It was appending .mir to the graphviz .dot and
spanview .html file names when generating blessed output files. That
also left outdated files in the baseline alongside the files with the
incorrect names, which I've now removed.)
Updated spanview HTML title elements to match their content, replacing a
hardcoded and incorrect name that was left in accidentally when
originally submitted.
Commit #4:
added more test examples
also improved Makefiles with support for non-zero exit status and to
force validation of tests unless a specific test overrides it with a
specific comment.
Commit #5:
Fixed rare issues after testing on real-world crate
Commit #6:
Addressed PR feedback, and removed temporary -Zexperimental-coverage
-Zinstrument-coverage once again supports the latest capabilities of
LLVM instrprof coverage instrumentation.
Also fixed a bug in spanview.
Commit #7:
Fix closure handling, add tests for closures and inner items
And cleaned up other tests for consistency, and to make it more clear
where spans start/end by breaking up lines.
Commit #8:
renamed "typical" test results "expected"
Now that the `llvm-cov show` tests are improved to normally expect
matching actuals, and to allow individual tests to override that
expectation.
Commit #9:
test coverage of inline generic struct function
Commit #10:
Addressed review feedback
* Removed unnecessary Unreachable filter.
* Replaced a match wildcard with remining variants.
* Added more comments to help clarify the role of successors() in the
CFG traversal
Commit #11:
refactoring based on feedback
* refactored `fn coverage_spans()`.
* changed the way I expand an empty coverage span to improve performance
* fixed a typo that I had accidently left in, in visit.rs
Commit #12:
Optimized use of SourceMap and SourceFile
Commit #13:
Fixed a regression, and synched with upstream
Some generated test file names changed due to some new change upstream.
Commit #14:
Stripping out crate disambiguators from demangled names
These can vary depending on the test platform.
Commit #15:
Ignore llvm-cov show diff on test with generics, expand IO error message
Tests with generics produce llvm-cov show results with demangled names
that can include an unstable "crate disambiguator" (hex value). The
value changes when run in the Rust CI Windows environment. I added a sed
filter to strip them out (in a prior commit), but sed also appears to
fail in the same environment. Until I can figure out a workaround, I'm
just going to ignore this specific test result. I added a FIXME to
follow up later, but it's not that critical.
I also saw an error with Windows GNU, but the IO error did not
specify a path for the directory or file that triggered the error. I
updated the error messages to provide more info for next, time but also
noticed some other tests with similar steps did not fail. Looks
spurious.
Commit #16:
Modify rust-demangler to strip disambiguators by default
Commit #17:
Remove std::process::exit from coverage tests
Due to Issue #77553, programs that call std::process::exit() do not
generate coverage results on Windows MSVC.
Commit #18:
fix: test file paths exceeding Windows max path len
2020-09-01 23:15:17 +00:00
|
|
|
let f = self.lookup_source_file(pos);
|
2015-07-02 03:37:52 +00:00
|
|
|
|
2016-08-24 21:06:31 +00:00
|
|
|
match f.lookup_line(pos) {
|
2019-06-25 21:22:45 +00:00
|
|
|
Some(line) => Ok(SourceFileAndLine { sf: f, line }),
|
2016-08-24 21:06:31 +00:00
|
|
|
None => Err(f),
|
2015-07-02 03:37:52 +00:00
|
|
|
}
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2011-07-16 06:01:10 +00:00
|
|
|
|
2021-08-26 10:46:01 +00:00
|
|
|
fn span_to_string(&self, sp: Span, filename_display_pref: FileNameDisplayPreference) -> String {
|
2021-06-09 00:00:00 +00:00
|
|
|
if self.files.borrow().source_files.is_empty() || sp.is_dummy() {
|
2014-05-25 10:17:19 +00:00
|
|
|
return "no-location".to_string();
|
2012-12-05 23:13:24 +00:00
|
|
|
}
|
|
|
|
|
2019-04-05 19:42:40 +00:00
|
|
|
let lo = self.lookup_char_pos(sp.lo());
|
|
|
|
let hi = self.lookup_char_pos(sp.hi());
|
2018-02-16 22:08:12 +00:00
|
|
|
format!(
|
|
|
|
"{}:{}:{}: {}:{}",
|
2021-08-26 10:46:01 +00:00
|
|
|
lo.file.name.display(filename_display_pref),
|
2019-09-06 02:56:45 +00:00
|
|
|
lo.line,
|
|
|
|
lo.col.to_usize() + 1,
|
|
|
|
hi.line,
|
|
|
|
hi.col.to_usize() + 1,
|
|
|
|
)
|
2012-02-10 18:28:43 +00:00
|
|
|
}
|
|
|
|
|
2021-05-03 00:14:25 +00:00
|
|
|
/// Format the span location suitable for embedding in build artifacts
|
|
|
|
pub fn span_to_embeddable_string(&self, sp: Span) -> String {
|
2021-08-26 10:46:01 +00:00
|
|
|
self.span_to_string(sp, FileNameDisplayPreference::Remapped)
|
2021-05-03 00:14:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Format the span location to be printed in diagnostics. Must not be emitted
|
|
|
|
/// to build artifacts as this may leak local file paths. Use span_to_embeddable_string
|
|
|
|
/// for string suitable for embedding.
|
|
|
|
pub fn span_to_diagnostic_string(&self, sp: Span) -> String {
|
2021-08-26 10:46:01 +00:00
|
|
|
self.span_to_string(sp, self.path_mapping.filename_display_for_diagnostics)
|
2017-09-30 06:28:48 +00:00
|
|
|
}
|
|
|
|
|
2013-08-31 16:13:04 +00:00
|
|
|
pub fn span_to_filename(&self, sp: Span) -> FileName {
|
2017-09-30 06:28:48 +00:00
|
|
|
self.lookup_char_pos(sp.lo()).file.name.clone()
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2011-07-05 09:48:19 +00:00
|
|
|
|
2021-08-26 10:46:01 +00:00
|
|
|
pub fn filename_for_diagnostics<'a>(&self, filename: &'a FileName) -> FileNameDisplay<'a> {
|
|
|
|
filename.display(self.path_mapping.filename_display_for_diagnostics)
|
|
|
|
}
|
|
|
|
|
2017-12-22 23:55:44 +00:00
|
|
|
pub fn is_multiline(&self, sp: Span) -> bool {
|
2021-07-01 00:00:00 +00:00
|
|
|
let lo = self.lookup_source_file_idx(sp.lo());
|
|
|
|
let hi = self.lookup_source_file_idx(sp.hi());
|
|
|
|
if lo != hi {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
let f = (*self.files.borrow().source_files)[lo].clone();
|
|
|
|
f.lookup_line(sp.lo()) != f.lookup_line(sp.hi())
|
2017-12-22 23:55:44 +00:00
|
|
|
}
|
|
|
|
|
2021-09-20 15:24:47 +00:00
|
|
|
#[instrument(skip(self), level = "trace")]
|
2020-01-15 17:57:06 +00:00
|
|
|
pub fn is_valid_span(&self, sp: Span) -> Result<(Loc, Loc), SpanLinesError> {
|
2017-07-31 20:04:34 +00:00
|
|
|
let lo = self.lookup_char_pos(sp.lo());
|
2021-09-20 15:24:47 +00:00
|
|
|
trace!(?lo);
|
2017-07-31 20:04:34 +00:00
|
|
|
let hi = self.lookup_char_pos(sp.hi());
|
2021-09-20 15:24:47 +00:00
|
|
|
trace!(?hi);
|
2015-04-30 08:23:50 +00:00
|
|
|
if lo.file.start_pos != hi.file.start_pos {
|
|
|
|
return Err(SpanLinesError::DistinctSources(DistinctSources {
|
|
|
|
begin: (lo.file.name.clone(), lo.file.start_pos),
|
|
|
|
end: (hi.file.name.clone(), hi.file.start_pos),
|
|
|
|
}));
|
|
|
|
}
|
2020-01-15 17:57:06 +00:00
|
|
|
Ok((lo, hi))
|
|
|
|
}
|
|
|
|
|
2020-03-07 10:00:40 +00:00
|
|
|
pub fn is_line_before_span_empty(&self, sp: Span) -> bool {
|
|
|
|
match self.span_to_prev_source(sp) {
|
2021-02-04 01:54:23 +00:00
|
|
|
Ok(s) => s.rsplit_once('\n').unwrap_or(("", &s)).1.trim_start().is_empty(),
|
2020-03-07 10:00:40 +00:00
|
|
|
Err(_) => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-15 17:57:06 +00:00
|
|
|
pub fn span_to_lines(&self, sp: Span) -> FileLinesResult {
|
|
|
|
debug!("span_to_lines(sp={:?})", sp);
|
|
|
|
let (lo, hi) = self.is_valid_span(sp)?;
|
2015-04-30 08:23:50 +00:00
|
|
|
assert!(hi.line >= lo.line);
|
|
|
|
|
2020-03-13 21:01:35 +00:00
|
|
|
if sp.is_dummy() {
|
|
|
|
return Ok(FileLines { file: lo.file, lines: Vec::new() });
|
|
|
|
}
|
|
|
|
|
2015-04-09 18:46:03 +00:00
|
|
|
let mut lines = Vec::with_capacity(hi.line - lo.line + 1);
|
|
|
|
|
|
|
|
// The span starts partway through the first line,
|
|
|
|
// but after that it starts from offset 0.
|
|
|
|
let mut start_col = lo.col;
|
|
|
|
|
|
|
|
// For every line but the last, it extends from `start_col`
|
|
|
|
// and to the end of the line. Be careful because the line
|
|
|
|
// numbers in Loc are 1-based, so we subtract 1 to get 0-based
|
|
|
|
// lines.
|
2020-03-13 21:01:35 +00:00
|
|
|
//
|
|
|
|
// FIXME: now that we handle DUMMY_SP up above, we should consider
|
2020-03-23 17:32:23 +00:00
|
|
|
// asserting that the line numbers here are all indeed 1-based.
|
2020-01-06 10:46:30 +00:00
|
|
|
let hi_line = hi.line.saturating_sub(1);
|
|
|
|
for line_index in lo.line.saturating_sub(1)..hi_line {
|
2021-01-11 19:45:33 +00:00
|
|
|
let line_len = lo.file.get_line(line_index).map_or(0, |s| s.chars().count());
|
2015-04-09 18:46:03 +00:00
|
|
|
lines.push(LineInfo { line_index, start_col, end_col: CharPos::from_usize(line_len) });
|
|
|
|
start_col = CharPos::from_usize(0);
|
|
|
|
}
|
|
|
|
|
|
|
|
// For the last line, it extends from `start_col` to `hi.col`:
|
2020-01-06 10:46:30 +00:00
|
|
|
lines.push(LineInfo { line_index: hi_line, start_col, end_col: hi.col });
|
2015-04-09 18:46:03 +00:00
|
|
|
|
2019-06-25 21:22:45 +00:00
|
|
|
Ok(FileLines { file: lo.file, lines })
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2012-02-10 18:28:43 +00:00
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Extracts the source surrounding the given `Span` using the `extract_source` function. The
|
2018-02-22 14:27:41 +00:00
|
|
|
/// extract function takes three arguments: a string slice containing the source, an index in
|
|
|
|
/// the slice for the beginning of the span and an index in the slice for the end of the span.
|
2021-01-02 18:45:11 +00:00
|
|
|
fn span_to_source<F, T>(&self, sp: Span, extract_source: F) -> Result<T, SpanSnippetError>
|
2019-08-13 18:35:49 +00:00
|
|
|
where
|
2021-01-02 18:45:11 +00:00
|
|
|
F: Fn(&str, usize, usize) -> Result<T, SpanSnippetError>,
|
2018-02-22 14:27:41 +00:00
|
|
|
{
|
2017-07-31 20:04:34 +00:00
|
|
|
let local_begin = self.lookup_byte_offset(sp.lo());
|
|
|
|
let local_end = self.lookup_byte_offset(sp.hi());
|
2013-08-04 02:14:01 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
if local_begin.sf.start_pos != local_end.sf.start_pos {
|
2020-03-20 14:03:11 +00:00
|
|
|
Err(SpanSnippetError::DistinctSources(DistinctSources {
|
2018-10-29 20:26:13 +00:00
|
|
|
begin: (local_begin.sf.name.clone(), local_begin.sf.start_pos),
|
|
|
|
end: (local_end.sf.name.clone(), local_end.sf.start_pos),
|
2020-03-20 14:03:11 +00:00
|
|
|
}))
|
2013-08-04 02:14:01 +00:00
|
|
|
} else {
|
2018-10-29 20:26:13 +00:00
|
|
|
self.ensure_source_file_source_present(local_begin.sf.clone());
|
2017-06-12 19:47:39 +00:00
|
|
|
|
|
|
|
let start_index = local_begin.pos.to_usize();
|
|
|
|
let end_index = local_end.pos.to_usize();
|
2018-10-29 20:26:13 +00:00
|
|
|
let source_len = (local_begin.sf.end_pos - local_begin.sf.start_pos).to_usize();
|
2017-06-12 19:47:39 +00:00
|
|
|
|
|
|
|
if start_index > end_index || end_index > source_len {
|
2018-10-29 20:26:13 +00:00
|
|
|
return Err(SpanSnippetError::MalformedForSourcemap(MalformedSourceMapPositions {
|
|
|
|
name: local_begin.sf.name.clone(),
|
2017-08-07 05:54:09 +00:00
|
|
|
source_len,
|
2017-06-12 19:47:39 +00:00
|
|
|
begin_pos: local_begin.pos,
|
|
|
|
end_pos: local_end.pos,
|
|
|
|
}));
|
|
|
|
}
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
if let Some(ref src) = local_begin.sf.src {
|
2020-03-20 14:03:11 +00:00
|
|
|
extract_source(src, start_index, end_index)
|
2018-10-29 20:26:13 +00:00
|
|
|
} else if let Some(src) = local_begin.sf.external_src.borrow().get_source() {
|
2020-03-20 14:03:11 +00:00
|
|
|
extract_source(src, start_index, end_index)
|
2017-06-12 19:47:39 +00:00
|
|
|
} else {
|
2020-03-20 14:03:11 +00:00
|
|
|
Err(SpanSnippetError::SourceNotAvailable { filename: local_begin.sf.name.clone() })
|
2015-02-11 17:29:49 +00:00
|
|
|
}
|
2013-08-04 02:14:01 +00:00
|
|
|
}
|
2021-08-28 02:13:08 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns whether or not this span points into a file
|
|
|
|
/// in the current crate. This may be `false` for spans
|
|
|
|
/// produced by a macro expansion, or for spans associated
|
|
|
|
/// with the definition of an item in a foreign crate
|
|
|
|
pub fn is_local_span(&self, sp: Span) -> bool {
|
|
|
|
let local_begin = self.lookup_byte_offset(sp.lo());
|
|
|
|
let local_end = self.lookup_byte_offset(sp.hi());
|
|
|
|
// This might be a weird span that covers multiple files
|
|
|
|
local_begin.sf.src.is_some() && local_end.sf.src.is_some()
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2012-05-11 00:18:04 +00:00
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Returns the source snippet as `String` corresponding to the given `Span`.
|
2018-02-22 14:27:41 +00:00
|
|
|
pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> {
|
2019-08-13 18:35:49 +00:00
|
|
|
self.span_to_source(sp, |src, start_index, end_index| {
|
|
|
|
src.get(start_index..end_index)
|
|
|
|
.map(|s| s.to_string())
|
2020-12-03 22:06:58 +00:00
|
|
|
.ok_or(SpanSnippetError::IllFormedSpan(sp))
|
2019-12-22 22:42:04 +00:00
|
|
|
})
|
2018-02-22 14:27:41 +00:00
|
|
|
}
|
|
|
|
|
2018-09-04 15:09:49 +00:00
|
|
|
pub fn span_to_margin(&self, sp: Span) -> Option<usize> {
|
2021-11-04 16:55:25 +00:00
|
|
|
Some(self.indentation_before(sp)?.len())
|
|
|
|
}
|
2021-02-04 01:54:23 +00:00
|
|
|
|
2021-11-04 16:55:25 +00:00
|
|
|
pub fn indentation_before(&self, sp: Span) -> Option<String> {
|
|
|
|
self.span_to_source(sp, |src, start_index, _| {
|
|
|
|
let before = &src[..start_index];
|
|
|
|
let last_line = before.rsplit_once('\n').map_or(before, |(_, last)| last);
|
|
|
|
Ok(last_line
|
|
|
|
.split_once(|c: char| !c.is_whitespace())
|
|
|
|
.map_or(last_line, |(indent, _)| indent)
|
|
|
|
.to_string())
|
|
|
|
})
|
|
|
|
.ok()
|
2018-09-04 15:09:49 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Returns the source snippet as `String` before the given `Span`.
|
2018-02-22 14:27:41 +00:00
|
|
|
pub fn span_to_prev_source(&self, sp: Span) -> Result<String, SpanSnippetError> {
|
2019-08-13 18:35:49 +00:00
|
|
|
self.span_to_source(sp, |src, start_index, _| {
|
2020-12-03 22:06:58 +00:00
|
|
|
src.get(..start_index).map(|s| s.to_string()).ok_or(SpanSnippetError::IllFormedSpan(sp))
|
2019-12-22 22:42:04 +00:00
|
|
|
})
|
2018-02-22 14:27:41 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Extends the given `Span` to just after the previous occurrence of `c`. Return the same span
|
2018-02-22 14:27:41 +00:00
|
|
|
/// if no character could be found or if an error occurred while retrieving the code snippet.
|
2021-01-02 18:45:11 +00:00
|
|
|
pub fn span_extend_to_prev_char(&self, sp: Span, c: char, accept_newlines: bool) -> Span {
|
2018-02-22 14:27:41 +00:00
|
|
|
if let Ok(prev_source) = self.span_to_prev_source(sp) {
|
2021-01-02 18:45:11 +00:00
|
|
|
let prev_source = prev_source.rsplit(c).next().unwrap_or("");
|
2021-02-04 01:54:23 +00:00
|
|
|
if !prev_source.is_empty() && (accept_newlines || !prev_source.contains('\n')) {
|
2018-02-22 14:27:41 +00:00
|
|
|
return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sp
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Extends the given `Span` to just after the previous occurrence of `pat` when surrounded by
|
2022-01-15 21:22:22 +00:00
|
|
|
/// whitespace. Returns None if the pattern could not be found or if an error occurred while
|
|
|
|
/// retrieving the code snippet.
|
|
|
|
pub fn span_extend_to_prev_str(
|
|
|
|
&self,
|
|
|
|
sp: Span,
|
|
|
|
pat: &str,
|
|
|
|
accept_newlines: bool,
|
|
|
|
include_whitespace: bool,
|
|
|
|
) -> Option<Span> {
|
2018-03-14 05:58:45 +00:00
|
|
|
// assure that the pattern is delimited, to avoid the following
|
|
|
|
// fn my_fn()
|
|
|
|
// ^^^^ returned span without the check
|
|
|
|
// ---------- correct span
|
2022-01-15 21:22:22 +00:00
|
|
|
let prev_source = self.span_to_prev_source(sp).ok()?;
|
2018-03-14 05:58:45 +00:00
|
|
|
for ws in &[" ", "\t", "\n"] {
|
|
|
|
let pat = pat.to_owned() + ws;
|
2022-01-15 21:22:22 +00:00
|
|
|
if let Some(pat_pos) = prev_source.rfind(&pat) {
|
|
|
|
let just_after_pat_pos = pat_pos + pat.len() - 1;
|
|
|
|
let just_after_pat_plus_ws = if include_whitespace {
|
|
|
|
just_after_pat_pos
|
|
|
|
+ prev_source[just_after_pat_pos..]
|
|
|
|
.find(|c: char| !c.is_whitespace())
|
|
|
|
.unwrap_or(0)
|
|
|
|
} else {
|
|
|
|
just_after_pat_pos
|
|
|
|
};
|
|
|
|
let len = prev_source.len() - just_after_pat_plus_ws;
|
|
|
|
let prev_source = &prev_source[just_after_pat_plus_ws..];
|
|
|
|
if accept_newlines || !prev_source.trim_start().contains('\n') {
|
|
|
|
return Some(sp.with_lo(BytePos(sp.lo().0 - len as u32)));
|
2018-03-14 05:58:45 +00:00
|
|
|
}
|
2018-02-22 14:27:41 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-01-15 21:22:22 +00:00
|
|
|
None
|
2018-02-22 14:27:41 +00:00
|
|
|
}
|
|
|
|
|
2021-01-02 18:45:11 +00:00
|
|
|
/// Returns the source snippet as `String` after the given `Span`.
|
|
|
|
pub fn span_to_next_source(&self, sp: Span) -> Result<String, SpanSnippetError> {
|
|
|
|
self.span_to_source(sp, |src, _, end_index| {
|
|
|
|
src.get(end_index..).map(|s| s.to_string()).ok_or(SpanSnippetError::IllFormedSpan(sp))
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-10-14 18:28:25 +00:00
|
|
|
/// Extends the given `Span` while the next character matches the predicate
|
|
|
|
pub fn span_extend_while(
|
|
|
|
&self,
|
|
|
|
span: Span,
|
|
|
|
f: impl Fn(char) -> bool,
|
|
|
|
) -> Result<Span, SpanSnippetError> {
|
|
|
|
self.span_to_source(span, |s, _start, end| {
|
|
|
|
let n = s[end..].char_indices().find(|&(_, c)| !f(c)).map_or(s.len() - end, |(i, _)| i);
|
|
|
|
Ok(span.with_hi(span.hi() + BytePos(n as u32)))
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2021-01-02 18:45:11 +00:00
|
|
|
/// Extends the given `Span` to just after the next occurrence of `c`.
|
|
|
|
pub fn span_extend_to_next_char(&self, sp: Span, c: char, accept_newlines: bool) -> Span {
|
|
|
|
if let Ok(next_source) = self.span_to_next_source(sp) {
|
|
|
|
let next_source = next_source.split(c).next().unwrap_or("");
|
2021-02-04 01:54:23 +00:00
|
|
|
if !next_source.is_empty() && (accept_newlines || !next_source.contains('\n')) {
|
2021-01-02 18:45:11 +00:00
|
|
|
return sp.with_hi(BytePos(sp.hi().0 + next_source.len() as u32));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sp
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Given a `Span`, tries to get a shorter span ending before the first occurrence of `char`
|
2019-09-19 05:17:36 +00:00
|
|
|
/// `c`.
|
2018-03-18 13:05:20 +00:00
|
|
|
pub fn span_until_char(&self, sp: Span, c: char) -> Span {
|
|
|
|
match self.span_to_snippet(sp) {
|
|
|
|
Ok(snippet) => {
|
2020-03-03 00:19:00 +00:00
|
|
|
let snippet = snippet.split(c).next().unwrap_or("").trim_end();
|
2018-03-18 13:05:20 +00:00
|
|
|
if !snippet.is_empty() && !snippet.contains('\n') {
|
|
|
|
sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32))
|
|
|
|
} else {
|
|
|
|
sp
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => sp,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Given a `Span`, tries to get a shorter span ending just after the first occurrence of `char`
|
2018-03-18 13:05:20 +00:00
|
|
|
/// `c`.
|
|
|
|
pub fn span_through_char(&self, sp: Span, c: char) -> Span {
|
|
|
|
if let Ok(snippet) = self.span_to_snippet(sp) {
|
|
|
|
if let Some(offset) = snippet.find(c) {
|
|
|
|
return sp.with_hi(BytePos(sp.lo().0 + (offset + c.len_utf8()) as u32));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
sp
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Given a `Span`, gets a new `Span` covering the first token and all its trailing whitespace
|
|
|
|
/// or the original `Span`.
|
2018-01-23 18:54:57 +00:00
|
|
|
///
|
|
|
|
/// If `sp` points to `"let mut x"`, then a span pointing at `"let "` will be returned.
|
|
|
|
pub fn span_until_non_whitespace(&self, sp: Span) -> Span {
|
2018-03-18 13:05:20 +00:00
|
|
|
let mut whitespace_found = false;
|
|
|
|
|
|
|
|
self.span_take_while(sp, |c| {
|
|
|
|
if !whitespace_found && c.is_whitespace() {
|
|
|
|
whitespace_found = true;
|
2018-01-23 18:54:57 +00:00
|
|
|
}
|
2018-03-18 13:05:20 +00:00
|
|
|
|
2020-03-03 01:07:15 +00:00
|
|
|
!whitespace_found || c.is_whitespace()
|
2018-03-18 13:05:20 +00:00
|
|
|
})
|
2018-01-23 18:54:57 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Given a `Span`, gets a new `Span` covering the first token without its trailing whitespace
|
|
|
|
/// or the original `Span` in case of error.
|
2018-02-22 14:27:41 +00:00
|
|
|
///
|
|
|
|
/// If `sp` points to `"let mut x"`, then a span pointing at `"let"` will be returned.
|
|
|
|
pub fn span_until_whitespace(&self, sp: Span) -> Span {
|
2018-03-18 13:05:20 +00:00
|
|
|
self.span_take_while(sp, |c| !c.is_whitespace())
|
2017-10-12 06:06:45 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Given a `Span`, gets a shorter one until `predicate` yields `false`.
|
2018-03-17 18:41:46 +00:00
|
|
|
pub fn span_take_while<P>(&self, sp: Span, predicate: P) -> Span
|
|
|
|
where
|
|
|
|
P: for<'r> FnMut(&'r char) -> bool,
|
|
|
|
{
|
|
|
|
if let Ok(snippet) = self.span_to_snippet(sp) {
|
|
|
|
let offset = snippet.chars().take_while(predicate).map(|c| c.len_utf8()).sum::<usize>();
|
|
|
|
|
|
|
|
sp.with_hi(BytePos(sp.lo().0 + (offset as u32)))
|
|
|
|
} else {
|
|
|
|
sp
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-09 18:42:37 +00:00
|
|
|
/// Given a `Span`, return a span ending in the closest `{`. This is useful when you have a
|
|
|
|
/// `Span` enclosing a whole item but we need to point at only the head (usually the first
|
|
|
|
/// line) of that item.
|
|
|
|
///
|
|
|
|
/// *Only suitable for diagnostics.*
|
|
|
|
pub fn guess_head_span(&self, sp: Span) -> Span {
|
|
|
|
// FIXME: extend the AST items to have a head span, or replace callers with pointing at
|
|
|
|
// the item's ident when appropriate.
|
2017-04-11 11:40:31 +00:00
|
|
|
self.span_until_char(sp, '{')
|
|
|
|
}
|
|
|
|
|
2021-02-07 20:13:36 +00:00
|
|
|
/// Returns a new span representing just the first character of the given span.
|
2018-07-07 23:53:52 +00:00
|
|
|
pub fn start_point(&self, sp: Span) -> Span {
|
2021-02-07 20:13:36 +00:00
|
|
|
let width = {
|
|
|
|
let sp = sp.data();
|
|
|
|
let local_begin = self.lookup_byte_offset(sp.lo);
|
|
|
|
let start_index = local_begin.pos.to_usize();
|
|
|
|
let src = local_begin.sf.external_src.borrow();
|
|
|
|
|
|
|
|
let snippet = if let Some(ref src) = local_begin.sf.src {
|
|
|
|
Some(&src[start_index..])
|
|
|
|
} else if let Some(src) = src.get_source() {
|
|
|
|
Some(&src[start_index..])
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
|
|
|
|
match snippet {
|
|
|
|
None => 1,
|
|
|
|
Some(snippet) => match snippet.chars().next() {
|
|
|
|
None => 1,
|
|
|
|
Some(c) => c.len_utf8(),
|
|
|
|
},
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
sp.with_hi(BytePos(sp.lo().0 + width as u32))
|
2018-07-07 23:53:52 +00:00
|
|
|
}
|
|
|
|
|
2021-02-07 20:13:36 +00:00
|
|
|
/// Returns a new span representing just the last character of this span.
|
2018-01-14 17:29:07 +00:00
|
|
|
pub fn end_point(&self, sp: Span) -> Span {
|
2018-01-16 20:41:00 +00:00
|
|
|
let pos = sp.hi().0;
|
|
|
|
|
|
|
|
let width = self.find_width_of_character_at_span(sp, false);
|
|
|
|
let corrected_end_position = pos.checked_sub(width).unwrap_or(pos);
|
|
|
|
|
|
|
|
let end_point = BytePos(cmp::max(corrected_end_position, sp.lo().0));
|
|
|
|
sp.with_lo(end_point)
|
2018-01-14 17:29:07 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Returns a new span representing the next character after the end-point of this span.
|
2018-01-14 17:29:07 +00:00
|
|
|
pub fn next_point(&self, sp: Span) -> Span {
|
2021-01-31 20:21:28 +00:00
|
|
|
if sp.is_dummy() {
|
|
|
|
return sp;
|
|
|
|
}
|
2018-01-17 10:01:57 +00:00
|
|
|
let start_of_next_point = sp.hi().0;
|
2018-01-16 20:41:00 +00:00
|
|
|
|
2020-01-10 19:02:47 +00:00
|
|
|
let width = self.find_width_of_character_at_span(sp.shrink_to_hi(), true);
|
2018-01-17 10:01:57 +00:00
|
|
|
// If the width is 1, then the next span should point to the same `lo` and `hi`. However,
|
|
|
|
// in the case of a multibyte character, where the width != 1, the next span should
|
|
|
|
// span multiple bytes to include the whole character.
|
|
|
|
let end_of_next_point =
|
|
|
|
start_of_next_point.checked_add(width - 1).unwrap_or(start_of_next_point);
|
|
|
|
|
|
|
|
let end_of_next_point = BytePos(cmp::max(sp.lo().0 + 1, end_of_next_point));
|
2021-04-18 12:27:04 +00:00
|
|
|
Span::new(BytePos(start_of_next_point), end_of_next_point, sp.ctxt(), None)
|
2018-01-14 17:29:07 +00:00
|
|
|
}
|
|
|
|
|
2021-02-07 20:13:36 +00:00
|
|
|
/// Finds the width of the character, either before or after the end of provided span,
|
|
|
|
/// depending on the `forwards` parameter.
|
2018-01-16 20:41:00 +00:00
|
|
|
fn find_width_of_character_at_span(&self, sp: Span, forwards: bool) -> u32 {
|
2019-11-01 20:24:07 +00:00
|
|
|
let sp = sp.data();
|
|
|
|
if sp.lo == sp.hi {
|
|
|
|
debug!("find_width_of_character_at_span: early return empty span");
|
2018-01-16 20:41:00 +00:00
|
|
|
return 1;
|
|
|
|
}
|
2018-01-14 17:29:07 +00:00
|
|
|
|
2019-11-01 20:24:07 +00:00
|
|
|
let local_begin = self.lookup_byte_offset(sp.lo);
|
|
|
|
let local_end = self.lookup_byte_offset(sp.hi);
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!(
|
|
|
|
"find_width_of_character_at_span: local_begin=`{:?}`, local_end=`{:?}`",
|
|
|
|
local_begin, local_end
|
|
|
|
);
|
2018-01-16 20:41:00 +00:00
|
|
|
|
2019-05-28 21:27:42 +00:00
|
|
|
if local_begin.sf.start_pos != local_end.sf.start_pos {
|
|
|
|
debug!("find_width_of_character_at_span: begin and end are in different files");
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2018-01-16 20:41:00 +00:00
|
|
|
let start_index = local_begin.pos.to_usize();
|
|
|
|
let end_index = local_end.pos.to_usize();
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!(
|
|
|
|
"find_width_of_character_at_span: start_index=`{:?}`, end_index=`{:?}`",
|
|
|
|
start_index, end_index
|
|
|
|
);
|
2018-01-16 20:41:00 +00:00
|
|
|
|
|
|
|
// Disregard indexes that are at the start or end of their spans, they can't fit bigger
|
|
|
|
// characters.
|
2020-06-02 07:59:11 +00:00
|
|
|
if (!forwards && end_index == usize::MIN) || (forwards && start_index == usize::MAX) {
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: start or end of span, cannot be multibyte");
|
2018-01-16 20:41:00 +00:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let source_len = (local_begin.sf.end_pos - local_begin.sf.start_pos).to_usize();
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: source_len=`{:?}`", source_len);
|
2018-01-16 20:41:00 +00:00
|
|
|
// Ensure indexes are also not malformed.
|
|
|
|
if start_index > end_index || end_index > source_len {
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: source indexes are malformed");
|
2018-01-16 20:41:00 +00:00
|
|
|
return 1;
|
2018-01-14 17:29:07 +00:00
|
|
|
}
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let src = local_begin.sf.external_src.borrow();
|
2018-02-12 19:28:32 +00:00
|
|
|
|
2018-01-16 20:41:00 +00:00
|
|
|
// We need to extend the snippet to the end of the src rather than to end_index so when
|
|
|
|
// searching forwards for boundaries we've got somewhere to search.
|
2018-10-29 20:26:13 +00:00
|
|
|
let snippet = if let Some(ref src) = local_begin.sf.src {
|
2021-02-07 20:13:36 +00:00
|
|
|
&src[start_index..]
|
2018-02-12 19:28:32 +00:00
|
|
|
} else if let Some(src) = src.get_source() {
|
2021-02-07 20:13:36 +00:00
|
|
|
&src[start_index..]
|
2018-01-16 20:41:00 +00:00
|
|
|
} else {
|
|
|
|
return 1;
|
|
|
|
};
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: snippet=`{:?}`", snippet);
|
|
|
|
|
2018-01-16 20:41:00 +00:00
|
|
|
let mut target = if forwards { end_index + 1 } else { end_index - 1 };
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: initial target=`{:?}`", target);
|
|
|
|
|
2018-02-25 00:41:08 +00:00
|
|
|
while !snippet.is_char_boundary(target - start_index) && target < source_len {
|
|
|
|
target = if forwards {
|
|
|
|
target + 1
|
|
|
|
} else {
|
|
|
|
match target.checked_sub(1) {
|
|
|
|
Some(target) => target,
|
|
|
|
None => {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: target=`{:?}`", target);
|
2018-01-16 20:41:00 +00:00
|
|
|
}
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: final target=`{:?}`", target);
|
2018-01-16 20:41:00 +00:00
|
|
|
|
|
|
|
if forwards { (target - end_index) as u32 } else { (end_index - target) as u32 }
|
2018-01-14 17:29:07 +00:00
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:56 +00:00
|
|
|
pub fn get_source_file(&self, filename: &FileName) -> Option<Lrc<SourceFile>> {
|
2021-01-10 00:04:48 +00:00
|
|
|
// Remap filename before lookup
|
|
|
|
let filename = self.path_mapping().map_filename_prefix(filename).0;
|
2018-10-29 20:26:13 +00:00
|
|
|
for sf in self.files.borrow().source_files.iter() {
|
2021-01-10 00:04:48 +00:00
|
|
|
if filename == sf.name {
|
2018-10-29 20:26:13 +00:00
|
|
|
return Some(sf.clone());
|
2013-12-31 00:30:33 +00:00
|
|
|
}
|
|
|
|
}
|
2016-05-24 14:08:01 +00:00
|
|
|
None
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2012-01-25 21:22:10 +00:00
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// For a global `BytePos`, computes the local offset within the containing `SourceFile`.
|
2018-08-18 10:13:52 +00:00
|
|
|
pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos {
|
2018-08-18 10:13:56 +00:00
|
|
|
let idx = self.lookup_source_file_idx(bpos);
|
2018-10-29 20:26:13 +00:00
|
|
|
let sf = (*self.files.borrow().source_files)[idx].clone();
|
|
|
|
let offset = bpos - sf.start_pos;
|
2019-06-25 21:22:45 +00:00
|
|
|
SourceFileAndBytePos { sf, pos: offset }
|
2014-02-05 04:31:33 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
// Returns the index of the `SourceFile` (in `self.files`) that contains `pos`.
|
2020-06-24 17:16:36 +00:00
|
|
|
// This index is guaranteed to be valid for the lifetime of this `SourceMap`,
|
|
|
|
// since `source_files` is a `MonotonicVec`
|
2018-08-18 10:13:56 +00:00
|
|
|
pub fn lookup_source_file_idx(&self, pos: BytePos) -> usize {
|
2019-10-12 14:47:17 +00:00
|
|
|
self.files
|
|
|
|
.borrow()
|
|
|
|
.source_files
|
|
|
|
.binary_search_by_key(&pos, |key| key.start_pos)
|
|
|
|
.unwrap_or_else(|p| p - 1)
|
2012-11-16 03:37:29 +00:00
|
|
|
}
|
|
|
|
|
2015-11-11 05:26:14 +00:00
|
|
|
pub fn count_lines(&self) -> usize {
|
2017-04-27 14:12:57 +00:00
|
|
|
self.files().iter().fold(0, |a, f| a + f.count_lines())
|
2015-11-11 05:26:14 +00:00
|
|
|
}
|
2018-05-21 16:06:28 +00:00
|
|
|
|
|
|
|
pub fn generate_fn_name_span(&self, span: Span) -> Option<Span> {
|
2022-01-15 21:22:22 +00:00
|
|
|
let prev_span = self.span_extend_to_prev_str(span, "fn", true, true).unwrap_or(span);
|
2020-05-16 10:29:46 +00:00
|
|
|
if let Ok(snippet) = self.span_to_snippet(prev_span) {
|
|
|
|
debug!(
|
|
|
|
"generate_fn_name_span: span={:?}, prev_span={:?}, snippet={:?}",
|
|
|
|
span, prev_span, snippet
|
|
|
|
);
|
|
|
|
|
|
|
|
if snippet.is_empty() {
|
|
|
|
return None;
|
|
|
|
};
|
|
|
|
|
|
|
|
let len = snippet
|
|
|
|
.find(|c: char| !c.is_alphanumeric() && c != '_')
|
|
|
|
.expect("no label after fn");
|
|
|
|
Some(prev_span.with_hi(BytePos(prev_span.lo().0 + len as u32)))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
2018-05-21 16:06:28 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Takes the span of a type parameter in a function signature and try to generate a span for
|
|
|
|
/// the function name (with generics) and a new snippet for this span with the pointed type
|
2018-05-21 16:43:11 +00:00
|
|
|
/// parameter as a new local type parameter.
|
2018-05-21 16:06:28 +00:00
|
|
|
///
|
|
|
|
/// For instance:
|
|
|
|
/// ```rust,ignore (pseudo-Rust)
|
|
|
|
/// // Given span
|
|
|
|
/// fn my_function(param: T)
|
|
|
|
/// // ^ Original span
|
|
|
|
///
|
|
|
|
/// // Result
|
|
|
|
/// fn my_function(param: T)
|
|
|
|
/// // ^^^^^^^^^^^ Generated span with snippet `my_function<T>`
|
|
|
|
/// ```
|
|
|
|
///
|
|
|
|
/// Attention: The method used is very fragile since it essentially duplicates the work of the
|
|
|
|
/// parser. If you need to use this function or something similar, please consider updating the
|
2019-09-06 02:56:45 +00:00
|
|
|
/// `SourceMap` functions and this function to something more robust.
|
2018-05-21 16:06:28 +00:00
|
|
|
pub fn generate_local_type_param_snippet(&self, span: Span) -> Option<(Span, String)> {
|
|
|
|
// Try to extend the span to the previous "fn" keyword to retrieve the function
|
2019-09-06 02:56:45 +00:00
|
|
|
// signature.
|
2022-01-15 21:22:22 +00:00
|
|
|
if let Some(sugg_span) = self.span_extend_to_prev_str(span, "fn", false, true) {
|
2018-05-21 16:06:28 +00:00
|
|
|
if let Ok(snippet) = self.span_to_snippet(sugg_span) {
|
2019-09-06 02:56:45 +00:00
|
|
|
// Consume the function name.
|
2018-05-21 16:06:28 +00:00
|
|
|
let mut offset = snippet
|
|
|
|
.find(|c: char| !c.is_alphanumeric() && c != '_')
|
|
|
|
.expect("no label after fn");
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
// Consume the generics part of the function signature.
|
2018-05-21 16:06:28 +00:00
|
|
|
let mut bracket_counter = 0;
|
|
|
|
let mut last_char = None;
|
|
|
|
for c in snippet[offset..].chars() {
|
|
|
|
match c {
|
|
|
|
'<' => bracket_counter += 1,
|
|
|
|
'>' => bracket_counter -= 1,
|
|
|
|
'(' => {
|
|
|
|
if bracket_counter == 0 {
|
|
|
|
break;
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
}
|
2018-05-21 16:06:28 +00:00
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
offset += c.len_utf8();
|
|
|
|
last_char = Some(c);
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
// Adjust the suggestion span to encompass the function name with its generics.
|
2018-05-21 16:06:28 +00:00
|
|
|
let sugg_span = sugg_span.with_hi(BytePos(sugg_span.lo().0 + offset as u32));
|
|
|
|
|
|
|
|
// Prepare the new suggested snippet to append the type parameter that triggered
|
2019-09-06 02:56:45 +00:00
|
|
|
// the error in the generics of the function signature.
|
2018-05-21 16:06:28 +00:00
|
|
|
let mut new_snippet = if last_char == Some('>') {
|
|
|
|
format!("{}, ", &snippet[..(offset - '>'.len_utf8())])
|
|
|
|
} else {
|
|
|
|
format!("{}<", &snippet[..offset])
|
|
|
|
};
|
2018-10-12 14:16:00 +00:00
|
|
|
new_snippet
|
|
|
|
.push_str(&self.span_to_snippet(span).unwrap_or_else(|_| "T".to_string()));
|
2018-05-21 16:06:28 +00:00
|
|
|
new_snippet.push('>');
|
|
|
|
|
|
|
|
return Some((sugg_span, new_snippet));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
None
|
|
|
|
}
|
2019-11-15 13:32:31 +00:00
|
|
|
pub fn ensure_source_file_source_present(&self, source_file: Lrc<SourceFile>) -> bool {
|
2021-08-16 15:29:49 +00:00
|
|
|
source_file.add_external_src(|| {
|
|
|
|
match source_file.name {
|
|
|
|
FileName::Real(ref name) if let Some(local_path) = name.local_path() => {
|
2021-04-08 23:54:51 +00:00
|
|
|
self.file_loader.read_file(local_path).ok()
|
|
|
|
}
|
2021-08-16 15:29:49 +00:00
|
|
|
_ => None,
|
2021-04-08 23:54:51 +00:00
|
|
|
}
|
2019-11-15 13:32:31 +00:00
|
|
|
})
|
2016-09-19 19:31:56 +00:00
|
|
|
}
|
2019-11-13 12:01:43 +00:00
|
|
|
|
|
|
|
pub fn is_imported(&self, sp: Span) -> bool {
|
|
|
|
let source_file_index = self.lookup_source_file_idx(sp.lo());
|
|
|
|
let source_file = &self.files()[source_file_index];
|
|
|
|
source_file.is_imported()
|
|
|
|
}
|
2021-10-14 18:28:25 +00:00
|
|
|
|
|
|
|
/// Gets the span of a statement. If the statement is a macro expansion, the
|
|
|
|
/// span in the context of the block span is found. The trailing semicolon is included
|
|
|
|
/// on a best-effort basis.
|
|
|
|
pub fn stmt_span(&self, stmt_span: Span, block_span: Span) -> Span {
|
|
|
|
if !stmt_span.from_expansion() {
|
|
|
|
return stmt_span;
|
|
|
|
}
|
|
|
|
let mac_call = original_sp(stmt_span, block_span);
|
|
|
|
self.mac_call_stmt_semi_span(mac_call).map_or(mac_call, |s| mac_call.with_hi(s.hi()))
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Tries to find the span of the semicolon of a macro call statement.
|
|
|
|
/// The input must be the *call site* span of a statement from macro expansion.
|
2022-04-15 22:04:34 +00:00
|
|
|
/// ```ignore (illustrative)
|
|
|
|
/// // v output
|
|
|
|
/// mac!();
|
|
|
|
/// // ^^^^^^ input
|
|
|
|
/// ```
|
2021-10-14 18:28:25 +00:00
|
|
|
pub fn mac_call_stmt_semi_span(&self, mac_call: Span) -> Option<Span> {
|
|
|
|
let span = self.span_extend_while(mac_call, char::is_whitespace).ok()?;
|
|
|
|
let span = span.shrink_to_hi().with_hi(BytePos(span.hi().0.checked_add(1)?));
|
|
|
|
if self.span_to_snippet(span).as_deref() != Ok(";") {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
Some(span)
|
|
|
|
}
|
2015-02-05 15:02:22 +00:00
|
|
|
}
|
|
|
|
|
2017-04-24 17:01:19 +00:00
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct FilePathMapping {
|
2017-12-14 07:09:19 +00:00
|
|
|
mapping: Vec<(PathBuf, PathBuf)>,
|
2021-08-26 10:46:01 +00:00
|
|
|
filename_display_for_diagnostics: FileNameDisplayPreference,
|
2017-04-24 17:01:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl FilePathMapping {
|
|
|
|
pub fn empty() -> FilePathMapping {
|
2021-08-26 10:46:01 +00:00
|
|
|
FilePathMapping::new(Vec::new())
|
2017-04-24 17:01:19 +00:00
|
|
|
}
|
|
|
|
|
2017-12-14 07:09:19 +00:00
|
|
|
pub fn new(mapping: Vec<(PathBuf, PathBuf)>) -> FilePathMapping {
|
2021-08-26 10:46:01 +00:00
|
|
|
let filename_display_for_diagnostics = if mapping.is_empty() {
|
|
|
|
FileNameDisplayPreference::Local
|
|
|
|
} else {
|
|
|
|
FileNameDisplayPreference::Remapped
|
|
|
|
};
|
|
|
|
|
|
|
|
FilePathMapping { mapping, filename_display_for_diagnostics }
|
2017-04-24 17:01:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Applies any path prefix substitution as defined by the mapping.
|
|
|
|
/// The return value is the remapped path and a boolean indicating whether
|
|
|
|
/// the path was affected by the mapping.
|
2017-12-14 07:09:19 +00:00
|
|
|
pub fn map_prefix(&self, path: PathBuf) -> (PathBuf, bool) {
|
2022-05-05 15:01:39 +00:00
|
|
|
if path.as_os_str().is_empty() {
|
|
|
|
return (path, false);
|
|
|
|
}
|
2022-04-29 17:36:02 +00:00
|
|
|
|
2022-05-05 15:01:39 +00:00
|
|
|
return remap_path_prefix(&self.mapping, path);
|
|
|
|
|
|
|
|
#[instrument(level = "debug", skip(mapping))]
|
|
|
|
fn remap_path_prefix(mapping: &[(PathBuf, PathBuf)], path: PathBuf) -> (PathBuf, bool) {
|
|
|
|
// NOTE: We are iterating over the mapping entries from last to first
|
|
|
|
// because entries specified later on the command line should
|
|
|
|
// take precedence.
|
|
|
|
for &(ref from, ref to) in mapping.iter().rev() {
|
|
|
|
debug!("Trying to apply {:?} => {:?}", from, to);
|
|
|
|
|
|
|
|
if let Ok(rest) = path.strip_prefix(from) {
|
|
|
|
let remapped = if rest.as_os_str().is_empty() {
|
|
|
|
// This is subtle, joining an empty path onto e.g. `foo/bar` will
|
|
|
|
// result in `foo/bar/`, that is, there'll be an additional directory
|
|
|
|
// separator at the end. This can lead to duplicated directory separators
|
|
|
|
// in remapped paths down the line.
|
|
|
|
// So, if we have an exact match, we just return that without a call
|
|
|
|
// to `Path::join()`.
|
|
|
|
to.clone()
|
|
|
|
} else {
|
|
|
|
to.join(rest)
|
|
|
|
};
|
|
|
|
debug!("Match - remapped {:?} => {:?}", path, remapped);
|
|
|
|
|
|
|
|
return (remapped, true);
|
|
|
|
} else {
|
|
|
|
debug!("No match - prefix {:?} does not match {:?}", from, path);
|
|
|
|
}
|
2017-04-24 17:01:19 +00:00
|
|
|
}
|
|
|
|
|
2022-05-05 15:01:39 +00:00
|
|
|
debug!("Path {:?} was not remapped", path);
|
|
|
|
(path, false)
|
|
|
|
}
|
2017-04-24 17:01:19 +00:00
|
|
|
}
|
2021-01-10 00:04:48 +00:00
|
|
|
|
|
|
|
fn map_filename_prefix(&self, file: &FileName) -> (FileName, bool) {
|
|
|
|
match file {
|
2021-08-16 15:29:49 +00:00
|
|
|
FileName::Real(realfile) if let RealFileName::LocalPath(local_path) = realfile => {
|
|
|
|
let (mapped_path, mapped) = self.map_prefix(local_path.to_path_buf());
|
|
|
|
let realfile = if mapped {
|
|
|
|
RealFileName::Remapped {
|
|
|
|
local_path: Some(local_path.clone()),
|
|
|
|
virtual_name: mapped_path,
|
|
|
|
}
|
2021-04-03 15:59:31 +00:00
|
|
|
} else {
|
2021-08-16 15:29:49 +00:00
|
|
|
realfile.clone()
|
|
|
|
};
|
|
|
|
(FileName::Real(realfile), mapped)
|
2021-01-10 00:04:48 +00:00
|
|
|
}
|
2021-08-16 15:29:49 +00:00
|
|
|
FileName::Real(_) => unreachable!("attempted to remap an already remapped filename"),
|
2021-01-10 00:04:48 +00:00
|
|
|
other => (other.clone(), false),
|
|
|
|
}
|
|
|
|
}
|
2022-05-05 15:03:51 +00:00
|
|
|
|
|
|
|
/// Expand a relative path to an absolute path with remapping taken into account.
|
|
|
|
/// Use this when absolute paths are required (e.g. debuginfo or crate metadata).
|
|
|
|
///
|
|
|
|
/// The resulting `RealFileName` will have its `local_path` portion erased if
|
|
|
|
/// possible (i.e. if there's also a remapped path).
|
|
|
|
pub fn to_embeddable_absolute_path(
|
|
|
|
&self,
|
|
|
|
file_path: RealFileName,
|
|
|
|
working_directory: &RealFileName,
|
|
|
|
) -> RealFileName {
|
|
|
|
match file_path {
|
|
|
|
// Anything that's already remapped we don't modify, except for erasing
|
|
|
|
// the `local_path` portion.
|
|
|
|
RealFileName::Remapped { local_path: _, virtual_name } => {
|
|
|
|
RealFileName::Remapped {
|
|
|
|
// We do not want any local path to be exported into metadata
|
|
|
|
local_path: None,
|
|
|
|
// We use the remapped name verbatim, even if it looks like a relative
|
|
|
|
// path. The assumption is that the user doesn't want us to further
|
|
|
|
// process paths that have gone through remapping.
|
|
|
|
virtual_name,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
RealFileName::LocalPath(unmapped_file_path) => {
|
|
|
|
// If no remapping has been applied yet, try to do so
|
|
|
|
let (new_path, was_remapped) = self.map_prefix(unmapped_file_path);
|
|
|
|
if was_remapped {
|
|
|
|
// It was remapped, so don't modify further
|
|
|
|
return RealFileName::Remapped { local_path: None, virtual_name: new_path };
|
|
|
|
}
|
|
|
|
|
|
|
|
if new_path.is_absolute() {
|
|
|
|
// No remapping has applied to this path and it is absolute,
|
|
|
|
// so the working directory cannot influence it either, so
|
|
|
|
// we are done.
|
|
|
|
return RealFileName::LocalPath(new_path);
|
|
|
|
}
|
|
|
|
|
|
|
|
debug_assert!(new_path.is_relative());
|
|
|
|
let unmapped_file_path_rel = new_path;
|
|
|
|
|
|
|
|
match working_directory {
|
|
|
|
RealFileName::LocalPath(unmapped_working_dir_abs) => {
|
|
|
|
let file_path_abs = unmapped_working_dir_abs.join(unmapped_file_path_rel);
|
|
|
|
|
|
|
|
// Although neither `working_directory` nor the file name were subject
|
|
|
|
// to path remapping, the concatenation between the two may be. Hence
|
|
|
|
// we need to do a remapping here.
|
|
|
|
let (file_path_abs, was_remapped) = self.map_prefix(file_path_abs);
|
|
|
|
if was_remapped {
|
|
|
|
RealFileName::Remapped {
|
|
|
|
// Erase the actual path
|
|
|
|
local_path: None,
|
|
|
|
virtual_name: file_path_abs,
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// No kind of remapping applied to this path, so
|
|
|
|
// we leave it as it is.
|
|
|
|
RealFileName::LocalPath(file_path_abs)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
RealFileName::Remapped {
|
|
|
|
local_path: _,
|
|
|
|
virtual_name: remapped_working_dir_abs,
|
|
|
|
} => {
|
|
|
|
// If working_directory has been remapped, then we emit
|
|
|
|
// Remapped variant as the expanded path won't be valid
|
|
|
|
RealFileName::Remapped {
|
|
|
|
local_path: None,
|
|
|
|
virtual_name: Path::new(remapped_working_dir_abs)
|
|
|
|
.join(unmapped_file_path_rel),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-04-24 17:01:19 +00:00
|
|
|
}
|