2019-09-06 02:56:45 +00:00
|
|
|
//! The `SourceMap` tracks all the source code used within a single crate, mapping
|
2014-12-22 17:04:23 +00:00
|
|
|
//! from integer byte positions to the original source code location. Each bit
|
|
|
|
//! of source parsed during crate parsing (typically files, in-memory strings,
|
|
|
|
//! or various bits of macro expansion) cover a continuous range of bytes in the
|
2019-09-06 02:56:45 +00:00
|
|
|
//! `SourceMap` and are represented by `SourceFile`s. Byte positions are stored in
|
2019-09-19 05:17:36 +00:00
|
|
|
//! `Span` and used pervasively in the compiler. They are absolute positions
|
2019-09-06 02:56:45 +00:00
|
|
|
//! within the `SourceMap`, which upon request can be converted to line and column
|
2014-12-22 17:04:23 +00:00
|
|
|
//! information, source code snippets, etc.
|
2012-11-13 00:45:24 +00:00
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
pub use crate::hygiene::{ExpnData, ExpnKind};
|
2019-11-15 13:27:09 +00:00
|
|
|
pub use crate::*;
|
2014-11-06 08:05:53 +00:00
|
|
|
|
2017-10-19 12:32:39 +00:00
|
|
|
use rustc_data_structures::fx::FxHashMap;
|
2017-12-15 22:50:07 +00:00
|
|
|
use rustc_data_structures::stable_hasher::StableHasher;
|
2020-02-18 17:24:36 +00:00
|
|
|
use rustc_data_structures::sync::{AtomicU32, Lock, LockGuard, Lrc, MappedLockGuard};
|
2018-01-14 17:29:07 +00:00
|
|
|
use std::cmp;
|
2020-02-18 17:24:36 +00:00
|
|
|
use std::convert::TryFrom;
|
2017-12-15 22:50:07 +00:00
|
|
|
use std::hash::Hash;
|
2017-04-27 14:12:57 +00:00
|
|
|
use std::path::{Path, PathBuf};
|
2020-02-18 17:24:36 +00:00
|
|
|
use std::sync::atomic::Ordering;
|
2014-12-22 17:04:23 +00:00
|
|
|
|
2016-06-21 22:08:13 +00:00
|
|
|
use std::fs;
|
2018-11-16 21:22:06 +00:00
|
|
|
use std::io;
|
2020-08-14 06:05:01 +00:00
|
|
|
use tracing::debug;
|
2019-02-06 17:33:01 +00:00
|
|
|
|
2019-08-01 21:26:40 +00:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests;
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Returns the span itself if it doesn't come from a macro expansion,
|
2014-04-09 20:42:25 +00:00
|
|
|
/// otherwise return the call site span up to the `enclosing_sp` by
|
2019-08-13 20:56:42 +00:00
|
|
|
/// following the `expn_data` chain.
|
2017-03-17 04:04:41 +00:00
|
|
|
pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span {
|
2019-08-13 20:56:42 +00:00
|
|
|
let expn_data1 = sp.ctxt().outer_expn_data();
|
|
|
|
let expn_data2 = enclosing_sp.ctxt().outer_expn_data();
|
2019-12-22 22:42:04 +00:00
|
|
|
if expn_data1.is_root() || !expn_data2.is_root() && expn_data1.call_site == expn_data2.call_site
|
|
|
|
{
|
2019-08-11 00:00:05 +00:00
|
|
|
sp
|
|
|
|
} else {
|
2019-08-13 20:56:42 +00:00
|
|
|
original_sp(expn_data1.call_site, enclosing_sp)
|
2014-04-09 20:42:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-24 17:16:36 +00:00
|
|
|
pub mod monotonic {
|
|
|
|
use std::ops::{Deref, DerefMut};
|
|
|
|
|
|
|
|
/// A `MonotonicVec` is a `Vec` which can only be grown.
|
|
|
|
/// Once inserted, an element can never be removed or swapped,
|
|
|
|
/// guaranteeing that any indices into a `MonotonicVec` are stable
|
|
|
|
// This is declared in its own module to ensure that the private
|
|
|
|
// field is inaccessible
|
|
|
|
pub struct MonotonicVec<T>(Vec<T>);
|
|
|
|
impl<T> MonotonicVec<T> {
|
|
|
|
pub fn new(val: Vec<T>) -> MonotonicVec<T> {
|
|
|
|
MonotonicVec(val)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn push(&mut self, val: T) {
|
|
|
|
self.0.push(val);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> Default for MonotonicVec<T> {
|
|
|
|
fn default() -> Self {
|
|
|
|
MonotonicVec::new(vec![])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> Deref for MonotonicVec<T> {
|
|
|
|
type Target = Vec<T>;
|
|
|
|
fn deref(&self) -> &Self::Target {
|
|
|
|
&self.0
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<T> !DerefMut for MonotonicVec<T> {}
|
|
|
|
}
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
#[derive(Clone, Encodable, Decodable, Debug, Copy, HashStable_Generic)]
|
2016-06-21 22:08:13 +00:00
|
|
|
pub struct Spanned<T> {
|
|
|
|
pub node: T,
|
|
|
|
pub span: Span,
|
2015-12-13 12:12:47 +00:00
|
|
|
}
|
|
|
|
|
2016-06-21 22:08:13 +00:00
|
|
|
pub fn respan<T>(sp: Span, t: T) -> Spanned<T> {
|
2019-12-22 22:42:04 +00:00
|
|
|
Spanned { node: t, span: sp }
|
2013-01-30 17:56:33 +00:00
|
|
|
}
|
|
|
|
|
2016-06-21 22:08:13 +00:00
|
|
|
pub fn dummy_spanned<T>(t: T) -> Spanned<T> {
|
|
|
|
respan(DUMMY_SP, t)
|
2015-08-26 23:46:05 +00:00
|
|
|
}
|
|
|
|
|
2015-02-11 17:29:49 +00:00
|
|
|
// _____________________________________________________________________________
|
2018-08-18 10:13:52 +00:00
|
|
|
// SourceFile, MultiByteChar, FileName, FileLines
|
2015-02-11 17:29:49 +00:00
|
|
|
//
|
|
|
|
|
2015-05-13 22:44:57 +00:00
|
|
|
/// An abstraction over the fs operations used by the Parser.
|
|
|
|
pub trait FileLoader {
|
|
|
|
/// Query the existence of a file.
|
|
|
|
fn file_exists(&self, path: &Path) -> bool;
|
|
|
|
|
|
|
|
/// Read the contents of an UTF-8 file into memory.
|
|
|
|
fn read_file(&self, path: &Path) -> io::Result<String>;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A FileLoader that uses std::fs to load real files.
|
|
|
|
pub struct RealFileLoader;
|
|
|
|
|
|
|
|
impl FileLoader for RealFileLoader {
|
|
|
|
fn file_exists(&self, path: &Path) -> bool {
|
|
|
|
fs::metadata(path).is_ok()
|
|
|
|
}
|
|
|
|
|
|
|
|
fn read_file(&self, path: &Path) -> io::Result<String> {
|
2018-11-16 21:22:06 +00:00
|
|
|
fs::read_to_string(path)
|
2015-05-13 22:44:57 +00:00
|
|
|
}
|
|
|
|
}
|
2015-02-11 17:29:49 +00:00
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
// This is a `SourceFile` identifier that is used to correlate `SourceFile`s between
|
2017-12-15 22:50:07 +00:00
|
|
|
// subsequent compilation sessions (which is something we need to do during
|
|
|
|
// incremental compilation).
|
2020-06-11 14:49:57 +00:00
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, Encodable, Decodable, Debug)]
|
2018-10-29 20:26:13 +00:00
|
|
|
pub struct StableSourceFileId(u128);
|
2017-12-15 22:50:07 +00:00
|
|
|
|
2020-05-29 18:04:03 +00:00
|
|
|
// FIXME: we need a more globally consistent approach to the problem solved by
|
|
|
|
// StableSourceFileId, perhaps built atop source_file.name_hash.
|
2018-10-29 20:26:13 +00:00
|
|
|
impl StableSourceFileId {
|
|
|
|
pub fn new(source_file: &SourceFile) -> StableSourceFileId {
|
2019-12-22 22:42:04 +00:00
|
|
|
StableSourceFileId::new_from_pieces(
|
|
|
|
&source_file.name,
|
|
|
|
source_file.name_was_remapped,
|
|
|
|
source_file.unmapped_path.as_ref(),
|
|
|
|
)
|
2018-10-30 14:10:42 +00:00
|
|
|
}
|
|
|
|
|
2020-05-29 15:47:17 +00:00
|
|
|
fn new_from_pieces(
|
2019-12-22 22:42:04 +00:00
|
|
|
name: &FileName,
|
|
|
|
name_was_remapped: bool,
|
|
|
|
unmapped_path: Option<&FileName>,
|
|
|
|
) -> StableSourceFileId {
|
2017-12-15 22:50:07 +00:00
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
|
2020-05-29 18:04:03 +00:00
|
|
|
if let FileName::Real(real_name) = name {
|
|
|
|
// rust-lang/rust#70924: Use the stable (virtualized) name when
|
|
|
|
// available. (We do not want artifacts from transient file system
|
|
|
|
// paths for libstd to leak into our build artifacts.)
|
|
|
|
real_name.stable_name().hash(&mut hasher)
|
|
|
|
} else {
|
|
|
|
name.hash(&mut hasher);
|
|
|
|
}
|
2018-10-30 14:10:42 +00:00
|
|
|
name_was_remapped.hash(&mut hasher);
|
|
|
|
unmapped_path.hash(&mut hasher);
|
2017-12-15 22:50:07 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
StableSourceFileId(hasher.finish())
|
2017-12-15 22:50:07 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-02-11 17:29:49 +00:00
|
|
|
// _____________________________________________________________________________
|
2018-08-18 10:13:35 +00:00
|
|
|
// SourceMap
|
2015-02-11 17:29:49 +00:00
|
|
|
//
|
|
|
|
|
2018-10-16 14:57:53 +00:00
|
|
|
#[derive(Default)]
|
2018-08-18 10:13:35 +00:00
|
|
|
pub(super) struct SourceMapFiles {
|
2020-06-24 17:16:36 +00:00
|
|
|
source_files: monotonic::MonotonicVec<Lrc<SourceFile>>,
|
2019-12-22 22:42:04 +00:00
|
|
|
stable_id_to_source_file: FxHashMap<StableSourceFileId, Lrc<SourceFile>>,
|
2018-03-14 17:11:37 +00:00
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:35 +00:00
|
|
|
pub struct SourceMap {
|
2020-02-18 17:24:36 +00:00
|
|
|
/// The address space below this value is currently used by the files in the source map.
|
|
|
|
used_address_space: AtomicU32,
|
|
|
|
|
2019-08-08 14:58:06 +00:00
|
|
|
files: Lock<SourceMapFiles>,
|
2018-07-10 19:06:26 +00:00
|
|
|
file_loader: Box<dyn FileLoader + Sync + Send>,
|
2017-04-24 17:01:19 +00:00
|
|
|
// This is used to apply the file path remapping as specified via
|
2019-09-06 02:56:45 +00:00
|
|
|
// `--remap-path-prefix` to all `SourceFile`s allocated within this `SourceMap`.
|
2017-04-24 17:01:19 +00:00
|
|
|
path_mapping: FilePathMapping,
|
2020-03-31 05:17:15 +00:00
|
|
|
|
|
|
|
/// The algorithm used for hashing the contents of each source file.
|
|
|
|
hash_kind: SourceFileHashAlgorithm,
|
2011-07-05 09:48:19 +00:00
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:35 +00:00
|
|
|
impl SourceMap {
|
|
|
|
pub fn new(path_mapping: FilePathMapping) -> SourceMap {
|
2020-03-31 05:17:15 +00:00
|
|
|
Self::with_file_loader_and_hash_kind(
|
|
|
|
Box::new(RealFileLoader),
|
2020-02-18 17:24:36 +00:00
|
|
|
path_mapping,
|
2020-03-31 05:17:15 +00:00
|
|
|
SourceFileHashAlgorithm::Md5,
|
|
|
|
)
|
2015-05-13 22:44:57 +00:00
|
|
|
}
|
|
|
|
|
2020-03-31 05:17:15 +00:00
|
|
|
pub fn with_file_loader_and_hash_kind(
|
2019-12-22 22:42:04 +00:00
|
|
|
file_loader: Box<dyn FileLoader + Sync + Send>,
|
|
|
|
path_mapping: FilePathMapping,
|
2020-03-31 05:17:15 +00:00
|
|
|
hash_kind: SourceFileHashAlgorithm,
|
2019-12-22 22:42:04 +00:00
|
|
|
) -> SourceMap {
|
2020-02-18 17:24:36 +00:00
|
|
|
SourceMap {
|
|
|
|
used_address_space: AtomicU32::new(0),
|
|
|
|
files: Default::default(),
|
|
|
|
file_loader,
|
|
|
|
path_mapping,
|
2020-03-31 05:17:15 +00:00
|
|
|
hash_kind,
|
2020-02-18 17:24:36 +00:00
|
|
|
}
|
2012-01-21 09:00:06 +00:00
|
|
|
}
|
2012-11-13 02:24:56 +00:00
|
|
|
|
2017-04-24 17:01:19 +00:00
|
|
|
pub fn path_mapping(&self) -> &FilePathMapping {
|
|
|
|
&self.path_mapping
|
|
|
|
}
|
|
|
|
|
2015-05-13 22:44:57 +00:00
|
|
|
pub fn file_exists(&self, path: &Path) -> bool {
|
|
|
|
self.file_loader.file_exists(path)
|
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:52 +00:00
|
|
|
pub fn load_file(&self, path: &Path) -> io::Result<Lrc<SourceFile>> {
|
2016-03-23 03:01:37 +00:00
|
|
|
let src = self.file_loader.read_file(path)?;
|
2018-12-04 20:18:03 +00:00
|
|
|
let filename = path.to_owned().into();
|
2018-08-18 10:13:56 +00:00
|
|
|
Ok(self.new_source_file(filename, src))
|
2015-05-13 22:44:57 +00:00
|
|
|
}
|
|
|
|
|
2019-08-13 16:51:32 +00:00
|
|
|
/// Loads source file as a binary blob.
|
|
|
|
///
|
|
|
|
/// Unlike `load_file`, guarantees that no normalization like BOM-removal
|
|
|
|
/// takes place.
|
|
|
|
pub fn load_binary_file(&self, path: &Path) -> io::Result<Vec<u8>> {
|
|
|
|
// Ideally, this should use `self.file_loader`, but it can't
|
|
|
|
// deal with binary files yet.
|
|
|
|
let bytes = fs::read(path)?;
|
|
|
|
|
|
|
|
// We need to add file to the `SourceMap`, so that it is present
|
|
|
|
// in dep-info. There's also an edge case that file might be both
|
|
|
|
// loaded as a binary via `include_bytes!` and as proper `SourceFile`
|
|
|
|
// via `mod`, so we try to use real file contents and not just an
|
|
|
|
// empty string.
|
2019-12-22 22:42:04 +00:00
|
|
|
let text = std::str::from_utf8(&bytes).unwrap_or("").to_string();
|
2019-08-13 16:51:32 +00:00
|
|
|
self.new_source_file(path.to_owned().into(), text);
|
|
|
|
Ok(bytes)
|
|
|
|
}
|
|
|
|
|
2020-06-24 17:16:36 +00:00
|
|
|
// By returning a `MonotonicVec`, we ensure that consumers cannot invalidate
|
|
|
|
// any existing indices pointing into `files`.
|
|
|
|
pub fn files(&self) -> MappedLockGuard<'_, monotonic::MonotonicVec<Lrc<SourceFile>>> {
|
2018-10-29 20:26:13 +00:00
|
|
|
LockGuard::map(self.files.borrow(), |files| &mut files.source_files)
|
2017-04-27 14:12:57 +00:00
|
|
|
}
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
pub fn source_file_by_stable_id(
|
|
|
|
&self,
|
|
|
|
stable_id: StableSourceFileId,
|
|
|
|
) -> Option<Lrc<SourceFile>> {
|
2020-02-29 12:14:52 +00:00
|
|
|
self.files.borrow().stable_id_to_source_file.get(&stable_id).cloned()
|
2017-10-19 12:32:39 +00:00
|
|
|
}
|
|
|
|
|
2020-02-18 17:24:36 +00:00
|
|
|
fn allocate_address_space(&self, size: usize) -> Result<usize, OffsetOverflowError> {
|
|
|
|
let size = u32::try_from(size).map_err(|_| OffsetOverflowError)?;
|
|
|
|
|
|
|
|
loop {
|
|
|
|
let current = self.used_address_space.load(Ordering::Relaxed);
|
|
|
|
let next = current
|
|
|
|
.checked_add(size)
|
|
|
|
// Add one so there is some space between files. This lets us distinguish
|
|
|
|
// positions in the `SourceMap`, even in the presence of zero-length files.
|
|
|
|
.and_then(|next| next.checked_add(1))
|
|
|
|
.ok_or(OffsetOverflowError)?;
|
|
|
|
|
|
|
|
if self
|
|
|
|
.used_address_space
|
|
|
|
.compare_exchange(current, next, Ordering::Relaxed, Ordering::Relaxed)
|
|
|
|
.is_ok()
|
|
|
|
{
|
|
|
|
return Ok(usize::try_from(current).unwrap());
|
|
|
|
}
|
2015-07-02 03:37:52 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Creates a new `SourceFile`.
|
|
|
|
/// If a file already exists in the `SourceMap` with the same ID, that file is returned
|
|
|
|
/// unmodified.
|
2018-08-18 10:13:56 +00:00
|
|
|
pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc<SourceFile> {
|
2019-12-22 22:42:04 +00:00
|
|
|
self.try_new_source_file(filename, src).unwrap_or_else(|OffsetOverflowError| {
|
|
|
|
eprintln!("fatal error: rustc does not support files larger than 4GB");
|
|
|
|
crate::fatal_error::FatalError.raise()
|
|
|
|
})
|
2019-06-17 08:35:26 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
fn try_new_source_file(
|
|
|
|
&self,
|
2020-05-29 15:31:55 +00:00
|
|
|
mut filename: FileName,
|
2019-12-22 22:42:04 +00:00
|
|
|
src: String,
|
2019-06-17 08:35:26 +00:00
|
|
|
) -> Result<Lrc<SourceFile>, OffsetOverflowError> {
|
2017-09-30 06:28:48 +00:00
|
|
|
// The path is used to determine the directory for loading submodules and
|
|
|
|
// include files, so it must be before remapping.
|
|
|
|
// Note that filename may not be a valid path, eg it may be `<anon>` etc,
|
|
|
|
// but this is okay because the directory determined by `path.pop()` will
|
|
|
|
// be empty, so the working directory will be used.
|
2017-12-14 07:09:19 +00:00
|
|
|
let unmapped_path = filename.clone();
|
|
|
|
|
2020-05-29 15:31:55 +00:00
|
|
|
let was_remapped;
|
|
|
|
if let FileName::Real(real_filename) = &mut filename {
|
|
|
|
match real_filename {
|
|
|
|
RealFileName::Named(path_to_be_remapped)
|
|
|
|
| RealFileName::Devirtualized {
|
|
|
|
local_path: path_to_be_remapped,
|
|
|
|
virtual_name: _,
|
|
|
|
} => {
|
|
|
|
let mapped = self.path_mapping.map_prefix(path_to_be_remapped.clone());
|
|
|
|
was_remapped = mapped.1;
|
|
|
|
*path_to_be_remapped = mapped.0;
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
}
|
2020-05-29 15:31:55 +00:00
|
|
|
} else {
|
|
|
|
was_remapped = false;
|
|
|
|
}
|
2012-11-16 22:22:09 +00:00
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
let file_id =
|
|
|
|
StableSourceFileId::new_from_pieces(&filename, was_remapped, Some(&unmapped_path));
|
2018-10-30 14:10:42 +00:00
|
|
|
|
2019-06-17 08:35:26 +00:00
|
|
|
let lrc_sf = match self.source_file_by_stable_id(file_id) {
|
2018-10-30 14:10:42 +00:00
|
|
|
Some(lrc_sf) => lrc_sf,
|
|
|
|
None => {
|
2020-02-18 17:24:36 +00:00
|
|
|
let start_pos = self.allocate_address_space(src.len())?;
|
|
|
|
|
2018-10-30 14:10:42 +00:00
|
|
|
let source_file = Lrc::new(SourceFile::new(
|
|
|
|
filename,
|
|
|
|
was_remapped,
|
|
|
|
unmapped_path,
|
|
|
|
src,
|
|
|
|
Pos::from_usize(start_pos),
|
2020-03-31 05:17:15 +00:00
|
|
|
self.hash_kind,
|
2020-02-18 17:24:36 +00:00
|
|
|
));
|
2012-11-16 22:22:09 +00:00
|
|
|
|
2018-10-30 14:10:42 +00:00
|
|
|
let mut files = self.files.borrow_mut();
|
2017-10-19 12:32:39 +00:00
|
|
|
|
2018-10-30 14:10:42 +00:00
|
|
|
files.source_files.push(source_file.clone());
|
|
|
|
files.stable_id_to_source_file.insert(file_id, source_file.clone());
|
|
|
|
|
|
|
|
source_file
|
|
|
|
}
|
2019-06-17 08:35:26 +00:00
|
|
|
};
|
|
|
|
Ok(lrc_sf)
|
2012-11-16 22:22:09 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Allocates a new `SourceFile` representing a source file from an external
|
|
|
|
/// crate. The source code of such an "imported `SourceFile`" is not available,
|
2015-02-11 17:29:49 +00:00
|
|
|
/// but we still know enough to generate accurate debuginfo location
|
|
|
|
/// information for things inlined from other crates.
|
2018-09-04 15:09:49 +00:00
|
|
|
pub fn new_imported_source_file(
|
|
|
|
&self,
|
|
|
|
filename: FileName,
|
|
|
|
name_was_remapped: bool,
|
2020-03-31 05:17:15 +00:00
|
|
|
src_hash: SourceFileHash,
|
2018-09-04 15:09:49 +00:00
|
|
|
name_hash: u128,
|
|
|
|
source_len: usize,
|
2020-02-07 19:02:24 +00:00
|
|
|
cnum: CrateNum,
|
2018-09-04 15:09:49 +00:00
|
|
|
mut file_local_lines: Vec<BytePos>,
|
|
|
|
mut file_local_multibyte_chars: Vec<MultiByteChar>,
|
|
|
|
mut file_local_non_narrow_chars: Vec<NonNarrowChar>,
|
2019-10-03 00:55:31 +00:00
|
|
|
mut file_local_normalized_pos: Vec<NormalizedPos>,
|
2020-02-07 19:02:24 +00:00
|
|
|
original_start_pos: BytePos,
|
|
|
|
original_end_pos: BytePos,
|
2018-09-04 15:09:49 +00:00
|
|
|
) -> Lrc<SourceFile> {
|
2020-02-18 17:24:36 +00:00
|
|
|
let start_pos = self
|
|
|
|
.allocate_address_space(source_len)
|
|
|
|
.expect("not enough address space for imported source file");
|
2015-02-11 17:29:49 +00:00
|
|
|
|
|
|
|
let end_pos = Pos::from_usize(start_pos + source_len);
|
|
|
|
let start_pos = Pos::from_usize(start_pos);
|
|
|
|
|
2015-04-17 04:38:24 +00:00
|
|
|
for pos in &mut file_local_lines {
|
|
|
|
*pos = *pos + start_pos;
|
|
|
|
}
|
|
|
|
|
|
|
|
for mbc in &mut file_local_multibyte_chars {
|
|
|
|
mbc.pos = mbc.pos + start_pos;
|
|
|
|
}
|
2015-02-11 17:29:49 +00:00
|
|
|
|
2017-11-02 01:25:54 +00:00
|
|
|
for swc in &mut file_local_non_narrow_chars {
|
|
|
|
*swc = *swc + start_pos;
|
|
|
|
}
|
|
|
|
|
2019-10-03 00:55:31 +00:00
|
|
|
for nc in &mut file_local_normalized_pos {
|
|
|
|
nc.pos = nc.pos + start_pos;
|
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:56 +00:00
|
|
|
let source_file = Lrc::new(SourceFile {
|
2015-02-11 17:29:49 +00:00
|
|
|
name: filename,
|
2017-08-07 05:54:09 +00:00
|
|
|
name_was_remapped,
|
2017-10-03 09:44:58 +00:00
|
|
|
unmapped_path: None,
|
2015-02-11 17:29:49 +00:00
|
|
|
src: None,
|
2017-08-07 05:54:09 +00:00
|
|
|
src_hash,
|
2020-02-07 19:02:24 +00:00
|
|
|
external_src: Lock::new(ExternalSource::Foreign {
|
|
|
|
kind: ExternalSourceKind::AbsentOk,
|
|
|
|
original_start_pos,
|
|
|
|
original_end_pos,
|
|
|
|
}),
|
2017-08-07 05:54:09 +00:00
|
|
|
start_pos,
|
|
|
|
end_pos,
|
2018-05-23 13:59:42 +00:00
|
|
|
lines: file_local_lines,
|
|
|
|
multibyte_chars: file_local_multibyte_chars,
|
|
|
|
non_narrow_chars: file_local_non_narrow_chars,
|
2019-10-03 00:55:31 +00:00
|
|
|
normalized_pos: file_local_normalized_pos,
|
2017-12-19 14:14:41 +00:00
|
|
|
name_hash,
|
2020-02-07 19:02:24 +00:00
|
|
|
cnum,
|
2015-02-11 17:29:49 +00:00
|
|
|
});
|
|
|
|
|
2018-03-14 17:11:37 +00:00
|
|
|
let mut files = self.files.borrow_mut();
|
2015-02-11 17:29:49 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
files.source_files.push(source_file.clone());
|
2019-12-22 22:42:04 +00:00
|
|
|
files
|
|
|
|
.stable_id_to_source_file
|
|
|
|
.insert(StableSourceFileId::new(&source_file), source_file.clone());
|
2017-10-19 12:32:39 +00:00
|
|
|
|
2018-08-18 10:13:56 +00:00
|
|
|
source_file
|
2015-02-11 17:29:49 +00:00
|
|
|
}
|
|
|
|
|
2014-05-22 23:57:53 +00:00
|
|
|
pub fn mk_substr_filename(&self, sp: Span) -> String {
|
2017-07-31 20:04:34 +00:00
|
|
|
let pos = self.lookup_char_pos(sp.lo());
|
2019-12-22 22:42:04 +00:00
|
|
|
format!("<{}:{}:{}>", pos.file.name, pos.line, pos.col.to_usize() + 1)
|
2011-07-05 09:48:19 +00:00
|
|
|
}
|
2012-01-25 21:22:10 +00:00
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
// If there is a doctest offset, applies it to the line.
|
2018-12-04 20:18:03 +00:00
|
|
|
pub fn doctest_offset_line(&self, file: &FileName, orig: usize) -> usize {
|
2020-03-20 14:03:11 +00:00
|
|
|
match file {
|
2018-12-04 20:18:03 +00:00
|
|
|
FileName::DocTest(_, offset) => {
|
2020-03-21 12:37:29 +00:00
|
|
|
if *offset < 0 {
|
2018-12-04 20:18:03 +00:00
|
|
|
orig - (-(*offset)) as usize
|
2020-03-21 12:37:29 +00:00
|
|
|
} else {
|
|
|
|
orig + *offset as usize
|
|
|
|
}
|
2019-12-22 22:42:04 +00:00
|
|
|
}
|
|
|
|
_ => orig,
|
2020-03-20 14:03:11 +00:00
|
|
|
}
|
2018-01-08 14:47:23 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Looks up source information about a `BytePos`.
|
2013-04-17 16:15:08 +00:00
|
|
|
pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
|
2015-06-16 18:47:09 +00:00
|
|
|
let chpos = self.bytepos_to_file_charpos(pos);
|
2015-07-02 03:37:52 +00:00
|
|
|
match self.lookup_line(pos) {
|
2018-10-29 20:26:13 +00:00
|
|
|
Ok(SourceFileAndLine { sf: f, line: a }) => {
|
2015-07-02 03:37:52 +00:00
|
|
|
let line = a + 1; // Line numbers start at 1
|
2018-05-23 13:59:42 +00:00
|
|
|
let linebpos = f.lines[a];
|
2015-07-02 03:37:52 +00:00
|
|
|
let linechpos = self.bytepos_to_file_charpos(linebpos);
|
2017-11-02 01:25:54 +00:00
|
|
|
let col = chpos - linechpos;
|
|
|
|
|
|
|
|
let col_display = {
|
2018-05-23 13:59:42 +00:00
|
|
|
let start_width_idx = f
|
|
|
|
.non_narrow_chars
|
2017-11-02 01:25:54 +00:00
|
|
|
.binary_search_by_key(&linebpos, |x| x.pos())
|
|
|
|
.unwrap_or_else(|x| x);
|
2018-05-23 13:59:42 +00:00
|
|
|
let end_width_idx = f
|
|
|
|
.non_narrow_chars
|
2017-11-02 01:25:54 +00:00
|
|
|
.binary_search_by_key(&pos, |x| x.pos())
|
|
|
|
.unwrap_or_else(|x| x);
|
|
|
|
let special_chars = end_width_idx - start_width_idx;
|
2019-12-22 22:42:04 +00:00
|
|
|
let non_narrow: usize = f.non_narrow_chars[start_width_idx..end_width_idx]
|
2020-02-29 02:05:14 +00:00
|
|
|
.iter()
|
2017-11-02 01:25:54 +00:00
|
|
|
.map(|x| x.width())
|
|
|
|
.sum();
|
|
|
|
col.0 - special_chars + non_narrow
|
|
|
|
};
|
2019-12-22 22:42:04 +00:00
|
|
|
debug!("byte pos {:?} is on the line at byte pos {:?}", pos, linebpos);
|
|
|
|
debug!("char pos {:?} is on the line at char pos {:?}", chpos, linechpos);
|
2015-07-02 03:37:52 +00:00
|
|
|
debug!("byte is on line: {}", line);
|
|
|
|
assert!(chpos >= linechpos);
|
2019-12-22 22:42:04 +00:00
|
|
|
Loc { file: f, line, col, col_display }
|
2015-07-02 03:37:52 +00:00
|
|
|
}
|
|
|
|
Err(f) => {
|
2017-11-02 01:25:54 +00:00
|
|
|
let col_display = {
|
2018-05-23 13:59:42 +00:00
|
|
|
let end_width_idx = f
|
|
|
|
.non_narrow_chars
|
2017-11-02 01:25:54 +00:00
|
|
|
.binary_search_by_key(&pos, |x| x.pos())
|
|
|
|
.unwrap_or_else(|x| x);
|
2019-12-22 22:42:04 +00:00
|
|
|
let non_narrow: usize =
|
2020-02-29 02:05:14 +00:00
|
|
|
f.non_narrow_chars[0..end_width_idx].iter().map(|x| x.width()).sum();
|
2017-11-02 01:25:54 +00:00
|
|
|
chpos.0 - end_width_idx + non_narrow
|
|
|
|
};
|
2019-12-22 22:42:04 +00:00
|
|
|
Loc { file: f, line: 0, col: chpos, col_display }
|
2015-07-02 03:37:52 +00:00
|
|
|
}
|
2015-06-16 18:47:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
// If the corresponding `SourceFile` is empty, does not return a line number.
|
2018-08-18 10:13:52 +00:00
|
|
|
pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Lrc<SourceFile>> {
|
2018-08-18 10:13:56 +00:00
|
|
|
let idx = self.lookup_source_file_idx(pos);
|
2015-06-16 18:47:09 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let f = (*self.files.borrow().source_files)[idx].clone();
|
2015-07-02 03:37:52 +00:00
|
|
|
|
2016-08-24 21:06:31 +00:00
|
|
|
match f.lookup_line(pos) {
|
2019-06-25 21:22:45 +00:00
|
|
|
Some(line) => Ok(SourceFileAndLine { sf: f, line }),
|
2019-12-22 22:42:04 +00:00
|
|
|
None => Err(f),
|
2015-07-02 03:37:52 +00:00
|
|
|
}
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2011-07-16 06:01:10 +00:00
|
|
|
|
Adds two source span utility functions used in source-based coverage
`span.is_empty()` - returns true if `lo()` and `hi()` are equal. This is
not only a convenience, but makes it clear that a `Span` can be empty
(that is, retrieving the source for an empty `Span` will return an empty
string), and codifies the (otherwise undocumented--in the rustc_span
package, at least) fact that `Span` is a half-open interval (where
`hi()` is the open end).
`source_map.lookup_file_span()` - returns an enclosing `Span`
representing the start and end positions of the file enclosing the given
`BytePos`. This gives developers a clear way to quickly determine if any
any other `BytePos` or `Span` is also from the same file (for example,
by simply calling `file_span.contains(span)`).
This results in much simpler code and is much more runtime efficient
compared with the obvious alternative: calling `source_map.lookup_line()`
for any two `Span`'s byte positions, handle both arms of the `Result`
(both contain the file), and then compare files. It is also more
efficient than the non-public method `lookup_source_file_idx()` for each
`BytePos`, because, while comparing the internal source file indexes
would be efficient, looking up the source file index for every `BytePos`
or `Span` to be compared requires a binary search (worst case
performance being O(log n) for every lookup).
`source_map.lookup_file_span()` performs the binary search only once, to
get the `file_span` result that can be used to compare to any number of
other `BytePos` or `Span` values and those comparisons are always O(1).
2020-08-27 20:58:01 +00:00
|
|
|
/// Returns a new `Span` covering the start and end `BytePos`s of the file containing the given
|
|
|
|
/// `pos`. This can be used to quickly determine if another `BytePos` or `Span` is from the same
|
|
|
|
/// file.
|
|
|
|
pub fn lookup_file_span(&self, pos: BytePos) -> Span {
|
|
|
|
let idx = self.lookup_source_file_idx(pos);
|
|
|
|
let SourceFile { start_pos, end_pos, .. } = *(*self.files.borrow().source_files)[idx];
|
|
|
|
Span::with_root_ctxt(start_pos, end_pos)
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Returns `Some(span)`, a union of the LHS and RHS span. The LHS must precede the RHS. If
|
|
|
|
/// there are gaps between LHS and RHS, the resulting union will cross these gaps.
|
|
|
|
/// For this to work,
|
2017-12-31 16:17:01 +00:00
|
|
|
///
|
2019-09-06 02:56:45 +00:00
|
|
|
/// * the syntax contexts of both spans much match,
|
|
|
|
/// * the LHS span needs to end on the same line the RHS span begins,
|
|
|
|
/// * the LHS span must start at or before the RHS span.
|
2016-09-19 19:31:56 +00:00
|
|
|
pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> {
|
2019-09-06 02:56:45 +00:00
|
|
|
// Ensure we're at the same expansion ID.
|
2017-07-31 20:04:34 +00:00
|
|
|
if sp_lhs.ctxt() != sp_rhs.ctxt() {
|
2016-09-19 19:31:56 +00:00
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2017-07-31 20:04:34 +00:00
|
|
|
let lhs_end = match self.lookup_line(sp_lhs.hi()) {
|
2016-09-19 19:31:56 +00:00
|
|
|
Ok(x) => x,
|
2019-12-22 22:42:04 +00:00
|
|
|
Err(_) => return None,
|
2016-09-19 19:31:56 +00:00
|
|
|
};
|
2017-07-31 20:04:34 +00:00
|
|
|
let rhs_begin = match self.lookup_line(sp_rhs.lo()) {
|
2016-09-19 19:31:56 +00:00
|
|
|
Ok(x) => x,
|
2019-12-22 22:42:04 +00:00
|
|
|
Err(_) => return None,
|
2016-09-19 19:31:56 +00:00
|
|
|
};
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
// If we must cross lines to merge, don't merge.
|
2016-09-19 19:31:56 +00:00
|
|
|
if lhs_end.line != rhs_begin.line {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
// Ensure these follow the expected order and that we don't overlap.
|
2017-07-31 20:04:34 +00:00
|
|
|
if (sp_lhs.lo() <= sp_rhs.lo()) && (sp_lhs.hi() <= sp_rhs.lo()) {
|
|
|
|
Some(sp_lhs.to(sp_rhs))
|
2016-09-19 19:31:56 +00:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-06-21 10:39:03 +00:00
|
|
|
pub fn span_to_string(&self, sp: Span) -> String {
|
2018-10-29 20:26:13 +00:00
|
|
|
if self.files.borrow().source_files.is_empty() && sp.is_dummy() {
|
2014-05-25 10:17:19 +00:00
|
|
|
return "no-location".to_string();
|
2012-12-05 23:13:24 +00:00
|
|
|
}
|
|
|
|
|
2019-04-05 19:42:40 +00:00
|
|
|
let lo = self.lookup_char_pos(sp.lo());
|
|
|
|
let hi = self.lookup_char_pos(sp.hi());
|
2019-12-22 22:42:04 +00:00
|
|
|
format!(
|
|
|
|
"{}:{}:{}: {}:{}",
|
2019-09-06 02:56:45 +00:00
|
|
|
lo.file.name,
|
|
|
|
lo.line,
|
|
|
|
lo.col.to_usize() + 1,
|
|
|
|
hi.line,
|
|
|
|
hi.col.to_usize() + 1,
|
|
|
|
)
|
2012-02-10 18:28:43 +00:00
|
|
|
}
|
|
|
|
|
2013-08-31 16:13:04 +00:00
|
|
|
pub fn span_to_filename(&self, sp: Span) -> FileName {
|
2017-09-30 06:28:48 +00:00
|
|
|
self.lookup_char_pos(sp.lo()).file.name.clone()
|
|
|
|
}
|
|
|
|
|
2017-12-14 07:09:19 +00:00
|
|
|
pub fn span_to_unmapped_path(&self, sp: Span) -> FileName {
|
2019-12-22 22:42:04 +00:00
|
|
|
self.lookup_char_pos(sp.lo())
|
|
|
|
.file
|
|
|
|
.unmapped_path
|
|
|
|
.clone()
|
2019-09-06 21:38:07 +00:00
|
|
|
.expect("`SourceMap::span_to_unmapped_path` called for imported `SourceFile`?")
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2011-07-05 09:48:19 +00:00
|
|
|
|
2017-12-22 23:55:44 +00:00
|
|
|
pub fn is_multiline(&self, sp: Span) -> bool {
|
|
|
|
let lo = self.lookup_char_pos(sp.lo());
|
|
|
|
let hi = self.lookup_char_pos(sp.hi());
|
|
|
|
lo.line != hi.line
|
|
|
|
}
|
|
|
|
|
2020-01-15 17:57:06 +00:00
|
|
|
pub fn is_valid_span(&self, sp: Span) -> Result<(Loc, Loc), SpanLinesError> {
|
2017-07-31 20:04:34 +00:00
|
|
|
let lo = self.lookup_char_pos(sp.lo());
|
2016-04-20 18:52:31 +00:00
|
|
|
debug!("span_to_lines: lo={:?}", lo);
|
2017-07-31 20:04:34 +00:00
|
|
|
let hi = self.lookup_char_pos(sp.hi());
|
2016-04-20 18:52:31 +00:00
|
|
|
debug!("span_to_lines: hi={:?}", hi);
|
2015-04-30 08:23:50 +00:00
|
|
|
if lo.file.start_pos != hi.file.start_pos {
|
|
|
|
return Err(SpanLinesError::DistinctSources(DistinctSources {
|
|
|
|
begin: (lo.file.name.clone(), lo.file.start_pos),
|
|
|
|
end: (hi.file.name.clone(), hi.file.start_pos),
|
|
|
|
}));
|
|
|
|
}
|
2020-01-15 17:57:06 +00:00
|
|
|
Ok((lo, hi))
|
|
|
|
}
|
|
|
|
|
2020-03-07 10:00:40 +00:00
|
|
|
pub fn is_line_before_span_empty(&self, sp: Span) -> bool {
|
|
|
|
match self.span_to_prev_source(sp) {
|
|
|
|
Ok(s) => s.split('\n').last().map(|l| l.trim_start().is_empty()).unwrap_or(false),
|
|
|
|
Err(_) => false,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-01-15 17:57:06 +00:00
|
|
|
pub fn span_to_lines(&self, sp: Span) -> FileLinesResult {
|
|
|
|
debug!("span_to_lines(sp={:?})", sp);
|
|
|
|
let (lo, hi) = self.is_valid_span(sp)?;
|
2015-04-30 08:23:50 +00:00
|
|
|
assert!(hi.line >= lo.line);
|
|
|
|
|
2020-03-13 21:01:35 +00:00
|
|
|
if sp.is_dummy() {
|
|
|
|
return Ok(FileLines { file: lo.file, lines: Vec::new() });
|
|
|
|
}
|
|
|
|
|
2015-04-09 18:46:03 +00:00
|
|
|
let mut lines = Vec::with_capacity(hi.line - lo.line + 1);
|
|
|
|
|
|
|
|
// The span starts partway through the first line,
|
|
|
|
// but after that it starts from offset 0.
|
|
|
|
let mut start_col = lo.col;
|
|
|
|
|
|
|
|
// For every line but the last, it extends from `start_col`
|
|
|
|
// and to the end of the line. Be careful because the line
|
|
|
|
// numbers in Loc are 1-based, so we subtract 1 to get 0-based
|
|
|
|
// lines.
|
2020-03-13 21:01:35 +00:00
|
|
|
//
|
|
|
|
// FIXME: now that we handle DUMMY_SP up above, we should consider
|
2020-03-23 17:32:23 +00:00
|
|
|
// asserting that the line numbers here are all indeed 1-based.
|
2020-01-06 10:46:30 +00:00
|
|
|
let hi_line = hi.line.saturating_sub(1);
|
|
|
|
for line_index in lo.line.saturating_sub(1)..hi_line {
|
2019-12-22 22:42:04 +00:00
|
|
|
let line_len = lo.file.get_line(line_index).map(|s| s.chars().count()).unwrap_or(0);
|
|
|
|
lines.push(LineInfo { line_index, start_col, end_col: CharPos::from_usize(line_len) });
|
2015-04-09 18:46:03 +00:00
|
|
|
start_col = CharPos::from_usize(0);
|
|
|
|
}
|
|
|
|
|
|
|
|
// For the last line, it extends from `start_col` to `hi.col`:
|
2020-01-06 10:46:30 +00:00
|
|
|
lines.push(LineInfo { line_index: hi_line, start_col, end_col: hi.col });
|
2015-04-09 18:46:03 +00:00
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
Ok(FileLines { file: lo.file, lines })
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2012-02-10 18:28:43 +00:00
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Extracts the source surrounding the given `Span` using the `extract_source` function. The
|
2018-02-22 14:27:41 +00:00
|
|
|
/// extract function takes three arguments: a string slice containing the source, an index in
|
|
|
|
/// the slice for the beginning of the span and an index in the slice for the end of the span.
|
|
|
|
fn span_to_source<F>(&self, sp: Span, extract_source: F) -> Result<String, SpanSnippetError>
|
2019-12-22 22:42:04 +00:00
|
|
|
where
|
|
|
|
F: Fn(&str, usize, usize) -> Result<String, SpanSnippetError>,
|
2018-02-22 14:27:41 +00:00
|
|
|
{
|
2017-07-31 20:04:34 +00:00
|
|
|
let local_begin = self.lookup_byte_offset(sp.lo());
|
|
|
|
let local_end = self.lookup_byte_offset(sp.hi());
|
2013-08-04 02:14:01 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
if local_begin.sf.start_pos != local_end.sf.start_pos {
|
2020-03-20 14:03:11 +00:00
|
|
|
Err(SpanSnippetError::DistinctSources(DistinctSources {
|
2019-12-22 22:42:04 +00:00
|
|
|
begin: (local_begin.sf.name.clone(), local_begin.sf.start_pos),
|
|
|
|
end: (local_end.sf.name.clone(), local_end.sf.start_pos),
|
2020-03-20 14:03:11 +00:00
|
|
|
}))
|
2013-08-04 02:14:01 +00:00
|
|
|
} else {
|
2018-10-29 20:26:13 +00:00
|
|
|
self.ensure_source_file_source_present(local_begin.sf.clone());
|
2017-06-12 19:47:39 +00:00
|
|
|
|
|
|
|
let start_index = local_begin.pos.to_usize();
|
|
|
|
let end_index = local_end.pos.to_usize();
|
2019-12-22 22:42:04 +00:00
|
|
|
let source_len = (local_begin.sf.end_pos - local_begin.sf.start_pos).to_usize();
|
2017-06-12 19:47:39 +00:00
|
|
|
|
|
|
|
if start_index > end_index || end_index > source_len {
|
2019-12-22 22:42:04 +00:00
|
|
|
return Err(SpanSnippetError::MalformedForSourcemap(MalformedSourceMapPositions {
|
|
|
|
name: local_begin.sf.name.clone(),
|
|
|
|
source_len,
|
|
|
|
begin_pos: local_begin.pos,
|
|
|
|
end_pos: local_end.pos,
|
|
|
|
}));
|
2017-06-12 19:47:39 +00:00
|
|
|
}
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
if let Some(ref src) = local_begin.sf.src {
|
2020-03-20 14:03:11 +00:00
|
|
|
extract_source(src, start_index, end_index)
|
2018-10-29 20:26:13 +00:00
|
|
|
} else if let Some(src) = local_begin.sf.external_src.borrow().get_source() {
|
2020-03-20 14:03:11 +00:00
|
|
|
extract_source(src, start_index, end_index)
|
2017-06-12 19:47:39 +00:00
|
|
|
} else {
|
2020-03-20 14:03:11 +00:00
|
|
|
Err(SpanSnippetError::SourceNotAvailable { filename: local_begin.sf.name.clone() })
|
2015-02-11 17:29:49 +00:00
|
|
|
}
|
2013-08-04 02:14:01 +00:00
|
|
|
}
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2012-05-11 00:18:04 +00:00
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Returns the source snippet as `String` corresponding to the given `Span`.
|
2018-02-22 14:27:41 +00:00
|
|
|
pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> {
|
2019-12-22 22:42:04 +00:00
|
|
|
self.span_to_source(sp, |src, start_index, end_index| {
|
|
|
|
src.get(start_index..end_index)
|
|
|
|
.map(|s| s.to_string())
|
|
|
|
.ok_or_else(|| SpanSnippetError::IllFormedSpan(sp))
|
|
|
|
})
|
2018-02-22 14:27:41 +00:00
|
|
|
}
|
|
|
|
|
2018-09-04 15:09:49 +00:00
|
|
|
pub fn span_to_margin(&self, sp: Span) -> Option<usize> {
|
|
|
|
match self.span_to_prev_source(sp) {
|
|
|
|
Err(_) => None,
|
2019-12-22 22:42:04 +00:00
|
|
|
Ok(source) => source
|
|
|
|
.split('\n')
|
|
|
|
.last()
|
|
|
|
.map(|last_line| last_line.len() - last_line.trim_start().len()),
|
2018-09-04 15:09:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Returns the source snippet as `String` before the given `Span`.
|
2018-02-22 14:27:41 +00:00
|
|
|
pub fn span_to_prev_source(&self, sp: Span) -> Result<String, SpanSnippetError> {
|
2019-12-22 22:42:04 +00:00
|
|
|
self.span_to_source(sp, |src, start_index, _| {
|
|
|
|
src.get(..start_index)
|
|
|
|
.map(|s| s.to_string())
|
|
|
|
.ok_or_else(|| SpanSnippetError::IllFormedSpan(sp))
|
|
|
|
})
|
2018-02-22 14:27:41 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Extends the given `Span` to just after the previous occurrence of `c`. Return the same span
|
2018-02-22 14:27:41 +00:00
|
|
|
/// if no character could be found or if an error occurred while retrieving the code snippet.
|
|
|
|
pub fn span_extend_to_prev_char(&self, sp: Span, c: char) -> Span {
|
|
|
|
if let Ok(prev_source) = self.span_to_prev_source(sp) {
|
2020-03-03 00:19:00 +00:00
|
|
|
let prev_source = prev_source.rsplit(c).next().unwrap_or("").trim_start();
|
2018-02-22 14:27:41 +00:00
|
|
|
if !prev_source.is_empty() && !prev_source.contains('\n') {
|
|
|
|
return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sp
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Extends the given `Span` to just after the previous occurrence of `pat` when surrounded by
|
|
|
|
/// whitespace. Returns the same span if no character could be found or if an error occurred
|
2018-03-14 05:58:45 +00:00
|
|
|
/// while retrieving the code snippet.
|
|
|
|
pub fn span_extend_to_prev_str(&self, sp: Span, pat: &str, accept_newlines: bool) -> Span {
|
|
|
|
// assure that the pattern is delimited, to avoid the following
|
|
|
|
// fn my_fn()
|
|
|
|
// ^^^^ returned span without the check
|
|
|
|
// ---------- correct span
|
|
|
|
for ws in &[" ", "\t", "\n"] {
|
|
|
|
let pat = pat.to_owned() + ws;
|
|
|
|
if let Ok(prev_source) = self.span_to_prev_source(sp) {
|
2020-03-03 00:19:00 +00:00
|
|
|
let prev_source = prev_source.rsplit(&pat).next().unwrap_or("").trim_start();
|
2018-03-14 05:58:45 +00:00
|
|
|
if !prev_source.is_empty() && (!prev_source.contains('\n') || accept_newlines) {
|
|
|
|
return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32));
|
|
|
|
}
|
2018-02-22 14:27:41 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sp
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Given a `Span`, tries to get a shorter span ending before the first occurrence of `char`
|
2019-09-19 05:17:36 +00:00
|
|
|
/// `c`.
|
2018-03-18 13:05:20 +00:00
|
|
|
pub fn span_until_char(&self, sp: Span, c: char) -> Span {
|
|
|
|
match self.span_to_snippet(sp) {
|
|
|
|
Ok(snippet) => {
|
2020-03-03 00:19:00 +00:00
|
|
|
let snippet = snippet.split(c).next().unwrap_or("").trim_end();
|
2018-03-18 13:05:20 +00:00
|
|
|
if !snippet.is_empty() && !snippet.contains('\n') {
|
|
|
|
sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32))
|
|
|
|
} else {
|
|
|
|
sp
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => sp,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Given a `Span`, tries to get a shorter span ending just after the first occurrence of `char`
|
2018-03-18 13:05:20 +00:00
|
|
|
/// `c`.
|
|
|
|
pub fn span_through_char(&self, sp: Span, c: char) -> Span {
|
|
|
|
if let Ok(snippet) = self.span_to_snippet(sp) {
|
|
|
|
if let Some(offset) = snippet.find(c) {
|
|
|
|
return sp.with_hi(BytePos(sp.lo().0 + (offset + c.len_utf8()) as u32));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
sp
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Given a `Span`, gets a new `Span` covering the first token and all its trailing whitespace
|
|
|
|
/// or the original `Span`.
|
2018-01-23 18:54:57 +00:00
|
|
|
///
|
|
|
|
/// If `sp` points to `"let mut x"`, then a span pointing at `"let "` will be returned.
|
|
|
|
pub fn span_until_non_whitespace(&self, sp: Span) -> Span {
|
2018-03-18 13:05:20 +00:00
|
|
|
let mut whitespace_found = false;
|
|
|
|
|
|
|
|
self.span_take_while(sp, |c| {
|
|
|
|
if !whitespace_found && c.is_whitespace() {
|
|
|
|
whitespace_found = true;
|
2018-01-23 18:54:57 +00:00
|
|
|
}
|
2018-03-18 13:05:20 +00:00
|
|
|
|
2020-03-03 01:07:15 +00:00
|
|
|
!whitespace_found || c.is_whitespace()
|
2018-03-18 13:05:20 +00:00
|
|
|
})
|
2018-01-23 18:54:57 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Given a `Span`, gets a new `Span` covering the first token without its trailing whitespace
|
|
|
|
/// or the original `Span` in case of error.
|
2018-02-22 14:27:41 +00:00
|
|
|
///
|
|
|
|
/// If `sp` points to `"let mut x"`, then a span pointing at `"let"` will be returned.
|
|
|
|
pub fn span_until_whitespace(&self, sp: Span) -> Span {
|
2018-03-18 13:05:20 +00:00
|
|
|
self.span_take_while(sp, |c| !c.is_whitespace())
|
2017-10-12 06:06:45 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Given a `Span`, gets a shorter one until `predicate` yields `false`.
|
2018-03-17 18:41:46 +00:00
|
|
|
pub fn span_take_while<P>(&self, sp: Span, predicate: P) -> Span
|
2019-12-22 22:42:04 +00:00
|
|
|
where
|
|
|
|
P: for<'r> FnMut(&'r char) -> bool,
|
2018-03-17 18:41:46 +00:00
|
|
|
{
|
|
|
|
if let Ok(snippet) = self.span_to_snippet(sp) {
|
2019-12-22 22:42:04 +00:00
|
|
|
let offset = snippet.chars().take_while(predicate).map(|c| c.len_utf8()).sum::<usize>();
|
2018-03-17 18:41:46 +00:00
|
|
|
|
|
|
|
sp.with_hi(BytePos(sp.lo().0 + (offset as u32)))
|
|
|
|
} else {
|
|
|
|
sp
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-03-09 18:42:37 +00:00
|
|
|
/// Given a `Span`, return a span ending in the closest `{`. This is useful when you have a
|
|
|
|
/// `Span` enclosing a whole item but we need to point at only the head (usually the first
|
|
|
|
/// line) of that item.
|
|
|
|
///
|
|
|
|
/// *Only suitable for diagnostics.*
|
|
|
|
pub fn guess_head_span(&self, sp: Span) -> Span {
|
|
|
|
// FIXME: extend the AST items to have a head span, or replace callers with pointing at
|
|
|
|
// the item's ident when appropriate.
|
2017-04-11 11:40:31 +00:00
|
|
|
self.span_until_char(sp, '{')
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Returns a new span representing just the start point of this span.
|
2018-07-07 23:53:52 +00:00
|
|
|
pub fn start_point(&self, sp: Span) -> Span {
|
|
|
|
let pos = sp.lo().0;
|
|
|
|
let width = self.find_width_of_character_at_span(sp, false);
|
|
|
|
let corrected_start_position = pos.checked_add(width).unwrap_or(pos);
|
|
|
|
let end_point = BytePos(cmp::max(corrected_start_position, sp.lo().0));
|
|
|
|
sp.with_hi(end_point)
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Returns a new span representing just the end point of this span.
|
2018-01-14 17:29:07 +00:00
|
|
|
pub fn end_point(&self, sp: Span) -> Span {
|
2018-01-16 20:41:00 +00:00
|
|
|
let pos = sp.hi().0;
|
|
|
|
|
|
|
|
let width = self.find_width_of_character_at_span(sp, false);
|
|
|
|
let corrected_end_position = pos.checked_sub(width).unwrap_or(pos);
|
|
|
|
|
|
|
|
let end_point = BytePos(cmp::max(corrected_end_position, sp.lo().0));
|
|
|
|
sp.with_lo(end_point)
|
2018-01-14 17:29:07 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Returns a new span representing the next character after the end-point of this span.
|
2018-01-14 17:29:07 +00:00
|
|
|
pub fn next_point(&self, sp: Span) -> Span {
|
2018-01-17 10:01:57 +00:00
|
|
|
let start_of_next_point = sp.hi().0;
|
2018-01-16 20:41:00 +00:00
|
|
|
|
2020-01-10 19:02:47 +00:00
|
|
|
let width = self.find_width_of_character_at_span(sp.shrink_to_hi(), true);
|
2018-01-17 10:01:57 +00:00
|
|
|
// If the width is 1, then the next span should point to the same `lo` and `hi`. However,
|
|
|
|
// in the case of a multibyte character, where the width != 1, the next span should
|
|
|
|
// span multiple bytes to include the whole character.
|
2019-12-22 22:42:04 +00:00
|
|
|
let end_of_next_point =
|
|
|
|
start_of_next_point.checked_add(width - 1).unwrap_or(start_of_next_point);
|
2018-01-17 10:01:57 +00:00
|
|
|
|
|
|
|
let end_of_next_point = BytePos(cmp::max(sp.lo().0 + 1, end_of_next_point));
|
|
|
|
Span::new(BytePos(start_of_next_point), end_of_next_point, sp.ctxt())
|
2018-01-14 17:29:07 +00:00
|
|
|
}
|
|
|
|
|
2018-01-16 20:41:00 +00:00
|
|
|
/// Finds the width of a character, either before or after the provided span.
|
|
|
|
fn find_width_of_character_at_span(&self, sp: Span, forwards: bool) -> u32 {
|
2019-11-01 20:24:07 +00:00
|
|
|
let sp = sp.data();
|
|
|
|
if sp.lo == sp.hi {
|
|
|
|
debug!("find_width_of_character_at_span: early return empty span");
|
2018-01-16 20:41:00 +00:00
|
|
|
return 1;
|
|
|
|
}
|
2018-01-14 17:29:07 +00:00
|
|
|
|
2019-11-01 20:24:07 +00:00
|
|
|
let local_begin = self.lookup_byte_offset(sp.lo);
|
|
|
|
let local_end = self.lookup_byte_offset(sp.hi);
|
2019-12-22 22:42:04 +00:00
|
|
|
debug!(
|
|
|
|
"find_width_of_character_at_span: local_begin=`{:?}`, local_end=`{:?}`",
|
|
|
|
local_begin, local_end
|
|
|
|
);
|
2018-01-16 20:41:00 +00:00
|
|
|
|
2019-05-28 21:27:42 +00:00
|
|
|
if local_begin.sf.start_pos != local_end.sf.start_pos {
|
|
|
|
debug!("find_width_of_character_at_span: begin and end are in different files");
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2018-01-16 20:41:00 +00:00
|
|
|
let start_index = local_begin.pos.to_usize();
|
|
|
|
let end_index = local_end.pos.to_usize();
|
2019-12-22 22:42:04 +00:00
|
|
|
debug!(
|
|
|
|
"find_width_of_character_at_span: start_index=`{:?}`, end_index=`{:?}`",
|
|
|
|
start_index, end_index
|
|
|
|
);
|
2018-01-16 20:41:00 +00:00
|
|
|
|
|
|
|
// Disregard indexes that are at the start or end of their spans, they can't fit bigger
|
|
|
|
// characters.
|
2020-06-02 07:59:11 +00:00
|
|
|
if (!forwards && end_index == usize::MIN) || (forwards && start_index == usize::MAX) {
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: start or end of span, cannot be multibyte");
|
2018-01-16 20:41:00 +00:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let source_len = (local_begin.sf.end_pos - local_begin.sf.start_pos).to_usize();
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: source_len=`{:?}`", source_len);
|
2018-01-16 20:41:00 +00:00
|
|
|
// Ensure indexes are also not malformed.
|
|
|
|
if start_index > end_index || end_index > source_len {
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: source indexes are malformed");
|
2018-01-16 20:41:00 +00:00
|
|
|
return 1;
|
2018-01-14 17:29:07 +00:00
|
|
|
}
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let src = local_begin.sf.external_src.borrow();
|
2018-02-12 19:28:32 +00:00
|
|
|
|
2018-01-16 20:41:00 +00:00
|
|
|
// We need to extend the snippet to the end of the src rather than to end_index so when
|
|
|
|
// searching forwards for boundaries we've got somewhere to search.
|
2018-10-29 20:26:13 +00:00
|
|
|
let snippet = if let Some(ref src) = local_begin.sf.src {
|
2018-01-16 20:41:00 +00:00
|
|
|
let len = src.len();
|
2020-01-23 05:42:35 +00:00
|
|
|
&src[start_index..len]
|
2018-02-12 19:28:32 +00:00
|
|
|
} else if let Some(src) = src.get_source() {
|
2018-01-16 20:41:00 +00:00
|
|
|
let len = src.len();
|
2020-01-23 05:42:35 +00:00
|
|
|
&src[start_index..len]
|
2018-01-16 20:41:00 +00:00
|
|
|
} else {
|
|
|
|
return 1;
|
|
|
|
};
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: snippet=`{:?}`", snippet);
|
|
|
|
|
2018-01-16 20:41:00 +00:00
|
|
|
let mut target = if forwards { end_index + 1 } else { end_index - 1 };
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: initial target=`{:?}`", target);
|
|
|
|
|
2018-02-25 00:41:08 +00:00
|
|
|
while !snippet.is_char_boundary(target - start_index) && target < source_len {
|
|
|
|
target = if forwards {
|
|
|
|
target + 1
|
|
|
|
} else {
|
|
|
|
match target.checked_sub(1) {
|
|
|
|
Some(target) => target,
|
|
|
|
None => {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: target=`{:?}`", target);
|
2018-01-16 20:41:00 +00:00
|
|
|
}
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: final target=`{:?}`", target);
|
2018-01-16 20:41:00 +00:00
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
if forwards { (target - end_index) as u32 } else { (end_index - target) as u32 }
|
2018-01-14 17:29:07 +00:00
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:56 +00:00
|
|
|
pub fn get_source_file(&self, filename: &FileName) -> Option<Lrc<SourceFile>> {
|
2018-10-29 20:26:13 +00:00
|
|
|
for sf in self.files.borrow().source_files.iter() {
|
|
|
|
if *filename == sf.name {
|
|
|
|
return Some(sf.clone());
|
2013-12-31 00:30:33 +00:00
|
|
|
}
|
|
|
|
}
|
2016-05-24 14:08:01 +00:00
|
|
|
None
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2012-01-25 21:22:10 +00:00
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// For a global `BytePos`, computes the local offset within the containing `SourceFile`.
|
2018-08-18 10:13:52 +00:00
|
|
|
pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos {
|
2018-08-18 10:13:56 +00:00
|
|
|
let idx = self.lookup_source_file_idx(bpos);
|
2018-10-29 20:26:13 +00:00
|
|
|
let sf = (*self.files.borrow().source_files)[idx].clone();
|
|
|
|
let offset = bpos - sf.start_pos;
|
2019-12-22 22:42:04 +00:00
|
|
|
SourceFileAndBytePos { sf, pos: offset }
|
2014-02-05 04:31:33 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Converts an absolute `BytePos` to a `CharPos` relative to the `SourceFile`.
|
2014-02-05 04:31:33 +00:00
|
|
|
pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
|
2018-08-18 10:13:56 +00:00
|
|
|
let idx = self.lookup_source_file_idx(bpos);
|
2018-10-29 20:26:13 +00:00
|
|
|
let map = &(*self.files.borrow().source_files)[idx];
|
2014-02-05 04:31:33 +00:00
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
// The number of extra bytes due to multibyte chars in the `SourceFile`.
|
2014-02-05 04:31:33 +00:00
|
|
|
let mut total_extra_bytes = 0;
|
|
|
|
|
2018-05-23 13:59:42 +00:00
|
|
|
for mbc in map.multibyte_chars.iter() {
|
2014-12-20 08:09:35 +00:00
|
|
|
debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos);
|
2014-02-05 04:31:33 +00:00
|
|
|
if mbc.pos < bpos {
|
2019-09-06 02:56:45 +00:00
|
|
|
// Every character is at least one byte, so we only
|
2014-02-05 04:31:33 +00:00
|
|
|
// count the actual extra bytes.
|
2018-06-26 13:37:09 +00:00
|
|
|
total_extra_bytes += mbc.bytes as u32 - 1;
|
2014-02-05 04:31:33 +00:00
|
|
|
// We should never see a byte position in the middle of a
|
2019-09-06 02:56:45 +00:00
|
|
|
// character.
|
2018-06-26 13:37:09 +00:00
|
|
|
assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32);
|
2014-02-05 04:31:33 +00:00
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-29 15:49:35 +00:00
|
|
|
assert!(map.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32());
|
|
|
|
CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes as usize)
|
2014-02-05 04:31:33 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
// Returns the index of the `SourceFile` (in `self.files`) that contains `pos`.
|
2020-06-24 17:16:36 +00:00
|
|
|
// This index is guaranteed to be valid for the lifetime of this `SourceMap`,
|
|
|
|
// since `source_files` is a `MonotonicVec`
|
2018-08-18 10:13:56 +00:00
|
|
|
pub fn lookup_source_file_idx(&self, pos: BytePos) -> usize {
|
2019-12-22 22:42:04 +00:00
|
|
|
self.files
|
|
|
|
.borrow()
|
|
|
|
.source_files
|
|
|
|
.binary_search_by_key(&pos, |key| key.start_pos)
|
2019-10-12 14:47:17 +00:00
|
|
|
.unwrap_or_else(|p| p - 1)
|
2012-11-16 03:37:29 +00:00
|
|
|
}
|
|
|
|
|
2015-11-11 05:26:14 +00:00
|
|
|
pub fn count_lines(&self) -> usize {
|
2017-04-27 14:12:57 +00:00
|
|
|
self.files().iter().fold(0, |a, f| a + f.count_lines())
|
2015-11-11 05:26:14 +00:00
|
|
|
}
|
2018-05-21 16:06:28 +00:00
|
|
|
|
|
|
|
pub fn generate_fn_name_span(&self, span: Span) -> Option<Span> {
|
|
|
|
let prev_span = self.span_extend_to_prev_str(span, "fn", true);
|
2020-05-16 10:29:46 +00:00
|
|
|
if let Ok(snippet) = self.span_to_snippet(prev_span) {
|
|
|
|
debug!(
|
|
|
|
"generate_fn_name_span: span={:?}, prev_span={:?}, snippet={:?}",
|
|
|
|
span, prev_span, snippet
|
|
|
|
);
|
|
|
|
|
|
|
|
if snippet.is_empty() {
|
|
|
|
return None;
|
|
|
|
};
|
|
|
|
|
|
|
|
let len = snippet
|
|
|
|
.find(|c: char| !c.is_alphanumeric() && c != '_')
|
|
|
|
.expect("no label after fn");
|
|
|
|
Some(prev_span.with_hi(BytePos(prev_span.lo().0 + len as u32)))
|
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
2018-05-21 16:06:28 +00:00
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
/// Takes the span of a type parameter in a function signature and try to generate a span for
|
|
|
|
/// the function name (with generics) and a new snippet for this span with the pointed type
|
2018-05-21 16:43:11 +00:00
|
|
|
/// parameter as a new local type parameter.
|
2018-05-21 16:06:28 +00:00
|
|
|
///
|
|
|
|
/// For instance:
|
|
|
|
/// ```rust,ignore (pseudo-Rust)
|
|
|
|
/// // Given span
|
|
|
|
/// fn my_function(param: T)
|
|
|
|
/// // ^ Original span
|
|
|
|
///
|
|
|
|
/// // Result
|
|
|
|
/// fn my_function(param: T)
|
|
|
|
/// // ^^^^^^^^^^^ Generated span with snippet `my_function<T>`
|
|
|
|
/// ```
|
|
|
|
///
|
|
|
|
/// Attention: The method used is very fragile since it essentially duplicates the work of the
|
|
|
|
/// parser. If you need to use this function or something similar, please consider updating the
|
2019-09-06 02:56:45 +00:00
|
|
|
/// `SourceMap` functions and this function to something more robust.
|
2018-05-21 16:06:28 +00:00
|
|
|
pub fn generate_local_type_param_snippet(&self, span: Span) -> Option<(Span, String)> {
|
|
|
|
// Try to extend the span to the previous "fn" keyword to retrieve the function
|
2019-09-06 02:56:45 +00:00
|
|
|
// signature.
|
2018-05-21 16:06:28 +00:00
|
|
|
let sugg_span = self.span_extend_to_prev_str(span, "fn", false);
|
|
|
|
if sugg_span != span {
|
|
|
|
if let Ok(snippet) = self.span_to_snippet(sugg_span) {
|
2019-09-06 02:56:45 +00:00
|
|
|
// Consume the function name.
|
2019-12-22 22:42:04 +00:00
|
|
|
let mut offset = snippet
|
|
|
|
.find(|c: char| !c.is_alphanumeric() && c != '_')
|
2018-05-21 16:06:28 +00:00
|
|
|
.expect("no label after fn");
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
// Consume the generics part of the function signature.
|
2018-05-21 16:06:28 +00:00
|
|
|
let mut bracket_counter = 0;
|
|
|
|
let mut last_char = None;
|
|
|
|
for c in snippet[offset..].chars() {
|
|
|
|
match c {
|
|
|
|
'<' => bracket_counter += 1,
|
|
|
|
'>' => bracket_counter -= 1,
|
2019-12-22 22:42:04 +00:00
|
|
|
'(' => {
|
|
|
|
if bracket_counter == 0 {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
2018-05-21 16:06:28 +00:00
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
offset += c.len_utf8();
|
|
|
|
last_char = Some(c);
|
|
|
|
}
|
|
|
|
|
2019-09-06 02:56:45 +00:00
|
|
|
// Adjust the suggestion span to encompass the function name with its generics.
|
2018-05-21 16:06:28 +00:00
|
|
|
let sugg_span = sugg_span.with_hi(BytePos(sugg_span.lo().0 + offset as u32));
|
|
|
|
|
|
|
|
// Prepare the new suggested snippet to append the type parameter that triggered
|
2019-09-06 02:56:45 +00:00
|
|
|
// the error in the generics of the function signature.
|
2018-05-21 16:06:28 +00:00
|
|
|
let mut new_snippet = if last_char == Some('>') {
|
|
|
|
format!("{}, ", &snippet[..(offset - '>'.len_utf8())])
|
|
|
|
} else {
|
|
|
|
format!("{}<", &snippet[..offset])
|
|
|
|
};
|
2019-12-22 22:42:04 +00:00
|
|
|
new_snippet
|
|
|
|
.push_str(&self.span_to_snippet(span).unwrap_or_else(|_| "T".to_string()));
|
2018-05-21 16:06:28 +00:00
|
|
|
new_snippet.push('>');
|
|
|
|
|
|
|
|
return Some((sugg_span, new_snippet));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
None
|
|
|
|
}
|
2019-11-15 13:32:31 +00:00
|
|
|
pub fn ensure_source_file_source_present(&self, source_file: Lrc<SourceFile>) -> bool {
|
2019-12-22 22:42:04 +00:00
|
|
|
source_file.add_external_src(|| match source_file.name {
|
2020-05-29 15:31:55 +00:00
|
|
|
FileName::Real(ref name) => self.file_loader.read_file(name.local_path()).ok(),
|
2019-12-22 22:42:04 +00:00
|
|
|
_ => None,
|
|
|
|
})
|
2016-09-19 19:31:56 +00:00
|
|
|
}
|
2019-11-13 12:01:43 +00:00
|
|
|
|
|
|
|
pub fn is_imported(&self, sp: Span) -> bool {
|
|
|
|
let source_file_index = self.lookup_source_file_idx(sp.lo());
|
|
|
|
let source_file = &self.files()[source_file_index];
|
|
|
|
source_file.is_imported()
|
|
|
|
}
|
2015-02-05 15:02:22 +00:00
|
|
|
}
|
|
|
|
|
2017-04-24 17:01:19 +00:00
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct FilePathMapping {
|
2017-12-14 07:09:19 +00:00
|
|
|
mapping: Vec<(PathBuf, PathBuf)>,
|
2017-04-24 17:01:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl FilePathMapping {
|
|
|
|
pub fn empty() -> FilePathMapping {
|
2019-12-22 22:42:04 +00:00
|
|
|
FilePathMapping { mapping: vec![] }
|
2017-04-24 17:01:19 +00:00
|
|
|
}
|
|
|
|
|
2017-12-14 07:09:19 +00:00
|
|
|
pub fn new(mapping: Vec<(PathBuf, PathBuf)>) -> FilePathMapping {
|
2019-12-22 22:42:04 +00:00
|
|
|
FilePathMapping { mapping }
|
2017-04-24 17:01:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Applies any path prefix substitution as defined by the mapping.
|
|
|
|
/// The return value is the remapped path and a boolean indicating whether
|
|
|
|
/// the path was affected by the mapping.
|
2017-12-14 07:09:19 +00:00
|
|
|
pub fn map_prefix(&self, path: PathBuf) -> (PathBuf, bool) {
|
2017-04-24 17:01:19 +00:00
|
|
|
// NOTE: We are iterating over the mapping entries from last to first
|
|
|
|
// because entries specified later on the command line should
|
|
|
|
// take precedence.
|
|
|
|
for &(ref from, ref to) in self.mapping.iter().rev() {
|
2017-12-14 07:09:19 +00:00
|
|
|
if let Ok(rest) = path.strip_prefix(from) {
|
|
|
|
return (to.join(rest), true);
|
2017-04-24 17:01:19 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
(path, false)
|
|
|
|
}
|
|
|
|
}
|