2018-08-18 10:13:35 +00:00
|
|
|
//! The SourceMap tracks all the source code used within a single crate, mapping
|
2014-12-22 17:04:23 +00:00
|
|
|
//! from integer byte positions to the original source code location. Each bit
|
|
|
|
//! of source parsed during crate parsing (typically files, in-memory strings,
|
|
|
|
//! or various bits of macro expansion) cover a continuous range of bytes in the
|
2018-08-18 10:13:52 +00:00
|
|
|
//! SourceMap and are represented by SourceFiles. Byte positions are stored in
|
2014-12-22 17:04:23 +00:00
|
|
|
//! `spans` and used pervasively in the compiler. They are absolute positions
|
2018-08-18 10:13:35 +00:00
|
|
|
//! within the SourceMap, which upon request can be converted to line and column
|
2014-12-22 17:04:23 +00:00
|
|
|
//! information, source code snippets, etc.
|
2012-11-13 00:45:24 +00:00
|
|
|
|
2017-10-19 12:32:39 +00:00
|
|
|
|
2017-03-17 04:04:41 +00:00
|
|
|
pub use syntax_pos::*;
|
2018-06-23 18:41:39 +00:00
|
|
|
pub use syntax_pos::hygiene::{ExpnFormat, ExpnInfo};
|
2019-02-06 17:33:01 +00:00
|
|
|
pub use ExpnFormat::*;
|
2014-11-06 08:05:53 +00:00
|
|
|
|
2017-10-19 12:32:39 +00:00
|
|
|
use rustc_data_structures::fx::FxHashMap;
|
2017-12-15 22:50:07 +00:00
|
|
|
use rustc_data_structures::stable_hasher::StableHasher;
|
2018-08-04 22:24:39 +00:00
|
|
|
use rustc_data_structures::sync::{Lrc, Lock, LockGuard, MappedLockGuard};
|
2018-01-14 17:29:07 +00:00
|
|
|
use std::cmp;
|
2017-12-15 22:50:07 +00:00
|
|
|
use std::hash::Hash;
|
2017-04-27 14:12:57 +00:00
|
|
|
use std::path::{Path, PathBuf};
|
2014-12-22 17:04:23 +00:00
|
|
|
|
2016-06-09 20:36:20 +00:00
|
|
|
use std::env;
|
2016-06-21 22:08:13 +00:00
|
|
|
use std::fs;
|
2018-11-16 21:22:06 +00:00
|
|
|
use std::io;
|
2019-02-06 17:33:01 +00:00
|
|
|
use log::debug;
|
|
|
|
|
2019-02-07 15:56:05 +00:00
|
|
|
use errors::SourceMapper;
|
2012-05-15 20:40:18 +00:00
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Returns the span itself if it doesn't come from a macro expansion,
|
2014-04-09 20:42:25 +00:00
|
|
|
/// otherwise return the call site span up to the `enclosing_sp` by
|
|
|
|
/// following the `expn_info` chain.
|
2017-03-17 04:04:41 +00:00
|
|
|
pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span {
|
2019-05-27 03:52:11 +00:00
|
|
|
let call_site1 = sp.ctxt().outer_expn_info().map(|ei| ei.call_site);
|
|
|
|
let call_site2 = enclosing_sp.ctxt().outer_expn_info().map(|ei| ei.call_site);
|
2014-09-17 16:01:33 +00:00
|
|
|
match (call_site1, call_site2) {
|
2014-04-09 20:42:25 +00:00
|
|
|
(None, _) => sp,
|
2014-09-17 16:01:33 +00:00
|
|
|
(Some(call_site1), Some(call_site2)) if call_site1 == call_site2 => sp,
|
2017-03-17 04:04:41 +00:00
|
|
|
(Some(call_site1), _) => original_sp(call_site1, enclosing_sp),
|
2014-04-09 20:42:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-06-21 22:08:13 +00:00
|
|
|
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
|
|
|
pub struct Spanned<T> {
|
|
|
|
pub node: T,
|
|
|
|
pub span: Span,
|
2015-12-13 12:12:47 +00:00
|
|
|
}
|
|
|
|
|
2016-06-21 22:08:13 +00:00
|
|
|
pub fn respan<T>(sp: Span, t: T) -> Spanned<T> {
|
|
|
|
Spanned {node: t, span: sp}
|
2013-01-30 17:56:33 +00:00
|
|
|
}
|
|
|
|
|
2016-06-21 22:08:13 +00:00
|
|
|
pub fn dummy_spanned<T>(t: T) -> Spanned<T> {
|
|
|
|
respan(DUMMY_SP, t)
|
2015-08-26 23:46:05 +00:00
|
|
|
}
|
|
|
|
|
2015-02-11 17:29:49 +00:00
|
|
|
// _____________________________________________________________________________
|
2018-08-18 10:13:52 +00:00
|
|
|
// SourceFile, MultiByteChar, FileName, FileLines
|
2015-02-11 17:29:49 +00:00
|
|
|
//
|
|
|
|
|
2015-05-13 22:44:57 +00:00
|
|
|
/// An abstraction over the fs operations used by the Parser.
|
|
|
|
pub trait FileLoader {
|
|
|
|
/// Query the existence of a file.
|
|
|
|
fn file_exists(&self, path: &Path) -> bool;
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Returns an absolute path to a file, if possible.
|
2016-06-09 20:36:20 +00:00
|
|
|
fn abs_path(&self, path: &Path) -> Option<PathBuf>;
|
|
|
|
|
2015-05-13 22:44:57 +00:00
|
|
|
/// Read the contents of an UTF-8 file into memory.
|
|
|
|
fn read_file(&self, path: &Path) -> io::Result<String>;
|
|
|
|
}
|
|
|
|
|
|
|
|
/// A FileLoader that uses std::fs to load real files.
|
|
|
|
pub struct RealFileLoader;
|
|
|
|
|
|
|
|
impl FileLoader for RealFileLoader {
|
|
|
|
fn file_exists(&self, path: &Path) -> bool {
|
|
|
|
fs::metadata(path).is_ok()
|
|
|
|
}
|
|
|
|
|
2016-06-09 20:36:20 +00:00
|
|
|
fn abs_path(&self, path: &Path) -> Option<PathBuf> {
|
|
|
|
if path.is_absolute() {
|
|
|
|
Some(path.to_path_buf())
|
|
|
|
} else {
|
|
|
|
env::current_dir()
|
|
|
|
.ok()
|
|
|
|
.map(|cwd| cwd.join(path))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-05-13 22:44:57 +00:00
|
|
|
fn read_file(&self, path: &Path) -> io::Result<String> {
|
2018-11-16 21:22:06 +00:00
|
|
|
fs::read_to_string(path)
|
2015-05-13 22:44:57 +00:00
|
|
|
}
|
|
|
|
}
|
2015-02-11 17:29:49 +00:00
|
|
|
|
2018-08-18 10:13:52 +00:00
|
|
|
// This is a SourceFile identifier that is used to correlate SourceFiles between
|
2017-12-15 22:50:07 +00:00
|
|
|
// subsequent compilation sessions (which is something we need to do during
|
|
|
|
// incremental compilation).
|
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)]
|
2018-10-29 20:26:13 +00:00
|
|
|
pub struct StableSourceFileId(u128);
|
2017-12-15 22:50:07 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
impl StableSourceFileId {
|
|
|
|
pub fn new(source_file: &SourceFile) -> StableSourceFileId {
|
2018-10-30 14:11:24 +00:00
|
|
|
StableSourceFileId::new_from_pieces(&source_file.name,
|
2018-10-30 14:10:42 +00:00
|
|
|
source_file.name_was_remapped,
|
|
|
|
source_file.unmapped_path.as_ref())
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn new_from_pieces(name: &FileName,
|
|
|
|
name_was_remapped: bool,
|
2018-10-30 14:11:24 +00:00
|
|
|
unmapped_path: Option<&FileName>) -> StableSourceFileId {
|
2017-12-15 22:50:07 +00:00
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
|
2018-10-30 14:10:42 +00:00
|
|
|
name.hash(&mut hasher);
|
|
|
|
name_was_remapped.hash(&mut hasher);
|
|
|
|
unmapped_path.hash(&mut hasher);
|
2017-12-15 22:50:07 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
StableSourceFileId(hasher.finish())
|
2017-12-15 22:50:07 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-02-11 17:29:49 +00:00
|
|
|
// _____________________________________________________________________________
|
2018-08-18 10:13:35 +00:00
|
|
|
// SourceMap
|
2015-02-11 17:29:49 +00:00
|
|
|
//
|
|
|
|
|
2018-10-16 14:57:53 +00:00
|
|
|
#[derive(Default)]
|
2018-08-18 10:13:35 +00:00
|
|
|
pub(super) struct SourceMapFiles {
|
2018-10-29 20:26:13 +00:00
|
|
|
pub(super) source_files: Vec<Lrc<SourceFile>>,
|
|
|
|
stable_id_to_source_file: FxHashMap<StableSourceFileId, Lrc<SourceFile>>
|
2018-03-14 17:11:37 +00:00
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:35 +00:00
|
|
|
pub struct SourceMap {
|
|
|
|
pub(super) files: Lock<SourceMapFiles>,
|
2018-07-10 19:06:26 +00:00
|
|
|
file_loader: Box<dyn FileLoader + Sync + Send>,
|
2017-04-24 17:01:19 +00:00
|
|
|
// This is used to apply the file path remapping as specified via
|
2018-08-18 10:13:52 +00:00
|
|
|
// --remap-path-prefix to all SourceFiles allocated within this SourceMap.
|
2017-04-24 17:01:19 +00:00
|
|
|
path_mapping: FilePathMapping,
|
2011-07-05 09:48:19 +00:00
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:35 +00:00
|
|
|
impl SourceMap {
|
|
|
|
pub fn new(path_mapping: FilePathMapping) -> SourceMap {
|
|
|
|
SourceMap {
|
2018-10-16 14:57:53 +00:00
|
|
|
files: Default::default(),
|
2017-04-24 17:01:19 +00:00
|
|
|
file_loader: Box::new(RealFileLoader),
|
2017-08-07 05:54:09 +00:00
|
|
|
path_mapping,
|
2015-05-13 22:44:57 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-07-10 19:06:26 +00:00
|
|
|
pub fn with_file_loader(file_loader: Box<dyn FileLoader + Sync + Send>,
|
2017-04-24 17:01:19 +00:00
|
|
|
path_mapping: FilePathMapping)
|
2018-08-18 10:13:35 +00:00
|
|
|
-> SourceMap {
|
|
|
|
SourceMap {
|
2018-10-16 14:57:53 +00:00
|
|
|
files: Default::default(),
|
2018-03-10 05:40:25 +00:00
|
|
|
file_loader: file_loader,
|
2017-08-07 05:54:09 +00:00
|
|
|
path_mapping,
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2012-01-21 09:00:06 +00:00
|
|
|
}
|
2012-11-13 02:24:56 +00:00
|
|
|
|
2017-04-24 17:01:19 +00:00
|
|
|
pub fn path_mapping(&self) -> &FilePathMapping {
|
|
|
|
&self.path_mapping
|
|
|
|
}
|
|
|
|
|
2015-05-13 22:44:57 +00:00
|
|
|
pub fn file_exists(&self, path: &Path) -> bool {
|
|
|
|
self.file_loader.file_exists(path)
|
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:52 +00:00
|
|
|
pub fn load_file(&self, path: &Path) -> io::Result<Lrc<SourceFile>> {
|
2016-03-23 03:01:37 +00:00
|
|
|
let src = self.file_loader.read_file(path)?;
|
2018-12-04 20:18:03 +00:00
|
|
|
let filename = path.to_owned().into();
|
2018-08-18 10:13:56 +00:00
|
|
|
Ok(self.new_source_file(filename, src))
|
2015-05-13 22:44:57 +00:00
|
|
|
}
|
|
|
|
|
2019-02-06 17:33:01 +00:00
|
|
|
pub fn files(&self) -> MappedLockGuard<'_, Vec<Lrc<SourceFile>>> {
|
2018-10-29 20:26:13 +00:00
|
|
|
LockGuard::map(self.files.borrow(), |files| &mut files.source_files)
|
2017-04-27 14:12:57 +00:00
|
|
|
}
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
pub fn source_file_by_stable_id(&self, stable_id: StableSourceFileId) ->
|
|
|
|
Option<Lrc<SourceFile>> {
|
|
|
|
self.files.borrow().stable_id_to_source_file.get(&stable_id).map(|sf| sf.clone())
|
2017-10-19 12:32:39 +00:00
|
|
|
}
|
|
|
|
|
2015-07-02 03:37:52 +00:00
|
|
|
fn next_start_pos(&self) -> usize {
|
2018-10-29 20:26:13 +00:00
|
|
|
match self.files.borrow().source_files.last() {
|
2015-07-02 03:37:52 +00:00
|
|
|
None => 0,
|
|
|
|
// Add one so there is some space between files. This lets us distinguish
|
2018-08-18 10:14:14 +00:00
|
|
|
// positions in the source_map, even in the presence of zero-length files.
|
2015-07-02 03:37:52 +00:00
|
|
|
Some(last) => last.end_pos.to_usize() + 1,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:56 +00:00
|
|
|
/// Creates a new source_file.
|
2018-10-30 14:10:42 +00:00
|
|
|
/// If a file already exists in the source_map with the same id, that file is returned
|
|
|
|
/// unmodified
|
2018-08-18 10:13:56 +00:00
|
|
|
pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc<SourceFile> {
|
2019-06-17 08:35:26 +00:00
|
|
|
self.try_new_source_file(filename, src)
|
|
|
|
.unwrap_or_else(|OffsetOverflowError| {
|
|
|
|
eprintln!("fatal error: rustc does not support files larger than 4GB");
|
|
|
|
errors::FatalError.raise()
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
fn try_new_source_file(
|
|
|
|
&self,
|
|
|
|
filename: FileName,
|
|
|
|
src: String
|
|
|
|
) -> Result<Lrc<SourceFile>, OffsetOverflowError> {
|
2015-07-02 03:37:52 +00:00
|
|
|
let start_pos = self.next_start_pos();
|
2012-11-16 22:22:09 +00:00
|
|
|
|
2017-09-30 06:28:48 +00:00
|
|
|
// The path is used to determine the directory for loading submodules and
|
|
|
|
// include files, so it must be before remapping.
|
|
|
|
// Note that filename may not be a valid path, eg it may be `<anon>` etc,
|
|
|
|
// but this is okay because the directory determined by `path.pop()` will
|
|
|
|
// be empty, so the working directory will be used.
|
2017-12-14 07:09:19 +00:00
|
|
|
let unmapped_path = filename.clone();
|
|
|
|
|
|
|
|
let (filename, was_remapped) = match filename {
|
|
|
|
FileName::Real(filename) => {
|
|
|
|
let (filename, was_remapped) = self.path_mapping.map_prefix(filename);
|
|
|
|
(FileName::Real(filename), was_remapped)
|
|
|
|
},
|
|
|
|
other => (other, false),
|
|
|
|
};
|
2012-11-16 22:22:09 +00:00
|
|
|
|
2018-10-30 14:11:24 +00:00
|
|
|
let file_id = StableSourceFileId::new_from_pieces(&filename,
|
2018-10-30 14:10:42 +00:00
|
|
|
was_remapped,
|
|
|
|
Some(&unmapped_path));
|
|
|
|
|
2019-06-17 08:35:26 +00:00
|
|
|
let lrc_sf = match self.source_file_by_stable_id(file_id) {
|
2018-10-30 14:10:42 +00:00
|
|
|
Some(lrc_sf) => lrc_sf,
|
|
|
|
None => {
|
|
|
|
let source_file = Lrc::new(SourceFile::new(
|
|
|
|
filename,
|
|
|
|
was_remapped,
|
|
|
|
unmapped_path,
|
|
|
|
src,
|
|
|
|
Pos::from_usize(start_pos),
|
2019-06-17 08:35:26 +00:00
|
|
|
)?);
|
2012-11-16 22:22:09 +00:00
|
|
|
|
2018-10-30 14:10:42 +00:00
|
|
|
let mut files = self.files.borrow_mut();
|
2017-10-19 12:32:39 +00:00
|
|
|
|
2018-10-30 14:10:42 +00:00
|
|
|
files.source_files.push(source_file.clone());
|
|
|
|
files.stable_id_to_source_file.insert(file_id, source_file.clone());
|
|
|
|
|
|
|
|
source_file
|
|
|
|
}
|
2019-06-17 08:35:26 +00:00
|
|
|
};
|
|
|
|
Ok(lrc_sf)
|
2012-11-16 22:22:09 +00:00
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:52 +00:00
|
|
|
/// Allocates a new SourceFile representing a source file from an external
|
2018-08-18 10:13:56 +00:00
|
|
|
/// crate. The source code of such an "imported source_file" is not available,
|
2015-02-11 17:29:49 +00:00
|
|
|
/// but we still know enough to generate accurate debuginfo location
|
|
|
|
/// information for things inlined from other crates.
|
2018-09-04 15:09:49 +00:00
|
|
|
pub fn new_imported_source_file(
|
|
|
|
&self,
|
|
|
|
filename: FileName,
|
|
|
|
name_was_remapped: bool,
|
|
|
|
crate_of_origin: u32,
|
|
|
|
src_hash: u128,
|
|
|
|
name_hash: u128,
|
|
|
|
source_len: usize,
|
|
|
|
mut file_local_lines: Vec<BytePos>,
|
|
|
|
mut file_local_multibyte_chars: Vec<MultiByteChar>,
|
|
|
|
mut file_local_non_narrow_chars: Vec<NonNarrowChar>,
|
|
|
|
) -> Lrc<SourceFile> {
|
2015-07-02 03:37:52 +00:00
|
|
|
let start_pos = self.next_start_pos();
|
2015-02-11 17:29:49 +00:00
|
|
|
|
|
|
|
let end_pos = Pos::from_usize(start_pos + source_len);
|
|
|
|
let start_pos = Pos::from_usize(start_pos);
|
|
|
|
|
2015-04-17 04:38:24 +00:00
|
|
|
for pos in &mut file_local_lines {
|
|
|
|
*pos = *pos + start_pos;
|
|
|
|
}
|
|
|
|
|
|
|
|
for mbc in &mut file_local_multibyte_chars {
|
|
|
|
mbc.pos = mbc.pos + start_pos;
|
|
|
|
}
|
2015-02-11 17:29:49 +00:00
|
|
|
|
2017-11-02 01:25:54 +00:00
|
|
|
for swc in &mut file_local_non_narrow_chars {
|
|
|
|
*swc = *swc + start_pos;
|
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:56 +00:00
|
|
|
let source_file = Lrc::new(SourceFile {
|
2015-02-11 17:29:49 +00:00
|
|
|
name: filename,
|
2017-08-07 05:54:09 +00:00
|
|
|
name_was_remapped,
|
2017-10-03 09:44:58 +00:00
|
|
|
unmapped_path: None,
|
2017-08-07 05:54:09 +00:00
|
|
|
crate_of_origin,
|
2015-02-11 17:29:49 +00:00
|
|
|
src: None,
|
2017-08-07 05:54:09 +00:00
|
|
|
src_hash,
|
2018-03-10 05:40:25 +00:00
|
|
|
external_src: Lock::new(ExternalSource::AbsentOk),
|
2017-08-07 05:54:09 +00:00
|
|
|
start_pos,
|
|
|
|
end_pos,
|
2018-05-23 13:59:42 +00:00
|
|
|
lines: file_local_lines,
|
|
|
|
multibyte_chars: file_local_multibyte_chars,
|
|
|
|
non_narrow_chars: file_local_non_narrow_chars,
|
2017-12-19 14:14:41 +00:00
|
|
|
name_hash,
|
2015-02-11 17:29:49 +00:00
|
|
|
});
|
|
|
|
|
2018-03-14 17:11:37 +00:00
|
|
|
let mut files = self.files.borrow_mut();
|
2015-02-11 17:29:49 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
files.source_files.push(source_file.clone());
|
|
|
|
files.stable_id_to_source_file.insert(StableSourceFileId::new(&source_file),
|
2018-08-18 10:14:31 +00:00
|
|
|
source_file.clone());
|
2017-10-19 12:32:39 +00:00
|
|
|
|
2018-08-18 10:13:56 +00:00
|
|
|
source_file
|
2015-02-11 17:29:49 +00:00
|
|
|
}
|
|
|
|
|
2014-05-22 23:57:53 +00:00
|
|
|
pub fn mk_substr_filename(&self, sp: Span) -> String {
|
2017-07-31 20:04:34 +00:00
|
|
|
let pos = self.lookup_char_pos(sp.lo());
|
2018-02-16 22:08:12 +00:00
|
|
|
format!("<{}:{}:{}>",
|
2014-05-07 23:33:43 +00:00
|
|
|
pos.file.name,
|
|
|
|
pos.line,
|
2018-02-16 22:08:12 +00:00
|
|
|
pos.col.to_usize() + 1)
|
2011-07-05 09:48:19 +00:00
|
|
|
}
|
2012-01-25 21:22:10 +00:00
|
|
|
|
2018-01-08 14:47:23 +00:00
|
|
|
// If there is a doctest_offset, apply it to the line
|
2018-12-04 20:18:03 +00:00
|
|
|
pub fn doctest_offset_line(&self, file: &FileName, orig: usize) -> usize {
|
|
|
|
return match file {
|
|
|
|
FileName::DocTest(_, offset) => {
|
|
|
|
return if *offset >= 0 {
|
|
|
|
orig + *offset as usize
|
|
|
|
} else {
|
|
|
|
orig - (-(*offset)) as usize
|
|
|
|
}
|
|
|
|
},
|
|
|
|
_ => orig
|
2018-01-08 14:47:23 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-11-16 23:14:11 +00:00
|
|
|
/// Lookup source information about a BytePos
|
2013-04-17 16:15:08 +00:00
|
|
|
pub fn lookup_char_pos(&self, pos: BytePos) -> Loc {
|
2015-06-16 18:47:09 +00:00
|
|
|
let chpos = self.bytepos_to_file_charpos(pos);
|
2015-07-02 03:37:52 +00:00
|
|
|
match self.lookup_line(pos) {
|
2018-10-29 20:26:13 +00:00
|
|
|
Ok(SourceFileAndLine { sf: f, line: a }) => {
|
2015-07-02 03:37:52 +00:00
|
|
|
let line = a + 1; // Line numbers start at 1
|
2018-05-23 13:59:42 +00:00
|
|
|
let linebpos = f.lines[a];
|
2015-07-02 03:37:52 +00:00
|
|
|
let linechpos = self.bytepos_to_file_charpos(linebpos);
|
2017-11-02 01:25:54 +00:00
|
|
|
let col = chpos - linechpos;
|
|
|
|
|
|
|
|
let col_display = {
|
2018-05-23 13:59:42 +00:00
|
|
|
let start_width_idx = f
|
|
|
|
.non_narrow_chars
|
2017-11-02 01:25:54 +00:00
|
|
|
.binary_search_by_key(&linebpos, |x| x.pos())
|
|
|
|
.unwrap_or_else(|x| x);
|
2018-05-23 13:59:42 +00:00
|
|
|
let end_width_idx = f
|
|
|
|
.non_narrow_chars
|
2017-11-02 01:25:54 +00:00
|
|
|
.binary_search_by_key(&pos, |x| x.pos())
|
|
|
|
.unwrap_or_else(|x| x);
|
|
|
|
let special_chars = end_width_idx - start_width_idx;
|
2018-05-23 13:59:42 +00:00
|
|
|
let non_narrow: usize = f
|
|
|
|
.non_narrow_chars[start_width_idx..end_width_idx]
|
2017-11-02 01:25:54 +00:00
|
|
|
.into_iter()
|
|
|
|
.map(|x| x.width())
|
|
|
|
.sum();
|
|
|
|
col.0 - special_chars + non_narrow
|
|
|
|
};
|
2015-07-02 03:37:52 +00:00
|
|
|
debug!("byte pos {:?} is on the line at byte pos {:?}",
|
|
|
|
pos, linebpos);
|
|
|
|
debug!("char pos {:?} is on the line at char pos {:?}",
|
|
|
|
chpos, linechpos);
|
|
|
|
debug!("byte is on line: {}", line);
|
|
|
|
assert!(chpos >= linechpos);
|
|
|
|
Loc {
|
|
|
|
file: f,
|
2017-08-07 05:54:09 +00:00
|
|
|
line,
|
2017-11-02 01:25:54 +00:00
|
|
|
col,
|
|
|
|
col_display,
|
2015-07-02 03:37:52 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Err(f) => {
|
2017-11-02 01:25:54 +00:00
|
|
|
let col_display = {
|
2018-05-23 13:59:42 +00:00
|
|
|
let end_width_idx = f
|
|
|
|
.non_narrow_chars
|
2017-11-02 01:25:54 +00:00
|
|
|
.binary_search_by_key(&pos, |x| x.pos())
|
|
|
|
.unwrap_or_else(|x| x);
|
2018-05-23 13:59:42 +00:00
|
|
|
let non_narrow: usize = f
|
|
|
|
.non_narrow_chars[0..end_width_idx]
|
2017-11-02 01:25:54 +00:00
|
|
|
.into_iter()
|
|
|
|
.map(|x| x.width())
|
|
|
|
.sum();
|
|
|
|
chpos.0 - end_width_idx + non_narrow
|
|
|
|
};
|
2015-07-02 03:37:52 +00:00
|
|
|
Loc {
|
|
|
|
file: f,
|
|
|
|
line: 0,
|
|
|
|
col: chpos,
|
2017-11-02 01:25:54 +00:00
|
|
|
col_display,
|
2015-07-02 03:37:52 +00:00
|
|
|
}
|
|
|
|
}
|
2015-06-16 18:47:09 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:56 +00:00
|
|
|
// If the relevant source_file is empty, we don't return a line number.
|
2018-08-18 10:13:52 +00:00
|
|
|
pub fn lookup_line(&self, pos: BytePos) -> Result<SourceFileAndLine, Lrc<SourceFile>> {
|
2018-08-18 10:13:56 +00:00
|
|
|
let idx = self.lookup_source_file_idx(pos);
|
2015-06-16 18:47:09 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let f = (*self.files.borrow().source_files)[idx].clone();
|
2015-07-02 03:37:52 +00:00
|
|
|
|
2016-08-24 21:06:31 +00:00
|
|
|
match f.lookup_line(pos) {
|
2018-10-29 20:26:13 +00:00
|
|
|
Some(line) => Ok(SourceFileAndLine { sf: f, line: line }),
|
2016-08-24 21:06:31 +00:00
|
|
|
None => Err(f)
|
2015-07-02 03:37:52 +00:00
|
|
|
}
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2011-07-16 06:01:10 +00:00
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If
|
2016-09-19 19:31:56 +00:00
|
|
|
/// there are gaps between lhs and rhs, the resulting union will cross these gaps.
|
|
|
|
/// For this to work, the spans have to be:
|
2017-12-31 16:17:01 +00:00
|
|
|
///
|
2017-03-17 04:04:41 +00:00
|
|
|
/// * the ctxt of both spans much match
|
2016-09-19 19:31:56 +00:00
|
|
|
/// * the lhs span needs to end on the same line the rhs span begins
|
|
|
|
/// * the lhs span must start at or before the rhs span
|
|
|
|
pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> {
|
|
|
|
// make sure we're at the same expansion id
|
2017-07-31 20:04:34 +00:00
|
|
|
if sp_lhs.ctxt() != sp_rhs.ctxt() {
|
2016-09-19 19:31:56 +00:00
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2017-07-31 20:04:34 +00:00
|
|
|
let lhs_end = match self.lookup_line(sp_lhs.hi()) {
|
2016-09-19 19:31:56 +00:00
|
|
|
Ok(x) => x,
|
|
|
|
Err(_) => return None
|
|
|
|
};
|
2017-07-31 20:04:34 +00:00
|
|
|
let rhs_begin = match self.lookup_line(sp_rhs.lo()) {
|
2016-09-19 19:31:56 +00:00
|
|
|
Ok(x) => x,
|
|
|
|
Err(_) => return None
|
|
|
|
};
|
|
|
|
|
|
|
|
// if we must cross lines to merge, don't merge
|
|
|
|
if lhs_end.line != rhs_begin.line {
|
|
|
|
return None;
|
|
|
|
}
|
|
|
|
|
2016-09-20 22:39:05 +00:00
|
|
|
// ensure these follow the expected order and we don't overlap
|
2017-07-31 20:04:34 +00:00
|
|
|
if (sp_lhs.lo() <= sp_rhs.lo()) && (sp_lhs.hi() <= sp_rhs.lo()) {
|
|
|
|
Some(sp_lhs.to(sp_rhs))
|
2016-09-19 19:31:56 +00:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2014-06-21 10:39:03 +00:00
|
|
|
pub fn span_to_string(&self, sp: Span) -> String {
|
2018-10-29 20:26:13 +00:00
|
|
|
if self.files.borrow().source_files.is_empty() && sp.is_dummy() {
|
2014-05-25 10:17:19 +00:00
|
|
|
return "no-location".to_string();
|
2012-12-05 23:13:24 +00:00
|
|
|
}
|
|
|
|
|
2019-04-05 19:42:40 +00:00
|
|
|
let lo = self.lookup_char_pos(sp.lo());
|
|
|
|
let hi = self.lookup_char_pos(sp.hi());
|
2018-02-16 22:08:12 +00:00
|
|
|
format!("{}:{}:{}: {}:{}",
|
2019-04-05 19:42:40 +00:00
|
|
|
lo.file.name,
|
2014-05-07 23:33:43 +00:00
|
|
|
lo.line,
|
2015-01-17 23:49:08 +00:00
|
|
|
lo.col.to_usize() + 1,
|
2014-05-07 23:33:43 +00:00
|
|
|
hi.line,
|
2018-02-16 22:08:12 +00:00
|
|
|
hi.col.to_usize() + 1)
|
2012-02-10 18:28:43 +00:00
|
|
|
}
|
|
|
|
|
2013-08-31 16:13:04 +00:00
|
|
|
pub fn span_to_filename(&self, sp: Span) -> FileName {
|
2017-09-30 06:28:48 +00:00
|
|
|
self.lookup_char_pos(sp.lo()).file.name.clone()
|
|
|
|
}
|
|
|
|
|
2017-12-14 07:09:19 +00:00
|
|
|
pub fn span_to_unmapped_path(&self, sp: Span) -> FileName {
|
2017-10-03 09:44:58 +00:00
|
|
|
self.lookup_char_pos(sp.lo()).file.unmapped_path.clone()
|
2018-08-18 10:13:52 +00:00
|
|
|
.expect("SourceMap::span_to_unmapped_path called for imported SourceFile?")
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2011-07-05 09:48:19 +00:00
|
|
|
|
2017-12-22 23:55:44 +00:00
|
|
|
pub fn is_multiline(&self, sp: Span) -> bool {
|
|
|
|
let lo = self.lookup_char_pos(sp.lo());
|
|
|
|
let hi = self.lookup_char_pos(sp.hi());
|
|
|
|
lo.line != hi.line
|
|
|
|
}
|
|
|
|
|
2015-04-30 08:23:50 +00:00
|
|
|
pub fn span_to_lines(&self, sp: Span) -> FileLinesResult {
|
2016-04-20 18:52:31 +00:00
|
|
|
debug!("span_to_lines(sp={:?})", sp);
|
|
|
|
|
2017-07-31 20:04:34 +00:00
|
|
|
if sp.lo() > sp.hi() {
|
2015-04-30 08:23:50 +00:00
|
|
|
return Err(SpanLinesError::IllFormedSpan(sp));
|
|
|
|
}
|
|
|
|
|
2017-07-31 20:04:34 +00:00
|
|
|
let lo = self.lookup_char_pos(sp.lo());
|
2016-04-20 18:52:31 +00:00
|
|
|
debug!("span_to_lines: lo={:?}", lo);
|
2017-07-31 20:04:34 +00:00
|
|
|
let hi = self.lookup_char_pos(sp.hi());
|
2016-04-20 18:52:31 +00:00
|
|
|
debug!("span_to_lines: hi={:?}", hi);
|
2015-04-30 08:23:50 +00:00
|
|
|
|
|
|
|
if lo.file.start_pos != hi.file.start_pos {
|
|
|
|
return Err(SpanLinesError::DistinctSources(DistinctSources {
|
|
|
|
begin: (lo.file.name.clone(), lo.file.start_pos),
|
|
|
|
end: (hi.file.name.clone(), hi.file.start_pos),
|
|
|
|
}));
|
|
|
|
}
|
|
|
|
assert!(hi.line >= lo.line);
|
|
|
|
|
2015-04-09 18:46:03 +00:00
|
|
|
let mut lines = Vec::with_capacity(hi.line - lo.line + 1);
|
|
|
|
|
|
|
|
// The span starts partway through the first line,
|
|
|
|
// but after that it starts from offset 0.
|
|
|
|
let mut start_col = lo.col;
|
|
|
|
|
|
|
|
// For every line but the last, it extends from `start_col`
|
|
|
|
// and to the end of the line. Be careful because the line
|
|
|
|
// numbers in Loc are 1-based, so we subtract 1 to get 0-based
|
|
|
|
// lines.
|
|
|
|
for line_index in lo.line-1 .. hi.line-1 {
|
2016-04-20 18:56:01 +00:00
|
|
|
let line_len = lo.file.get_line(line_index)
|
|
|
|
.map(|s| s.chars().count())
|
|
|
|
.unwrap_or(0);
|
2017-08-07 05:54:09 +00:00
|
|
|
lines.push(LineInfo { line_index,
|
|
|
|
start_col,
|
2015-04-09 18:46:03 +00:00
|
|
|
end_col: CharPos::from_usize(line_len) });
|
|
|
|
start_col = CharPos::from_usize(0);
|
|
|
|
}
|
|
|
|
|
|
|
|
// For the last line, it extends from `start_col` to `hi.col`:
|
|
|
|
lines.push(LineInfo { line_index: hi.line - 1,
|
2017-08-07 05:54:09 +00:00
|
|
|
start_col,
|
2015-04-09 18:46:03 +00:00
|
|
|
end_col: hi.col });
|
|
|
|
|
2015-04-30 08:23:50 +00:00
|
|
|
Ok(FileLines {file: lo.file, lines: lines})
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2012-02-10 18:28:43 +00:00
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Extracts the source surrounding the given `Span` using the `extract_source` function. The
|
2018-02-22 14:27:41 +00:00
|
|
|
/// extract function takes three arguments: a string slice containing the source, an index in
|
|
|
|
/// the slice for the beginning of the span and an index in the slice for the end of the span.
|
|
|
|
fn span_to_source<F>(&self, sp: Span, extract_source: F) -> Result<String, SpanSnippetError>
|
|
|
|
where F: Fn(&str, usize, usize) -> String
|
|
|
|
{
|
2017-07-31 20:04:34 +00:00
|
|
|
if sp.lo() > sp.hi() {
|
2015-02-05 15:02:22 +00:00
|
|
|
return Err(SpanSnippetError::IllFormedSpan(sp));
|
|
|
|
}
|
|
|
|
|
2017-07-31 20:04:34 +00:00
|
|
|
let local_begin = self.lookup_byte_offset(sp.lo());
|
|
|
|
let local_end = self.lookup_byte_offset(sp.hi());
|
2013-08-04 02:14:01 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
if local_begin.sf.start_pos != local_end.sf.start_pos {
|
2015-02-05 15:02:22 +00:00
|
|
|
return Err(SpanSnippetError::DistinctSources(DistinctSources {
|
2018-10-29 20:26:13 +00:00
|
|
|
begin: (local_begin.sf.name.clone(),
|
|
|
|
local_begin.sf.start_pos),
|
|
|
|
end: (local_end.sf.name.clone(),
|
|
|
|
local_end.sf.start_pos)
|
2015-02-05 15:02:22 +00:00
|
|
|
}));
|
2013-08-04 02:14:01 +00:00
|
|
|
} else {
|
2018-10-29 20:26:13 +00:00
|
|
|
self.ensure_source_file_source_present(local_begin.sf.clone());
|
2017-06-12 19:47:39 +00:00
|
|
|
|
|
|
|
let start_index = local_begin.pos.to_usize();
|
|
|
|
let end_index = local_end.pos.to_usize();
|
2018-10-29 20:26:13 +00:00
|
|
|
let source_len = (local_begin.sf.end_pos -
|
|
|
|
local_begin.sf.start_pos).to_usize();
|
2017-06-12 19:47:39 +00:00
|
|
|
|
|
|
|
if start_index > end_index || end_index > source_len {
|
2018-10-29 20:26:13 +00:00
|
|
|
return Err(SpanSnippetError::MalformedForSourcemap(
|
|
|
|
MalformedSourceMapPositions {
|
|
|
|
name: local_begin.sf.name.clone(),
|
2017-08-07 05:54:09 +00:00
|
|
|
source_len,
|
2017-06-12 19:47:39 +00:00
|
|
|
begin_pos: local_begin.pos,
|
|
|
|
end_pos: local_end.pos,
|
|
|
|
}));
|
|
|
|
}
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
if let Some(ref src) = local_begin.sf.src {
|
2018-02-22 14:27:41 +00:00
|
|
|
return Ok(extract_source(src, start_index, end_index));
|
2018-10-29 20:26:13 +00:00
|
|
|
} else if let Some(src) = local_begin.sf.external_src.borrow().get_source() {
|
2018-02-22 14:27:41 +00:00
|
|
|
return Ok(extract_source(src, start_index, end_index));
|
2017-06-12 19:47:39 +00:00
|
|
|
} else {
|
|
|
|
return Err(SpanSnippetError::SourceNotAvailable {
|
2018-10-29 20:26:13 +00:00
|
|
|
filename: local_begin.sf.name.clone()
|
2017-06-12 19:47:39 +00:00
|
|
|
});
|
2015-02-11 17:29:49 +00:00
|
|
|
}
|
2013-08-04 02:14:01 +00:00
|
|
|
}
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2012-05-11 00:18:04 +00:00
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Returns the source snippet as `String` corresponding to the given `Span`
|
2018-02-22 14:27:41 +00:00
|
|
|
pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> {
|
|
|
|
self.span_to_source(sp, |src, start_index, end_index| src[start_index..end_index]
|
|
|
|
.to_string())
|
|
|
|
}
|
|
|
|
|
2018-09-04 15:09:49 +00:00
|
|
|
pub fn span_to_margin(&self, sp: Span) -> Option<usize> {
|
|
|
|
match self.span_to_prev_source(sp) {
|
|
|
|
Err(_) => None,
|
|
|
|
Ok(source) => source.split('\n').last().map(|last_line| {
|
2018-12-05 14:42:56 +00:00
|
|
|
last_line.len() - last_line.trim_start().len()
|
2018-09-04 15:09:49 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Returns the source snippet as `String` before the given `Span`
|
2018-02-22 14:27:41 +00:00
|
|
|
pub fn span_to_prev_source(&self, sp: Span) -> Result<String, SpanSnippetError> {
|
|
|
|
self.span_to_source(sp, |src, start_index, _| src[..start_index].to_string())
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Extend the given `Span` to just after the previous occurrence of `c`. Return the same span
|
|
|
|
/// if no character could be found or if an error occurred while retrieving the code snippet.
|
|
|
|
pub fn span_extend_to_prev_char(&self, sp: Span, c: char) -> Span {
|
|
|
|
if let Ok(prev_source) = self.span_to_prev_source(sp) {
|
2018-12-05 14:42:56 +00:00
|
|
|
let prev_source = prev_source.rsplit(c).nth(0).unwrap_or("").trim_start();
|
2018-02-22 14:27:41 +00:00
|
|
|
if !prev_source.is_empty() && !prev_source.contains('\n') {
|
|
|
|
return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sp
|
|
|
|
}
|
|
|
|
|
2018-03-14 05:58:45 +00:00
|
|
|
/// Extend the given `Span` to just after the previous occurrence of `pat` when surrounded by
|
|
|
|
/// whitespace. Return the same span if no character could be found or if an error occurred
|
|
|
|
/// while retrieving the code snippet.
|
|
|
|
pub fn span_extend_to_prev_str(&self, sp: Span, pat: &str, accept_newlines: bool) -> Span {
|
|
|
|
// assure that the pattern is delimited, to avoid the following
|
|
|
|
// fn my_fn()
|
|
|
|
// ^^^^ returned span without the check
|
|
|
|
// ---------- correct span
|
|
|
|
for ws in &[" ", "\t", "\n"] {
|
|
|
|
let pat = pat.to_owned() + ws;
|
|
|
|
if let Ok(prev_source) = self.span_to_prev_source(sp) {
|
2018-12-05 14:42:56 +00:00
|
|
|
let prev_source = prev_source.rsplit(&pat).nth(0).unwrap_or("").trim_start();
|
2018-03-14 05:58:45 +00:00
|
|
|
if !prev_source.is_empty() && (!prev_source.contains('\n') || accept_newlines) {
|
|
|
|
return sp.with_lo(BytePos(sp.lo().0 - prev_source.len() as u32));
|
|
|
|
}
|
2018-02-22 14:27:41 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
sp
|
|
|
|
}
|
|
|
|
|
2018-03-18 13:05:20 +00:00
|
|
|
/// Given a `Span`, try to get a shorter span ending before the first occurrence of `c` `char`
|
|
|
|
pub fn span_until_char(&self, sp: Span, c: char) -> Span {
|
|
|
|
match self.span_to_snippet(sp) {
|
|
|
|
Ok(snippet) => {
|
2018-12-05 14:42:56 +00:00
|
|
|
let snippet = snippet.split(c).nth(0).unwrap_or("").trim_end();
|
2018-03-18 13:05:20 +00:00
|
|
|
if !snippet.is_empty() && !snippet.contains('\n') {
|
|
|
|
sp.with_hi(BytePos(sp.lo().0 + snippet.len() as u32))
|
|
|
|
} else {
|
|
|
|
sp
|
|
|
|
}
|
|
|
|
}
|
|
|
|
_ => sp,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Given a `Span`, try to get a shorter span ending just after the first occurrence of `char`
|
|
|
|
/// `c`.
|
|
|
|
pub fn span_through_char(&self, sp: Span, c: char) -> Span {
|
|
|
|
if let Ok(snippet) = self.span_to_snippet(sp) {
|
|
|
|
if let Some(offset) = snippet.find(c) {
|
|
|
|
return sp.with_hi(BytePos(sp.lo().0 + (offset + c.len_utf8()) as u32));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
sp
|
|
|
|
}
|
|
|
|
|
2018-01-23 18:54:57 +00:00
|
|
|
/// Given a `Span`, get a new `Span` covering the first token and all its trailing whitespace or
|
|
|
|
/// the original `Span`.
|
|
|
|
///
|
|
|
|
/// If `sp` points to `"let mut x"`, then a span pointing at `"let "` will be returned.
|
|
|
|
pub fn span_until_non_whitespace(&self, sp: Span) -> Span {
|
2018-03-18 13:05:20 +00:00
|
|
|
let mut whitespace_found = false;
|
|
|
|
|
|
|
|
self.span_take_while(sp, |c| {
|
|
|
|
if !whitespace_found && c.is_whitespace() {
|
|
|
|
whitespace_found = true;
|
2018-01-23 18:54:57 +00:00
|
|
|
}
|
2018-03-18 13:05:20 +00:00
|
|
|
|
|
|
|
if whitespace_found && !c.is_whitespace() {
|
|
|
|
false
|
|
|
|
} else {
|
|
|
|
true
|
2018-01-23 18:54:57 +00:00
|
|
|
}
|
2018-03-18 13:05:20 +00:00
|
|
|
})
|
2018-01-23 18:54:57 +00:00
|
|
|
}
|
|
|
|
|
2018-02-22 14:27:41 +00:00
|
|
|
/// Given a `Span`, get a new `Span` covering the first token without its trailing whitespace or
|
|
|
|
/// the original `Span` in case of error.
|
|
|
|
///
|
|
|
|
/// If `sp` points to `"let mut x"`, then a span pointing at `"let"` will be returned.
|
|
|
|
pub fn span_until_whitespace(&self, sp: Span) -> Span {
|
2018-03-18 13:05:20 +00:00
|
|
|
self.span_take_while(sp, |c| !c.is_whitespace())
|
2017-10-12 06:06:45 +00:00
|
|
|
}
|
|
|
|
|
2018-03-17 18:41:46 +00:00
|
|
|
/// Given a `Span`, get a shorter one until `predicate` yields false.
|
|
|
|
pub fn span_take_while<P>(&self, sp: Span, predicate: P) -> Span
|
|
|
|
where P: for <'r> FnMut(&'r char) -> bool
|
|
|
|
{
|
|
|
|
if let Ok(snippet) = self.span_to_snippet(sp) {
|
|
|
|
let offset = snippet.chars()
|
|
|
|
.take_while(predicate)
|
|
|
|
.map(|c| c.len_utf8())
|
|
|
|
.sum::<usize>();
|
|
|
|
|
|
|
|
sp.with_hi(BytePos(sp.lo().0 + (offset as u32)))
|
|
|
|
} else {
|
|
|
|
sp
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-04-11 11:40:31 +00:00
|
|
|
pub fn def_span(&self, sp: Span) -> Span {
|
|
|
|
self.span_until_char(sp, '{')
|
|
|
|
}
|
|
|
|
|
2018-07-07 23:53:52 +00:00
|
|
|
/// Returns a new span representing just the start-point of this span
|
|
|
|
pub fn start_point(&self, sp: Span) -> Span {
|
|
|
|
let pos = sp.lo().0;
|
|
|
|
let width = self.find_width_of_character_at_span(sp, false);
|
|
|
|
let corrected_start_position = pos.checked_add(width).unwrap_or(pos);
|
|
|
|
let end_point = BytePos(cmp::max(corrected_start_position, sp.lo().0));
|
|
|
|
sp.with_hi(end_point)
|
|
|
|
}
|
|
|
|
|
2018-01-14 17:29:07 +00:00
|
|
|
/// Returns a new span representing just the end-point of this span
|
|
|
|
pub fn end_point(&self, sp: Span) -> Span {
|
2018-01-16 20:41:00 +00:00
|
|
|
let pos = sp.hi().0;
|
|
|
|
|
|
|
|
let width = self.find_width_of_character_at_span(sp, false);
|
|
|
|
let corrected_end_position = pos.checked_sub(width).unwrap_or(pos);
|
|
|
|
|
|
|
|
let end_point = BytePos(cmp::max(corrected_end_position, sp.lo().0));
|
|
|
|
sp.with_lo(end_point)
|
2018-01-14 17:29:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Returns a new span representing the next character after the end-point of this span
|
|
|
|
pub fn next_point(&self, sp: Span) -> Span {
|
2018-01-17 10:01:57 +00:00
|
|
|
let start_of_next_point = sp.hi().0;
|
2018-01-16 20:41:00 +00:00
|
|
|
|
|
|
|
let width = self.find_width_of_character_at_span(sp, true);
|
2018-01-17 10:01:57 +00:00
|
|
|
// If the width is 1, then the next span should point to the same `lo` and `hi`. However,
|
|
|
|
// in the case of a multibyte character, where the width != 1, the next span should
|
|
|
|
// span multiple bytes to include the whole character.
|
|
|
|
let end_of_next_point = start_of_next_point.checked_add(
|
|
|
|
width - 1).unwrap_or(start_of_next_point);
|
|
|
|
|
|
|
|
let end_of_next_point = BytePos(cmp::max(sp.lo().0 + 1, end_of_next_point));
|
|
|
|
Span::new(BytePos(start_of_next_point), end_of_next_point, sp.ctxt())
|
2018-01-14 17:29:07 +00:00
|
|
|
}
|
|
|
|
|
2018-01-16 20:41:00 +00:00
|
|
|
/// Finds the width of a character, either before or after the provided span.
|
|
|
|
fn find_width_of_character_at_span(&self, sp: Span, forwards: bool) -> u32 {
|
|
|
|
// Disregard malformed spans and assume a one-byte wide character.
|
2018-01-17 16:41:58 +00:00
|
|
|
if sp.lo() >= sp.hi() {
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: early return malformed span");
|
2018-01-16 20:41:00 +00:00
|
|
|
return 1;
|
|
|
|
}
|
2018-01-14 17:29:07 +00:00
|
|
|
|
2018-01-16 20:41:00 +00:00
|
|
|
let local_begin = self.lookup_byte_offset(sp.lo());
|
|
|
|
let local_end = self.lookup_byte_offset(sp.hi());
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: local_begin=`{:?}`, local_end=`{:?}`",
|
|
|
|
local_begin, local_end);
|
2018-01-16 20:41:00 +00:00
|
|
|
|
2019-05-28 21:27:42 +00:00
|
|
|
if local_begin.sf.start_pos != local_end.sf.start_pos {
|
|
|
|
debug!("find_width_of_character_at_span: begin and end are in different files");
|
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2018-01-16 20:41:00 +00:00
|
|
|
let start_index = local_begin.pos.to_usize();
|
|
|
|
let end_index = local_end.pos.to_usize();
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: start_index=`{:?}`, end_index=`{:?}`",
|
|
|
|
start_index, end_index);
|
2018-01-16 20:41:00 +00:00
|
|
|
|
|
|
|
// Disregard indexes that are at the start or end of their spans, they can't fit bigger
|
|
|
|
// characters.
|
|
|
|
if (!forwards && end_index == usize::min_value()) ||
|
|
|
|
(forwards && start_index == usize::max_value()) {
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: start or end of span, cannot be multibyte");
|
2018-01-16 20:41:00 +00:00
|
|
|
return 1;
|
|
|
|
}
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let source_len = (local_begin.sf.end_pos - local_begin.sf.start_pos).to_usize();
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: source_len=`{:?}`", source_len);
|
2018-01-16 20:41:00 +00:00
|
|
|
// Ensure indexes are also not malformed.
|
|
|
|
if start_index > end_index || end_index > source_len {
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: source indexes are malformed");
|
2018-01-16 20:41:00 +00:00
|
|
|
return 1;
|
2018-01-14 17:29:07 +00:00
|
|
|
}
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let src = local_begin.sf.external_src.borrow();
|
2018-02-12 19:28:32 +00:00
|
|
|
|
2018-01-16 20:41:00 +00:00
|
|
|
// We need to extend the snippet to the end of the src rather than to end_index so when
|
|
|
|
// searching forwards for boundaries we've got somewhere to search.
|
2018-10-29 20:26:13 +00:00
|
|
|
let snippet = if let Some(ref src) = local_begin.sf.src {
|
2018-01-16 20:41:00 +00:00
|
|
|
let len = src.len();
|
2018-02-12 19:28:32 +00:00
|
|
|
(&src[start_index..len])
|
|
|
|
} else if let Some(src) = src.get_source() {
|
2018-01-16 20:41:00 +00:00
|
|
|
let len = src.len();
|
2018-02-12 19:28:32 +00:00
|
|
|
(&src[start_index..len])
|
2018-01-16 20:41:00 +00:00
|
|
|
} else {
|
|
|
|
return 1;
|
|
|
|
};
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: snippet=`{:?}`", snippet);
|
|
|
|
|
2018-01-16 20:41:00 +00:00
|
|
|
let mut target = if forwards { end_index + 1 } else { end_index - 1 };
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: initial target=`{:?}`", target);
|
|
|
|
|
2018-02-25 00:41:08 +00:00
|
|
|
while !snippet.is_char_boundary(target - start_index) && target < source_len {
|
|
|
|
target = if forwards {
|
|
|
|
target + 1
|
|
|
|
} else {
|
|
|
|
match target.checked_sub(1) {
|
|
|
|
Some(target) => target,
|
|
|
|
None => {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
};
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: target=`{:?}`", target);
|
2018-01-16 20:41:00 +00:00
|
|
|
}
|
2018-01-27 13:30:34 +00:00
|
|
|
debug!("find_width_of_character_at_span: final target=`{:?}`", target);
|
2018-01-16 20:41:00 +00:00
|
|
|
|
|
|
|
if forwards {
|
|
|
|
(target - end_index) as u32
|
|
|
|
} else {
|
|
|
|
(end_index - target) as u32
|
|
|
|
}
|
2018-01-14 17:29:07 +00:00
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:56 +00:00
|
|
|
pub fn get_source_file(&self, filename: &FileName) -> Option<Lrc<SourceFile>> {
|
2018-10-29 20:26:13 +00:00
|
|
|
for sf in self.files.borrow().source_files.iter() {
|
|
|
|
if *filename == sf.name {
|
|
|
|
return Some(sf.clone());
|
2013-12-31 00:30:33 +00:00
|
|
|
}
|
|
|
|
}
|
2016-05-24 14:08:01 +00:00
|
|
|
None
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2012-01-25 21:22:10 +00:00
|
|
|
|
2018-08-18 10:13:52 +00:00
|
|
|
/// For a global BytePos compute the local offset within the containing SourceFile
|
|
|
|
pub fn lookup_byte_offset(&self, bpos: BytePos) -> SourceFileAndBytePos {
|
2018-08-18 10:13:56 +00:00
|
|
|
let idx = self.lookup_source_file_idx(bpos);
|
2018-10-29 20:26:13 +00:00
|
|
|
let sf = (*self.files.borrow().source_files)[idx].clone();
|
|
|
|
let offset = bpos - sf.start_pos;
|
|
|
|
SourceFileAndBytePos {sf: sf, pos: offset}
|
2014-02-05 04:31:33 +00:00
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:56 +00:00
|
|
|
/// Converts an absolute BytePos to a CharPos relative to the source_file.
|
2014-02-05 04:31:33 +00:00
|
|
|
pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
|
2018-08-18 10:13:56 +00:00
|
|
|
let idx = self.lookup_source_file_idx(bpos);
|
2018-10-29 20:26:13 +00:00
|
|
|
let map = &(*self.files.borrow().source_files)[idx];
|
2014-02-05 04:31:33 +00:00
|
|
|
|
2018-08-18 10:13:52 +00:00
|
|
|
// The number of extra bytes due to multibyte chars in the SourceFile
|
2014-02-05 04:31:33 +00:00
|
|
|
let mut total_extra_bytes = 0;
|
|
|
|
|
2018-05-23 13:59:42 +00:00
|
|
|
for mbc in map.multibyte_chars.iter() {
|
2014-12-20 08:09:35 +00:00
|
|
|
debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos);
|
2014-02-05 04:31:33 +00:00
|
|
|
if mbc.pos < bpos {
|
|
|
|
// every character is at least one byte, so we only
|
|
|
|
// count the actual extra bytes.
|
2018-06-26 13:37:09 +00:00
|
|
|
total_extra_bytes += mbc.bytes as u32 - 1;
|
2014-02-05 04:31:33 +00:00
|
|
|
// We should never see a byte position in the middle of a
|
|
|
|
// character
|
2018-06-26 13:37:09 +00:00
|
|
|
assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32);
|
2014-02-05 04:31:33 +00:00
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-05-29 15:49:35 +00:00
|
|
|
assert!(map.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32());
|
|
|
|
CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes as usize)
|
2014-02-05 04:31:33 +00:00
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:56 +00:00
|
|
|
// Return the index of the source_file (in self.files) which contains pos.
|
|
|
|
pub fn lookup_source_file_idx(&self, pos: BytePos) -> usize {
|
2013-12-31 00:30:33 +00:00
|
|
|
let files = self.files.borrow();
|
2018-10-29 20:26:13 +00:00
|
|
|
let files = &files.source_files;
|
2015-07-02 03:37:52 +00:00
|
|
|
let count = files.len();
|
|
|
|
|
2018-08-18 10:13:56 +00:00
|
|
|
// Binary search for the source_file.
|
2015-01-28 01:01:48 +00:00
|
|
|
let mut a = 0;
|
2015-07-02 03:37:52 +00:00
|
|
|
let mut b = count;
|
2015-01-28 01:01:48 +00:00
|
|
|
while b - a > 1 {
|
|
|
|
let m = (a + b) / 2;
|
2014-10-15 06:05:01 +00:00
|
|
|
if files[m].start_pos > pos {
|
2012-11-13 03:32:48 +00:00
|
|
|
b = m;
|
|
|
|
} else {
|
|
|
|
a = m;
|
|
|
|
}
|
2012-11-13 02:24:56 +00:00
|
|
|
}
|
2015-07-02 03:37:52 +00:00
|
|
|
|
|
|
|
assert!(a < count, "position {} does not resolve to a source location", pos.to_usize());
|
2012-11-16 03:37:29 +00:00
|
|
|
|
|
|
|
return a;
|
|
|
|
}
|
|
|
|
|
2015-11-11 05:26:14 +00:00
|
|
|
pub fn count_lines(&self) -> usize {
|
2017-04-27 14:12:57 +00:00
|
|
|
self.files().iter().fold(0, |a, f| a + f.count_lines())
|
2015-11-11 05:26:14 +00:00
|
|
|
}
|
2018-05-21 16:06:28 +00:00
|
|
|
|
|
|
|
|
|
|
|
pub fn generate_fn_name_span(&self, span: Span) -> Option<Span> {
|
|
|
|
let prev_span = self.span_extend_to_prev_str(span, "fn", true);
|
|
|
|
self.span_to_snippet(prev_span).map(|snippet| {
|
|
|
|
let len = snippet.find(|c: char| !c.is_alphanumeric() && c != '_')
|
|
|
|
.expect("no label after fn");
|
|
|
|
prev_span.with_hi(BytePos(prev_span.lo().0 + len as u32))
|
|
|
|
}).ok()
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Take the span of a type parameter in a function signature and try to generate a span for the
|
2018-05-21 16:43:11 +00:00
|
|
|
/// function name (with generics) and a new snippet for this span with the pointed type
|
|
|
|
/// parameter as a new local type parameter.
|
2018-05-21 16:06:28 +00:00
|
|
|
///
|
|
|
|
/// For instance:
|
|
|
|
/// ```rust,ignore (pseudo-Rust)
|
|
|
|
/// // Given span
|
|
|
|
/// fn my_function(param: T)
|
|
|
|
/// // ^ Original span
|
|
|
|
///
|
|
|
|
/// // Result
|
|
|
|
/// fn my_function(param: T)
|
|
|
|
/// // ^^^^^^^^^^^ Generated span with snippet `my_function<T>`
|
|
|
|
/// ```
|
|
|
|
///
|
|
|
|
/// Attention: The method used is very fragile since it essentially duplicates the work of the
|
|
|
|
/// parser. If you need to use this function or something similar, please consider updating the
|
2018-08-18 10:14:14 +00:00
|
|
|
/// source_map functions and this function to something more robust.
|
2018-05-21 16:06:28 +00:00
|
|
|
pub fn generate_local_type_param_snippet(&self, span: Span) -> Option<(Span, String)> {
|
|
|
|
// Try to extend the span to the previous "fn" keyword to retrieve the function
|
|
|
|
// signature
|
|
|
|
let sugg_span = self.span_extend_to_prev_str(span, "fn", false);
|
|
|
|
if sugg_span != span {
|
|
|
|
if let Ok(snippet) = self.span_to_snippet(sugg_span) {
|
|
|
|
// Consume the function name
|
|
|
|
let mut offset = snippet.find(|c: char| !c.is_alphanumeric() && c != '_')
|
|
|
|
.expect("no label after fn");
|
|
|
|
|
|
|
|
// Consume the generics part of the function signature
|
|
|
|
let mut bracket_counter = 0;
|
|
|
|
let mut last_char = None;
|
|
|
|
for c in snippet[offset..].chars() {
|
|
|
|
match c {
|
|
|
|
'<' => bracket_counter += 1,
|
|
|
|
'>' => bracket_counter -= 1,
|
|
|
|
'(' => if bracket_counter == 0 { break; }
|
|
|
|
_ => {}
|
|
|
|
}
|
|
|
|
offset += c.len_utf8();
|
|
|
|
last_char = Some(c);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Adjust the suggestion span to encompass the function name with its generics
|
|
|
|
let sugg_span = sugg_span.with_hi(BytePos(sugg_span.lo().0 + offset as u32));
|
|
|
|
|
|
|
|
// Prepare the new suggested snippet to append the type parameter that triggered
|
|
|
|
// the error in the generics of the function signature
|
|
|
|
let mut new_snippet = if last_char == Some('>') {
|
|
|
|
format!("{}, ", &snippet[..(offset - '>'.len_utf8())])
|
|
|
|
} else {
|
|
|
|
format!("{}<", &snippet[..offset])
|
|
|
|
};
|
2018-10-12 14:16:00 +00:00
|
|
|
new_snippet.push_str(
|
|
|
|
&self.span_to_snippet(span).unwrap_or_else(|_| "T".to_string()));
|
2018-05-21 16:06:28 +00:00
|
|
|
new_snippet.push('>');
|
|
|
|
|
|
|
|
return Some((sugg_span, new_snippet));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
None
|
|
|
|
}
|
2016-04-16 01:23:50 +00:00
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:35 +00:00
|
|
|
impl SourceMapper for SourceMap {
|
2016-06-21 22:08:13 +00:00
|
|
|
fn lookup_char_pos(&self, pos: BytePos) -> Loc {
|
|
|
|
self.lookup_char_pos(pos)
|
|
|
|
}
|
|
|
|
fn span_to_lines(&self, sp: Span) -> FileLinesResult {
|
|
|
|
self.span_to_lines(sp)
|
|
|
|
}
|
|
|
|
fn span_to_string(&self, sp: Span) -> String {
|
|
|
|
self.span_to_string(sp)
|
|
|
|
}
|
|
|
|
fn span_to_filename(&self, sp: Span) -> FileName {
|
|
|
|
self.span_to_filename(sp)
|
|
|
|
}
|
2016-09-19 19:31:56 +00:00
|
|
|
fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> {
|
|
|
|
self.merge_spans(sp_lhs, sp_rhs)
|
|
|
|
}
|
2017-06-01 06:48:19 +00:00
|
|
|
fn call_span_if_macro(&self, sp: Span) -> Span {
|
2017-12-14 07:09:19 +00:00
|
|
|
if self.span_to_filename(sp.clone()).is_macros() {
|
2017-06-01 06:48:19 +00:00
|
|
|
let v = sp.macro_backtrace();
|
|
|
|
if let Some(use_site) = v.last() {
|
|
|
|
return use_site.call_site;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
sp
|
|
|
|
}
|
2018-10-29 20:26:13 +00:00
|
|
|
fn ensure_source_file_source_present(&self, source_file: Lrc<SourceFile>) -> bool {
|
|
|
|
source_file.add_external_src(
|
|
|
|
|| match source_file.name {
|
2017-12-14 07:09:19 +00:00
|
|
|
FileName::Real(ref name) => self.file_loader.read_file(name).ok(),
|
|
|
|
_ => None,
|
|
|
|
}
|
2017-08-01 11:17:11 +00:00
|
|
|
)
|
2017-06-10 19:08:32 +00:00
|
|
|
}
|
2018-12-04 20:18:03 +00:00
|
|
|
fn doctest_offset_line(&self, file: &FileName, line: usize) -> usize {
|
|
|
|
self.doctest_offset_line(file, line)
|
2018-01-08 14:47:23 +00:00
|
|
|
}
|
2015-02-05 15:02:22 +00:00
|
|
|
}
|
|
|
|
|
2017-04-24 17:01:19 +00:00
|
|
|
#[derive(Clone)]
|
|
|
|
pub struct FilePathMapping {
|
2017-12-14 07:09:19 +00:00
|
|
|
mapping: Vec<(PathBuf, PathBuf)>,
|
2017-04-24 17:01:19 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl FilePathMapping {
|
|
|
|
pub fn empty() -> FilePathMapping {
|
|
|
|
FilePathMapping {
|
|
|
|
mapping: vec![]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-12-14 07:09:19 +00:00
|
|
|
pub fn new(mapping: Vec<(PathBuf, PathBuf)>) -> FilePathMapping {
|
2017-04-24 17:01:19 +00:00
|
|
|
FilePathMapping {
|
2017-08-07 05:54:09 +00:00
|
|
|
mapping,
|
2017-04-24 17:01:19 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Applies any path prefix substitution as defined by the mapping.
|
|
|
|
/// The return value is the remapped path and a boolean indicating whether
|
|
|
|
/// the path was affected by the mapping.
|
2017-12-14 07:09:19 +00:00
|
|
|
pub fn map_prefix(&self, path: PathBuf) -> (PathBuf, bool) {
|
2017-04-24 17:01:19 +00:00
|
|
|
// NOTE: We are iterating over the mapping entries from last to first
|
|
|
|
// because entries specified later on the command line should
|
|
|
|
// take precedence.
|
|
|
|
for &(ref from, ref to) in self.mapping.iter().rev() {
|
2017-12-14 07:09:19 +00:00
|
|
|
if let Ok(rest) = path.strip_prefix(from) {
|
|
|
|
return (to.join(rest), true);
|
2017-04-24 17:01:19 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
(path, false)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2015-02-11 17:29:49 +00:00
|
|
|
// _____________________________________________________________________________
|
|
|
|
// Tests
|
|
|
|
//
|
|
|
|
|
2013-01-30 17:56:33 +00:00
|
|
|
#[cfg(test)]
|
2015-04-24 15:30:41 +00:00
|
|
|
mod tests {
|
2013-01-30 17:56:33 +00:00
|
|
|
use super::*;
|
2018-02-27 16:11:14 +00:00
|
|
|
use rustc_data_structures::sync::Lrc;
|
2013-01-30 17:56:33 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
fn init_source_map() -> SourceMap {
|
|
|
|
let sm = SourceMap::new(FilePathMapping::empty());
|
|
|
|
sm.new_source_file(PathBuf::from("blork.rs").into(),
|
2018-05-24 09:30:30 +00:00
|
|
|
"first line.\nsecond line".to_string());
|
2018-10-29 20:26:13 +00:00
|
|
|
sm.new_source_file(PathBuf::from("empty.rs").into(),
|
2018-08-23 08:14:52 +00:00
|
|
|
String::new());
|
2018-10-29 20:26:13 +00:00
|
|
|
sm.new_source_file(PathBuf::from("blork2.rs").into(),
|
2018-05-24 09:30:30 +00:00
|
|
|
"first line.\nsecond line".to_string());
|
2018-10-29 20:26:13 +00:00
|
|
|
sm
|
2014-02-19 01:24:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn t3() {
|
|
|
|
// Test lookup_byte_offset
|
2018-10-29 20:26:13 +00:00
|
|
|
let sm = init_source_map();
|
2014-02-19 01:24:07 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let srcfbp1 = sm.lookup_byte_offset(BytePos(23));
|
|
|
|
assert_eq!(srcfbp1.sf.name, PathBuf::from("blork.rs").into());
|
|
|
|
assert_eq!(srcfbp1.pos, BytePos(23));
|
2015-07-02 03:37:52 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let srcfbp1 = sm.lookup_byte_offset(BytePos(24));
|
|
|
|
assert_eq!(srcfbp1.sf.name, PathBuf::from("empty.rs").into());
|
|
|
|
assert_eq!(srcfbp1.pos, BytePos(0));
|
2014-02-19 01:24:07 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let srcfbp2 = sm.lookup_byte_offset(BytePos(25));
|
|
|
|
assert_eq!(srcfbp2.sf.name, PathBuf::from("blork2.rs").into());
|
|
|
|
assert_eq!(srcfbp2.pos, BytePos(0));
|
2014-02-19 01:24:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn t4() {
|
2014-02-27 23:53:36 +00:00
|
|
|
// Test bytepos_to_file_charpos
|
2018-10-29 20:26:13 +00:00
|
|
|
let sm = init_source_map();
|
2014-02-19 01:24:07 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let cp1 = sm.bytepos_to_file_charpos(BytePos(22));
|
2014-02-19 01:24:07 +00:00
|
|
|
assert_eq!(cp1, CharPos(22));
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let cp2 = sm.bytepos_to_file_charpos(BytePos(25));
|
2014-02-27 23:53:36 +00:00
|
|
|
assert_eq!(cp2, CharPos(0));
|
2014-02-19 01:24:07 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn t5() {
|
2018-08-18 10:13:56 +00:00
|
|
|
// Test zero-length source_files.
|
2018-10-29 20:26:13 +00:00
|
|
|
let sm = init_source_map();
|
2014-02-19 01:24:07 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let loc1 = sm.lookup_char_pos(BytePos(22));
|
2017-12-14 07:09:19 +00:00
|
|
|
assert_eq!(loc1.file.name, PathBuf::from("blork.rs").into());
|
2014-02-19 01:24:07 +00:00
|
|
|
assert_eq!(loc1.line, 2);
|
|
|
|
assert_eq!(loc1.col, CharPos(10));
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let loc2 = sm.lookup_char_pos(BytePos(25));
|
2017-12-14 07:09:19 +00:00
|
|
|
assert_eq!(loc2.file.name, PathBuf::from("blork2.rs").into());
|
2014-02-19 01:24:07 +00:00
|
|
|
assert_eq!(loc2.line, 1);
|
|
|
|
assert_eq!(loc2.col, CharPos(0));
|
|
|
|
}
|
2014-02-27 23:53:36 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
fn init_source_map_mbc() -> SourceMap {
|
|
|
|
let sm = SourceMap::new(FilePathMapping::empty());
|
2014-02-27 23:53:36 +00:00
|
|
|
// € is a three byte utf8 char.
|
2018-10-29 20:26:13 +00:00
|
|
|
sm.new_source_file(PathBuf::from("blork.rs").into(),
|
2018-05-24 09:30:30 +00:00
|
|
|
"fir€st €€€€ line.\nsecond line".to_string());
|
2018-10-29 20:26:13 +00:00
|
|
|
sm.new_source_file(PathBuf::from("blork2.rs").into(),
|
2018-05-24 09:30:30 +00:00
|
|
|
"first line€€.\n€ second line".to_string());
|
2018-10-29 20:26:13 +00:00
|
|
|
sm
|
2014-02-27 23:53:36 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn t6() {
|
|
|
|
// Test bytepos_to_file_charpos in the presence of multi-byte chars
|
2018-10-29 20:26:13 +00:00
|
|
|
let sm = init_source_map_mbc();
|
2014-02-27 23:53:36 +00:00
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let cp1 = sm.bytepos_to_file_charpos(BytePos(3));
|
2014-02-27 23:53:36 +00:00
|
|
|
assert_eq!(cp1, CharPos(3));
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let cp2 = sm.bytepos_to_file_charpos(BytePos(6));
|
2014-02-27 23:53:36 +00:00
|
|
|
assert_eq!(cp2, CharPos(4));
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let cp3 = sm.bytepos_to_file_charpos(BytePos(56));
|
2014-02-27 23:53:36 +00:00
|
|
|
assert_eq!(cp3, CharPos(12));
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
let cp4 = sm.bytepos_to_file_charpos(BytePos(61));
|
2014-02-27 23:53:36 +00:00
|
|
|
assert_eq!(cp4, CharPos(15));
|
|
|
|
}
|
2014-03-03 10:44:43 +00:00
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn t7() {
|
2018-08-18 10:13:56 +00:00
|
|
|
// Test span_to_lines for a span ending at the end of source_file
|
2018-10-29 20:26:13 +00:00
|
|
|
let sm = init_source_map();
|
2017-07-31 20:04:34 +00:00
|
|
|
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
|
2018-10-29 20:26:13 +00:00
|
|
|
let file_lines = sm.span_to_lines(span).unwrap();
|
2014-03-03 10:44:43 +00:00
|
|
|
|
2017-12-14 07:09:19 +00:00
|
|
|
assert_eq!(file_lines.file.name, PathBuf::from("blork.rs").into());
|
2014-03-03 10:44:43 +00:00
|
|
|
assert_eq!(file_lines.lines.len(), 1);
|
2015-04-09 18:46:03 +00:00
|
|
|
assert_eq!(file_lines.lines[0].line_index, 1);
|
|
|
|
}
|
|
|
|
|
2016-04-20 18:56:01 +00:00
|
|
|
/// Given a string like " ~~~~~~~~~~~~ ", produces a span
|
2017-08-11 18:34:14 +00:00
|
|
|
/// converting that range. The idea is that the string has the same
|
2019-02-08 13:53:55 +00:00
|
|
|
/// length as the input, and we uncover the byte positions. Note
|
2015-04-09 18:46:03 +00:00
|
|
|
/// that this can span lines and so on.
|
|
|
|
fn span_from_selection(input: &str, selection: &str) -> Span {
|
|
|
|
assert_eq!(input.len(), selection.len());
|
2016-04-20 18:56:01 +00:00
|
|
|
let left_index = selection.find('~').unwrap() as u32;
|
2015-12-13 12:12:47 +00:00
|
|
|
let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index);
|
2017-07-31 20:04:34 +00:00
|
|
|
Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION)
|
2015-04-09 18:46:03 +00:00
|
|
|
}
|
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Tests span_to_snippet and span_to_lines for a span converting 3
|
2015-04-09 18:46:03 +00:00
|
|
|
/// lines in the middle of a file.
|
|
|
|
#[test]
|
|
|
|
fn span_to_snippet_and_lines_spanning_multiple_lines() {
|
2018-10-29 20:26:13 +00:00
|
|
|
let sm = SourceMap::new(FilePathMapping::empty());
|
2015-04-09 18:46:03 +00:00
|
|
|
let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
|
2016-04-20 18:56:01 +00:00
|
|
|
let selection = " \n ~~\n~~~\n~~~~~ \n \n";
|
2018-10-29 20:26:13 +00:00
|
|
|
sm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_string());
|
2015-04-09 18:46:03 +00:00
|
|
|
let span = span_from_selection(inputtext, selection);
|
|
|
|
|
|
|
|
// check that we are extracting the text we thought we were extracting
|
2018-10-29 20:26:13 +00:00
|
|
|
assert_eq!(&sm.span_to_snippet(span).unwrap(), "BB\nCCC\nDDDDD");
|
2015-04-09 18:46:03 +00:00
|
|
|
|
|
|
|
// check that span_to_lines gives us the complete result with the lines/cols we expected
|
2018-10-29 20:26:13 +00:00
|
|
|
let lines = sm.span_to_lines(span).unwrap();
|
2015-04-09 18:46:03 +00:00
|
|
|
let expected = vec![
|
|
|
|
LineInfo { line_index: 1, start_col: CharPos(4), end_col: CharPos(6) },
|
|
|
|
LineInfo { line_index: 2, start_col: CharPos(0), end_col: CharPos(3) },
|
|
|
|
LineInfo { line_index: 3, start_col: CharPos(0), end_col: CharPos(5) }
|
|
|
|
];
|
|
|
|
assert_eq!(lines.lines, expected);
|
2014-03-03 10:44:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn t8() {
|
2018-08-18 10:13:56 +00:00
|
|
|
// Test span_to_snippet for a span ending at the end of source_file
|
2018-10-29 20:26:13 +00:00
|
|
|
let sm = init_source_map();
|
2017-07-31 20:04:34 +00:00
|
|
|
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
|
2018-10-29 20:26:13 +00:00
|
|
|
let snippet = sm.span_to_snippet(span);
|
2014-03-03 10:44:43 +00:00
|
|
|
|
2015-02-05 15:02:22 +00:00
|
|
|
assert_eq!(snippet, Ok("second line".to_string()));
|
2014-03-03 10:44:43 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
fn t9() {
|
2018-08-18 10:13:56 +00:00
|
|
|
// Test span_to_str for a span ending at the end of source_file
|
2018-10-29 20:26:13 +00:00
|
|
|
let sm = init_source_map();
|
2017-07-31 20:04:34 +00:00
|
|
|
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
|
2018-10-29 20:26:13 +00:00
|
|
|
let sstr = sm.span_to_string(span);
|
2014-03-03 10:44:43 +00:00
|
|
|
|
2014-11-28 00:52:53 +00:00
|
|
|
assert_eq!(sstr, "blork.rs:2:1: 2:12");
|
2014-03-03 10:44:43 +00:00
|
|
|
}
|
2015-11-24 22:05:27 +00:00
|
|
|
|
2019-02-08 13:53:55 +00:00
|
|
|
/// Tests failing to merge two spans on different lines
|
2016-09-19 19:31:56 +00:00
|
|
|
#[test]
|
|
|
|
fn span_merging_fail() {
|
2018-10-29 20:26:13 +00:00
|
|
|
let sm = SourceMap::new(FilePathMapping::empty());
|
2016-09-19 19:31:56 +00:00
|
|
|
let inputtext = "bbbb BB\ncc CCC\n";
|
|
|
|
let selection1 = " ~~\n \n";
|
|
|
|
let selection2 = " \n ~~~\n";
|
2018-10-29 20:26:13 +00:00
|
|
|
sm.new_source_file(Path::new("blork.rs").to_owned().into(), inputtext.to_owned());
|
2016-09-19 19:31:56 +00:00
|
|
|
let span1 = span_from_selection(inputtext, selection1);
|
|
|
|
let span2 = span_from_selection(inputtext, selection2);
|
|
|
|
|
2018-10-29 20:26:13 +00:00
|
|
|
assert!(sm.merge_spans(span1, span2).is_none());
|
2016-09-19 19:31:56 +00:00
|
|
|
}
|
|
|
|
|
2016-06-23 14:50:05 +00:00
|
|
|
/// Returns the span corresponding to the `n`th occurrence of
|
|
|
|
/// `substring` in `source_text`.
|
2018-08-18 10:13:35 +00:00
|
|
|
trait SourceMapExtension {
|
2016-06-23 14:50:05 +00:00
|
|
|
fn span_substr(&self,
|
2018-08-18 10:13:52 +00:00
|
|
|
file: &Lrc<SourceFile>,
|
2016-06-23 14:50:05 +00:00
|
|
|
source_text: &str,
|
|
|
|
substring: &str,
|
|
|
|
n: usize)
|
|
|
|
-> Span;
|
|
|
|
}
|
|
|
|
|
2018-08-18 10:13:35 +00:00
|
|
|
impl SourceMapExtension for SourceMap {
|
2016-06-23 14:50:05 +00:00
|
|
|
fn span_substr(&self,
|
2018-08-18 10:13:52 +00:00
|
|
|
file: &Lrc<SourceFile>,
|
2016-06-23 14:50:05 +00:00
|
|
|
source_text: &str,
|
|
|
|
substring: &str,
|
|
|
|
n: usize)
|
|
|
|
-> Span
|
|
|
|
{
|
|
|
|
println!("span_substr(file={:?}/{:?}, substring={:?}, n={})",
|
|
|
|
file.name, file.start_pos, substring, n);
|
|
|
|
let mut i = 0;
|
|
|
|
let mut hi = 0;
|
|
|
|
loop {
|
|
|
|
let offset = source_text[hi..].find(substring).unwrap_or_else(|| {
|
|
|
|
panic!("source_text `{}` does not have {} occurrences of `{}`, only {}",
|
|
|
|
source_text, n, substring, i);
|
|
|
|
});
|
|
|
|
let lo = hi + offset;
|
|
|
|
hi = lo + substring.len();
|
|
|
|
if i == n {
|
2017-07-31 20:04:34 +00:00
|
|
|
let span = Span::new(
|
|
|
|
BytePos(lo as u32 + file.start_pos.0),
|
|
|
|
BytePos(hi as u32 + file.start_pos.0),
|
|
|
|
NO_EXPANSION,
|
|
|
|
);
|
2016-06-23 14:50:05 +00:00
|
|
|
assert_eq!(&self.span_to_snippet(span).unwrap()[..],
|
|
|
|
substring);
|
|
|
|
return span;
|
|
|
|
}
|
|
|
|
i += 1;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2013-01-30 17:56:33 +00:00
|
|
|
}
|