syntax_pos: NO_EXPANSION/SyntaxContext::empty() -> SyntaxContext::root()

For consistency with `ExpnId::root`.

Also introduce a helper `Span::with_root_ctxt` for creating spans with `SyntaxContext::root()` context
This commit is contained in:
Vadim Petrochenkov 2019-08-11 01:44:55 +03:00
parent dfcbe75900
commit 67d6ce4206
20 changed files with 53 additions and 53 deletions

View File

@ -350,7 +350,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for Span {
let line_col_len = col | line | len;
std_hash::Hash::hash(&line_col_len, hasher);
if span.ctxt == SyntaxContext::empty() {
if span.ctxt == SyntaxContext::root() {
TAG_NO_EXPANSION.hash_stable(hcx, hasher);
} else {
TAG_EXPANSION.hash_stable(hcx, hasher);

View File

@ -592,7 +592,7 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for CacheDecoder<'a, 'tcx> {
// `SyntaxContextData::prev_ctxt` or `SyntaxContextData::opaque`. These things
// don't seem to be used after HIR lowering, so everything should be fine
// as long as incremental compilation does not kick in before that.
let location = || Span::new(lo, hi, SyntaxContext::empty());
let location = || Span::with_root_ctxt(lo, hi);
let recover_from_expn_info = |this: &Self, expn_info, pos| {
let span = location().fresh_expansion(ExpnId::root(), expn_info);
this.synthetic_expansion_infos.borrow_mut().insert(pos, span.ctxt());
@ -816,7 +816,7 @@ where
col_lo.encode(self)?;
len.encode(self)?;
if span_data.ctxt == SyntaxContext::empty() {
if span_data.ctxt == SyntaxContext::root() {
TAG_NO_EXPANSION_INFO.encode(self)
} else {
let (expn_id, expn_info) = span_data.ctxt.outer_expn_with_info();

View File

@ -43,8 +43,7 @@ use syntax_pos::{BytePos,
SourceFile,
FileName,
MultiSpan,
Span,
NO_EXPANSION};
Span};
/// Indicates the confidence in the correctness of a suggestion.
///
@ -189,7 +188,7 @@ impl CodeSuggestion {
// Find the bounding span.
let lo = substitution.parts.iter().map(|part| part.span.lo()).min().unwrap();
let hi = substitution.parts.iter().map(|part| part.span.hi()).min().unwrap();
let bounding_span = Span::new(lo, hi, NO_EXPANSION);
let bounding_span = Span::with_root_ctxt(lo, hi);
let lines = cm.span_to_lines(bounding_span).unwrap();
assert!(!lines.lines.is_empty());

View File

@ -35,7 +35,7 @@ use syntax::ext::proc_macro::BangProcMacro;
use syntax::parse::source_file_to_stream;
use syntax::parse::parser::emit_unclosed_delims;
use syntax::symbol::{Symbol, sym};
use syntax_pos::{Span, NO_EXPANSION, FileName};
use syntax_pos::{Span, FileName};
use rustc_data_structures::bit_set::BitSet;
macro_rules! provide {
@ -443,7 +443,7 @@ impl cstore::CStore {
let source_name = FileName::Macros(macro_full_name);
let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body);
let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION);
let local_span = Span::with_root_ctxt(source_file.start_pos, source_file.end_pos);
let (body, mut errors) = source_file_to_stream(&sess.parse_sess, source_file, None);
emit_unclosed_delims(&mut errors, &sess.diagnostic());

View File

@ -32,7 +32,7 @@ use syntax::source_map;
use syntax::symbol::{Symbol, sym};
use syntax::ext::base::{MacroKind, SyntaxExtension};
use syntax::ext::hygiene::ExpnId;
use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP, NO_EXPANSION};
use syntax_pos::{self, Span, BytePos, Pos, DUMMY_SP};
use log::debug;
pub struct DecodeContext<'a, 'tcx> {
@ -344,7 +344,7 @@ impl<'a, 'tcx> SpecializedDecoder<Span> for DecodeContext<'a, 'tcx> {
let hi = (hi + source_file.translated_source_file.start_pos)
- source_file.original_start_pos;
Ok(Span::new(lo, hi, NO_EXPANSION))
Ok(Span::with_root_ctxt(lo, hi))
}
}

View File

@ -1430,7 +1430,7 @@ impl<'a> Resolver<'a> {
}
let (general_span, modern_span) = if ident.name == kw::SelfUpper {
// FIXME(jseyfried) improve `Self` hygiene
let empty_span = ident.span.with_ctxt(SyntaxContext::empty());
let empty_span = ident.span.with_ctxt(SyntaxContext::root());
(empty_span, empty_span)
} else if ns == TypeNS {
let modern_span = ident.span.modern();

View File

@ -762,7 +762,7 @@ impl<'a> ExtCtxt<'a> {
}
}
pub fn backtrace(&self) -> SyntaxContext {
SyntaxContext::empty().apply_mark(self.current_expansion.id)
SyntaxContext::root().apply_mark(self.current_expansion.id)
}
/// Returns span for the macro which originally caused the current expansion to happen.

View File

@ -759,7 +759,7 @@ impl<'a> Parser<'a> {
let msg = format!("macro expansion ignores token `{}` and any following",
self.this_token_to_string());
// Avoid emitting backtrace info twice.
let def_site_span = self.token.span.with_ctxt(SyntaxContext::empty());
let def_site_span = self.token.span.with_ctxt(SyntaxContext::root());
let mut err = self.diagnostic().struct_span_err(def_site_span, &msg);
err.span_label(span, "caused by the macro expansion here");
let msg = format!(

View File

@ -365,7 +365,7 @@ impl<'a> Rustc<'a> {
let location = cx.current_expansion.id.expn_info().unwrap().call_site;
let to_span = |transparency| {
location.with_ctxt(
SyntaxContext::empty()
SyntaxContext::root()
.apply_mark_with_transparency(cx.current_expansion.id, transparency),
)
};

View File

@ -4,7 +4,7 @@ use crate::symbol::{sym, Symbol};
use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char};
use errors::{FatalError, DiagnosticBuilder};
use syntax_pos::{BytePos, Pos, Span, NO_EXPANSION};
use syntax_pos::{BytePos, Pos, Span};
use rustc_lexer::Base;
use rustc_lexer::unescape;
@ -84,7 +84,7 @@ impl<'a> StringReader<'a> {
fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
self.override_span.unwrap_or_else(|| Span::new(lo, hi, NO_EXPANSION))
self.override_span.unwrap_or_else(|| Span::with_root_ctxt(lo, hi))
}
/// Returns the next token, including trivia like whitespace or comments.

View File

@ -9,7 +9,7 @@ use crate::diagnostics::plugin::ErrorMap;
use crate::with_default_globals;
use std::io;
use std::path::PathBuf;
use syntax_pos::{BytePos, Span, NO_EXPANSION, edition::Edition};
use syntax_pos::{BytePos, Span, edition::Edition};
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use rustc_data_structures::sync::{Lock, Once};
@ -61,7 +61,7 @@ fn t1() {
let tok1 = string_reader.next_token();
let tok2 = Token::new(
mk_ident("fn"),
Span::new(BytePos(21), BytePos(23), NO_EXPANSION),
Span::with_root_ctxt(BytePos(21), BytePos(23)),
);
assert_eq!(tok1.kind, tok2.kind);
assert_eq!(tok1.span, tok2.span);
@ -71,7 +71,7 @@ fn t1() {
assert_eq!(string_reader.pos.clone(), BytePos(28));
let tok4 = Token::new(
mk_ident("main"),
Span::new(BytePos(24), BytePos(28), NO_EXPANSION),
Span::with_root_ctxt(BytePos(24), BytePos(28)),
);
assert_eq!(tok3.kind, tok4.kind);
assert_eq!(tok3.span, tok4.span);

View File

@ -3,7 +3,7 @@
use super::StringReader;
use errors::{Applicability, DiagnosticBuilder};
use syntax_pos::{BytePos, Pos, Span, NO_EXPANSION, symbol::kw};
use syntax_pos::{BytePos, Pos, Span, symbol::kw};
use crate::parse::token;
#[rustfmt::skip] // for line breaks
@ -343,7 +343,7 @@ crate fn check_for_substitution<'a>(
None => return None,
};
let span = Span::new(pos, pos + Pos::from_usize(ch.len_utf8()), NO_EXPANSION);
let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8()));
let (ascii_name, token) = match ASCII_ARRAY.iter().find(|&&(c, _, _)| c == ascii_char) {
Some((_ascii_char, ascii_name, token)) => (ascii_name, token),
@ -362,10 +362,9 @@ crate fn check_for_substitution<'a>(
ascii_char, ascii_name
);
err.span_suggestion(
Span::new(
Span::with_root_ctxt(
pos,
pos + Pos::from_usize('“'.len_utf8() + s.len() + '”'.len_utf8()),
NO_EXPANSION,
),
&msg,
format!("\"{}\"", s),

View File

@ -12,7 +12,7 @@ use crate::symbol::{kw, sym};
use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse};
use crate::tokenstream::{DelimSpan, TokenTree, TokenStream};
use crate::with_default_globals;
use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION};
use syntax_pos::{Span, BytePos, Pos};
use std::path::PathBuf;
@ -27,7 +27,7 @@ fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess)
// produce a syntax_pos::span
fn sp(a: u32, b: u32) -> Span {
Span::new(BytePos(a), BytePos(b), NO_EXPANSION)
Span::with_root_ctxt(BytePos(a), BytePos(b))
}
/// Parse a string, return an expr

View File

@ -91,7 +91,7 @@ fn t6() {
fn t7() {
// Test span_to_lines for a span ending at the end of source_file
let sm = init_source_map();
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
let span = Span::with_root_ctxt(BytePos(12), BytePos(23));
let file_lines = sm.span_to_lines(span).unwrap();
assert_eq!(file_lines.file.name, PathBuf::from("blork.rs").into());
@ -107,7 +107,7 @@ fn span_from_selection(input: &str, selection: &str) -> Span {
assert_eq!(input.len(), selection.len());
let left_index = selection.find('~').unwrap() as u32;
let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index);
Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION)
Span::with_root_ctxt(BytePos(left_index), BytePos(right_index + 1))
}
/// Tests span_to_snippet and span_to_lines for a span converting 3
@ -137,7 +137,7 @@ fn span_to_snippet_and_lines_spanning_multiple_lines() {
fn t8() {
// Test span_to_snippet for a span ending at the end of source_file
let sm = init_source_map();
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
let span = Span::with_root_ctxt(BytePos(12), BytePos(23));
let snippet = sm.span_to_snippet(span);
assert_eq!(snippet, Ok("second line".to_string()));
@ -147,7 +147,7 @@ fn t8() {
fn t9() {
// Test span_to_str for a span ending at the end of source_file
let sm = init_source_map();
let span = Span::new(BytePos(12), BytePos(23), NO_EXPANSION);
let span = Span::with_root_ctxt(BytePos(12), BytePos(23));
let sstr = sm.span_to_string(span);
assert_eq!(sstr, "blork.rs:2:1: 2:12");
@ -198,10 +198,9 @@ impl SourceMapExtension for SourceMap {
let lo = hi + offset;
hi = lo + substring.len();
if i == n {
let span = Span::new(
let span = Span::with_root_ctxt(
BytePos(lo as u32 + file.start_pos.0),
BytePos(hi as u32 + file.start_pos.0),
NO_EXPANSION,
);
assert_eq!(&self.span_to_snippet(span).unwrap()[..],
substring);

View File

@ -9,7 +9,7 @@ use crate::with_default_globals;
use errors::emitter::EmitterWriter;
use errors::Handler;
use rustc_data_structures::sync::Lrc;
use syntax_pos::{BytePos, NO_EXPANSION, Span, MultiSpan};
use syntax_pos::{BytePos, Span, MultiSpan};
use std::io;
use std::io::prelude::*;
@ -169,7 +169,7 @@ fn make_span(file_text: &str, start: &Position, end: &Position) -> Span {
let start = make_pos(file_text, start);
let end = make_pos(file_text, end) + end.string.len(); // just after matching thing ends
assert!(start <= end);
Span::new(BytePos(start as u32), BytePos(end as u32), NO_EXPANSION)
Span::with_root_ctxt(BytePos(start as u32), BytePos(end as u32))
}
fn make_pos(file_text: &str, pos: &Position) -> usize {

View File

@ -3,14 +3,14 @@ use super::*;
use crate::ast::Name;
use crate::with_default_globals;
use crate::tests::string_to_stream;
use syntax_pos::{Span, BytePos, NO_EXPANSION};
use syntax_pos::{Span, BytePos};
fn string_to_ts(string: &str) -> TokenStream {
string_to_stream(string.to_owned())
}
fn sp(a: u32, b: u32) -> Span {
Span::new(BytePos(a), BytePos(b), NO_EXPANSION)
Span::with_root_ctxt(BytePos(a), BytePos(b))
}
#[test]

View File

@ -29,7 +29,7 @@ pub fn expand(
};
// Generate a bunch of new items using the AllocFnFactory
let span = item.span.with_ctxt(SyntaxContext::empty().apply_mark(ecx.current_expansion.id));
let span = item.span.with_ctxt(SyntaxContext::root().apply_mark(ecx.current_expansion.id));
let f = AllocFnFactory {
span,
kind: AllocatorKind::Global,

View File

@ -29,7 +29,7 @@ pub fn expand_test_case(
if !ecx.ecfg.should_test { return vec![]; }
let sp = attr_sp.with_ctxt(SyntaxContext::empty().apply_mark(ecx.current_expansion.id));
let sp = attr_sp.with_ctxt(SyntaxContext::root().apply_mark(ecx.current_expansion.id));
let mut item = anno_item.expect_item();
item = item.map(|mut item| {
item.vis = respan(item.vis.span, ast::VisibilityKind::Public);
@ -93,7 +93,7 @@ pub fn expand_test_or_bench(
return vec![Annotatable::Item(item)];
}
let ctxt = SyntaxContext::empty().apply_mark(cx.current_expansion.id);
let ctxt = SyntaxContext::root().apply_mark(cx.current_expansion.id);
let (sp, attr_sp) = (item.span.with_ctxt(ctxt), attr_sp.with_ctxt(ctxt));
// Gensym "test" so we can extern crate without conflicting with any local names

View File

@ -246,7 +246,7 @@ impl HygieneData {
fn marks(&self, mut ctxt: SyntaxContext) -> Vec<(ExpnId, Transparency)> {
let mut marks = Vec::new();
while ctxt != SyntaxContext::empty() {
while ctxt != SyntaxContext::root() {
marks.push((self.outer_expn(ctxt), self.outer_transparency(ctxt)));
ctxt = self.parent_ctxt(ctxt);
}
@ -286,14 +286,14 @@ impl HygieneData {
}
let call_site_ctxt =
self.expn_info(expn_id).map_or(SyntaxContext::empty(), |info| info.call_site.ctxt());
self.expn_info(expn_id).map_or(SyntaxContext::root(), |info| info.call_site.ctxt());
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
self.modern(call_site_ctxt)
} else {
self.modern_and_legacy(call_site_ctxt)
};
if call_site_ctxt == SyntaxContext::empty() {
if call_site_ctxt == SyntaxContext::root() {
return self.apply_mark_internal(ctxt, expn_id, transparency);
}
@ -400,7 +400,7 @@ pub fn update_dollar_crate_names(mut get_name: impl FnMut(SyntaxContext) -> Symb
impl SyntaxContext {
#[inline]
pub const fn empty() -> Self {
pub const fn root() -> Self {
SyntaxContext(0)
}
@ -615,7 +615,7 @@ impl Span {
pub fn fresh_expansion(self, parent: ExpnId, expn_info: ExpnInfo) -> Span {
HygieneData::with(|data| {
let expn_id = data.fresh_expn(parent, Some(expn_info));
self.with_ctxt(data.apply_mark(SyntaxContext::empty(), expn_id))
self.with_ctxt(data.apply_mark(SyntaxContext::root(), expn_id))
})
}
}
@ -775,6 +775,6 @@ impl Encodable for SyntaxContext {
impl Decodable for SyntaxContext {
fn decode<D: Decoder>(_: &mut D) -> Result<SyntaxContext, D::Error> {
Ok(SyntaxContext::empty()) // FIXME(jseyfried) intercrate hygiene
Ok(SyntaxContext::root()) // FIXME(jseyfried) intercrate hygiene
}
}

View File

@ -291,7 +291,12 @@ impl Span {
/// Returns `true` if this span comes from a macro or desugaring.
#[inline]
pub fn from_expansion(self) -> bool {
self.ctxt() != SyntaxContext::empty()
self.ctxt() != SyntaxContext::root()
}
#[inline]
pub fn with_root_ctxt(lo: BytePos, hi: BytePos) -> Span {
Span::new(lo, hi, SyntaxContext::root())
}
/// Returns a new span representing an empty span at the beginning of this span
@ -474,9 +479,9 @@ impl Span {
// Return the macro span on its own to avoid weird diagnostic output. It is preferable to
// have an incomplete span than a completely nonsensical one.
if span_data.ctxt != end_data.ctxt {
if span_data.ctxt == SyntaxContext::empty() {
if span_data.ctxt == SyntaxContext::root() {
return end;
} else if end_data.ctxt == SyntaxContext::empty() {
} else if end_data.ctxt == SyntaxContext::root() {
return self;
}
// Both spans fall within a macro.
@ -485,7 +490,7 @@ impl Span {
Span::new(
cmp::min(span_data.lo, end_data.lo),
cmp::max(span_data.hi, end_data.hi),
if span_data.ctxt == SyntaxContext::empty() { end_data.ctxt } else { span_data.ctxt },
if span_data.ctxt == SyntaxContext::root() { end_data.ctxt } else { span_data.ctxt },
)
}
@ -496,7 +501,7 @@ impl Span {
Span::new(
span.hi,
end.lo,
if end.ctxt == SyntaxContext::empty() { end.ctxt } else { span.ctxt },
if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt },
)
}
@ -507,7 +512,7 @@ impl Span {
Span::new(
span.lo,
end.lo,
if end.ctxt == SyntaxContext::empty() { end.ctxt } else { span.ctxt },
if end.ctxt == SyntaxContext::root() { end.ctxt } else { span.ctxt },
)
}
@ -617,7 +622,7 @@ impl rustc_serialize::UseSpecializedDecodable for Span {
d.read_struct("Span", 2, |d| {
let lo = d.read_struct_field("lo", 0, Decodable::decode)?;
let hi = d.read_struct_field("hi", 1, Decodable::decode)?;
Ok(Span::new(lo, hi, NO_EXPANSION))
Ok(Span::with_root_ctxt(lo, hi))
})
}
}
@ -761,8 +766,6 @@ impl From<Vec<Span>> for MultiSpan {
}
}
pub const NO_EXPANSION: SyntaxContext = SyntaxContext::empty();
/// Identifies an offset of a multi-byte character in a `SourceFile`.
#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)]
pub struct MultiByteChar {