2020-06-30 00:12:20 +00:00
|
|
|
|
//! Machinery for hygienic macros.
|
2014-02-24 20:47:19 +00:00
|
|
|
|
//!
|
2020-06-30 00:12:20 +00:00
|
|
|
|
//! Inspired by Matthew Flatt et al., “Macros That Work Together: Compile-Time Bindings, Partial
|
|
|
|
|
//! Expansion, and Definition Contexts,” *Journal of Functional Programming* 22, no. 2
|
|
|
|
|
//! (March 1, 2012): 181–216, <https://doi.org/10.1017/S0956796812000093>.
|
2014-02-24 20:47:19 +00:00
|
|
|
|
|
2019-06-03 03:08:15 +00:00
|
|
|
|
// Hygiene data is stored in a global variable and accessed via TLS, which
|
|
|
|
|
// means that accesses are somewhat expensive. (`HygieneData::with`
|
|
|
|
|
// encapsulates a single access.) Therefore, on hot code paths it is worth
|
|
|
|
|
// ensuring that multiple HygieneData accesses are combined into a single
|
|
|
|
|
// `HygieneData::with`.
|
|
|
|
|
//
|
2019-07-15 22:04:05 +00:00
|
|
|
|
// This explains why `HygieneData`, `SyntaxContext` and `ExpnId` have interfaces
|
2019-06-03 03:08:15 +00:00
|
|
|
|
// with a certain amount of redundancy in them. For example,
|
2019-08-13 20:56:42 +00:00
|
|
|
|
// `SyntaxContext::outer_expn_data` combines `SyntaxContext::outer` and
|
|
|
|
|
// `ExpnId::expn_data` so that two `HygieneData` accesses can be performed within
|
2019-06-03 03:08:15 +00:00
|
|
|
|
// a single `HygieneData::with` call.
|
|
|
|
|
//
|
|
|
|
|
// It also explains why many functions appear in `HygieneData` and again in
|
2019-07-15 22:04:05 +00:00
|
|
|
|
// `SyntaxContext` or `ExpnId`. For example, `HygieneData::outer` and
|
2019-06-03 03:08:15 +00:00
|
|
|
|
// `SyntaxContext::outer` do the same thing, but the former is for use within a
|
|
|
|
|
// `HygieneData::with` call while the latter is for use outside such a call.
|
|
|
|
|
// When modifying this file it is important to understand this distinction,
|
|
|
|
|
// because getting it wrong can lead to nested `HygieneData::with` calls that
|
|
|
|
|
// trigger runtime aborts. (Fortunately these are obvious and easy to fix.)
|
|
|
|
|
|
2019-04-05 22:15:49 +00:00
|
|
|
|
use crate::edition::Edition;
|
2019-10-30 16:54:40 +00:00
|
|
|
|
use crate::symbol::{kw, sym, Symbol};
|
2020-07-06 00:53:14 +00:00
|
|
|
|
use crate::SESSION_GLOBALS;
|
2020-12-07 22:44:40 +00:00
|
|
|
|
use crate::{BytePos, CachingSourceMapView, ExpnIdCache, SourceFile, Span, DUMMY_SP};
|
2017-03-17 04:04:41 +00:00
|
|
|
|
|
2020-03-17 15:45:02 +00:00
|
|
|
|
use crate::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
|
2020-12-07 22:44:40 +00:00
|
|
|
|
use rustc_data_structures::fingerprint::Fingerprint;
|
2020-07-24 07:01:07 +00:00
|
|
|
|
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
2020-12-07 22:44:40 +00:00
|
|
|
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
2020-03-17 15:45:02 +00:00
|
|
|
|
use rustc_data_structures::sync::{Lock, Lrc};
|
2019-12-22 22:42:04 +00:00
|
|
|
|
use rustc_macros::HashStable_Generic;
|
2020-06-11 14:49:57 +00:00
|
|
|
|
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
|
2019-07-05 00:09:24 +00:00
|
|
|
|
use std::fmt;
|
2020-12-07 22:44:40 +00:00
|
|
|
|
use std::hash::Hash;
|
|
|
|
|
use std::thread::LocalKey;
|
2020-08-14 06:05:01 +00:00
|
|
|
|
use tracing::*;
|
2014-02-24 20:47:19 +00:00
|
|
|
|
|
2019-07-15 22:42:58 +00:00
|
|
|
|
/// A `SyntaxContext` represents a chain of pairs `(ExpnId, Transparency)` named "marks".
|
2019-06-30 11:57:34 +00:00
|
|
|
|
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
2018-06-30 16:35:00 +00:00
|
|
|
|
pub struct SyntaxContext(u32);
|
2014-02-24 20:47:19 +00:00
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
|
#[derive(Debug, Encodable, Decodable, Clone)]
|
2020-03-17 15:45:02 +00:00
|
|
|
|
pub struct SyntaxContextData {
|
2019-07-15 22:42:58 +00:00
|
|
|
|
outer_expn: ExpnId,
|
2019-07-15 23:59:53 +00:00
|
|
|
|
outer_transparency: Transparency,
|
|
|
|
|
parent: SyntaxContext,
|
2019-07-15 22:42:58 +00:00
|
|
|
|
/// This context, but with all transparent and semi-transparent expansions filtered away.
|
2018-06-30 16:35:00 +00:00
|
|
|
|
opaque: SyntaxContext,
|
2019-07-15 22:42:58 +00:00
|
|
|
|
/// This context, but with all transparent expansions filtered away.
|
2018-06-30 16:35:00 +00:00
|
|
|
|
opaque_and_semitransparent: SyntaxContext,
|
2019-04-22 18:29:11 +00:00
|
|
|
|
/// Name of the crate to which `$crate` with this context would resolve.
|
2018-12-09 14:46:12 +00:00
|
|
|
|
dollar_crate_name: Symbol,
|
2014-02-24 20:47:19 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-07-15 22:42:58 +00:00
|
|
|
|
/// A unique ID associated with a macro invocation and expansion.
|
2019-06-30 11:57:34 +00:00
|
|
|
|
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
2019-07-15 22:04:05 +00:00
|
|
|
|
pub struct ExpnId(u32);
|
2014-02-24 20:47:19 +00:00
|
|
|
|
|
2018-06-19 21:54:17 +00:00
|
|
|
|
/// A property of a macro expansion that determines how identifiers
|
|
|
|
|
/// produced by that expansion are resolved.
|
2020-06-11 14:49:57 +00:00
|
|
|
|
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Hash, Debug, Encodable, Decodable)]
|
2020-03-23 14:48:59 +00:00
|
|
|
|
#[derive(HashStable_Generic)]
|
2018-06-19 21:54:17 +00:00
|
|
|
|
pub enum Transparency {
|
|
|
|
|
/// Identifier produced by a transparent expansion is always resolved at call-site.
|
|
|
|
|
/// Call-site spans in procedural macros, hygiene opt-out in `macro` should use this.
|
|
|
|
|
Transparent,
|
|
|
|
|
/// Identifier produced by a semi-transparent expansion may be resolved
|
|
|
|
|
/// either at call-site or at definition-site.
|
|
|
|
|
/// If it's a local variable, label or `$crate` then it's resolved at def-site.
|
|
|
|
|
/// Otherwise it's resolved at call-site.
|
|
|
|
|
/// `macro_rules` macros behave like this, built-in macros currently behave like this too,
|
|
|
|
|
/// but that's an implementation detail.
|
|
|
|
|
SemiTransparent,
|
|
|
|
|
/// Identifier produced by an opaque expansion is always resolved at definition-site.
|
|
|
|
|
/// Def-site spans in procedural macros, identifiers from `macro` by default use this.
|
|
|
|
|
Opaque,
|
2017-12-12 09:14:45 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-07-15 22:04:05 +00:00
|
|
|
|
impl ExpnId {
|
2019-08-13 20:56:42 +00:00
|
|
|
|
pub fn fresh(expn_data: Option<ExpnData>) -> Self {
|
2020-12-07 22:44:40 +00:00
|
|
|
|
let has_data = expn_data.is_some();
|
|
|
|
|
let expn_id = HygieneData::with(|data| data.fresh_expn(expn_data));
|
|
|
|
|
if has_data {
|
|
|
|
|
update_disambiguator(expn_id);
|
|
|
|
|
}
|
|
|
|
|
expn_id
|
2016-06-20 10:28:32 +00:00
|
|
|
|
}
|
2016-09-02 09:12:47 +00:00
|
|
|
|
|
2019-07-15 22:42:58 +00:00
|
|
|
|
/// The ID of the theoretical expansion that generates freshly parsed, unexpanded AST.
|
2017-12-07 15:46:31 +00:00
|
|
|
|
#[inline]
|
2016-09-07 23:21:59 +00:00
|
|
|
|
pub fn root() -> Self {
|
2019-07-15 22:04:05 +00:00
|
|
|
|
ExpnId(0)
|
2016-09-07 23:21:59 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-12-07 15:46:31 +00:00
|
|
|
|
#[inline]
|
2016-12-01 22:49:32 +00:00
|
|
|
|
pub fn as_u32(self) -> u32 {
|
2016-09-02 09:12:47 +00:00
|
|
|
|
self.0
|
|
|
|
|
}
|
2017-03-16 10:23:33 +00:00
|
|
|
|
|
2017-12-07 15:46:31 +00:00
|
|
|
|
#[inline]
|
2019-07-15 22:04:05 +00:00
|
|
|
|
pub fn from_u32(raw: u32) -> ExpnId {
|
|
|
|
|
ExpnId(raw)
|
2017-03-16 10:23:33 +00:00
|
|
|
|
}
|
2017-03-17 04:04:41 +00:00
|
|
|
|
|
2017-12-07 15:46:31 +00:00
|
|
|
|
#[inline]
|
2019-08-13 20:56:42 +00:00
|
|
|
|
pub fn expn_data(self) -> ExpnData {
|
|
|
|
|
HygieneData::with(|data| data.expn_data(self).clone())
|
2017-03-17 04:04:41 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-12-07 15:46:31 +00:00
|
|
|
|
#[inline]
|
2020-03-17 15:45:02 +00:00
|
|
|
|
pub fn set_expn_data(self, mut expn_data: ExpnData) {
|
2019-07-06 17:25:34 +00:00
|
|
|
|
HygieneData::with(|data| {
|
2019-08-13 20:56:42 +00:00
|
|
|
|
let old_expn_data = &mut data.expn_data[self.0 as usize];
|
|
|
|
|
assert!(old_expn_data.is_none(), "expansion data is reset for an expansion ID");
|
2021-03-04 12:06:01 +00:00
|
|
|
|
assert_eq!(expn_data.orig_id, None);
|
|
|
|
|
expn_data.orig_id = Some(self.as_u32());
|
2019-08-13 20:56:42 +00:00
|
|
|
|
*old_expn_data = Some(expn_data);
|
2020-12-07 22:44:40 +00:00
|
|
|
|
});
|
|
|
|
|
update_disambiguator(self)
|
2017-03-22 08:39:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-07-15 22:04:05 +00:00
|
|
|
|
pub fn is_descendant_of(self, ancestor: ExpnId) -> bool {
|
2019-05-29 22:59:22 +00:00
|
|
|
|
HygieneData::with(|data| data.is_descendant_of(self, ancestor))
|
2017-03-17 04:04:41 +00:00
|
|
|
|
}
|
2018-04-21 22:20:17 +00:00
|
|
|
|
|
2019-07-15 23:59:53 +00:00
|
|
|
|
/// `expn_id.outer_expn_is_descendant_of(ctxt)` is equivalent to but faster than
|
|
|
|
|
/// `expn_id.is_descendant_of(ctxt.outer_expn())`.
|
|
|
|
|
pub fn outer_expn_is_descendant_of(self, ctxt: SyntaxContext) -> bool {
|
|
|
|
|
HygieneData::with(|data| data.is_descendant_of(self, data.outer_expn(ctxt)))
|
2019-05-27 03:38:23 +00:00
|
|
|
|
}
|
2019-10-30 16:54:40 +00:00
|
|
|
|
|
|
|
|
|
/// Returns span for the macro which originally caused this expansion to happen.
|
|
|
|
|
///
|
|
|
|
|
/// Stops backtracing at include! boundary.
|
|
|
|
|
pub fn expansion_cause(mut self) -> Option<Span> {
|
|
|
|
|
let mut last_macro = None;
|
|
|
|
|
loop {
|
|
|
|
|
let expn_data = self.expn_data();
|
|
|
|
|
// Stop going up the backtrace once include! is encountered
|
2020-01-20 20:22:36 +00:00
|
|
|
|
if expn_data.is_root()
|
|
|
|
|
|| expn_data.kind == ExpnKind::Macro(MacroKind::Bang, sym::include)
|
|
|
|
|
{
|
2019-10-30 16:54:40 +00:00
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
self = expn_data.call_site.ctxt().outer_expn();
|
|
|
|
|
last_macro = Some(expn_data.call_site);
|
|
|
|
|
}
|
|
|
|
|
last_macro
|
|
|
|
|
}
|
2014-02-24 20:47:19 +00:00
|
|
|
|
}
|
|
|
|
|
|
2018-06-19 21:08:14 +00:00
|
|
|
|
#[derive(Debug)]
|
2020-03-17 15:45:02 +00:00
|
|
|
|
pub struct HygieneData {
|
2019-08-13 20:56:42 +00:00
|
|
|
|
/// Each expansion should have an associated expansion data, but sometimes there's a delay
|
|
|
|
|
/// between creation of an expansion ID and obtaining its data (e.g. macros are collected
|
2019-08-13 00:34:46 +00:00
|
|
|
|
/// first and then resolved later), so we use an `Option` here.
|
2019-08-13 20:56:42 +00:00
|
|
|
|
expn_data: Vec<Option<ExpnData>>,
|
2019-07-15 22:42:58 +00:00
|
|
|
|
syntax_context_data: Vec<SyntaxContextData>,
|
|
|
|
|
syntax_context_map: FxHashMap<(SyntaxContext, ExpnId, Transparency), SyntaxContext>,
|
2020-12-07 22:44:40 +00:00
|
|
|
|
/// Maps the `Fingerprint` of an `ExpnData` to the next disambiguator value.
|
|
|
|
|
/// This is used by `update_disambiguator` to keep track of which `ExpnData`s
|
|
|
|
|
/// would have collisions without a disambiguator.
|
|
|
|
|
/// The keys of this map are always computed with `ExpnData.disambiguator`
|
|
|
|
|
/// set to 0.
|
|
|
|
|
expn_data_disambiguators: FxHashMap<Fingerprint, u32>,
|
2014-02-24 20:47:19 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-06-22 08:03:42 +00:00
|
|
|
|
impl HygieneData {
|
2019-07-07 13:45:41 +00:00
|
|
|
|
crate fn new(edition: Edition) -> Self {
|
2020-03-17 15:45:02 +00:00
|
|
|
|
let mut root_data = ExpnData::default(
|
|
|
|
|
ExpnKind::Root,
|
|
|
|
|
DUMMY_SP,
|
|
|
|
|
edition,
|
|
|
|
|
Some(DefId::local(CRATE_DEF_INDEX)),
|
|
|
|
|
);
|
|
|
|
|
root_data.orig_id = Some(0);
|
|
|
|
|
|
2016-06-22 08:03:42 +00:00
|
|
|
|
HygieneData {
|
2020-03-17 15:45:02 +00:00
|
|
|
|
expn_data: vec![Some(root_data)],
|
2019-07-15 22:42:58 +00:00
|
|
|
|
syntax_context_data: vec![SyntaxContextData {
|
|
|
|
|
outer_expn: ExpnId::root(),
|
2019-07-15 23:59:53 +00:00
|
|
|
|
outer_transparency: Transparency::Opaque,
|
|
|
|
|
parent: SyntaxContext(0),
|
2018-06-24 16:54:23 +00:00
|
|
|
|
opaque: SyntaxContext(0),
|
|
|
|
|
opaque_and_semitransparent: SyntaxContext(0),
|
2019-05-11 14:41:37 +00:00
|
|
|
|
dollar_crate_name: kw::DollarCrate,
|
2017-12-12 09:14:45 +00:00
|
|
|
|
}],
|
2019-07-15 22:42:58 +00:00
|
|
|
|
syntax_context_map: FxHashMap::default(),
|
2020-12-07 22:44:40 +00:00
|
|
|
|
expn_data_disambiguators: FxHashMap::default(),
|
2016-06-22 08:03:42 +00:00
|
|
|
|
}
|
2014-02-24 20:47:19 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-17 15:45:02 +00:00
|
|
|
|
pub fn with<T, F: FnOnce(&mut HygieneData) -> T>(f: F) -> T {
|
2020-07-06 00:53:14 +00:00
|
|
|
|
SESSION_GLOBALS.with(|session_globals| f(&mut *session_globals.hygiene_data.borrow_mut()))
|
2016-06-22 08:03:42 +00:00
|
|
|
|
}
|
2019-05-29 22:59:22 +00:00
|
|
|
|
|
2020-03-17 15:45:02 +00:00
|
|
|
|
fn fresh_expn(&mut self, mut expn_data: Option<ExpnData>) -> ExpnId {
|
|
|
|
|
let raw_id = self.expn_data.len() as u32;
|
|
|
|
|
if let Some(data) = expn_data.as_mut() {
|
2021-03-04 12:06:01 +00:00
|
|
|
|
assert_eq!(data.orig_id, None);
|
|
|
|
|
data.orig_id = Some(raw_id);
|
2020-03-17 15:45:02 +00:00
|
|
|
|
}
|
2019-08-13 20:56:42 +00:00
|
|
|
|
self.expn_data.push(expn_data);
|
2020-03-17 15:45:02 +00:00
|
|
|
|
ExpnId(raw_id)
|
2019-07-06 18:02:45 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-08-13 20:56:42 +00:00
|
|
|
|
fn expn_data(&self, expn_id: ExpnId) -> &ExpnData {
|
2019-12-22 22:42:04 +00:00
|
|
|
|
self.expn_data[expn_id.0 as usize].as_ref().expect("no expansion data for an expansion ID")
|
2019-05-29 22:59:22 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-07-15 22:42:58 +00:00
|
|
|
|
fn is_descendant_of(&self, mut expn_id: ExpnId, ancestor: ExpnId) -> bool {
|
|
|
|
|
while expn_id != ancestor {
|
|
|
|
|
if expn_id == ExpnId::root() {
|
2019-05-29 22:59:22 +00:00
|
|
|
|
return false;
|
|
|
|
|
}
|
2019-08-13 20:56:42 +00:00
|
|
|
|
expn_id = self.expn_data(expn_id).parent;
|
2019-05-29 22:59:22 +00:00
|
|
|
|
}
|
|
|
|
|
true
|
|
|
|
|
}
|
2019-05-31 21:19:58 +00:00
|
|
|
|
|
2020-03-13 22:36:46 +00:00
|
|
|
|
fn normalize_to_macros_2_0(&self, ctxt: SyntaxContext) -> SyntaxContext {
|
2019-07-15 22:42:58 +00:00
|
|
|
|
self.syntax_context_data[ctxt.0 as usize].opaque
|
2019-05-31 21:19:58 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-13 22:36:46 +00:00
|
|
|
|
fn normalize_to_macro_rules(&self, ctxt: SyntaxContext) -> SyntaxContext {
|
2019-07-15 22:42:58 +00:00
|
|
|
|
self.syntax_context_data[ctxt.0 as usize].opaque_and_semitransparent
|
2019-05-31 21:19:58 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-07-15 23:59:53 +00:00
|
|
|
|
fn outer_expn(&self, ctxt: SyntaxContext) -> ExpnId {
|
2019-07-15 22:42:58 +00:00
|
|
|
|
self.syntax_context_data[ctxt.0 as usize].outer_expn
|
2019-05-31 21:19:58 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-08-22 21:27:46 +00:00
|
|
|
|
fn outer_mark(&self, ctxt: SyntaxContext) -> (ExpnId, Transparency) {
|
|
|
|
|
let data = &self.syntax_context_data[ctxt.0 as usize];
|
|
|
|
|
(data.outer_expn, data.outer_transparency)
|
2019-05-31 21:19:58 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-07-15 23:59:53 +00:00
|
|
|
|
fn parent_ctxt(&self, ctxt: SyntaxContext) -> SyntaxContext {
|
|
|
|
|
self.syntax_context_data[ctxt.0 as usize].parent
|
2019-05-31 21:19:58 +00:00
|
|
|
|
}
|
2019-05-31 21:28:15 +00:00
|
|
|
|
|
2019-08-22 22:31:01 +00:00
|
|
|
|
fn remove_mark(&self, ctxt: &mut SyntaxContext) -> (ExpnId, Transparency) {
|
|
|
|
|
let outer_mark = self.outer_mark(*ctxt);
|
2019-07-15 23:59:53 +00:00
|
|
|
|
*ctxt = self.parent_ctxt(*ctxt);
|
2019-08-22 22:31:01 +00:00
|
|
|
|
outer_mark
|
2019-05-31 21:28:15 +00:00
|
|
|
|
}
|
2019-05-31 21:44:14 +00:00
|
|
|
|
|
2019-07-15 22:04:05 +00:00
|
|
|
|
fn marks(&self, mut ctxt: SyntaxContext) -> Vec<(ExpnId, Transparency)> {
|
2019-06-01 22:27:36 +00:00
|
|
|
|
let mut marks = Vec::new();
|
2019-08-10 22:44:55 +00:00
|
|
|
|
while ctxt != SyntaxContext::root() {
|
2020-03-17 15:45:02 +00:00
|
|
|
|
debug!("marks: getting parent of {:?}", ctxt);
|
2019-08-22 21:27:46 +00:00
|
|
|
|
marks.push(self.outer_mark(ctxt));
|
2019-07-15 23:59:53 +00:00
|
|
|
|
ctxt = self.parent_ctxt(ctxt);
|
2019-06-01 22:27:36 +00:00
|
|
|
|
}
|
|
|
|
|
marks.reverse();
|
|
|
|
|
marks
|
|
|
|
|
}
|
|
|
|
|
|
2019-06-03 02:07:51 +00:00
|
|
|
|
fn walk_chain(&self, mut span: Span, to: SyntaxContext) -> Span {
|
2020-03-17 15:45:02 +00:00
|
|
|
|
debug!("walk_chain({:?}, {:?})", span, to);
|
|
|
|
|
debug!("walk_chain: span ctxt = {:?}", span.ctxt());
|
2019-08-10 22:08:30 +00:00
|
|
|
|
while span.from_expansion() && span.ctxt() != to {
|
2020-03-17 15:45:02 +00:00
|
|
|
|
let outer_expn = self.outer_expn(span.ctxt());
|
|
|
|
|
debug!("walk_chain({:?}): outer_expn={:?}", span, outer_expn);
|
|
|
|
|
let expn_data = self.expn_data(outer_expn);
|
|
|
|
|
debug!("walk_chain({:?}): expn_data={:?}", span, expn_data);
|
|
|
|
|
span = expn_data.call_site;
|
2019-06-03 02:07:51 +00:00
|
|
|
|
}
|
|
|
|
|
span
|
|
|
|
|
}
|
|
|
|
|
|
2019-07-15 22:42:58 +00:00
|
|
|
|
fn adjust(&self, ctxt: &mut SyntaxContext, expn_id: ExpnId) -> Option<ExpnId> {
|
2019-05-31 21:44:14 +00:00
|
|
|
|
let mut scope = None;
|
2019-07-15 23:59:53 +00:00
|
|
|
|
while !self.is_descendant_of(expn_id, self.outer_expn(*ctxt)) {
|
2019-08-22 22:31:01 +00:00
|
|
|
|
scope = Some(self.remove_mark(ctxt).0);
|
2019-05-31 21:44:14 +00:00
|
|
|
|
}
|
|
|
|
|
scope
|
|
|
|
|
}
|
2019-06-01 22:16:46 +00:00
|
|
|
|
|
2019-08-22 22:31:01 +00:00
|
|
|
|
fn apply_mark(
|
2019-12-22 22:42:04 +00:00
|
|
|
|
&mut self,
|
|
|
|
|
ctxt: SyntaxContext,
|
|
|
|
|
expn_id: ExpnId,
|
|
|
|
|
transparency: Transparency,
|
2019-08-22 22:31:01 +00:00
|
|
|
|
) -> SyntaxContext {
|
2019-07-15 22:42:58 +00:00
|
|
|
|
assert_ne!(expn_id, ExpnId::root());
|
2019-06-01 22:23:54 +00:00
|
|
|
|
if transparency == Transparency::Opaque {
|
2019-07-15 22:42:58 +00:00
|
|
|
|
return self.apply_mark_internal(ctxt, expn_id, transparency);
|
2019-06-01 22:23:54 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-08-13 20:56:42 +00:00
|
|
|
|
let call_site_ctxt = self.expn_data(expn_id).call_site.ctxt();
|
2019-06-01 22:23:54 +00:00
|
|
|
|
let mut call_site_ctxt = if transparency == Transparency::SemiTransparent {
|
2020-03-13 22:36:46 +00:00
|
|
|
|
self.normalize_to_macros_2_0(call_site_ctxt)
|
2019-06-01 22:23:54 +00:00
|
|
|
|
} else {
|
2020-03-13 22:36:46 +00:00
|
|
|
|
self.normalize_to_macro_rules(call_site_ctxt)
|
2019-06-01 22:23:54 +00:00
|
|
|
|
};
|
|
|
|
|
|
2019-08-10 22:44:55 +00:00
|
|
|
|
if call_site_ctxt == SyntaxContext::root() {
|
2019-07-15 22:42:58 +00:00
|
|
|
|
return self.apply_mark_internal(ctxt, expn_id, transparency);
|
2019-06-01 22:23:54 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-07-15 22:42:58 +00:00
|
|
|
|
// Otherwise, `expn_id` is a macros 1.0 definition and the call site is in a
|
2019-06-01 22:23:54 +00:00
|
|
|
|
// macros 2.0 expansion, i.e., a macros 1.0 invocation is in a macros 2.0 definition.
|
|
|
|
|
//
|
|
|
|
|
// In this case, the tokens from the macros 1.0 definition inherit the hygiene
|
|
|
|
|
// at their invocation. That is, we pretend that the macros 1.0 definition
|
|
|
|
|
// was defined at its invocation (i.e., inside the macros 2.0 definition)
|
|
|
|
|
// so that the macros 2.0 definition remains hygienic.
|
|
|
|
|
//
|
2019-07-26 23:26:27 +00:00
|
|
|
|
// See the example at `test/ui/hygiene/legacy_interaction.rs`.
|
2019-07-15 22:42:58 +00:00
|
|
|
|
for (expn_id, transparency) in self.marks(ctxt) {
|
|
|
|
|
call_site_ctxt = self.apply_mark_internal(call_site_ctxt, expn_id, transparency);
|
2019-06-01 22:23:54 +00:00
|
|
|
|
}
|
2019-07-15 22:42:58 +00:00
|
|
|
|
self.apply_mark_internal(call_site_ctxt, expn_id, transparency)
|
2019-06-01 22:23:54 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-07-15 22:42:58 +00:00
|
|
|
|
fn apply_mark_internal(
|
2019-12-22 22:42:04 +00:00
|
|
|
|
&mut self,
|
|
|
|
|
ctxt: SyntaxContext,
|
|
|
|
|
expn_id: ExpnId,
|
|
|
|
|
transparency: Transparency,
|
2019-07-15 22:42:58 +00:00
|
|
|
|
) -> SyntaxContext {
|
|
|
|
|
let syntax_context_data = &mut self.syntax_context_data;
|
|
|
|
|
let mut opaque = syntax_context_data[ctxt.0 as usize].opaque;
|
2019-06-01 22:16:46 +00:00
|
|
|
|
let mut opaque_and_semitransparent =
|
2019-07-15 22:42:58 +00:00
|
|
|
|
syntax_context_data[ctxt.0 as usize].opaque_and_semitransparent;
|
2019-06-01 22:16:46 +00:00
|
|
|
|
|
|
|
|
|
if transparency >= Transparency::Opaque {
|
2019-07-15 23:59:53 +00:00
|
|
|
|
let parent = opaque;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
opaque = *self
|
|
|
|
|
.syntax_context_map
|
|
|
|
|
.entry((parent, expn_id, transparency))
|
|
|
|
|
.or_insert_with(|| {
|
|
|
|
|
let new_opaque = SyntaxContext(syntax_context_data.len() as u32);
|
|
|
|
|
syntax_context_data.push(SyntaxContextData {
|
|
|
|
|
outer_expn: expn_id,
|
|
|
|
|
outer_transparency: transparency,
|
|
|
|
|
parent,
|
|
|
|
|
opaque: new_opaque,
|
|
|
|
|
opaque_and_semitransparent: new_opaque,
|
|
|
|
|
dollar_crate_name: kw::DollarCrate,
|
|
|
|
|
});
|
|
|
|
|
new_opaque
|
2019-06-01 22:16:46 +00:00
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if transparency >= Transparency::SemiTransparent {
|
2019-07-15 23:59:53 +00:00
|
|
|
|
let parent = opaque_and_semitransparent;
|
2019-12-22 22:42:04 +00:00
|
|
|
|
opaque_and_semitransparent = *self
|
|
|
|
|
.syntax_context_map
|
|
|
|
|
.entry((parent, expn_id, transparency))
|
|
|
|
|
.or_insert_with(|| {
|
|
|
|
|
let new_opaque_and_semitransparent =
|
|
|
|
|
SyntaxContext(syntax_context_data.len() as u32);
|
|
|
|
|
syntax_context_data.push(SyntaxContextData {
|
|
|
|
|
outer_expn: expn_id,
|
|
|
|
|
outer_transparency: transparency,
|
|
|
|
|
parent,
|
|
|
|
|
opaque,
|
|
|
|
|
opaque_and_semitransparent: new_opaque_and_semitransparent,
|
|
|
|
|
dollar_crate_name: kw::DollarCrate,
|
|
|
|
|
});
|
|
|
|
|
new_opaque_and_semitransparent
|
2019-06-01 22:16:46 +00:00
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
2019-07-15 23:59:53 +00:00
|
|
|
|
let parent = ctxt;
|
|
|
|
|
*self.syntax_context_map.entry((parent, expn_id, transparency)).or_insert_with(|| {
|
2019-06-01 22:16:46 +00:00
|
|
|
|
let new_opaque_and_semitransparent_and_transparent =
|
2019-07-15 22:42:58 +00:00
|
|
|
|
SyntaxContext(syntax_context_data.len() as u32);
|
|
|
|
|
syntax_context_data.push(SyntaxContextData {
|
|
|
|
|
outer_expn: expn_id,
|
2019-07-15 23:59:53 +00:00
|
|
|
|
outer_transparency: transparency,
|
|
|
|
|
parent,
|
2019-06-01 22:16:46 +00:00
|
|
|
|
opaque,
|
|
|
|
|
opaque_and_semitransparent,
|
|
|
|
|
dollar_crate_name: kw::DollarCrate,
|
|
|
|
|
});
|
|
|
|
|
new_opaque_and_semitransparent_and_transparent
|
|
|
|
|
})
|
|
|
|
|
}
|
2014-03-08 22:18:58 +00:00
|
|
|
|
}
|
2014-02-24 20:47:19 +00:00
|
|
|
|
|
2019-07-15 23:59:53 +00:00
|
|
|
|
pub fn clear_syntax_context_map() {
|
2019-07-15 22:42:58 +00:00
|
|
|
|
HygieneData::with(|data| data.syntax_context_map = FxHashMap::default());
|
2014-11-29 04:56:09 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-06-03 02:07:51 +00:00
|
|
|
|
pub fn walk_chain(span: Span, to: SyntaxContext) -> Span {
|
|
|
|
|
HygieneData::with(|data| data.walk_chain(span, to))
|
|
|
|
|
}
|
|
|
|
|
|
2019-07-05 00:09:24 +00:00
|
|
|
|
pub fn update_dollar_crate_names(mut get_name: impl FnMut(SyntaxContext) -> Symbol) {
|
|
|
|
|
// The new contexts that need updating are at the end of the list and have `$crate` as a name.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let (len, to_update) = HygieneData::with(|data| {
|
|
|
|
|
(
|
|
|
|
|
data.syntax_context_data.len(),
|
|
|
|
|
data.syntax_context_data
|
|
|
|
|
.iter()
|
|
|
|
|
.rev()
|
|
|
|
|
.take_while(|scdata| scdata.dollar_crate_name == kw::DollarCrate)
|
|
|
|
|
.count(),
|
|
|
|
|
)
|
|
|
|
|
});
|
2019-07-05 00:09:24 +00:00
|
|
|
|
// The callback must be called from outside of the `HygieneData` lock,
|
|
|
|
|
// since it will try to acquire it too.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
let range_to_update = len - to_update..len;
|
2019-07-05 00:09:24 +00:00
|
|
|
|
let names: Vec<_> =
|
|
|
|
|
range_to_update.clone().map(|idx| get_name(SyntaxContext::from_u32(idx as u32))).collect();
|
2019-12-22 22:42:04 +00:00
|
|
|
|
HygieneData::with(|data| {
|
|
|
|
|
range_to_update.zip(names.into_iter()).for_each(|(idx, name)| {
|
|
|
|
|
data.syntax_context_data[idx].dollar_crate_name = name;
|
|
|
|
|
})
|
|
|
|
|
})
|
2019-07-05 00:09:24 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-07-14 20:17:37 +00:00
|
|
|
|
pub fn debug_hygiene_data(verbose: bool) -> String {
|
|
|
|
|
HygieneData::with(|data| {
|
|
|
|
|
if verbose {
|
|
|
|
|
format!("{:#?}", data)
|
|
|
|
|
} else {
|
|
|
|
|
let mut s = String::from("");
|
|
|
|
|
s.push_str("Expansions:");
|
|
|
|
|
data.expn_data.iter().enumerate().for_each(|(id, expn_info)| {
|
|
|
|
|
let expn_info = expn_info.as_ref().expect("no expansion data for an expansion ID");
|
|
|
|
|
s.push_str(&format!(
|
2020-06-24 17:16:36 +00:00
|
|
|
|
"\n{}: parent: {:?}, call_site_ctxt: {:?}, def_site_ctxt: {:?}, kind: {:?}",
|
2019-07-14 20:17:37 +00:00
|
|
|
|
id,
|
|
|
|
|
expn_info.parent,
|
|
|
|
|
expn_info.call_site.ctxt(),
|
2020-06-24 17:16:36 +00:00
|
|
|
|
expn_info.def_site.ctxt(),
|
2019-07-14 20:17:37 +00:00
|
|
|
|
expn_info.kind,
|
|
|
|
|
));
|
|
|
|
|
});
|
|
|
|
|
s.push_str("\n\nSyntaxContexts:");
|
|
|
|
|
data.syntax_context_data.iter().enumerate().for_each(|(id, ctxt)| {
|
|
|
|
|
s.push_str(&format!(
|
|
|
|
|
"\n#{}: parent: {:?}, outer_mark: ({:?}, {:?})",
|
2019-12-22 22:42:04 +00:00
|
|
|
|
id, ctxt.parent, ctxt.outer_expn, ctxt.outer_transparency,
|
2019-07-14 20:17:37 +00:00
|
|
|
|
));
|
|
|
|
|
});
|
|
|
|
|
s
|
|
|
|
|
}
|
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2016-06-22 08:03:42 +00:00
|
|
|
|
impl SyntaxContext {
|
Increase `Span` from 4 bytes to 8 bytes.
This increases the size of some important types, such as `ast::Expr` and
`mir::Statement`. However, it drastically reduces how much the interner
is used, and the fields are more natural sizes that don't require bit
operations to extract.
As a result, instruction counts drop across a range of workloads, by as
much as 12% for incremental "check" builds of `script-servo`.
Peak memory usage goes up a little for some cases, but down by more for
some other cases -- as much as 18% for non-incremental builds of
`packed-simd`.
The commit also:
- removes the `repr(packed)`, because it has negligible effect, but can
cause undefined behaviour;
- replaces explicit impls of common traits (`Copy`, `PartialEq`, etc.)
with derived ones.
2019-04-04 04:46:33 +00:00
|
|
|
|
#[inline]
|
2019-08-10 22:44:55 +00:00
|
|
|
|
pub const fn root() -> Self {
|
2016-06-22 08:03:42 +00:00
|
|
|
|
SyntaxContext(0)
|
|
|
|
|
}
|
|
|
|
|
|
Increase `Span` from 4 bytes to 8 bytes.
This increases the size of some important types, such as `ast::Expr` and
`mir::Statement`. However, it drastically reduces how much the interner
is used, and the fields are more natural sizes that don't require bit
operations to extract.
As a result, instruction counts drop across a range of workloads, by as
much as 12% for incremental "check" builds of `script-servo`.
Peak memory usage goes up a little for some cases, but down by more for
some other cases -- as much as 18% for non-incremental builds of
`packed-simd`.
The commit also:
- removes the `repr(packed)`, because it has negligible effect, but can
cause undefined behaviour;
- replaces explicit impls of common traits (`Copy`, `PartialEq`, etc.)
with derived ones.
2019-04-04 04:46:33 +00:00
|
|
|
|
#[inline]
|
2018-06-30 16:35:00 +00:00
|
|
|
|
crate fn as_u32(self) -> u32 {
|
|
|
|
|
self.0
|
|
|
|
|
}
|
|
|
|
|
|
Increase `Span` from 4 bytes to 8 bytes.
This increases the size of some important types, such as `ast::Expr` and
`mir::Statement`. However, it drastically reduces how much the interner
is used, and the fields are more natural sizes that don't require bit
operations to extract.
As a result, instruction counts drop across a range of workloads, by as
much as 12% for incremental "check" builds of `script-servo`.
Peak memory usage goes up a little for some cases, but down by more for
some other cases -- as much as 18% for non-incremental builds of
`packed-simd`.
The commit also:
- removes the `repr(packed)`, because it has negligible effect, but can
cause undefined behaviour;
- replaces explicit impls of common traits (`Copy`, `PartialEq`, etc.)
with derived ones.
2019-04-04 04:46:33 +00:00
|
|
|
|
#[inline]
|
2018-06-30 16:35:00 +00:00
|
|
|
|
crate fn from_u32(raw: u32) -> SyntaxContext {
|
|
|
|
|
SyntaxContext(raw)
|
|
|
|
|
}
|
|
|
|
|
|
2019-07-15 22:42:58 +00:00
|
|
|
|
/// Extend a syntax context with a given expansion and transparency.
|
2019-08-28 09:41:29 +00:00
|
|
|
|
crate fn apply_mark(self, expn_id: ExpnId, transparency: Transparency) -> SyntaxContext {
|
2019-08-22 22:31:01 +00:00
|
|
|
|
HygieneData::with(|data| data.apply_mark(self, expn_id, transparency))
|
2016-06-22 08:03:42 +00:00
|
|
|
|
}
|
2017-03-17 04:04:41 +00:00
|
|
|
|
|
2018-04-21 22:00:09 +00:00
|
|
|
|
/// Pulls a single mark off of the syntax context. This effectively moves the
|
|
|
|
|
/// context up one macro definition level. That is, if we have a nested macro
|
|
|
|
|
/// definition as follows:
|
|
|
|
|
///
|
|
|
|
|
/// ```rust
|
|
|
|
|
/// macro_rules! f {
|
|
|
|
|
/// macro_rules! g {
|
|
|
|
|
/// ...
|
|
|
|
|
/// }
|
|
|
|
|
/// }
|
|
|
|
|
/// ```
|
|
|
|
|
///
|
|
|
|
|
/// and we have a SyntaxContext that is referring to something declared by an invocation
|
|
|
|
|
/// of g (call it g1), calling remove_mark will result in the SyntaxContext for the
|
|
|
|
|
/// invocation of f that created g1.
|
|
|
|
|
/// Returns the mark that was removed.
|
2019-07-15 22:04:05 +00:00
|
|
|
|
pub fn remove_mark(&mut self) -> ExpnId {
|
2019-08-22 22:31:01 +00:00
|
|
|
|
HygieneData::with(|data| data.remove_mark(self).0)
|
2017-03-22 08:39:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-07-15 22:04:05 +00:00
|
|
|
|
pub fn marks(self) -> Vec<(ExpnId, Transparency)> {
|
2019-06-01 22:27:36 +00:00
|
|
|
|
HygieneData::with(|data| data.marks(self))
|
2017-11-29 09:05:31 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-03-22 08:39:51 +00:00
|
|
|
|
/// Adjust this context for resolution in a scope created by the given expansion.
|
|
|
|
|
/// For example, consider the following three resolutions of `f`:
|
2017-12-31 16:17:01 +00:00
|
|
|
|
///
|
2017-03-22 08:39:51 +00:00
|
|
|
|
/// ```rust
|
|
|
|
|
/// mod foo { pub fn f() {} } // `f`'s `SyntaxContext` is empty.
|
|
|
|
|
/// m!(f);
|
|
|
|
|
/// macro m($f:ident) {
|
|
|
|
|
/// mod bar {
|
2019-07-15 22:04:05 +00:00
|
|
|
|
/// pub fn f() {} // `f`'s `SyntaxContext` has a single `ExpnId` from `m`.
|
2017-03-22 08:39:51 +00:00
|
|
|
|
/// pub fn $f() {} // `$f`'s `SyntaxContext` is empty.
|
|
|
|
|
/// }
|
2019-07-15 22:04:05 +00:00
|
|
|
|
/// foo::f(); // `f`'s `SyntaxContext` has a single `ExpnId` from `m`
|
2017-03-22 08:39:51 +00:00
|
|
|
|
/// //^ Since `mod foo` is outside this expansion, `adjust` removes the mark from `f`,
|
|
|
|
|
/// //| and it resolves to `::foo::f`.
|
2019-07-15 22:04:05 +00:00
|
|
|
|
/// bar::f(); // `f`'s `SyntaxContext` has a single `ExpnId` from `m`
|
2017-03-22 08:39:51 +00:00
|
|
|
|
/// //^ Since `mod bar` not outside this expansion, `adjust` does not change `f`,
|
|
|
|
|
/// //| and it resolves to `::bar::f`.
|
|
|
|
|
/// bar::$f(); // `f`'s `SyntaxContext` is empty.
|
|
|
|
|
/// //^ Since `mod bar` is not outside this expansion, `adjust` does not change `$f`,
|
|
|
|
|
/// //| and it resolves to `::bar::$f`.
|
|
|
|
|
/// }
|
|
|
|
|
/// ```
|
|
|
|
|
/// This returns the expansion whose definition scope we use to privacy check the resolution,
|
2018-11-27 02:59:49 +00:00
|
|
|
|
/// or `None` if we privacy check as usual (i.e., not w.r.t. a macro definition scope).
|
2019-07-15 22:42:58 +00:00
|
|
|
|
pub fn adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> {
|
|
|
|
|
HygieneData::with(|data| data.adjust(self, expn_id))
|
2017-03-22 08:39:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-03-13 22:36:46 +00:00
|
|
|
|
/// Like `SyntaxContext::adjust`, but also normalizes `self` to macros 2.0.
|
|
|
|
|
pub fn normalize_to_macros_2_0_and_adjust(&mut self, expn_id: ExpnId) -> Option<ExpnId> {
|
2019-06-03 06:10:03 +00:00
|
|
|
|
HygieneData::with(|data| {
|
2020-03-13 22:36:46 +00:00
|
|
|
|
*self = data.normalize_to_macros_2_0(*self);
|
2019-07-15 22:42:58 +00:00
|
|
|
|
data.adjust(self, expn_id)
|
2019-06-03 06:10:03 +00:00
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2017-03-22 08:39:51 +00:00
|
|
|
|
/// Adjust this context for resolution in a scope created by the given expansion
|
|
|
|
|
/// via a glob import with the given `SyntaxContext`.
|
2017-12-31 16:17:01 +00:00
|
|
|
|
/// For example:
|
|
|
|
|
///
|
2017-03-22 08:39:51 +00:00
|
|
|
|
/// ```rust
|
|
|
|
|
/// m!(f);
|
|
|
|
|
/// macro m($i:ident) {
|
|
|
|
|
/// mod foo {
|
2019-07-15 22:04:05 +00:00
|
|
|
|
/// pub fn f() {} // `f`'s `SyntaxContext` has a single `ExpnId` from `m`.
|
2017-03-22 08:39:51 +00:00
|
|
|
|
/// pub fn $i() {} // `$i`'s `SyntaxContext` is empty.
|
|
|
|
|
/// }
|
|
|
|
|
/// n(f);
|
|
|
|
|
/// macro n($j:ident) {
|
|
|
|
|
/// use foo::*;
|
|
|
|
|
/// f(); // `f`'s `SyntaxContext` has a mark from `m` and a mark from `n`
|
|
|
|
|
/// //^ `glob_adjust` removes the mark from `n`, so this resolves to `foo::f`.
|
|
|
|
|
/// $i(); // `$i`'s `SyntaxContext` has a mark from `n`
|
|
|
|
|
/// //^ `glob_adjust` removes the mark from `n`, so this resolves to `foo::$i`.
|
|
|
|
|
/// $j(); // `$j`'s `SyntaxContext` has a mark from `m`
|
|
|
|
|
/// //^ This cannot be glob-adjusted, so this is a resolution error.
|
|
|
|
|
/// }
|
|
|
|
|
/// }
|
|
|
|
|
/// ```
|
|
|
|
|
/// This returns `None` if the context cannot be glob-adjusted.
|
|
|
|
|
/// Otherwise, it returns the scope to use when privacy checking (see `adjust` for details).
|
2019-07-15 22:42:58 +00:00
|
|
|
|
pub fn glob_adjust(&mut self, expn_id: ExpnId, glob_span: Span) -> Option<Option<ExpnId>> {
|
2019-05-31 22:35:01 +00:00
|
|
|
|
HygieneData::with(|data| {
|
|
|
|
|
let mut scope = None;
|
2020-03-13 22:36:46 +00:00
|
|
|
|
let mut glob_ctxt = data.normalize_to_macros_2_0(glob_span.ctxt());
|
2019-07-15 23:59:53 +00:00
|
|
|
|
while !data.is_descendant_of(expn_id, data.outer_expn(glob_ctxt)) {
|
2019-08-22 22:31:01 +00:00
|
|
|
|
scope = Some(data.remove_mark(&mut glob_ctxt).0);
|
|
|
|
|
if data.remove_mark(self).0 != scope.unwrap() {
|
2019-05-31 22:35:01 +00:00
|
|
|
|
return None;
|
|
|
|
|
}
|
|
|
|
|
}
|
2019-07-15 22:42:58 +00:00
|
|
|
|
if data.adjust(self, expn_id).is_some() {
|
2017-03-22 08:39:51 +00:00
|
|
|
|
return None;
|
|
|
|
|
}
|
2019-05-31 22:35:01 +00:00
|
|
|
|
Some(scope)
|
|
|
|
|
})
|
2017-03-22 08:39:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// Undo `glob_adjust` if possible:
|
2017-12-31 16:17:01 +00:00
|
|
|
|
///
|
2017-03-22 08:39:51 +00:00
|
|
|
|
/// ```rust
|
|
|
|
|
/// if let Some(privacy_checking_scope) = self.reverse_glob_adjust(expansion, glob_ctxt) {
|
|
|
|
|
/// assert!(self.glob_adjust(expansion, glob_ctxt) == Some(privacy_checking_scope));
|
|
|
|
|
/// }
|
|
|
|
|
/// ```
|
2019-12-22 22:42:04 +00:00
|
|
|
|
pub fn reverse_glob_adjust(
|
|
|
|
|
&mut self,
|
|
|
|
|
expn_id: ExpnId,
|
|
|
|
|
glob_span: Span,
|
|
|
|
|
) -> Option<Option<ExpnId>> {
|
2019-05-31 22:35:01 +00:00
|
|
|
|
HygieneData::with(|data| {
|
2019-07-15 22:42:58 +00:00
|
|
|
|
if data.adjust(self, expn_id).is_some() {
|
2019-05-31 22:35:01 +00:00
|
|
|
|
return None;
|
|
|
|
|
}
|
2017-03-22 08:39:51 +00:00
|
|
|
|
|
2020-03-13 22:36:46 +00:00
|
|
|
|
let mut glob_ctxt = data.normalize_to_macros_2_0(glob_span.ctxt());
|
2019-05-31 22:35:01 +00:00
|
|
|
|
let mut marks = Vec::new();
|
2019-07-15 23:59:53 +00:00
|
|
|
|
while !data.is_descendant_of(expn_id, data.outer_expn(glob_ctxt)) {
|
2019-05-31 22:35:01 +00:00
|
|
|
|
marks.push(data.remove_mark(&mut glob_ctxt));
|
|
|
|
|
}
|
2017-03-22 08:39:51 +00:00
|
|
|
|
|
2019-08-22 22:31:01 +00:00
|
|
|
|
let scope = marks.last().map(|mark| mark.0);
|
|
|
|
|
while let Some((expn_id, transparency)) = marks.pop() {
|
|
|
|
|
*self = data.apply_mark(*self, expn_id, transparency);
|
2019-05-31 22:35:01 +00:00
|
|
|
|
}
|
|
|
|
|
Some(scope)
|
|
|
|
|
})
|
2017-03-22 08:39:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-07-15 22:42:58 +00:00
|
|
|
|
pub fn hygienic_eq(self, other: SyntaxContext, expn_id: ExpnId) -> bool {
|
2019-06-02 23:43:20 +00:00
|
|
|
|
HygieneData::with(|data| {
|
2020-03-13 22:36:46 +00:00
|
|
|
|
let mut self_normalized = data.normalize_to_macros_2_0(self);
|
|
|
|
|
data.adjust(&mut self_normalized, expn_id);
|
|
|
|
|
self_normalized == data.normalize_to_macros_2_0(other)
|
2019-06-02 23:43:20 +00:00
|
|
|
|
})
|
|
|
|
|
}
|
|
|
|
|
|
2017-12-07 15:46:31 +00:00
|
|
|
|
#[inline]
|
2020-03-13 22:36:46 +00:00
|
|
|
|
pub fn normalize_to_macros_2_0(self) -> SyntaxContext {
|
|
|
|
|
HygieneData::with(|data| data.normalize_to_macros_2_0(self))
|
2018-06-24 16:54:23 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[inline]
|
2020-03-13 22:36:46 +00:00
|
|
|
|
pub fn normalize_to_macro_rules(self) -> SyntaxContext {
|
|
|
|
|
HygieneData::with(|data| data.normalize_to_macro_rules(self))
|
2017-03-22 08:39:51 +00:00
|
|
|
|
}
|
|
|
|
|
|
2017-12-07 15:46:31 +00:00
|
|
|
|
#[inline]
|
2019-07-15 23:59:53 +00:00
|
|
|
|
pub fn outer_expn(self) -> ExpnId {
|
|
|
|
|
HygieneData::with(|data| data.outer_expn(self))
|
2017-03-17 04:04:41 +00:00
|
|
|
|
}
|
2018-12-09 14:46:12 +00:00
|
|
|
|
|
2019-08-13 20:56:42 +00:00
|
|
|
|
/// `ctxt.outer_expn_data()` is equivalent to but faster than
|
|
|
|
|
/// `ctxt.outer_expn().expn_data()`.
|
2019-05-27 03:52:11 +00:00
|
|
|
|
#[inline]
|
2019-08-13 20:56:42 +00:00
|
|
|
|
pub fn outer_expn_data(self) -> ExpnData {
|
|
|
|
|
HygieneData::with(|data| data.expn_data(data.outer_expn(self)).clone())
|
2019-05-27 03:52:11 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-08-04 07:16:01 +00:00
|
|
|
|
#[inline]
|
|
|
|
|
pub fn outer_mark(self) -> (ExpnId, Transparency) {
|
|
|
|
|
HygieneData::with(|data| data.outer_mark(self))
|
|
|
|
|
}
|
|
|
|
|
|
2018-12-09 14:46:12 +00:00
|
|
|
|
pub fn dollar_crate_name(self) -> Symbol {
|
2019-07-15 22:42:58 +00:00
|
|
|
|
HygieneData::with(|data| data.syntax_context_data[self.0 as usize].dollar_crate_name)
|
2018-12-09 14:46:12 +00:00
|
|
|
|
}
|
2021-01-07 12:23:25 +00:00
|
|
|
|
|
|
|
|
|
pub fn edition(self) -> Edition {
|
|
|
|
|
self.outer_expn_data().edition
|
|
|
|
|
}
|
2014-02-24 20:47:19 +00:00
|
|
|
|
}
|
|
|
|
|
|
2016-06-22 08:03:42 +00:00
|
|
|
|
impl fmt::Debug for SyntaxContext {
|
2019-02-03 18:42:27 +00:00
|
|
|
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
2016-06-22 08:03:42 +00:00
|
|
|
|
write!(f, "#{}", self.0)
|
|
|
|
|
}
|
2016-06-26 03:32:45 +00:00
|
|
|
|
}
|
2017-03-17 04:04:41 +00:00
|
|
|
|
|
2019-07-06 18:02:45 +00:00
|
|
|
|
impl Span {
|
|
|
|
|
/// Creates a fresh expansion with given properties.
|
|
|
|
|
/// Expansions are normally created by macros, but in some cases expansions are created for
|
|
|
|
|
/// other compiler-generated code to set per-span properties like allowed unstable features.
|
|
|
|
|
/// The returned span belongs to the created expansion and has the new properties,
|
|
|
|
|
/// but its location is inherited from the current span.
|
2019-08-13 20:56:42 +00:00
|
|
|
|
pub fn fresh_expansion(self, expn_data: ExpnData) -> Span {
|
2019-08-25 19:58:03 +00:00
|
|
|
|
self.fresh_expansion_with_transparency(expn_data, Transparency::Transparent)
|
2019-08-22 21:27:46 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn fresh_expansion_with_transparency(
|
2019-12-22 22:42:04 +00:00
|
|
|
|
self,
|
|
|
|
|
expn_data: ExpnData,
|
|
|
|
|
transparency: Transparency,
|
2019-08-22 21:27:46 +00:00
|
|
|
|
) -> Span {
|
2020-12-07 22:44:40 +00:00
|
|
|
|
let expn_id = ExpnId::fresh(Some(expn_data));
|
2019-07-06 18:02:45 +00:00
|
|
|
|
HygieneData::with(|data| {
|
2019-08-22 22:31:01 +00:00
|
|
|
|
self.with_ctxt(data.apply_mark(SyntaxContext::root(), expn_id, transparency))
|
2019-07-06 18:02:45 +00:00
|
|
|
|
})
|
|
|
|
|
}
|
2021-01-03 13:56:49 +00:00
|
|
|
|
|
|
|
|
|
/// Reuses the span but adds information like the kind of the desugaring and features that are
|
|
|
|
|
/// allowed inside this span.
|
|
|
|
|
pub fn mark_with_reason(
|
|
|
|
|
self,
|
|
|
|
|
allow_internal_unstable: Option<Lrc<[Symbol]>>,
|
|
|
|
|
reason: DesugaringKind,
|
|
|
|
|
edition: Edition,
|
|
|
|
|
) -> Span {
|
|
|
|
|
self.fresh_expansion(ExpnData {
|
|
|
|
|
allow_internal_unstable,
|
|
|
|
|
..ExpnData::default(ExpnKind::Desugaring(reason), self, edition, None)
|
|
|
|
|
})
|
|
|
|
|
}
|
2019-07-06 18:02:45 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-06-30 12:58:56 +00:00
|
|
|
|
/// A subset of properties from both macro definition and macro call available through global data.
|
|
|
|
|
/// Avoid using this if you have access to the original definition or call structures.
|
2020-06-11 14:49:57 +00:00
|
|
|
|
#[derive(Clone, Debug, Encodable, Decodable, HashStable_Generic)]
|
2019-08-13 20:56:42 +00:00
|
|
|
|
pub struct ExpnData {
|
2019-06-17 20:55:22 +00:00
|
|
|
|
// --- The part unique to each expansion.
|
2019-08-13 00:34:46 +00:00
|
|
|
|
/// The kind of this expansion - macro or compiler desugaring.
|
|
|
|
|
pub kind: ExpnKind,
|
|
|
|
|
/// The expansion that produced this expansion.
|
|
|
|
|
pub parent: ExpnId,
|
2017-03-17 04:04:41 +00:00
|
|
|
|
/// The location of the actual macro invocation or syntax sugar , e.g.
|
|
|
|
|
/// `let x = foo!();` or `if let Some(y) = x {}`
|
|
|
|
|
///
|
2018-11-27 02:59:49 +00:00
|
|
|
|
/// This may recursively refer to other macro invocations, e.g., if
|
2017-03-17 04:04:41 +00:00
|
|
|
|
/// `foo!()` invoked `bar!()` internally, and there was an
|
|
|
|
|
/// expression inside `bar!`; the call_site of the expression in
|
|
|
|
|
/// the expansion would point to the `bar!` invocation; that
|
2019-08-13 20:56:42 +00:00
|
|
|
|
/// call_site span would have its own ExpnData, with the call_site
|
2017-03-17 04:04:41 +00:00
|
|
|
|
/// pointing to the `foo!` invocation.
|
|
|
|
|
pub call_site: Span,
|
2019-06-17 20:55:22 +00:00
|
|
|
|
|
|
|
|
|
// --- The part specific to the macro/desugaring definition.
|
2019-08-13 20:39:48 +00:00
|
|
|
|
// --- It may be reasonable to share this part between expansions with the same definition,
|
|
|
|
|
// --- but such sharing is known to bring some minor inconveniences without also bringing
|
|
|
|
|
// --- noticeable perf improvements (PR #62898).
|
2019-06-30 00:05:52 +00:00
|
|
|
|
/// The span of the macro definition (possibly dummy).
|
2018-06-23 18:41:39 +00:00
|
|
|
|
/// This span serves only informational purpose and is not used for resolution.
|
2019-06-30 00:05:52 +00:00
|
|
|
|
pub def_site: Span,
|
2020-05-01 20:28:15 +00:00
|
|
|
|
/// List of `#[unstable]`/feature-gated features that the macro is allowed to use
|
2019-02-03 11:55:00 +00:00
|
|
|
|
/// internally without forcing the whole crate to opt-in
|
2017-03-17 04:04:41 +00:00
|
|
|
|
/// to them.
|
2019-02-08 09:21:21 +00:00
|
|
|
|
pub allow_internal_unstable: Option<Lrc<[Symbol]>>,
|
2017-08-08 15:21:20 +00:00
|
|
|
|
/// Whether the macro is allowed to use `unsafe` internally
|
|
|
|
|
/// even if the user crate has `#![forbid(unsafe_code)]`.
|
|
|
|
|
pub allow_internal_unsafe: bool,
|
2018-06-11 11:21:36 +00:00
|
|
|
|
/// Enables the macro helper hack (`ident!(...)` -> `$crate::ident!(...)`)
|
|
|
|
|
/// for a given macro.
|
|
|
|
|
pub local_inner_macros: bool,
|
2018-04-27 23:08:16 +00:00
|
|
|
|
/// Edition of the crate in which the macro is defined.
|
|
|
|
|
pub edition: Edition,
|
2020-05-22 20:57:25 +00:00
|
|
|
|
/// The `DefId` of the macro being invoked,
|
|
|
|
|
/// if this `ExpnData` corresponds to a macro invocation
|
|
|
|
|
pub macro_def_id: Option<DefId>,
|
2020-09-27 04:09:45 +00:00
|
|
|
|
/// The crate that originally created this `ExpnData`. During
|
2020-03-17 15:45:02 +00:00
|
|
|
|
/// metadata serialization, we only encode `ExpnData`s that were
|
|
|
|
|
/// created locally - when our serialized metadata is decoded,
|
|
|
|
|
/// foreign `ExpnId`s will have their `ExpnData` looked up
|
|
|
|
|
/// from the crate specified by `Crate
|
2021-01-03 13:56:49 +00:00
|
|
|
|
krate: CrateNum,
|
2020-03-17 15:45:02 +00:00
|
|
|
|
/// The raw that this `ExpnData` had in its original crate.
|
|
|
|
|
/// An `ExpnData` can be created before being assigned an `ExpnId`,
|
|
|
|
|
/// so this might be `None` until `set_expn_data` is called
|
2020-07-22 19:11:25 +00:00
|
|
|
|
// This is used only for serialization/deserialization purposes:
|
|
|
|
|
// two `ExpnData`s that differ only in their `orig_id` should
|
|
|
|
|
// be considered equivalent.
|
|
|
|
|
#[stable_hasher(ignore)]
|
2021-01-03 13:56:49 +00:00
|
|
|
|
orig_id: Option<u32>,
|
2020-12-07 22:44:40 +00:00
|
|
|
|
|
|
|
|
|
/// Used to force two `ExpnData`s to have different `Fingerprint`s.
|
|
|
|
|
/// Due to macro expansion, it's possible to end up with two `ExpnId`s
|
|
|
|
|
/// that have identical `ExpnData`s. This violates the constract of `HashStable`
|
|
|
|
|
/// - the two `ExpnId`s are not equal, but their `Fingerprint`s are equal
|
|
|
|
|
/// (since the numerical `ExpnId` value is not considered by the `HashStable`
|
|
|
|
|
/// implementation).
|
|
|
|
|
///
|
|
|
|
|
/// The `disambiguator` field is set by `update_disambiguator` when two distinct
|
|
|
|
|
/// `ExpnId`s would end up with the same `Fingerprint`. Since `ExpnData` includes
|
|
|
|
|
/// a `krate` field, this value only needs to be unique within a single crate.
|
|
|
|
|
disambiguator: u32,
|
2017-03-17 04:04:41 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-12-07 22:44:40 +00:00
|
|
|
|
// These would require special handling of `orig_id`.
|
2020-07-22 19:11:25 +00:00
|
|
|
|
impl !PartialEq for ExpnData {}
|
2020-12-07 22:44:40 +00:00
|
|
|
|
impl !Hash for ExpnData {}
|
2020-07-22 19:11:25 +00:00
|
|
|
|
|
2019-08-13 20:56:42 +00:00
|
|
|
|
impl ExpnData {
|
2021-01-03 13:56:49 +00:00
|
|
|
|
pub fn new(
|
|
|
|
|
kind: ExpnKind,
|
|
|
|
|
parent: ExpnId,
|
|
|
|
|
call_site: Span,
|
|
|
|
|
def_site: Span,
|
|
|
|
|
allow_internal_unstable: Option<Lrc<[Symbol]>>,
|
|
|
|
|
allow_internal_unsafe: bool,
|
|
|
|
|
local_inner_macros: bool,
|
|
|
|
|
edition: Edition,
|
|
|
|
|
macro_def_id: Option<DefId>,
|
|
|
|
|
) -> ExpnData {
|
|
|
|
|
ExpnData {
|
|
|
|
|
kind,
|
|
|
|
|
parent,
|
|
|
|
|
call_site,
|
|
|
|
|
def_site,
|
|
|
|
|
allow_internal_unstable,
|
|
|
|
|
allow_internal_unsafe,
|
|
|
|
|
local_inner_macros,
|
|
|
|
|
edition,
|
|
|
|
|
macro_def_id,
|
|
|
|
|
krate: LOCAL_CRATE,
|
|
|
|
|
orig_id: None,
|
2020-12-07 22:44:40 +00:00
|
|
|
|
disambiguator: 0,
|
2021-01-03 13:56:49 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-08-13 20:56:42 +00:00
|
|
|
|
/// Constructs expansion data with default properties.
|
2020-05-22 20:57:25 +00:00
|
|
|
|
pub fn default(
|
|
|
|
|
kind: ExpnKind,
|
|
|
|
|
call_site: Span,
|
|
|
|
|
edition: Edition,
|
|
|
|
|
macro_def_id: Option<DefId>,
|
|
|
|
|
) -> ExpnData {
|
2019-08-13 20:56:42 +00:00
|
|
|
|
ExpnData {
|
2019-06-18 22:08:45 +00:00
|
|
|
|
kind,
|
2019-08-13 00:34:46 +00:00
|
|
|
|
parent: ExpnId::root(),
|
|
|
|
|
call_site,
|
2019-06-30 00:05:52 +00:00
|
|
|
|
def_site: DUMMY_SP,
|
2019-06-17 19:18:56 +00:00
|
|
|
|
allow_internal_unstable: None,
|
|
|
|
|
allow_internal_unsafe: false,
|
|
|
|
|
local_inner_macros: false,
|
|
|
|
|
edition,
|
2020-05-22 20:57:25 +00:00
|
|
|
|
macro_def_id,
|
2020-03-17 15:45:02 +00:00
|
|
|
|
krate: LOCAL_CRATE,
|
|
|
|
|
orig_id: None,
|
2020-12-07 22:44:40 +00:00
|
|
|
|
disambiguator: 0,
|
2019-06-17 19:18:56 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-12-22 22:42:04 +00:00
|
|
|
|
pub fn allow_unstable(
|
|
|
|
|
kind: ExpnKind,
|
|
|
|
|
call_site: Span,
|
|
|
|
|
edition: Edition,
|
|
|
|
|
allow_internal_unstable: Lrc<[Symbol]>,
|
2020-05-22 20:57:25 +00:00
|
|
|
|
macro_def_id: Option<DefId>,
|
2019-12-22 22:42:04 +00:00
|
|
|
|
) -> ExpnData {
|
2019-08-13 20:56:42 +00:00
|
|
|
|
ExpnData {
|
2019-07-06 18:02:45 +00:00
|
|
|
|
allow_internal_unstable: Some(allow_internal_unstable),
|
2020-05-22 20:57:25 +00:00
|
|
|
|
..ExpnData::default(kind, call_site, edition, macro_def_id)
|
2019-06-17 19:18:56 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
2019-08-11 00:00:05 +00:00
|
|
|
|
|
|
|
|
|
#[inline]
|
|
|
|
|
pub fn is_root(&self) -> bool {
|
2020-09-27 09:54:50 +00:00
|
|
|
|
matches!(self.kind, ExpnKind::Root)
|
2019-08-11 00:00:05 +00:00
|
|
|
|
}
|
2019-06-17 19:18:56 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-06-30 12:58:56 +00:00
|
|
|
|
/// Expansion kind.
|
2020-06-11 14:49:57 +00:00
|
|
|
|
#[derive(Clone, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
|
2019-06-18 22:08:45 +00:00
|
|
|
|
pub enum ExpnKind {
|
2019-07-15 22:04:05 +00:00
|
|
|
|
/// No expansion, aka root expansion. Only `ExpnId::root()` has this kind.
|
2019-07-07 13:45:41 +00:00
|
|
|
|
Root,
|
2019-06-30 12:58:56 +00:00
|
|
|
|
/// Expansion produced by a macro.
|
|
|
|
|
Macro(MacroKind, Symbol),
|
2019-08-25 18:59:51 +00:00
|
|
|
|
/// Transform done by the compiler on the AST.
|
|
|
|
|
AstPass(AstPass),
|
2017-03-17 04:04:41 +00:00
|
|
|
|
/// Desugaring done by the compiler during HIR lowering.
|
2019-12-22 22:42:04 +00:00
|
|
|
|
Desugaring(DesugaringKind),
|
2020-09-19 16:56:32 +00:00
|
|
|
|
/// MIR inlining
|
|
|
|
|
Inlined,
|
2017-08-13 00:43:43 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-06-18 22:08:45 +00:00
|
|
|
|
impl ExpnKind {
|
2020-01-20 23:02:01 +00:00
|
|
|
|
pub fn descr(&self) -> String {
|
2018-06-23 18:41:39 +00:00
|
|
|
|
match *self {
|
2020-01-20 23:02:01 +00:00
|
|
|
|
ExpnKind::Root => kw::PathRoot.to_string(),
|
|
|
|
|
ExpnKind::Macro(macro_kind, name) => match macro_kind {
|
|
|
|
|
MacroKind::Bang => format!("{}!", name),
|
|
|
|
|
MacroKind::Attr => format!("#[{}]", name),
|
|
|
|
|
MacroKind::Derive => format!("#[derive({})]", name),
|
|
|
|
|
},
|
|
|
|
|
ExpnKind::AstPass(kind) => kind.descr().to_string(),
|
|
|
|
|
ExpnKind::Desugaring(kind) => format!("desugaring of {}", kind.descr()),
|
2020-09-19 16:56:32 +00:00
|
|
|
|
ExpnKind::Inlined => "inlined source".to_string(),
|
2018-06-23 18:41:39 +00:00
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-06-18 22:00:49 +00:00
|
|
|
|
/// The kind of macro invocation or definition.
|
2020-06-11 14:49:57 +00:00
|
|
|
|
#[derive(Clone, Copy, PartialEq, Eq, Encodable, Decodable, Hash, Debug)]
|
2020-03-23 14:48:59 +00:00
|
|
|
|
#[derive(HashStable_Generic)]
|
2019-06-18 22:00:49 +00:00
|
|
|
|
pub enum MacroKind {
|
|
|
|
|
/// A bang macro `foo!()`.
|
|
|
|
|
Bang,
|
|
|
|
|
/// An attribute macro `#[foo]`.
|
|
|
|
|
Attr,
|
|
|
|
|
/// A derive macro `#[derive(Foo)]`
|
|
|
|
|
Derive,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl MacroKind {
|
|
|
|
|
pub fn descr(self) -> &'static str {
|
|
|
|
|
match self {
|
|
|
|
|
MacroKind::Bang => "macro",
|
|
|
|
|
MacroKind::Attr => "attribute macro",
|
|
|
|
|
MacroKind::Derive => "derive macro",
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-09-15 09:55:18 +00:00
|
|
|
|
pub fn descr_expected(self) -> &'static str {
|
|
|
|
|
match self {
|
|
|
|
|
MacroKind::Attr => "attribute",
|
|
|
|
|
_ => self.descr(),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-06-18 22:00:49 +00:00
|
|
|
|
pub fn article(self) -> &'static str {
|
|
|
|
|
match self {
|
|
|
|
|
MacroKind::Attr => "an",
|
|
|
|
|
_ => "a",
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2019-08-25 18:59:51 +00:00
|
|
|
|
/// The kind of AST transform.
|
2020-06-11 14:49:57 +00:00
|
|
|
|
#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
|
2019-08-25 18:59:51 +00:00
|
|
|
|
pub enum AstPass {
|
|
|
|
|
StdImports,
|
|
|
|
|
TestHarness,
|
|
|
|
|
ProcMacroHarness,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl AstPass {
|
|
|
|
|
fn descr(self) -> &'static str {
|
|
|
|
|
match self {
|
|
|
|
|
AstPass::StdImports => "standard library imports",
|
|
|
|
|
AstPass::TestHarness => "test harness",
|
|
|
|
|
AstPass::ProcMacroHarness => "proc macro harness",
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2017-08-13 00:43:43 +00:00
|
|
|
|
/// The kind of compiler desugaring.
|
2020-06-11 14:49:57 +00:00
|
|
|
|
#[derive(Clone, Copy, PartialEq, Debug, Encodable, Decodable, HashStable_Generic)]
|
2019-06-18 22:08:45 +00:00
|
|
|
|
pub enum DesugaringKind {
|
2019-03-11 15:43:27 +00:00
|
|
|
|
/// We desugar `if c { i } else { e }` to `match $ExprKind::Use(c) { true => i, _ => e }`.
|
|
|
|
|
/// However, we do not want to blame `c` for unreachability but rather say that `i`
|
|
|
|
|
/// is unreachable. This desugaring kind allows us to avoid blaming `c`.
|
2019-06-20 08:29:42 +00:00
|
|
|
|
/// This also applies to `while` loops.
|
|
|
|
|
CondTemporary,
|
2017-08-13 00:43:43 +00:00
|
|
|
|
QuestionMark,
|
2018-07-22 01:47:02 +00:00
|
|
|
|
TryBlock,
|
2018-05-22 12:31:56 +00:00
|
|
|
|
/// Desugaring of an `impl Trait` in return type position
|
2019-07-31 23:41:54 +00:00
|
|
|
|
/// to an `type Foo = impl Trait;` and replacing the
|
2018-05-22 12:31:56 +00:00
|
|
|
|
/// `impl Trait` with `Foo`.
|
2019-07-31 23:41:54 +00:00
|
|
|
|
OpaqueTy,
|
2018-06-06 22:50:59 +00:00
|
|
|
|
Async,
|
2019-04-18 19:55:23 +00:00
|
|
|
|
Await,
|
2020-06-11 17:48:46 +00:00
|
|
|
|
ForLoop(ForLoopLoc),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/// A location in the desugaring of a `for` loop
|
2020-06-11 14:49:57 +00:00
|
|
|
|
#[derive(Clone, Copy, PartialEq, Debug, Encodable, Decodable, HashStable_Generic)]
|
2020-06-11 17:48:46 +00:00
|
|
|
|
pub enum ForLoopLoc {
|
|
|
|
|
Head,
|
|
|
|
|
IntoIter,
|
2017-08-13 00:43:43 +00:00
|
|
|
|
}
|
|
|
|
|
|
2019-06-18 22:08:45 +00:00
|
|
|
|
impl DesugaringKind {
|
2019-07-07 10:02:05 +00:00
|
|
|
|
/// The description wording should combine well with "desugaring of {}".
|
|
|
|
|
fn descr(self) -> &'static str {
|
2019-06-30 12:58:56 +00:00
|
|
|
|
match self {
|
2019-07-07 10:02:05 +00:00
|
|
|
|
DesugaringKind::CondTemporary => "`if` or `while` condition",
|
|
|
|
|
DesugaringKind::Async => "`async` block or function",
|
|
|
|
|
DesugaringKind::Await => "`await` expression",
|
|
|
|
|
DesugaringKind::QuestionMark => "operator `?`",
|
|
|
|
|
DesugaringKind::TryBlock => "`try` block",
|
2019-07-31 23:41:54 +00:00
|
|
|
|
DesugaringKind::OpaqueTy => "`impl Trait`",
|
2020-06-11 17:48:46 +00:00
|
|
|
|
DesugaringKind::ForLoop(_) => "`for` loop",
|
2019-06-30 12:58:56 +00:00
|
|
|
|
}
|
2017-08-13 00:43:43 +00:00
|
|
|
|
}
|
2017-03-17 04:04:41 +00:00
|
|
|
|
}
|
|
|
|
|
|
2020-07-24 07:01:07 +00:00
|
|
|
|
#[derive(Default)]
|
|
|
|
|
pub struct HygieneEncodeContext {
|
2020-08-02 15:20:00 +00:00
|
|
|
|
/// All `SyntaxContexts` for which we have written `SyntaxContextData` into crate metadata.
|
2020-07-24 07:01:07 +00:00
|
|
|
|
/// This is `None` after we finish encoding `SyntaxContexts`, to ensure
|
|
|
|
|
/// that we don't accidentally try to encode any more `SyntaxContexts`
|
|
|
|
|
serialized_ctxts: Lock<FxHashSet<SyntaxContext>>,
|
|
|
|
|
/// The `SyntaxContexts` that we have serialized (e.g. as a result of encoding `Spans`)
|
|
|
|
|
/// in the most recent 'round' of serializnig. Serializing `SyntaxContextData`
|
|
|
|
|
/// may cause us to serialize more `SyntaxContext`s, so serialize in a loop
|
|
|
|
|
/// until we reach a fixed point.
|
|
|
|
|
latest_ctxts: Lock<FxHashSet<SyntaxContext>>,
|
|
|
|
|
|
|
|
|
|
serialized_expns: Lock<FxHashSet<ExpnId>>,
|
|
|
|
|
|
|
|
|
|
latest_expns: Lock<FxHashSet<ExpnId>>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl HygieneEncodeContext {
|
|
|
|
|
pub fn encode<
|
|
|
|
|
T,
|
|
|
|
|
R,
|
|
|
|
|
F: FnMut(&mut T, u32, &SyntaxContextData) -> Result<(), R>,
|
|
|
|
|
G: FnMut(&mut T, u32, &ExpnData) -> Result<(), R>,
|
|
|
|
|
>(
|
|
|
|
|
&self,
|
|
|
|
|
encoder: &mut T,
|
|
|
|
|
mut encode_ctxt: F,
|
|
|
|
|
mut encode_expn: G,
|
|
|
|
|
) -> Result<(), R> {
|
|
|
|
|
// When we serialize a `SyntaxContextData`, we may end up serializing
|
|
|
|
|
// a `SyntaxContext` that we haven't seen before
|
|
|
|
|
while !self.latest_ctxts.lock().is_empty() || !self.latest_expns.lock().is_empty() {
|
|
|
|
|
debug!(
|
|
|
|
|
"encode_hygiene: Serializing a round of {:?} SyntaxContextDatas: {:?}",
|
|
|
|
|
self.latest_ctxts.lock().len(),
|
|
|
|
|
self.latest_ctxts
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Consume the current round of SyntaxContexts.
|
|
|
|
|
// Drop the lock() temporary early
|
|
|
|
|
let latest_ctxts = { std::mem::take(&mut *self.latest_ctxts.lock()) };
|
|
|
|
|
|
|
|
|
|
// It's fine to iterate over a HashMap, because the serialization
|
|
|
|
|
// of the table that we insert data into doesn't depend on insertion
|
|
|
|
|
// order
|
|
|
|
|
for_all_ctxts_in(latest_ctxts.into_iter(), |(index, ctxt, data)| {
|
|
|
|
|
if self.serialized_ctxts.lock().insert(ctxt) {
|
|
|
|
|
encode_ctxt(encoder, index, data)?;
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
})?;
|
|
|
|
|
|
|
|
|
|
let latest_expns = { std::mem::take(&mut *self.latest_expns.lock()) };
|
|
|
|
|
|
|
|
|
|
for_all_expns_in(latest_expns.into_iter(), |index, expn, data| {
|
|
|
|
|
if self.serialized_expns.lock().insert(expn) {
|
|
|
|
|
encode_expn(encoder, index, data)?;
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
})?;
|
|
|
|
|
}
|
|
|
|
|
debug!("encode_hygiene: Done serializing SyntaxContextData");
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[derive(Default)]
|
2020-03-17 15:45:02 +00:00
|
|
|
|
/// Additional information used to assist in decoding hygiene data
|
2020-07-24 07:01:07 +00:00
|
|
|
|
pub struct HygieneDecodeContext {
|
2020-03-17 15:45:02 +00:00
|
|
|
|
// Maps serialized `SyntaxContext` ids to a `SyntaxContext` in the current
|
|
|
|
|
// global `HygieneData`. When we deserialize a `SyntaxContext`, we need to create
|
|
|
|
|
// a new id in the global `HygieneData`. This map tracks the ID we end up picking,
|
2020-08-02 15:20:00 +00:00
|
|
|
|
// so that multiple occurrences of the same serialized id are decoded to the same
|
2020-03-17 15:45:02 +00:00
|
|
|
|
// `SyntaxContext`
|
|
|
|
|
remapped_ctxts: Lock<Vec<Option<SyntaxContext>>>,
|
|
|
|
|
// The same as `remapepd_ctxts`, but for `ExpnId`s
|
|
|
|
|
remapped_expns: Lock<Vec<Option<ExpnId>>>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn decode_expn_id<
|
|
|
|
|
'a,
|
|
|
|
|
D: Decoder,
|
|
|
|
|
F: FnOnce(&mut D, u32) -> Result<ExpnData, D::Error>,
|
2020-07-24 07:01:07 +00:00
|
|
|
|
G: FnOnce(CrateNum) -> &'a HygieneDecodeContext,
|
2020-03-17 15:45:02 +00:00
|
|
|
|
>(
|
|
|
|
|
d: &mut D,
|
|
|
|
|
mode: ExpnDataDecodeMode<'a, G>,
|
|
|
|
|
decode_data: F,
|
|
|
|
|
) -> Result<ExpnId, D::Error> {
|
|
|
|
|
let index = u32::decode(d)?;
|
|
|
|
|
let context = match mode {
|
|
|
|
|
ExpnDataDecodeMode::IncrComp(context) => context,
|
|
|
|
|
ExpnDataDecodeMode::Metadata(get_context) => {
|
|
|
|
|
let krate = CrateNum::decode(d)?;
|
|
|
|
|
get_context(krate)
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Do this after decoding, so that we decode a `CrateNum`
|
|
|
|
|
// if necessary
|
|
|
|
|
if index == ExpnId::root().as_u32() {
|
|
|
|
|
debug!("decode_expn_id: deserialized root");
|
|
|
|
|
return Ok(ExpnId::root());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let outer_expns = &context.remapped_expns;
|
|
|
|
|
|
|
|
|
|
// Ensure that the lock() temporary is dropped early
|
|
|
|
|
{
|
|
|
|
|
if let Some(expn_id) = outer_expns.lock().get(index as usize).copied().flatten() {
|
|
|
|
|
return Ok(expn_id);
|
|
|
|
|
}
|
2019-08-13 00:34:46 +00:00
|
|
|
|
}
|
2020-03-17 15:45:02 +00:00
|
|
|
|
|
|
|
|
|
// Don't decode the data inside `HygieneData::with`, since we need to recursively decode
|
|
|
|
|
// other ExpnIds
|
2020-07-23 22:47:05 +00:00
|
|
|
|
let mut expn_data = decode_data(d, index)?;
|
2020-03-17 15:45:02 +00:00
|
|
|
|
|
|
|
|
|
let expn_id = HygieneData::with(|hygiene_data| {
|
|
|
|
|
let expn_id = ExpnId(hygiene_data.expn_data.len() as u32);
|
2020-07-23 22:47:05 +00:00
|
|
|
|
|
|
|
|
|
// If we just deserialized an `ExpnData` owned by
|
|
|
|
|
// the local crate, its `orig_id` will be stale,
|
|
|
|
|
// so we need to update it to its own value.
|
|
|
|
|
// This only happens when we deserialize the incremental cache,
|
|
|
|
|
// since a crate will never decode its own metadata.
|
|
|
|
|
if expn_data.krate == LOCAL_CRATE {
|
|
|
|
|
expn_data.orig_id = Some(expn_id.0);
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-17 15:45:02 +00:00
|
|
|
|
hygiene_data.expn_data.push(Some(expn_data));
|
|
|
|
|
|
2020-07-24 07:01:07 +00:00
|
|
|
|
let mut expns = outer_expns.lock();
|
|
|
|
|
let new_len = index as usize + 1;
|
|
|
|
|
if expns.len() < new_len {
|
|
|
|
|
expns.resize(new_len, None);
|
2020-03-17 15:45:02 +00:00
|
|
|
|
}
|
2020-07-24 07:01:07 +00:00
|
|
|
|
expns[index as usize] = Some(expn_id);
|
|
|
|
|
drop(expns);
|
2020-03-17 15:45:02 +00:00
|
|
|
|
expn_id
|
|
|
|
|
});
|
2020-08-07 22:39:38 +00:00
|
|
|
|
Ok(expn_id)
|
2019-08-13 00:34:46 +00:00
|
|
|
|
}
|
2020-03-17 15:45:02 +00:00
|
|
|
|
|
2020-07-24 07:01:07 +00:00
|
|
|
|
// Decodes `SyntaxContext`, using the provided `HygieneDecodeContext`
|
2020-03-17 15:45:02 +00:00
|
|
|
|
// to track which `SyntaxContext`s we have already decoded.
|
|
|
|
|
// The provided closure will be invoked to deserialize a `SyntaxContextData`
|
|
|
|
|
// if we haven't already seen the id of the `SyntaxContext` we are deserializing.
|
|
|
|
|
pub fn decode_syntax_context<
|
|
|
|
|
D: Decoder,
|
|
|
|
|
F: FnOnce(&mut D, u32) -> Result<SyntaxContextData, D::Error>,
|
|
|
|
|
>(
|
|
|
|
|
d: &mut D,
|
2020-07-24 07:01:07 +00:00
|
|
|
|
context: &HygieneDecodeContext,
|
2020-03-17 15:45:02 +00:00
|
|
|
|
decode_data: F,
|
|
|
|
|
) -> Result<SyntaxContext, D::Error> {
|
|
|
|
|
let raw_id: u32 = Decodable::decode(d)?;
|
|
|
|
|
if raw_id == 0 {
|
|
|
|
|
debug!("decode_syntax_context: deserialized root");
|
|
|
|
|
// The root is special
|
|
|
|
|
return Ok(SyntaxContext::root());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let outer_ctxts = &context.remapped_ctxts;
|
|
|
|
|
|
|
|
|
|
// Ensure that the lock() temporary is dropped early
|
|
|
|
|
{
|
|
|
|
|
if let Some(ctxt) = outer_ctxts.lock().get(raw_id as usize).copied().flatten() {
|
|
|
|
|
return Ok(ctxt);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Allocate and store SyntaxContext id *before* calling the decoder function,
|
|
|
|
|
// as the SyntaxContextData may reference itself.
|
|
|
|
|
let new_ctxt = HygieneData::with(|hygiene_data| {
|
|
|
|
|
let new_ctxt = SyntaxContext(hygiene_data.syntax_context_data.len() as u32);
|
|
|
|
|
// Push a dummy SyntaxContextData to ensure that nobody else can get the
|
|
|
|
|
// same ID as us. This will be overwritten after call `decode_Data`
|
|
|
|
|
hygiene_data.syntax_context_data.push(SyntaxContextData {
|
|
|
|
|
outer_expn: ExpnId::root(),
|
|
|
|
|
outer_transparency: Transparency::Transparent,
|
|
|
|
|
parent: SyntaxContext::root(),
|
|
|
|
|
opaque: SyntaxContext::root(),
|
|
|
|
|
opaque_and_semitransparent: SyntaxContext::root(),
|
2020-12-30 01:28:08 +00:00
|
|
|
|
dollar_crate_name: kw::Empty,
|
2020-03-17 15:45:02 +00:00
|
|
|
|
});
|
2020-07-24 07:01:07 +00:00
|
|
|
|
let mut ctxts = outer_ctxts.lock();
|
|
|
|
|
let new_len = raw_id as usize + 1;
|
|
|
|
|
if ctxts.len() < new_len {
|
|
|
|
|
ctxts.resize(new_len, None);
|
2020-03-17 15:45:02 +00:00
|
|
|
|
}
|
2020-07-24 07:01:07 +00:00
|
|
|
|
ctxts[raw_id as usize] = Some(new_ctxt);
|
|
|
|
|
drop(ctxts);
|
2020-03-17 15:45:02 +00:00
|
|
|
|
new_ctxt
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
// Don't try to decode data while holding the lock, since we need to
|
|
|
|
|
// be able to recursively decode a SyntaxContext
|
|
|
|
|
let mut ctxt_data = decode_data(d, raw_id)?;
|
|
|
|
|
// Reset `dollar_crate_name` so that it will be updated by `update_dollar_crate_names`
|
|
|
|
|
// We don't care what the encoding crate set this to - we want to resolve it
|
|
|
|
|
// from the perspective of the current compilation session
|
|
|
|
|
ctxt_data.dollar_crate_name = kw::DollarCrate;
|
|
|
|
|
|
|
|
|
|
// Overwrite the dummy data with our decoded SyntaxContextData
|
|
|
|
|
HygieneData::with(|hygiene_data| {
|
|
|
|
|
let dummy = std::mem::replace(
|
|
|
|
|
&mut hygiene_data.syntax_context_data[new_ctxt.as_u32() as usize],
|
|
|
|
|
ctxt_data,
|
|
|
|
|
);
|
|
|
|
|
// Make sure nothing weird happening while `decode_data` was running
|
2020-12-30 01:28:08 +00:00
|
|
|
|
assert_eq!(dummy.dollar_crate_name, kw::Empty);
|
2020-03-17 15:45:02 +00:00
|
|
|
|
});
|
|
|
|
|
|
2020-08-07 22:39:38 +00:00
|
|
|
|
Ok(new_ctxt)
|
2020-03-17 15:45:02 +00:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn num_syntax_ctxts() -> usize {
|
|
|
|
|
HygieneData::with(|data| data.syntax_context_data.len())
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-24 07:01:07 +00:00
|
|
|
|
pub fn for_all_ctxts_in<E, F: FnMut((u32, SyntaxContext, &SyntaxContextData)) -> Result<(), E>>(
|
2020-03-17 15:45:02 +00:00
|
|
|
|
ctxts: impl Iterator<Item = SyntaxContext>,
|
|
|
|
|
mut f: F,
|
|
|
|
|
) -> Result<(), E> {
|
|
|
|
|
let all_data: Vec<_> = HygieneData::with(|data| {
|
|
|
|
|
ctxts.map(|ctxt| (ctxt, data.syntax_context_data[ctxt.0 as usize].clone())).collect()
|
|
|
|
|
});
|
|
|
|
|
for (ctxt, data) in all_data.into_iter() {
|
|
|
|
|
f((ctxt.0, ctxt, &data))?;
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-24 07:01:07 +00:00
|
|
|
|
pub fn for_all_expns_in<E, F: FnMut(u32, ExpnId, &ExpnData) -> Result<(), E>>(
|
|
|
|
|
expns: impl Iterator<Item = ExpnId>,
|
|
|
|
|
mut f: F,
|
|
|
|
|
) -> Result<(), E> {
|
|
|
|
|
let all_data: Vec<_> = HygieneData::with(|data| {
|
|
|
|
|
expns.map(|expn| (expn, data.expn_data[expn.0 as usize].clone())).collect()
|
|
|
|
|
});
|
|
|
|
|
for (expn, data) in all_data.into_iter() {
|
|
|
|
|
f(expn.0, expn, &data.unwrap_or_else(|| panic!("Missing data for {:?}", expn)))?;
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
2020-06-11 14:49:57 +00:00
|
|
|
|
|
2020-03-17 15:45:02 +00:00
|
|
|
|
pub fn for_all_data<E, F: FnMut((u32, SyntaxContext, &SyntaxContextData)) -> Result<(), E>>(
|
|
|
|
|
mut f: F,
|
|
|
|
|
) -> Result<(), E> {
|
|
|
|
|
let all_data = HygieneData::with(|data| data.syntax_context_data.clone());
|
|
|
|
|
for (i, data) in all_data.into_iter().enumerate() {
|
|
|
|
|
f((i as u32, SyntaxContext(i as u32), &data))?;
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
|
impl<E: Encoder> Encodable<E> for ExpnId {
|
|
|
|
|
default fn encode(&self, _: &mut E) -> Result<(), E::Error> {
|
|
|
|
|
panic!("cannot encode `ExpnId` with `{}`", std::any::type_name::<E>());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<D: Decoder> Decodable<D> for ExpnId {
|
|
|
|
|
default fn decode(_: &mut D) -> Result<Self, D::Error> {
|
|
|
|
|
panic!("cannot decode `ExpnId` with `{}`", std::any::type_name::<D>());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-03-17 15:45:02 +00:00
|
|
|
|
pub fn for_all_expn_data<E, F: FnMut(u32, &ExpnData) -> Result<(), E>>(mut f: F) -> Result<(), E> {
|
|
|
|
|
let all_data = HygieneData::with(|data| data.expn_data.clone());
|
|
|
|
|
for (i, data) in all_data.into_iter().enumerate() {
|
|
|
|
|
f(i as u32, &data.unwrap_or_else(|| panic!("Missing ExpnData!")))?;
|
|
|
|
|
}
|
|
|
|
|
Ok(())
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn raw_encode_syntax_context<E: Encoder>(
|
|
|
|
|
ctxt: SyntaxContext,
|
2020-07-24 07:01:07 +00:00
|
|
|
|
context: &HygieneEncodeContext,
|
2020-03-17 15:45:02 +00:00
|
|
|
|
e: &mut E,
|
|
|
|
|
) -> Result<(), E::Error> {
|
2020-07-24 07:01:07 +00:00
|
|
|
|
if !context.serialized_ctxts.lock().contains(&ctxt) {
|
|
|
|
|
context.latest_ctxts.lock().insert(ctxt);
|
|
|
|
|
}
|
2020-03-17 15:45:02 +00:00
|
|
|
|
ctxt.0.encode(e)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub fn raw_encode_expn_id<E: Encoder>(
|
|
|
|
|
expn: ExpnId,
|
2020-07-24 07:01:07 +00:00
|
|
|
|
context: &HygieneEncodeContext,
|
2020-03-17 15:45:02 +00:00
|
|
|
|
mode: ExpnDataEncodeMode,
|
|
|
|
|
e: &mut E,
|
|
|
|
|
) -> Result<(), E::Error> {
|
2020-08-04 08:53:28 +00:00
|
|
|
|
// Record the fact that we need to serialize the corresponding
|
|
|
|
|
// `ExpnData`
|
|
|
|
|
let needs_data = || {
|
|
|
|
|
if !context.serialized_expns.lock().contains(&expn) {
|
|
|
|
|
context.latest_expns.lock().insert(expn);
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
2020-03-17 15:45:02 +00:00
|
|
|
|
match mode {
|
2020-08-04 08:53:28 +00:00
|
|
|
|
ExpnDataEncodeMode::IncrComp => {
|
|
|
|
|
// Always serialize the `ExpnData` in incr comp mode
|
|
|
|
|
needs_data();
|
|
|
|
|
expn.0.encode(e)
|
|
|
|
|
}
|
2020-03-17 15:45:02 +00:00
|
|
|
|
ExpnDataEncodeMode::Metadata => {
|
|
|
|
|
let data = expn.expn_data();
|
2020-08-04 08:53:28 +00:00
|
|
|
|
// We only need to serialize the ExpnData
|
|
|
|
|
// if it comes from this crate.
|
2020-08-09 12:42:41 +00:00
|
|
|
|
// We currently don't serialize any hygiene information data for
|
|
|
|
|
// proc-macro crates: see the `SpecializedEncoder<Span>` impl
|
|
|
|
|
// for crate metadata.
|
2020-08-04 08:53:28 +00:00
|
|
|
|
if data.krate == LOCAL_CRATE {
|
|
|
|
|
needs_data();
|
|
|
|
|
}
|
2020-03-17 15:45:02 +00:00
|
|
|
|
data.orig_id.expect("Missing orig_id").encode(e)?;
|
|
|
|
|
data.krate.encode(e)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
pub enum ExpnDataEncodeMode {
|
|
|
|
|
IncrComp,
|
|
|
|
|
Metadata,
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-24 07:01:07 +00:00
|
|
|
|
pub enum ExpnDataDecodeMode<'a, F: FnOnce(CrateNum) -> &'a HygieneDecodeContext> {
|
|
|
|
|
IncrComp(&'a HygieneDecodeContext),
|
2020-03-17 15:45:02 +00:00
|
|
|
|
Metadata(F),
|
|
|
|
|
}
|
|
|
|
|
|
2020-07-24 07:01:07 +00:00
|
|
|
|
impl<'a> ExpnDataDecodeMode<'a, Box<dyn FnOnce(CrateNum) -> &'a HygieneDecodeContext>> {
|
|
|
|
|
pub fn incr_comp(ctxt: &'a HygieneDecodeContext) -> Self {
|
2020-03-17 15:45:02 +00:00
|
|
|
|
ExpnDataDecodeMode::IncrComp(ctxt)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2020-06-11 14:49:57 +00:00
|
|
|
|
impl<E: Encoder> Encodable<E> for SyntaxContext {
|
|
|
|
|
default fn encode(&self, _: &mut E) -> Result<(), E::Error> {
|
|
|
|
|
panic!("cannot encode `SyntaxContext` with `{}`", std::any::type_name::<E>());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<D: Decoder> Decodable<D> for SyntaxContext {
|
|
|
|
|
default fn decode(_: &mut D) -> Result<Self, D::Error> {
|
|
|
|
|
panic!("cannot decode `SyntaxContext` with `{}`", std::any::type_name::<D>());
|
|
|
|
|
}
|
|
|
|
|
}
|
2020-12-07 22:44:40 +00:00
|
|
|
|
|
|
|
|
|
/// Updates the `disambiguator` field of the corresponding `ExpnData`
|
|
|
|
|
/// such that the `Fingerprint` of the `ExpnData` does not collide with
|
|
|
|
|
/// any other `ExpnIds`.
|
|
|
|
|
///
|
|
|
|
|
/// This method is called only when an `ExpnData` is first associated
|
|
|
|
|
/// with an `ExpnId` (when the `ExpnId` is initially constructed, or via
|
|
|
|
|
/// `set_expn_data`). It is *not* called for foreign `ExpnId`s deserialized
|
|
|
|
|
/// from another crate's metadata - since `ExpnData` includes a `krate` field,
|
|
|
|
|
/// collisions are only possible between `ExpnId`s within the same crate.
|
|
|
|
|
fn update_disambiguator(expn_id: ExpnId) {
|
|
|
|
|
/// A `HashStableContext` which hashes the raw id values for `DefId`
|
|
|
|
|
/// and `CrateNum`, rather than using their computed stable hash.
|
|
|
|
|
///
|
|
|
|
|
/// This allows us to use the `HashStable` implementation on `ExpnId`
|
|
|
|
|
/// early on in compilation, before we've constructed a `TyCtxt`.
|
|
|
|
|
/// The `Fingerprint`s created by this context are not 'stable', since
|
|
|
|
|
/// the raw `CrateNum` and `DefId` values for an item may change between
|
|
|
|
|
/// sessions due to unrelated changes (e.g. adding/removing an different item).
|
|
|
|
|
///
|
|
|
|
|
/// However, this is fine for our purposes - we only need to detect
|
|
|
|
|
/// when two `ExpnData`s have the same `Fingerprint`. Since the hashes produced
|
|
|
|
|
/// by this context still obey the properties of `HashStable`, we have
|
|
|
|
|
/// that
|
|
|
|
|
/// `hash_stable(expn1, DummyHashStableContext) == hash_stable(expn2, DummyHashStableContext)`
|
|
|
|
|
/// iff `hash_stable(expn1, StableHashingContext) == hash_stable(expn2, StableHasingContext)`.
|
|
|
|
|
///
|
|
|
|
|
/// This is sufficient for determining when we need to update the disambiguator.
|
|
|
|
|
struct DummyHashStableContext<'a> {
|
|
|
|
|
caching_source_map: CachingSourceMapView<'a>,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
impl<'a> crate::HashStableContext for DummyHashStableContext<'a> {
|
|
|
|
|
fn hash_def_id(&mut self, def_id: DefId, hasher: &mut StableHasher) {
|
|
|
|
|
def_id.krate.as_u32().hash_stable(self, hasher);
|
|
|
|
|
def_id.index.as_u32().hash_stable(self, hasher);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn expn_id_cache() -> &'static LocalKey<ExpnIdCache> {
|
|
|
|
|
// This cache is only used by `DummyHashStableContext`,
|
|
|
|
|
// so we won't pollute the cache values of the normal `StableHashingContext`
|
|
|
|
|
thread_local! {
|
|
|
|
|
static CACHE: ExpnIdCache = Default::default();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
&CACHE
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn hash_crate_num(&mut self, krate: CrateNum, hasher: &mut StableHasher) {
|
|
|
|
|
krate.as_u32().hash_stable(self, hasher);
|
|
|
|
|
}
|
|
|
|
|
fn hash_spans(&self) -> bool {
|
|
|
|
|
true
|
|
|
|
|
}
|
|
|
|
|
fn span_data_to_lines_and_cols(
|
|
|
|
|
&mut self,
|
|
|
|
|
span: &crate::SpanData,
|
|
|
|
|
) -> Option<(Lrc<SourceFile>, usize, BytePos, usize, BytePos)> {
|
|
|
|
|
self.caching_source_map.span_data_to_lines_and_cols(span)
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let source_map = SESSION_GLOBALS
|
|
|
|
|
.with(|session_globals| session_globals.source_map.borrow().as_ref().unwrap().clone());
|
|
|
|
|
|
|
|
|
|
let mut ctx =
|
|
|
|
|
DummyHashStableContext { caching_source_map: CachingSourceMapView::new(&source_map) };
|
|
|
|
|
|
|
|
|
|
let mut hasher = StableHasher::new();
|
|
|
|
|
|
|
|
|
|
let expn_data = expn_id.expn_data();
|
|
|
|
|
// This disambiguator should not have been set yet.
|
|
|
|
|
assert_eq!(
|
|
|
|
|
expn_data.disambiguator, 0,
|
|
|
|
|
"Already set disambiguator for ExpnData: {:?}",
|
|
|
|
|
expn_data
|
|
|
|
|
);
|
|
|
|
|
expn_data.hash_stable(&mut ctx, &mut hasher);
|
|
|
|
|
let first_hash = hasher.finish();
|
|
|
|
|
|
|
|
|
|
let modified = HygieneData::with(|data| {
|
|
|
|
|
// If this is the first ExpnData with a given hash, then keep our
|
|
|
|
|
// disambiguator at 0 (the default u32 value)
|
|
|
|
|
let disambig = data.expn_data_disambiguators.entry(first_hash).or_default();
|
|
|
|
|
data.expn_data[expn_id.0 as usize].as_mut().unwrap().disambiguator = *disambig;
|
|
|
|
|
*disambig += 1;
|
|
|
|
|
|
|
|
|
|
*disambig != 1
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if modified {
|
2021-02-13 00:00:00 +00:00
|
|
|
|
debug!("Set disambiguator for {:?} (hash {:?})", expn_id, first_hash);
|
|
|
|
|
debug!("expn_data = {:?}", expn_id.expn_data());
|
2020-12-07 22:44:40 +00:00
|
|
|
|
|
|
|
|
|
// Verify that the new disambiguator makes the hash unique
|
|
|
|
|
#[cfg(debug_assertions)]
|
|
|
|
|
{
|
|
|
|
|
hasher = StableHasher::new();
|
|
|
|
|
expn_id.expn_data().hash_stable(&mut ctx, &mut hasher);
|
|
|
|
|
let new_hash: Fingerprint = hasher.finish();
|
|
|
|
|
|
|
|
|
|
HygieneData::with(|data| {
|
2021-03-04 12:06:01 +00:00
|
|
|
|
assert_eq!(
|
|
|
|
|
data.expn_data_disambiguators.get(&new_hash),
|
|
|
|
|
None,
|
|
|
|
|
"Hash collision after disambiguator update!",
|
|
|
|
|
);
|
2020-12-07 22:44:40 +00:00
|
|
|
|
});
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
}
|