Implement token-based handling of attributes during expansion

This PR modifies the macro expansion infrastructure to handle attributes
in a fully token-based manner. As a result:

* Derives macros no longer lose spans when their input is modified
  by eager cfg-expansion. This is accomplished by performing eager
  cfg-expansion on the token stream that we pass to the derive
  proc-macro
* Inner attributes now preserve spans in all cases, including when we
  have multiple inner attributes in a row.

This is accomplished through the following changes:

* New structs `AttrAnnotatedTokenStream` and `AttrAnnotatedTokenTree` are introduced.
  These are very similar to a normal `TokenTree`, but they also track
  the position of attributes and attribute targets within the stream.
  They are built when we collect tokens during parsing.
  An `AttrAnnotatedTokenStream` is converted to a regular `TokenStream` when
  we invoke a macro.
* Token capturing and `LazyTokenStream` are modified to work with
  `AttrAnnotatedTokenStream`. A new `ReplaceRange` type is introduced, which
  is created during the parsing of a nested AST node to make the 'outer'
  AST node aware of the attributes and attribute target stored deeper in the token stream.
* When we need to perform eager cfg-expansion (either due to `#[derive]` or `#[cfg_eval]`),
we tokenize and reparse our target, capturing additional information about the locations of
`#[cfg]` and `#[cfg_attr]` attributes at any depth within the target.
This is a performance optimization, allowing us to perform less work
in the typical case where captured tokens never have eager cfg-expansion run.
This commit is contained in:
Aaron Hill 2020-11-28 18:33:17 -05:00
parent 25ea6be13e
commit a93c4f05de
No known key found for this signature in database
GPG Key ID: B4087E510E98B164
33 changed files with 2046 additions and 1192 deletions

View File

@ -1,20 +1,32 @@
use super::ptr::P;
use super::token::Nonterminal;
use super::tokenstream::LazyTokenStream;
use super::{Arm, ExprField, FieldDef, GenericParam, Param, PatField, Variant};
use super::{AssocItem, Expr, ForeignItem, Item, Local};
use super::{AssocItem, Expr, ForeignItem, Item, Local, MacCallStmt};
use super::{AttrItem, AttrKind, Block, Pat, Path, Ty, Visibility};
use super::{AttrVec, Attribute, Stmt, StmtKind};
use std::fmt::Debug;
/// An `AstLike` represents an AST node (or some wrapper around
/// and AST node) which stores some combination of attributes
/// and tokens.
pub trait AstLike: Sized {
pub trait AstLike: Sized + Debug {
/// This is `true` if this `AstLike` might support 'custom' (proc-macro) inner
/// attributes. Attributes like `#![cfg]` and `#![cfg_attr]` are not
/// considered 'custom' attributes
///
/// If this is `false`, then this `AstLike` definitely does
/// not support 'custom' inner attributes, which enables some optimizations
/// during token collection.
const SUPPORTS_CUSTOM_INNER_ATTRS: bool;
fn attrs(&self) -> &[Attribute];
fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec<Attribute>));
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>>;
}
impl<T: AstLike + 'static> AstLike for P<T> {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = T::SUPPORTS_CUSTOM_INNER_ATTRS;
fn attrs(&self) -> &[Attribute] {
(**self).attrs()
}
@ -26,6 +38,55 @@ impl<T: AstLike + 'static> AstLike for P<T> {
}
}
impl AstLike for crate::token::Nonterminal {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = true;
fn attrs(&self) -> &[Attribute] {
match self {
Nonterminal::NtItem(item) => item.attrs(),
Nonterminal::NtStmt(stmt) => stmt.attrs(),
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.attrs(),
Nonterminal::NtPat(_)
| Nonterminal::NtTy(_)
| Nonterminal::NtMeta(_)
| Nonterminal::NtPath(_)
| Nonterminal::NtVis(_)
| Nonterminal::NtTT(_)
| Nonterminal::NtBlock(_)
| Nonterminal::NtIdent(..)
| Nonterminal::NtLifetime(_) => &[],
}
}
fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec<Attribute>)) {
match self {
Nonterminal::NtItem(item) => item.visit_attrs(f),
Nonterminal::NtStmt(stmt) => stmt.visit_attrs(f),
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.visit_attrs(f),
Nonterminal::NtPat(_)
| Nonterminal::NtTy(_)
| Nonterminal::NtMeta(_)
| Nonterminal::NtPath(_)
| Nonterminal::NtVis(_)
| Nonterminal::NtTT(_)
| Nonterminal::NtBlock(_)
| Nonterminal::NtIdent(..)
| Nonterminal::NtLifetime(_) => {}
}
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
match self {
Nonterminal::NtItem(item) => item.tokens_mut(),
Nonterminal::NtStmt(stmt) => stmt.tokens_mut(),
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => expr.tokens_mut(),
Nonterminal::NtPat(pat) => pat.tokens_mut(),
Nonterminal::NtTy(ty) => ty.tokens_mut(),
Nonterminal::NtMeta(attr_item) => attr_item.tokens_mut(),
Nonterminal::NtPath(path) => path.tokens_mut(),
Nonterminal::NtVis(vis) => vis.tokens_mut(),
_ => panic!("Called tokens_mut on {:?}", self),
}
}
}
fn visit_attrvec(attrs: &mut AttrVec, f: impl FnOnce(&mut Vec<Attribute>)) {
crate::mut_visit::visit_clobber(attrs, |attrs| {
let mut vec = attrs.into();
@ -35,6 +96,10 @@ fn visit_attrvec(attrs: &mut AttrVec, f: impl FnOnce(&mut Vec<Attribute>)) {
}
impl AstLike for StmtKind {
// This might be an `StmtKind::Item`, which contains
// an item that supports inner attrs
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = true;
fn attrs(&self) -> &[Attribute] {
match self {
StmtKind::Local(local) => local.attrs(),
@ -66,6 +131,8 @@ impl AstLike for StmtKind {
}
impl AstLike for Stmt {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = StmtKind::SUPPORTS_CUSTOM_INNER_ATTRS;
fn attrs(&self) -> &[Attribute] {
self.kind.attrs()
}
@ -79,6 +146,8 @@ impl AstLike for Stmt {
}
impl AstLike for Attribute {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
fn attrs(&self) -> &[Attribute] {
&[]
}
@ -94,6 +163,8 @@ impl AstLike for Attribute {
}
impl<T: AstLike> AstLike for Option<T> {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = T::SUPPORTS_CUSTOM_INNER_ATTRS;
fn attrs(&self) -> &[Attribute] {
self.as_ref().map(|inner| inner.attrs()).unwrap_or(&[])
}
@ -127,8 +198,13 @@ impl VecOrAttrVec for AttrVec {
}
macro_rules! derive_has_tokens_and_attrs {
($($ty:path),*) => { $(
(
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = $inner_attrs:literal;
$($ty:path),*
) => { $(
impl AstLike for $ty {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = $inner_attrs;
fn attrs(&self) -> &[Attribute] {
&self.attrs
}
@ -140,6 +216,7 @@ macro_rules! derive_has_tokens_and_attrs {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
Some(&mut self.tokens)
}
}
)* }
}
@ -147,6 +224,8 @@ macro_rules! derive_has_tokens_and_attrs {
macro_rules! derive_has_attrs_no_tokens {
($($ty:path),*) => { $(
impl AstLike for $ty {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
fn attrs(&self) -> &[Attribute] {
&self.attrs
}
@ -165,12 +244,13 @@ macro_rules! derive_has_attrs_no_tokens {
macro_rules! derive_has_tokens_no_attrs {
($($ty:path),*) => { $(
impl AstLike for $ty {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
fn attrs(&self) -> &[Attribute] {
&[]
}
fn visit_attrs(&mut self, _f: impl FnOnce(&mut Vec<Attribute>)) {}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
Some(&mut self.tokens)
}
@ -178,10 +258,18 @@ macro_rules! derive_has_tokens_no_attrs {
)* }
}
// These AST nodes support both inert and active
// attributes, so they also have tokens.
// These ast nodes support both active and inert attributes,
// so they have tokens collected to pass to proc macros
derive_has_tokens_and_attrs! {
Item, Expr, Local, AssocItem, ForeignItem
// Both `Item` and `AssocItem` can have bodies, which
// can contain inner attributes
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = true;
Item, AssocItem, ForeignItem
}
derive_has_tokens_and_attrs! {
const SUPPORTS_CUSTOM_INNER_ATTRS: bool = false;
Local, MacCallStmt, Expr
}
// These ast nodes only support inert attributes, so they don't

View File

@ -6,7 +6,9 @@ use crate::ast::{Lit, LitKind};
use crate::ast::{MacArgs, MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem};
use crate::ast::{Path, PathSegment};
use crate::token::{self, CommentKind, Token};
use crate::tokenstream::{DelimSpan, LazyTokenStream, TokenStream, TokenTree, TreeAndSpacing};
use crate::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
use crate::tokenstream::{DelimSpan, Spacing, TokenTree, TreeAndSpacing};
use crate::tokenstream::{LazyTokenStream, TokenStream};
use rustc_index::bit_set::GrowableBitSet;
use rustc_span::source_map::BytePos;
@ -268,14 +270,18 @@ impl Attribute {
}
}
pub fn tokens(&self) -> TokenStream {
pub fn tokens(&self) -> AttrAnnotatedTokenStream {
match self.kind {
AttrKind::Normal(_, ref tokens) => tokens
.as_ref()
.unwrap_or_else(|| panic!("attribute is missing tokens: {:?}", self))
.create_token_stream(),
AttrKind::DocComment(comment_kind, data) => TokenStream::from(TokenTree::Token(
Token::new(token::DocComment(comment_kind, self.style, data), self.span),
AttrKind::DocComment(comment_kind, data) => AttrAnnotatedTokenStream::from((
AttrAnnotatedTokenTree::Token(Token::new(
token::DocComment(comment_kind, self.style, data),
self.span,
)),
Spacing::Alone,
)),
}
}

View File

@ -630,6 +630,33 @@ pub fn noop_flat_map_param<T: MutVisitor>(mut param: Param, vis: &mut T) -> Smal
smallvec![param]
}
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
pub fn visit_attr_annotated_tt<T: MutVisitor>(tt: &mut AttrAnnotatedTokenTree, vis: &mut T) {
match tt {
AttrAnnotatedTokenTree::Token(token) => {
visit_token(token, vis);
}
AttrAnnotatedTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
vis.visit_span(open);
vis.visit_span(close);
visit_attr_annotated_tts(tts, vis);
}
AttrAnnotatedTokenTree::Attributes(data) => {
for attr in &mut *data.attrs {
match &mut attr.kind {
AttrKind::Normal(_, attr_tokens) => {
visit_lazy_tts(attr_tokens, vis);
}
AttrKind::DocComment(..) => {
vis.visit_span(&mut attr.span);
}
}
}
visit_lazy_tts_opt_mut(Some(&mut data.tokens), vis);
}
}
}
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
pub fn visit_tt<T: MutVisitor>(tt: &mut TokenTree, vis: &mut T) {
match tt {
@ -652,16 +679,30 @@ pub fn visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T)
}
}
pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyTokenStream>, vis: &mut T) {
if vis.token_visiting_enabled() {
visit_opt(lazy_tts, |lazy_tts| {
let mut tts = lazy_tts.create_token_stream();
visit_tts(&mut tts, vis);
*lazy_tts = LazyTokenStream::new(tts);
})
pub fn visit_attr_annotated_tts<T: MutVisitor>(
AttrAnnotatedTokenStream(tts): &mut AttrAnnotatedTokenStream,
vis: &mut T,
) {
if vis.token_visiting_enabled() && !tts.is_empty() {
let tts = Lrc::make_mut(tts);
visit_vec(tts, |(tree, _is_joint)| visit_attr_annotated_tt(tree, vis));
}
}
pub fn visit_lazy_tts_opt_mut<T: MutVisitor>(lazy_tts: Option<&mut LazyTokenStream>, vis: &mut T) {
if vis.token_visiting_enabled() {
if let Some(lazy_tts) = lazy_tts {
let mut tts = lazy_tts.create_token_stream();
visit_attr_annotated_tts(&mut tts, vis);
*lazy_tts = LazyTokenStream::new(tts);
}
}
}
pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyTokenStream>, vis: &mut T) {
visit_lazy_tts_opt_mut(lazy_tts.as_mut(), vis);
}
// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
// Applies ident visitor if it's an ident; applies other visits to interpolated nodes.
// In practice the ident part is not actually used by specific visitors right now,

View File

@ -14,6 +14,7 @@
//! ownership of the original.
use crate::token::{self, DelimToken, Token, TokenKind};
use crate::AttrVec;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::sync::{self, Lrc};
@ -123,11 +124,11 @@ where
}
pub trait CreateTokenStream: sync::Send + sync::Sync {
fn create_token_stream(&self) -> TokenStream;
fn create_token_stream(&self) -> AttrAnnotatedTokenStream;
}
impl CreateTokenStream for TokenStream {
fn create_token_stream(&self) -> TokenStream {
impl CreateTokenStream for AttrAnnotatedTokenStream {
fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
self.clone()
}
}
@ -143,14 +144,14 @@ impl LazyTokenStream {
LazyTokenStream(Lrc::new(Box::new(inner)))
}
pub fn create_token_stream(&self) -> TokenStream {
pub fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
self.0.create_token_stream()
}
}
impl fmt::Debug for LazyTokenStream {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt("LazyTokenStream", f)
write!(f, "LazyTokenStream({:?})", self.create_token_stream())
}
}
@ -173,6 +174,145 @@ impl<CTX> HashStable<CTX> for LazyTokenStream {
}
}
/// A `AttrAnnotatedTokenStream` is similar to a `TokenStream`, but with extra
/// information about the tokens for attribute targets. This is used
/// during expansion to perform early cfg-expansion, and to process attributes
/// during proc-macro invocations.
#[derive(Clone, Debug, Default, Encodable, Decodable)]
pub struct AttrAnnotatedTokenStream(pub Lrc<Vec<(AttrAnnotatedTokenTree, Spacing)>>);
/// Like `TokenTree`, but for `AttrAnnotatedTokenStream`
#[derive(Clone, Debug, Encodable, Decodable)]
pub enum AttrAnnotatedTokenTree {
Token(Token),
Delimited(DelimSpan, DelimToken, AttrAnnotatedTokenStream),
/// Stores the attributes for an attribute target,
/// along with the tokens for that attribute target.
/// See `AttributesData` for more information
Attributes(AttributesData),
}
impl AttrAnnotatedTokenStream {
pub fn new(tokens: Vec<(AttrAnnotatedTokenTree, Spacing)>) -> AttrAnnotatedTokenStream {
AttrAnnotatedTokenStream(Lrc::new(tokens))
}
/// Converts this `AttrAnnotatedTokenStream` to a plain `TokenStream
/// During conversion, `AttrAnnotatedTokenTree::Attributes` get 'flattened'
/// back to a `TokenStream` of the form `outer_attr attr_target`.
/// If there are inner attributes, they are inserted into the proper
/// place in the attribute target tokens.
pub fn to_tokenstream(&self) -> TokenStream {
let trees: Vec<_> = self
.0
.iter()
.flat_map(|tree| match &tree.0 {
AttrAnnotatedTokenTree::Token(inner) => {
smallvec![(TokenTree::Token(inner.clone()), tree.1)].into_iter()
}
AttrAnnotatedTokenTree::Delimited(span, delim, stream) => smallvec![(
TokenTree::Delimited(*span, *delim, stream.to_tokenstream()),
tree.1,
)]
.into_iter(),
AttrAnnotatedTokenTree::Attributes(data) => {
let mut outer_attrs = Vec::new();
let mut inner_attrs = Vec::new();
let attrs: Vec<_> = data.attrs.clone().into();
for attr in attrs {
match attr.style {
crate::AttrStyle::Outer => {
assert!(
inner_attrs.len() == 0,
"Found outer attribute {:?} after inner attrs {:?}",
attr,
inner_attrs
);
outer_attrs.push(attr);
}
crate::AttrStyle::Inner => {
inner_attrs.push(attr);
}
}
}
let mut target_tokens: Vec<_> = data
.tokens
.create_token_stream()
.to_tokenstream()
.0
.iter()
.cloned()
.collect();
if !inner_attrs.is_empty() {
let mut found = false;
// Check the last two trees (to account for a trailing semi)
for (tree, _) in target_tokens.iter_mut().rev().take(2) {
if let TokenTree::Delimited(span, delim, delim_tokens) = tree {
// Inner attributes are only supported on extern blocks, functions, impls,
// and modules. All of these have their inner attributes placed at
// the beginning of the rightmost outermost braced group:
// e.g. fn foo() { #![my_attr} }
//
// Therefore, we can insert them back into the right location
// without needing to do any extra position tracking.
//
// Note: Outline modules are an exception - they can
// have attributes like `#![my_attr]` at the start of a file.
// Support for custom attributes in this position is not
// properly implemented - we always synthesize fake tokens,
// so we never reach this code.
let mut builder = TokenStreamBuilder::new();
for inner_attr in &inner_attrs {
builder.push(inner_attr.tokens().to_tokenstream());
}
builder.push(delim_tokens.clone());
*tree = TokenTree::Delimited(*span, *delim, builder.build());
found = true;
break;
}
}
assert!(
found,
"Failed to find trailing delimited group in: {:?}",
target_tokens
);
}
let mut flat: SmallVec<[_; 1]> = SmallVec::new();
for attr in outer_attrs {
// FIXME: Make this more efficient
flat.extend(attr.tokens().to_tokenstream().0.clone().iter().cloned());
}
flat.extend(target_tokens);
flat.into_iter()
}
})
.collect();
TokenStream::new(trees)
}
}
/// Stores the tokens for an attribute target, along
/// with its attributes.
///
/// This is constructed during parsing when we need to capture
/// tokens.
///
/// For example, `#[cfg(FALSE)] struct Foo {}` would
/// have an `attrs` field contaiing the `#[cfg(FALSE)]` attr,
/// and a `tokens` field storing the (unparesd) tokens `struct Foo {}`
#[derive(Clone, Debug, Encodable, Decodable)]
pub struct AttributesData {
/// Attributes, both outer and inner.
/// These are stored in the original order that they were parsed in.
pub attrs: AttrVec,
/// The underlying tokens for the attribute target that `attrs`
/// are applied to
pub tokens: LazyTokenStream,
}
/// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
///
/// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
@ -235,6 +375,12 @@ impl TokenStream {
}
}
impl From<(AttrAnnotatedTokenTree, Spacing)> for AttrAnnotatedTokenStream {
fn from((tree, spacing): (AttrAnnotatedTokenTree, Spacing)) -> AttrAnnotatedTokenStream {
AttrAnnotatedTokenStream::new(vec![(tree, spacing)])
}
}
impl From<TokenTree> for TokenStream {
fn from(tree: TokenTree) -> TokenStream {
TokenStream::new(vec![(tree, Spacing::Alone)])
@ -457,6 +603,10 @@ impl Cursor {
}
}
pub fn index(&self) -> usize {
self.index
}
pub fn append(&mut self, new_stream: TokenStream) {
if new_stream.is_empty() {
return;

View File

@ -37,8 +37,8 @@
#![recursion_limit = "256"]
use rustc_ast::node_id::NodeMap;
use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, DelimSpan, TokenStream, TokenTree};
use rustc_ast::token::{self, Token};
use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream, TokenTree};
use rustc_ast::visit::{self, AssocCtxt, Visitor};
use rustc_ast::walk_list;
use rustc_ast::{self as ast, *};
@ -56,7 +56,7 @@ use rustc_hir::{ConstArg, GenericArg, ParamName};
use rustc_index::vec::{Idx, IndexVec};
use rustc_session::lint::builtin::{BARE_TRAIT_OBJECTS, MISSING_ABI};
use rustc_session::lint::{BuiltinLintDiagnostics, LintBuffer};
use rustc_session::parse::ParseSess;
use rustc_session::utils::{FlattenNonterminals, NtToTokenstream};
use rustc_session::Session;
use rustc_span::hygiene::ExpnId;
use rustc_span::source_map::{respan, DesugaringKind};
@ -213,8 +213,6 @@ pub trait ResolverAstLowering {
) -> LocalDefId;
}
type NtToTokenstream = fn(&Nonterminal, &ParseSess, CanSynthesizeMissingTokens) -> TokenStream;
/// Context of `impl Trait` in code, which determines whether it is allowed in an HIR subtree,
/// and if so, what meaning it has.
#[derive(Debug)]
@ -403,42 +401,6 @@ enum AnonymousLifetimeMode {
PassThrough,
}
struct TokenStreamLowering<'a> {
parse_sess: &'a ParseSess,
synthesize_tokens: CanSynthesizeMissingTokens,
nt_to_tokenstream: NtToTokenstream,
}
impl<'a> TokenStreamLowering<'a> {
fn lower_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
tokens.into_trees().flat_map(|tree| self.lower_token_tree(tree).into_trees()).collect()
}
fn lower_token_tree(&mut self, tree: TokenTree) -> TokenStream {
match tree {
TokenTree::Token(token) => self.lower_token(token),
TokenTree::Delimited(span, delim, tts) => {
TokenTree::Delimited(span, delim, self.lower_token_stream(tts)).into()
}
}
}
fn lower_token(&mut self, token: Token) -> TokenStream {
match token.kind {
token::Interpolated(nt) => {
let tts = (self.nt_to_tokenstream)(&nt, self.parse_sess, self.synthesize_tokens);
TokenTree::Delimited(
DelimSpan::from_single(token.span),
DelimToken::NoDelim,
self.lower_token_stream(tts),
)
.into()
}
_ => TokenTree::Token(token).into(),
}
}
}
impl<'a, 'hir> LoweringContext<'a, 'hir> {
fn lower_crate(mut self, c: &Crate) -> hir::Crate<'hir> {
/// Full-crate AST visitor that inserts into a fresh
@ -1037,12 +999,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
}
}
let tokens = TokenStreamLowering {
let tokens = FlattenNonterminals {
parse_sess: &self.sess.parse_sess,
synthesize_tokens: CanSynthesizeMissingTokens::Yes,
nt_to_tokenstream: self.nt_to_tokenstream,
}
.lower_token(token.clone());
.process_token(token.clone());
MacArgs::Eq(eq_span, unwrap_single_token(self.sess, tokens, token.span))
}
}
@ -1053,12 +1015,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
tokens: TokenStream,
synthesize_tokens: CanSynthesizeMissingTokens,
) -> TokenStream {
TokenStreamLowering {
FlattenNonterminals {
parse_sess: &self.sess.parse_sess,
synthesize_tokens,
nt_to_tokenstream: self.nt_to_tokenstream,
}
.lower_token_stream(tokens)
.process_token_stream(tokens)
}
/// Given an associated type constraint like one of these:

View File

@ -1,11 +1,18 @@
use crate::util::check_builtin_macro_attribute;
use rustc_ast::mut_visit::{self, MutVisitor};
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, AstLike};
use rustc_ast as ast;
use rustc_ast::mut_visit::MutVisitor;
use rustc_ast::tokenstream::CanSynthesizeMissingTokens;
use rustc_ast::visit::Visitor;
use rustc_ast::{mut_visit, visit};
use rustc_ast::{AstLike, Attribute};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_expand::config::StripUnconfigured;
use rustc_expand::configure;
use rustc_parse::parser::ForceCollect;
use rustc_session::utils::FlattenNonterminals;
use rustc_ast::ptr::P;
use rustc_span::symbol::sym;
use rustc_span::Span;
use smallvec::SmallVec;
@ -22,74 +29,179 @@ crate fn expand(
crate fn cfg_eval(ecx: &ExtCtxt<'_>, annotatable: Annotatable) -> Vec<Annotatable> {
let mut visitor = CfgEval {
cfg: StripUnconfigured { sess: ecx.sess, features: ecx.ecfg.features, modified: false },
cfg: &mut StripUnconfigured {
sess: ecx.sess,
features: ecx.ecfg.features,
config_tokens: true,
},
};
let mut annotatable = visitor.configure_annotatable(annotatable);
if visitor.cfg.modified {
// Erase the tokens if cfg-stripping modified the item
// This will cause us to synthesize fake tokens
// when `nt_to_tokenstream` is called on this item.
if let Some(tokens) = annotatable.tokens_mut() {
*tokens = None;
}
}
let annotatable = visitor.configure_annotatable(annotatable);
vec![annotatable]
}
struct CfgEval<'a> {
cfg: StripUnconfigured<'a>,
struct CfgEval<'a, 'b> {
cfg: &'a mut StripUnconfigured<'b>,
}
impl CfgEval<'_> {
fn flat_map_annotatable(vis: &mut impl MutVisitor, annotatable: Annotatable) -> Annotatable {
// Since the item itself has already been configured by the InvocationCollector,
// we know that fold result vector will contain exactly one element
match annotatable {
Annotatable::Item(item) => Annotatable::Item(vis.flat_map_item(item).pop().unwrap()),
Annotatable::TraitItem(item) => {
Annotatable::TraitItem(vis.flat_map_trait_item(item).pop().unwrap())
}
Annotatable::ImplItem(item) => {
Annotatable::ImplItem(vis.flat_map_impl_item(item).pop().unwrap())
}
Annotatable::ForeignItem(item) => {
Annotatable::ForeignItem(vis.flat_map_foreign_item(item).pop().unwrap())
}
Annotatable::Stmt(stmt) => {
Annotatable::Stmt(stmt.map(|stmt| vis.flat_map_stmt(stmt).pop().unwrap()))
}
Annotatable::Expr(mut expr) => Annotatable::Expr({
vis.visit_expr(&mut expr);
expr
}),
Annotatable::Arm(arm) => Annotatable::Arm(vis.flat_map_arm(arm).pop().unwrap()),
Annotatable::ExprField(field) => {
Annotatable::ExprField(vis.flat_map_expr_field(field).pop().unwrap())
}
Annotatable::PatField(fp) => {
Annotatable::PatField(vis.flat_map_pat_field(fp).pop().unwrap())
}
Annotatable::GenericParam(param) => {
Annotatable::GenericParam(vis.flat_map_generic_param(param).pop().unwrap())
}
Annotatable::Param(param) => Annotatable::Param(vis.flat_map_param(param).pop().unwrap()),
Annotatable::FieldDef(sf) => {
Annotatable::FieldDef(vis.flat_map_field_def(sf).pop().unwrap())
}
Annotatable::Variant(v) => Annotatable::Variant(vis.flat_map_variant(v).pop().unwrap()),
}
}
struct CfgFinder {
has_cfg_or_cfg_attr: bool,
}
impl CfgFinder {
fn has_cfg_or_cfg_attr(annotatable: &Annotatable) -> bool {
let mut finder = CfgFinder { has_cfg_or_cfg_attr: false };
match annotatable {
Annotatable::Item(item) => finder.visit_item(&item),
Annotatable::TraitItem(item) => finder.visit_assoc_item(&item, visit::AssocCtxt::Trait),
Annotatable::ImplItem(item) => finder.visit_assoc_item(&item, visit::AssocCtxt::Impl),
Annotatable::ForeignItem(item) => finder.visit_foreign_item(&item),
Annotatable::Stmt(stmt) => finder.visit_stmt(&stmt),
Annotatable::Expr(expr) => finder.visit_expr(&expr),
Annotatable::Arm(arm) => finder.visit_arm(&arm),
Annotatable::ExprField(field) => finder.visit_expr_field(&field),
Annotatable::PatField(field) => finder.visit_pat_field(&field),
Annotatable::GenericParam(param) => finder.visit_generic_param(&param),
Annotatable::Param(param) => finder.visit_param(&param),
Annotatable::FieldDef(field) => finder.visit_field_def(&field),
Annotatable::Variant(variant) => finder.visit_variant(&variant),
};
finder.has_cfg_or_cfg_attr
}
}
impl<'ast> visit::Visitor<'ast> for CfgFinder {
fn visit_attribute(&mut self, attr: &'ast Attribute) {
// We want short-circuiting behavior, so don't use the '|=' operator.
self.has_cfg_or_cfg_attr = self.has_cfg_or_cfg_attr
|| attr
.ident()
.map_or(false, |ident| ident.name == sym::cfg || ident.name == sym::cfg_attr);
}
}
impl CfgEval<'_, '_> {
fn configure<T: AstLike>(&mut self, node: T) -> Option<T> {
self.cfg.configure(node)
}
fn configure_annotatable(&mut self, annotatable: Annotatable) -> Annotatable {
// Since the item itself has already been configured by the InvocationCollector,
// we know that fold result vector will contain exactly one element
match annotatable {
Annotatable::Item(item) => Annotatable::Item(self.flat_map_item(item).pop().unwrap()),
Annotatable::TraitItem(item) => {
Annotatable::TraitItem(self.flat_map_trait_item(item).pop().unwrap())
}
Annotatable::ImplItem(item) => {
Annotatable::ImplItem(self.flat_map_impl_item(item).pop().unwrap())
}
Annotatable::ForeignItem(item) => {
Annotatable::ForeignItem(self.flat_map_foreign_item(item).pop().unwrap())
}
Annotatable::Stmt(stmt) => {
Annotatable::Stmt(stmt.map(|stmt| self.flat_map_stmt(stmt).pop().unwrap()))
}
Annotatable::Expr(mut expr) => Annotatable::Expr({
self.visit_expr(&mut expr);
expr
}),
Annotatable::Arm(arm) => Annotatable::Arm(self.flat_map_arm(arm).pop().unwrap()),
Annotatable::ExprField(field) => {
Annotatable::ExprField(self.flat_map_expr_field(field).pop().unwrap())
}
Annotatable::PatField(fp) => {
Annotatable::PatField(self.flat_map_pat_field(fp).pop().unwrap())
}
Annotatable::GenericParam(param) => {
Annotatable::GenericParam(self.flat_map_generic_param(param).pop().unwrap())
}
Annotatable::Param(param) => {
Annotatable::Param(self.flat_map_param(param).pop().unwrap())
}
Annotatable::FieldDef(sf) => {
Annotatable::FieldDef(self.flat_map_field_def(sf).pop().unwrap())
}
Annotatable::Variant(v) => {
Annotatable::Variant(self.flat_map_variant(v).pop().unwrap())
}
pub fn configure_annotatable(&mut self, mut annotatable: Annotatable) -> Annotatable {
// Tokenizing and re-parsing the `Annotatable` can have a significant
// performance impact, so try to avoid it if possible
if !CfgFinder::has_cfg_or_cfg_attr(&annotatable) {
return annotatable;
}
// The majority of parsed attribute targets will never need to have early cfg-expansion
// run (e.g. they are not part of a `#[derive]` or `#[cfg_eval]` macro inoput).
// Therefore, we normally do not capture the necessary information about `#[cfg]`
// and `#[cfg_attr]` attributes during parsing.
//
// Therefore, when we actually *do* run early cfg-expansion, we need to tokenize
// and re-parse the attribute target, this time capturing information about
// the location of `#[cfg]` and `#[cfg_attr]` in the token stream. The tokenization
// process is lossless, so this process is invisible to proc-macros.
// FIXME - get rid of this clone
let nt = annotatable.clone().into_nonterminal();
let mut orig_tokens = rustc_parse::nt_to_tokenstream(
&nt,
&self.cfg.sess.parse_sess,
CanSynthesizeMissingTokens::No,
);
// 'Flatten' all nonterminals (i.e. `TokenKind::Interpolated`)
// to `None`-delimited groups containing the corresponding tokens. This
// is normally delayed until the proc-macro server actually needs to
// provide a `TokenKind::Interpolated` to a proc-macro. We do this earlier,
// so that we can handle cases like:
//
// ```rust
// #[cfg_eval] #[cfg] $item
//```
//
// where `$item` is `#[cfg_attr] struct Foo {}`. We want to make
// sure to evaluate *all* `#[cfg]` and `#[cfg_attr]` attributes - the simplest
// way to do this is to do a single parse of a stream without any nonterminals.
let mut flatten = FlattenNonterminals {
nt_to_tokenstream: rustc_parse::nt_to_tokenstream,
parse_sess: &self.cfg.sess.parse_sess,
synthesize_tokens: CanSynthesizeMissingTokens::No,
};
orig_tokens = flatten.process_token_stream(orig_tokens);
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
// to the captured `AttrAnnotatedTokenStream` (specifically, we capture
// `AttrAnnotatedTokenTree::AttributesData` for all occurences of `#[cfg]` and `#[cfg_attr]`)
let mut parser =
rustc_parse::stream_to_parser(&self.cfg.sess.parse_sess, orig_tokens, None);
parser.capture_cfg = true;
annotatable = match annotatable {
Annotatable::Item(_) => {
Annotatable::Item(parser.parse_item(ForceCollect::Yes).unwrap().unwrap())
}
Annotatable::TraitItem(_) => Annotatable::TraitItem(
parser.parse_trait_item(ForceCollect::Yes).unwrap().unwrap().unwrap(),
),
Annotatable::ImplItem(_) => Annotatable::ImplItem(
parser.parse_impl_item(ForceCollect::Yes).unwrap().unwrap().unwrap(),
),
Annotatable::ForeignItem(_) => Annotatable::ForeignItem(
parser.parse_foreign_item(ForceCollect::Yes).unwrap().unwrap().unwrap(),
),
Annotatable::Stmt(_) => {
Annotatable::Stmt(P(parser.parse_stmt(ForceCollect::Yes).unwrap().unwrap()))
}
Annotatable::Expr(_) => Annotatable::Expr(parser.parse_expr_force_collect().unwrap()),
_ => unreachable!(),
};
// Now that we have our re-parsed `AttrAnnotatedTokenStream`, recursively configuring
// our attribute target will correctly the tokens as well.
flat_map_annotatable(self, annotatable)
}
}
impl MutVisitor for CfgEval<'_> {
impl MutVisitor for CfgEval<'_, '_> {
fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
self.cfg.configure_expr(expr);
mut_visit::noop_visit_expr(expr, self);

View File

@ -3,7 +3,7 @@ use crate::module::DirOwnership;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Nonterminal};
use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, LazyTokenStream, TokenStream};
use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream};
use rustc_ast::visit::{AssocCtxt, Visitor};
use rustc_ast::{self as ast, AstLike, Attribute, Item, NodeId, PatKind};
use rustc_attr::{self as attr, Deprecation, Stability};
@ -46,62 +46,6 @@ pub enum Annotatable {
Variant(ast::Variant),
}
impl AstLike for Annotatable {
fn attrs(&self) -> &[Attribute] {
match *self {
Annotatable::Item(ref item) => &item.attrs,
Annotatable::TraitItem(ref trait_item) => &trait_item.attrs,
Annotatable::ImplItem(ref impl_item) => &impl_item.attrs,
Annotatable::ForeignItem(ref foreign_item) => &foreign_item.attrs,
Annotatable::Stmt(ref stmt) => stmt.attrs(),
Annotatable::Expr(ref expr) => &expr.attrs,
Annotatable::Arm(ref arm) => &arm.attrs,
Annotatable::ExprField(ref field) => &field.attrs,
Annotatable::PatField(ref fp) => &fp.attrs,
Annotatable::GenericParam(ref gp) => &gp.attrs,
Annotatable::Param(ref p) => &p.attrs,
Annotatable::FieldDef(ref sf) => &sf.attrs,
Annotatable::Variant(ref v) => &v.attrs(),
}
}
fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec<Attribute>)) {
match self {
Annotatable::Item(item) => item.visit_attrs(f),
Annotatable::TraitItem(trait_item) => trait_item.visit_attrs(f),
Annotatable::ImplItem(impl_item) => impl_item.visit_attrs(f),
Annotatable::ForeignItem(foreign_item) => foreign_item.visit_attrs(f),
Annotatable::Stmt(stmt) => stmt.visit_attrs(f),
Annotatable::Expr(expr) => expr.visit_attrs(f),
Annotatable::Arm(arm) => arm.visit_attrs(f),
Annotatable::ExprField(field) => field.visit_attrs(f),
Annotatable::PatField(fp) => fp.visit_attrs(f),
Annotatable::GenericParam(gp) => gp.visit_attrs(f),
Annotatable::Param(p) => p.visit_attrs(f),
Annotatable::FieldDef(sf) => sf.visit_attrs(f),
Annotatable::Variant(v) => v.visit_attrs(f),
}
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
match self {
Annotatable::Item(item) => item.tokens_mut(),
Annotatable::TraitItem(trait_item) => trait_item.tokens_mut(),
Annotatable::ImplItem(impl_item) => impl_item.tokens_mut(),
Annotatable::ForeignItem(foreign_item) => foreign_item.tokens_mut(),
Annotatable::Stmt(stmt) => stmt.tokens_mut(),
Annotatable::Expr(expr) => expr.tokens_mut(),
Annotatable::Arm(arm) => arm.tokens_mut(),
Annotatable::ExprField(field) => field.tokens_mut(),
Annotatable::PatField(fp) => fp.tokens_mut(),
Annotatable::GenericParam(gp) => gp.tokens_mut(),
Annotatable::Param(p) => p.tokens_mut(),
Annotatable::FieldDef(sf) => sf.tokens_mut(),
Annotatable::Variant(v) => v.tokens_mut(),
}
}
}
impl Annotatable {
pub fn span(&self) -> Span {
match *self {
@ -121,6 +65,24 @@ impl Annotatable {
}
}
pub fn visit_attrs(&mut self, f: impl FnOnce(&mut Vec<Attribute>)) {
match self {
Annotatable::Item(item) => item.visit_attrs(f),
Annotatable::TraitItem(trait_item) => trait_item.visit_attrs(f),
Annotatable::ImplItem(impl_item) => impl_item.visit_attrs(f),
Annotatable::ForeignItem(foreign_item) => foreign_item.visit_attrs(f),
Annotatable::Stmt(stmt) => stmt.visit_attrs(f),
Annotatable::Expr(expr) => expr.visit_attrs(f),
Annotatable::Arm(arm) => arm.visit_attrs(f),
Annotatable::ExprField(field) => field.visit_attrs(f),
Annotatable::PatField(fp) => fp.visit_attrs(f),
Annotatable::GenericParam(gp) => gp.visit_attrs(f),
Annotatable::Param(p) => p.visit_attrs(f),
Annotatable::FieldDef(sf) => sf.visit_attrs(f),
Annotatable::Variant(v) => v.visit_attrs(f),
}
}
pub fn visit_with<'a, V: Visitor<'a>>(&'a self, visitor: &mut V) {
match self {
Annotatable::Item(item) => visitor.visit_item(item),
@ -139,7 +101,7 @@ impl Annotatable {
}
}
crate fn into_nonterminal(self) -> Nonterminal {
pub fn into_nonterminal(self) -> Nonterminal {
match self {
Annotatable::Item(item) => token::NtItem(item),
Annotatable::TraitItem(item) | Annotatable::ImplItem(item) => {
@ -161,10 +123,7 @@ impl Annotatable {
}
crate fn into_tokens(self, sess: &ParseSess) -> TokenStream {
// Tokens of an attribute target may be invalidated by some outer `#[derive]` performing
// "full configuration" (attributes following derives on the same item should be the most
// common case), that's why synthesizing tokens is allowed.
nt_to_tokenstream(&self.into_nonterminal(), sess, CanSynthesizeMissingTokens::Yes)
nt_to_tokenstream(&self.into_nonterminal(), sess, CanSynthesizeMissingTokens::No)
}
pub fn expect_item(self) -> P<ast::Item> {

View File

@ -2,8 +2,10 @@
use rustc_ast::ptr::P;
use rustc_ast::token::{DelimToken, Token, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, LazyTokenStream, Spacing, TokenStream, TokenTree};
use rustc_ast::{self as ast, AstLike, AttrItem, Attribute, MetaItem};
use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
use rustc_ast::tokenstream::{DelimSpan, Spacing};
use rustc_ast::tokenstream::{LazyTokenStream, TokenTree};
use rustc_ast::{self as ast, AstLike, AttrItem, AttrStyle, Attribute, MetaItem};
use rustc_attr as attr;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::map_in_place::MapInPlace;
@ -23,7 +25,10 @@ use rustc_span::{Span, DUMMY_SP};
pub struct StripUnconfigured<'a> {
pub sess: &'a Session,
pub features: Option<&'a Features>,
pub modified: bool,
/// If `true`, perform cfg-stripping on attached tokens.
/// This is only used for the input to derive macros,
/// which needs eager expansion of `cfg` and `cfg_attr`
pub config_tokens: bool,
}
fn get_features(
@ -194,7 +199,7 @@ fn get_features(
// `cfg_attr`-process the crate's attributes and compute the crate's features.
pub fn features(sess: &Session, mut krate: ast::Crate) -> (ast::Crate, Features) {
let mut strip_unconfigured = StripUnconfigured { sess, features: None, modified: false };
let mut strip_unconfigured = StripUnconfigured { sess, features: None, config_tokens: false };
let unconfigured_attrs = krate.attrs.clone();
let diag = &sess.parse_sess.span_diagnostic;
@ -241,24 +246,83 @@ impl<'a> StripUnconfigured<'a> {
pub fn configure<T: AstLike>(&mut self, mut node: T) -> Option<T> {
self.process_cfg_attrs(&mut node);
if self.in_cfg(node.attrs()) {
self.try_configure_tokens(&mut node);
Some(node)
} else {
self.modified = true;
None
}
}
fn try_configure_tokens<T: AstLike>(&mut self, node: &mut T) {
if self.config_tokens {
if let Some(Some(tokens)) = node.tokens_mut() {
let attr_annotated_tokens = tokens.create_token_stream();
*tokens = LazyTokenStream::new(self.configure_tokens(&attr_annotated_tokens));
}
}
}
fn configure_krate_attrs(
&mut self,
mut attrs: Vec<ast::Attribute>,
) -> Option<Vec<ast::Attribute>> {
attrs.flat_map_in_place(|attr| self.process_cfg_attr(attr));
if self.in_cfg(&attrs) {
Some(attrs)
} else {
self.modified = true;
None
if self.in_cfg(&attrs) { Some(attrs) } else { None }
}
/// Performs cfg-expansion on `stream`, producing a new `AttrAnnotatedTokenStream`.
/// This is only used during the invocation of `derive` proc-macros,
/// which require that we cfg-expand their entire input.
/// Normal cfg-expansion operates on parsed AST nodes via the `configure` method
fn configure_tokens(&mut self, stream: &AttrAnnotatedTokenStream) -> AttrAnnotatedTokenStream {
fn can_skip(stream: &AttrAnnotatedTokenStream) -> bool {
stream.0.iter().all(|(tree, _spacing)| match tree {
AttrAnnotatedTokenTree::Attributes(_) => false,
AttrAnnotatedTokenTree::Token(_) => true,
AttrAnnotatedTokenTree::Delimited(_, _, inner) => can_skip(inner),
})
}
if can_skip(stream) {
return stream.clone();
}
let trees: Vec<_> = stream
.0
.iter()
.flat_map(|(tree, spacing)| match tree.clone() {
AttrAnnotatedTokenTree::Attributes(mut data) => {
let mut attrs: Vec<_> = std::mem::take(&mut data.attrs).into();
attrs.flat_map_in_place(|attr| self.process_cfg_attr(attr));
data.attrs = attrs.into();
if self.in_cfg(&data.attrs) {
data.tokens = LazyTokenStream::new(
self.configure_tokens(&data.tokens.create_token_stream()),
);
Some((AttrAnnotatedTokenTree::Attributes(data), *spacing)).into_iter()
} else {
None.into_iter()
}
}
AttrAnnotatedTokenTree::Delimited(sp, delim, mut inner) => {
inner = self.configure_tokens(&inner);
Some((AttrAnnotatedTokenTree::Delimited(sp, delim, inner), *spacing))
.into_iter()
}
AttrAnnotatedTokenTree::Token(token) => {
if let TokenKind::Interpolated(nt) = token.kind {
panic!(
"Nonterminal should have been flattened at {:?}: {:?}",
token.span, nt
);
} else {
Some((AttrAnnotatedTokenTree::Token(token), *spacing)).into_iter()
}
}
})
.collect();
AttrAnnotatedTokenStream::new(trees)
}
/// Parse and expand all `cfg_attr` attributes into a list of attributes
@ -285,9 +349,6 @@ impl<'a> StripUnconfigured<'a> {
return vec![attr];
}
// A `#[cfg_attr]` either gets removed, or replaced with a new attribute
self.modified = true;
let (cfg_predicate, expanded_attrs) = match self.parse_cfg_attr(&attr) {
None => return vec![],
Some(r) => r,
@ -311,7 +372,7 @@ impl<'a> StripUnconfigured<'a> {
expanded_attrs
.into_iter()
.flat_map(|(item, span)| {
let orig_tokens = attr.tokens();
let orig_tokens = attr.tokens().to_tokenstream();
// We are taking an attribute of the form `#[cfg_attr(pred, attr)]`
// and producing an attribute of the form `#[attr]`. We
@ -321,25 +382,34 @@ impl<'a> StripUnconfigured<'a> {
// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
// for `attr` when we expand it to `#[attr]`
let pound_token = orig_tokens.trees().next().unwrap();
if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) {
panic!("Bad tokens for attribute {:?}", attr);
let mut orig_trees = orig_tokens.trees();
let pound_token = match orig_trees.next().unwrap() {
TokenTree::Token(token @ Token { kind: TokenKind::Pound, .. }) => token,
_ => panic!("Bad tokens for attribute {:?}", attr),
};
let pound_span = pound_token.span;
let mut trees = vec![(AttrAnnotatedTokenTree::Token(pound_token), Spacing::Alone)];
if attr.style == AttrStyle::Inner {
// For inner attributes, we do the same thing for the `!` in `#![some_attr]`
let bang_token = match orig_trees.next().unwrap() {
TokenTree::Token(token @ Token { kind: TokenKind::Not, .. }) => token,
_ => panic!("Bad tokens for attribute {:?}", attr),
};
trees.push((AttrAnnotatedTokenTree::Token(bang_token), Spacing::Alone));
}
// We don't really have a good span to use for the syntheized `[]`
// in `#[attr]`, so just use the span of the `#` token.
let bracket_group = TokenTree::Delimited(
DelimSpan::from_single(pound_token.span()),
let bracket_group = AttrAnnotatedTokenTree::Delimited(
DelimSpan::from_single(pound_span),
DelimToken::Bracket,
item.tokens
.as_ref()
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
.create_token_stream(),
);
let tokens = Some(LazyTokenStream::new(TokenStream::new(vec![
(pound_token, Spacing::Alone),
(bracket_group, Spacing::Alone),
])));
trees.push((bracket_group, Spacing::Alone));
let tokens = Some(LazyTokenStream::new(AttrAnnotatedTokenStream::new(trees)));
self.process_cfg_attr(attr::mk_attr_from_item(item, tokens, attr.style, span))
})
.collect()
@ -457,7 +527,8 @@ impl<'a> StripUnconfigured<'a> {
self.sess.parse_sess.span_diagnostic.span_err(attr.span, msg);
}
self.process_cfg_attrs(expr)
self.process_cfg_attrs(expr);
self.try_configure_tokens(&mut *expr);
}
}

View File

@ -12,7 +12,7 @@ use rustc_ast::ptr::P;
use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream;
use rustc_ast::visit::{self, AssocCtxt, Visitor};
use rustc_ast::{AstLike, AttrItem, AttrStyle, Block, Inline, ItemKind, LitKind, MacArgs};
use rustc_ast::{AstLike, AttrItem, Block, Inline, ItemKind, LitKind, MacArgs};
use rustc_ast::{MacCallStmt, MacStmtStyle, MetaItemKind, ModKind, NestedMetaItem};
use rustc_ast::{NodeId, PatKind, Path, StmtKind, Unsafe};
use rustc_ast_pretty::pprust;
@ -611,10 +611,15 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
let invocations = {
let mut collector = InvocationCollector {
// Non-derive macro invocations cannot see the results of cfg expansion - they
// will either be removed along with the item, or invoked before the cfg/cfg_attr
// attribute is expanded. Therefore, we don't need to configure the tokens
// Derive macros *can* see the results of cfg-expansion - they are handled
// specially in `fully_expand_fragment`
cfg: StripUnconfigured {
sess: &self.cx.sess,
features: self.cx.ecfg.features,
modified: false,
config_tokens: false,
},
cx: self.cx,
invocations: Vec::new(),
@ -709,13 +714,26 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
SyntaxExtensionKind::Attr(expander) => {
self.gate_proc_macro_input(&item);
self.gate_proc_macro_attr_item(span, &item);
let tokens = match attr.style {
AttrStyle::Outer => item.into_tokens(&self.cx.sess.parse_sess),
// FIXME: Properly collect tokens for inner attributes
AttrStyle::Inner => rustc_parse::fake_token_stream(
let mut fake_tokens = false;
if let Annotatable::Item(item_inner) = &item {
if let ItemKind::Mod(_, mod_kind) = &item_inner.kind {
// FIXME: Collect tokens and use them instead of generating
// fake ones. These are unstable, so it needs to be
// fixed prior to stabilization
// Fake tokens when we are invoking an inner attribute, and:
fake_tokens = matches!(attr.style, ast::AttrStyle::Inner) &&
// We are invoking an attribute on the crate root, or an outline
// module
(item_inner.ident.name.is_empty() || !matches!(mod_kind, ast::ModKind::Loaded(_, Inline::Yes, _)));
}
}
let tokens = if fake_tokens {
rustc_parse::fake_token_stream(
&self.cx.sess.parse_sess,
&item.into_nonterminal(),
),
)
} else {
item.into_tokens(&self.cx.sess.parse_sess)
};
let attr_item = attr.unwrap_normal_item();
if let MacArgs::Eq(..) = attr_item.args {
@ -897,21 +915,21 @@ pub fn parse_ast_fragment<'a>(
}
AstFragmentKind::TraitItems => {
let mut items = SmallVec::new();
while let Some(item) = this.parse_trait_item()? {
while let Some(item) = this.parse_trait_item(ForceCollect::No)? {
items.extend(item);
}
AstFragment::TraitItems(items)
}
AstFragmentKind::ImplItems => {
let mut items = SmallVec::new();
while let Some(item) = this.parse_impl_item()? {
while let Some(item) = this.parse_impl_item(ForceCollect::No)? {
items.extend(item);
}
AstFragment::ImplItems(items)
}
AstFragmentKind::ForeignItems => {
let mut items = SmallVec::new();
while let Some(item) = this.parse_foreign_item()? {
while let Some(item) = this.parse_foreign_item(ForceCollect::No)? {
items.extend(item);
}
AstFragment::ForeignItems(items)

View File

@ -94,7 +94,7 @@ impl MultiItemModifier for ProcMacroDerive {
{
TokenTree::token(token::Interpolated(Lrc::new(item)), DUMMY_SP).into()
} else {
nt_to_tokenstream(&item, &ecx.sess.parse_sess, CanSynthesizeMissingTokens::Yes)
nt_to_tokenstream(&item, &ecx.sess.parse_sess, CanSynthesizeMissingTokens::No)
};
let server = proc_macro_server::Rustc::new(ecx);

View File

@ -1,5 +1,6 @@
//! The main parser interface.
#![feature(array_windows)]
#![feature(crate_visibility_modifier)]
#![feature(bindings_after_at)]
#![feature(iter_order_by)]
@ -9,9 +10,12 @@
#![recursion_limit = "256"]
use rustc_ast as ast;
use rustc_ast::token::{self, Nonterminal};
use rustc_ast::tokenstream::{self, CanSynthesizeMissingTokens, LazyTokenStream, TokenStream};
use rustc_ast::token::{self, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{self, AttributesData, CanSynthesizeMissingTokens, LazyTokenStream};
use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
use rustc_ast::tokenstream::{Spacing, TokenStream};
use rustc_ast::AstLike;
use rustc_ast::Attribute;
use rustc_ast_pretty::pprust;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Diagnostic, FatalError, Level, PResult};
@ -21,8 +25,6 @@ use rustc_span::{FileName, SourceFile, Span};
use std::path::Path;
use std::str;
use tracing::debug;
pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments");
#[macro_use]
@ -255,19 +257,23 @@ pub fn nt_to_tokenstream(
// before we fall back to the stringification.
let convert_tokens =
|tokens: Option<&LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream());
|tokens: Option<&LazyTokenStream>| Some(tokens?.create_token_stream().to_tokenstream());
let tokens = match *nt {
Nonterminal::NtItem(ref item) => prepend_attrs(sess, &item.attrs, nt, item.tokens.as_ref()),
Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()),
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()),
Nonterminal::NtStmt(ref stmt) => {
let do_prepend = |tokens| prepend_attrs(sess, stmt.attrs(), nt, tokens);
if let ast::StmtKind::Empty = stmt.kind {
let tokens: TokenStream =
tokenstream::TokenTree::token(token::Semi, stmt.span).into();
do_prepend(Some(&LazyTokenStream::new(tokens)))
let tokens = AttrAnnotatedTokenStream::new(vec![(
tokenstream::AttrAnnotatedTokenTree::Token(Token::new(
TokenKind::Semi,
stmt.span,
)),
Spacing::Alone,
)]);
prepend_attrs(&stmt.attrs(), Some(&LazyTokenStream::new(tokens)))
} else {
do_prepend(stmt.tokens())
prepend_attrs(&stmt.attrs(), stmt.tokens())
}
}
Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()),
@ -283,10 +289,7 @@ pub fn nt_to_tokenstream(
Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.as_ref()),
Nonterminal::NtTT(ref tt) => Some(tt.clone().into()),
Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => {
if expr.tokens.is_none() {
debug!("missing tokens for expr {:?}", expr);
}
prepend_attrs(sess, &expr.attrs, nt, expr.tokens.as_ref())
prepend_attrs(&expr.attrs, expr.tokens.as_ref())
}
};
@ -295,34 +298,30 @@ pub fn nt_to_tokenstream(
} else if matches!(synthesize_tokens, CanSynthesizeMissingTokens::Yes) {
return fake_token_stream(sess, nt);
} else {
panic!("Missing tokens for nt at {:?}: {:?}", nt.span(), pprust::nonterminal_to_string(nt));
panic!(
"Missing tokens for nt {:?} at {:?}: {:?}",
nt,
nt.span(),
pprust::nonterminal_to_string(nt)
);
}
}
fn prepend_attrs(attrs: &[Attribute], tokens: Option<&LazyTokenStream>) -> Option<TokenStream> {
let tokens = tokens?;
if attrs.is_empty() {
return Some(tokens.create_token_stream().to_tokenstream());
}
let attr_data = AttributesData { attrs: attrs.to_vec().into(), tokens: tokens.clone() };
let wrapped = AttrAnnotatedTokenStream::new(vec![(
AttrAnnotatedTokenTree::Attributes(attr_data),
Spacing::Alone,
)]);
Some(wrapped.to_tokenstream())
}
pub fn fake_token_stream(sess: &ParseSess, nt: &Nonterminal) -> TokenStream {
let source = pprust::nonterminal_to_string(nt);
let filename = FileName::macro_expansion_source_code(&source);
parse_stream_from_source_str(filename, source, sess, Some(nt.span()))
}
fn prepend_attrs(
sess: &ParseSess,
attrs: &[ast::Attribute],
nt: &Nonterminal,
tokens: Option<&tokenstream::LazyTokenStream>,
) -> Option<tokenstream::TokenStream> {
if attrs.is_empty() {
return Some(tokens?.create_token_stream());
}
let mut builder = tokenstream::TokenStreamBuilder::new();
for attr in attrs {
// FIXME: Correctly handle tokens for inner attributes.
// For now, we fall back to reparsing the original AST node
if attr.style == ast::AttrStyle::Inner {
return Some(fake_token_stream(sess, nt));
}
builder.push(attr.tokens());
}
builder.push(tokens?.create_token_stream());
Some(builder.build())
}

View File

@ -1,10 +1,11 @@
use super::{AttrWrapper, Parser, PathStyle};
use super::{AttrWrapper, Capturing, Parser, PathStyle};
use rustc_ast as ast;
use rustc_ast::attr;
use rustc_ast::token::{self, Nonterminal};
use rustc_ast_pretty::pprust;
use rustc_errors::{error_code, PResult};
use rustc_span::{sym, Span};
use std::convert::TryInto;
use tracing::debug;
@ -29,6 +30,7 @@ impl<'a> Parser<'a> {
pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, AttrWrapper> {
let mut attrs: Vec<ast::Attribute> = Vec::new();
let mut just_parsed_doc_comment = false;
let start_pos = self.token_cursor.num_next_calls;
loop {
debug!("parse_outer_attributes: self.token={:?}", self.token);
let attr = if self.check(&token::Pound) {
@ -74,7 +76,7 @@ impl<'a> Parser<'a> {
break;
}
}
Ok(AttrWrapper::new(attrs))
Ok(AttrWrapper::new(attrs.into(), start_pos))
}
/// Matches `attribute = # ! [ meta_item ]`.
@ -177,6 +179,7 @@ impl<'a> Parser<'a> {
crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
let mut attrs: Vec<ast::Attribute> = vec![];
loop {
let start_pos: u32 = self.token_cursor.num_next_calls.try_into().unwrap();
// Only try to parse if it is an inner attribute (has `!`).
let attr = if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
@ -191,6 +194,18 @@ impl<'a> Parser<'a> {
None
};
if let Some(attr) = attr {
let end_pos: u32 = self.token_cursor.num_next_calls.try_into().unwrap();
// If we are currently capturing tokens, mark the location of this inner attribute.
// If capturing ends up creating a `LazyTokenStream`, we will include
// this replace range with it, removing the inner attribute from the final
// `AttrAnnotatedTokenStream`. Inner attributes are stored in the parsed AST note.
// During macro expansion, they are selectively inserted back into the
// token stream (the first inner attribute is remoevd each time we invoke the
// corresponding macro).
let range = start_pos..end_pos;
if let Capturing::Yes = self.capture_state.capturing {
self.capture_state.inner_attr_ranges.insert(attr.id, (range, vec![]));
}
attrs.push(attr);
} else {
break;
@ -311,6 +326,9 @@ pub fn maybe_needs_tokens(attrs: &[ast::Attribute]) -> bool {
// One of the attributes may either itself be a macro,
// or expand to macro attributes (`cfg_attr`).
attrs.iter().any(|attr| {
if attr.is_doc_comment() {
return false;
}
attr.ident().map_or(true, |ident| {
ident.name == sym::cfg_attr || !rustc_feature::is_builtin_attr_name(ident.name)
})

View File

@ -1,12 +1,14 @@
use super::attr;
use super::{ForceCollect, Parser, TokenCursor, TrailingToken};
use rustc_ast::token::{self, Token, TokenKind};
use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree, TreeAndSpacing};
use rustc_ast::tokenstream::{DelimSpan, LazyTokenStream, Spacing};
use rustc_ast::AstLike;
use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttributesData, CreateTokenStream};
use rustc_ast::tokenstream::{AttrAnnotatedTokenTree, DelimSpan, LazyTokenStream, Spacing};
use rustc_ast::{self as ast};
use rustc_ast::{AstLike, AttrVec, Attribute};
use rustc_errors::PResult;
use rustc_span::{Span, DUMMY_SP};
use rustc_span::{sym, Span, DUMMY_SP};
use std::convert::TryInto;
use std::ops::Range;
/// A wrapper type to ensure that the parser handles outer attributes correctly.
/// When we parse outer attributes, we need to ensure that we capture tokens
@ -23,23 +25,158 @@ use rustc_span::{Span, DUMMY_SP};
/// cannot directly access the `attrs` field
#[derive(Debug, Clone)]
pub struct AttrWrapper {
attrs: Vec<ast::Attribute>,
attrs: AttrVec,
// The start of the outer attributes in the token cursor.
// This allows us to create a `ReplaceRange` for the entire attribute
// target, including outer attributes.
start_pos: usize,
}
// This struct is passed around very frequently,
// so make sure it doesn't accidentally get larger
#[cfg(target_arch = "x86_64")]
rustc_data_structures::static_assert_size!(AttrWrapper, 16);
impl AttrWrapper {
pub fn empty() -> AttrWrapper {
AttrWrapper { attrs: vec![] }
pub(super) fn new(attrs: AttrVec, start_pos: usize) -> AttrWrapper {
AttrWrapper { attrs, start_pos }
}
pub fn new(attrs: Vec<ast::Attribute>) -> AttrWrapper {
AttrWrapper { attrs }
pub fn empty() -> AttrWrapper {
AttrWrapper { attrs: AttrVec::new(), start_pos: usize::MAX }
}
// FIXME: Delay span bug here?
pub(crate) fn take_for_recovery(self) -> Vec<ast::Attribute> {
pub(crate) fn take_for_recovery(self) -> AttrVec {
self.attrs
}
// FIXME: require passing an NT to prevent misuse of this method
pub(crate) fn prepend_to_nt_inner(self, attrs: &mut Vec<Attribute>) {
let mut self_attrs: Vec<_> = self.attrs.into();
std::mem::swap(attrs, &mut self_attrs);
attrs.extend(self_attrs);
}
pub fn is_empty(&self) -> bool {
self.attrs.is_empty()
}
pub fn maybe_needs_tokens(&self) -> bool {
crate::parser::attr::maybe_needs_tokens(&self.attrs)
}
}
/// Returns `true` if `attrs` contains a `cfg` or `cfg_attr` attribute
fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
// NOTE: Builtin attributes like `cfg` and `cfg_attr` cannot be renamed via imports.
// Therefore, the absence of a literal `cfg` or `cfg_attr` guarantees that
// we don't need to do any eager expansion.
attrs.iter().any(|attr| {
attr.ident().map_or(false, |ident| ident.name == sym::cfg || ident.name == sym::cfg_attr)
})
}
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
// and `num_calls`, we can reconstruct the `TokenStream` seen
// by the callback. This allows us to avoid producing a `TokenStream`
// if it is never needed - for example, a captured `macro_rules!`
// argument that is never passed to a proc macro.
// In practice token stream creation happens rarely compared to
// calls to `collect_tokens` (see some statistics in #78736),
// so we are doing as little up-front work as possible.
//
// This also makes `Parser` very cheap to clone, since
// there is no intermediate collection buffer to clone.
#[derive(Clone)]
struct LazyTokenStreamImpl {
start_token: (Token, Spacing),
cursor_snapshot: TokenCursor,
num_calls: usize,
break_last_token: bool,
replace_ranges: Box<[ReplaceRange]>,
}
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 144);
impl CreateTokenStream for LazyTokenStreamImpl {
fn create_token_stream(&self) -> AttrAnnotatedTokenStream {
// The token produced by the final call to `next` or `next_desugared`
// was not actually consumed by the callback. The combination
// of chaining the initial token and using `take` produces the desired
// result - we produce an empty `TokenStream` if no calls were made,
// and omit the final token otherwise.
let mut cursor_snapshot = self.cursor_snapshot.clone();
let tokens =
std::iter::once((FlatToken::Token(self.start_token.0.clone()), self.start_token.1))
.chain((0..self.num_calls).map(|_| {
let token = if cursor_snapshot.desugar_doc_comments {
cursor_snapshot.next_desugared()
} else {
cursor_snapshot.next()
};
(FlatToken::Token(token.0), token.1)
}))
.take(self.num_calls);
if !self.replace_ranges.is_empty() {
let mut tokens: Vec<_> = tokens.collect();
let mut replace_ranges = self.replace_ranges.clone();
replace_ranges.sort_by_key(|(range, _)| range.start);
#[cfg(debug_assertions)]
{
for [(range, tokens), (next_range, next_tokens)] in replace_ranges.array_windows() {
assert!(
range.end <= next_range.start || range.end >= next_range.end,
"Replace ranges should either be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})",
range,
tokens,
next_range,
next_tokens,
);
}
}
// Process the replace ranges, starting from the highest start
// position and working our way back. If have tokens like:
//
// `#[cfg(FALSE)]` struct Foo { #[cfg(FALSE)] field: bool }`
//
// Then we will generate replace ranges for both
// the `#[cfg(FALSE)] field: bool` and the entire
// `#[cfg(FALSE)]` struct Foo { #[cfg(FALSE)] field: bool }`
//
// By starting processing from the replace range with the greatest
// start position, we ensure that any replace range which encloses
// another replace range will capture the *replaced* tokens for the inner
// range, not the original tokens.
for (range, new_tokens) in replace_ranges.iter().rev() {
assert!(!range.is_empty(), "Cannot replace an empty range: {:?}", range);
// Replace ranges are only allowed to decrease the number of tokens.
assert!(
range.len() >= new_tokens.len(),
"Range {:?} has greater len than {:?}",
range,
new_tokens
);
// Replace any removed tokens with `FlatToken::Empty`.
// This keeps the total length of `tokens` constant throughout the
// replacement process, allowing us to use all of the `ReplaceRanges` entries
// without adjusting indices.
let filler = std::iter::repeat((FlatToken::Empty, Spacing::Alone))
.take(range.len() - new_tokens.len());
tokens.splice(
(range.start as usize)..(range.end as usize),
new_tokens.clone().into_iter().chain(filler),
);
}
make_token_stream(tokens.into_iter(), self.break_last_token)
} else {
make_token_stream(tokens, self.break_last_token)
}
}
}
impl<'a> Parser<'a> {
@ -65,106 +202,195 @@ impl<'a> Parser<'a> {
force_collect: ForceCollect,
f: impl FnOnce(&mut Self, Vec<ast::Attribute>) -> PResult<'a, (R, TrailingToken)>,
) -> PResult<'a, R> {
if matches!(force_collect, ForceCollect::No) && !attr::maybe_needs_tokens(&attrs.attrs) {
return Ok(f(self, attrs.attrs)?.0);
// We only bail out when nothing could possibly observe the collected tokens:
// 1. We cannot be force collecting tokens (since force-collecting requires tokens
// by definition
if matches!(force_collect, ForceCollect::No)
// None of our outer attributes can require tokens (e.g. a proc-macro)
&& !attrs.maybe_needs_tokens()
// If our target supports custom inner attributes, then we cannot bail
// out early, since we may need to capture tokens for a custom inner attribute
// invocation.
&& !R::SUPPORTS_CUSTOM_INNER_ATTRS
// Never bail out early in `capture_cfg` mode, since there might be `#[cfg]`
// or `#[cfg_attr]` attributes.
&& !self.capture_cfg
{
return Ok(f(self, attrs.attrs.into())?.0);
}
let start_token = (self.token.clone(), self.token_spacing);
let cursor_snapshot = self.token_cursor.clone();
let (mut ret, trailing_token) = f(self, attrs.attrs)?;
let tokens = match ret.tokens_mut() {
Some(tokens) if tokens.is_none() => tokens,
_ => return Ok(ret),
};
let has_outer_attrs = !attrs.attrs.is_empty();
let prev_capturing = std::mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
let replace_ranges_start = self.capture_state.replace_ranges.len();
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
// and `num_calls`, we can reconstruct the `TokenStream` seen
// by the callback. This allows us to avoid producing a `TokenStream`
// if it is never needed - for example, a captured `macro_rules!`
// argument that is never passed to a proc macro.
// In practice token stream creation happens rarely compared to
// calls to `collect_tokens` (see some statistics in #78736),
// so we are doing as little up-front work as possible.
//
// This also makes `Parser` very cheap to clone, since
// there is no intermediate collection buffer to clone.
#[derive(Clone)]
struct LazyTokenStreamImpl {
start_token: (Token, Spacing),
cursor_snapshot: TokenCursor,
num_calls: usize,
desugar_doc_comments: bool,
append_unglued_token: Option<TreeAndSpacing>,
let ret = f(self, attrs.attrs.into());
self.capture_state.capturing = prev_capturing;
let (mut ret, trailing) = ret?;
// When we're not in `capture-cfg` mode, then bail out early if:
// 1. Our target doesn't support tokens at all (e.g we're parsing an `NtIdent`)
// so there's nothing for us to do.
// 2. Our target already has tokens set (e.g. we've parsed something
// like `#[my_attr] $item`. The actual parsing code takes care of prepending
// any attributes to the nonterminal, so we don't need to modify the
// already captured tokens.
// Note that this check is independent of `force_collect`- if we already
// have tokens, or can't even store them, then there's never a need to
// force collection of new tokens.
if !self.capture_cfg && matches!(ret.tokens_mut(), None | Some(Some(_))) {
return Ok(ret);
}
impl CreateTokenStream for LazyTokenStreamImpl {
fn create_token_stream(&self) -> TokenStream {
if self.num_calls == 0 {
return TokenStream::new(vec![]);
}
let mut cursor_snapshot = self.cursor_snapshot.clone();
// Don't skip `None` delimiters, since we want to pass them to
// proc macros. Normally, we'll end up capturing `TokenKind::Interpolated`,
// which gets converted to a `None`-delimited group when we invoke
// a proc-macro. However, it's possible to already have a `None`-delimited
// group in the stream (such as when parsing the output of a proc-macro,
// or in certain unusual cases with cross-crate `macro_rules!` macros).
cursor_snapshot.skip_none_delims = false;
// This is very similar to the bail out check at the start of this function.
// Now that we've parsed an AST node, we have more information available.
if matches!(force_collect, ForceCollect::No)
// We now have inner attributes available, so this check is more precise
// than `attrs.maybe_needs_tokens()` at the start of the function.
// As a result, we don't need to check `R::SUPPORTS_CUSTOM_INNER_ATTRS`
&& !crate::parser::attr::maybe_needs_tokens(ret.attrs())
// Subtle: We call `has_cfg_or_cfg_attr` with the attrs from `ret`.
// This ensures that we consider inner attributes (e.g. `#![cfg]`),
// which require us to have tokens available
// We also call `has_cfg_or_cfg_attr` at the beginning of this function,
// but we only bail out if there's no possibility of inner attributes
// (!R::SUPPORTS_CUSTOM_INNER_ATTRS)
// We only catpure about `#[cfg]` or `#[cfg_attr]` in `capture_cfg`
// mode - during normal parsing, we don't need any special capturing
// for those attributes, since they're builtin.
&& !(self.capture_cfg && has_cfg_or_cfg_attr(ret.attrs()))
{
return Ok(ret);
}
// The token produced by the final call to `next` or `next_desugared`
// was not actually consumed by the callback.
let num_calls = self.num_calls - 1;
let mut i = 0;
let tokens =
std::iter::once(self.start_token.clone()).chain(std::iter::from_fn(|| {
if i >= num_calls {
return None;
}
let token = if self.desugar_doc_comments {
cursor_snapshot.next_desugared()
} else {
cursor_snapshot.next()
};
// When the `LazyTokenStreamImpl` was original produced, we did *not*
// include `NoDelim` tokens in `num_calls`, since they are normally ignored
// by the parser. Therefore, we only increment our counter for other types of tokens.
if !matches!(
token.0.kind,
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim)
) {
i += 1;
}
Some(token)
}));
make_token_stream(tokens, self.append_unglued_token.clone())
let mut inner_attr_replace_ranges = Vec::new();
// Take the captured ranges for any inner attributes that we parsed.
for inner_attr in ret.attrs().iter().filter(|a| a.style == ast::AttrStyle::Inner) {
if let Some(attr_range) = self.capture_state.inner_attr_ranges.remove(&inner_attr.id) {
inner_attr_replace_ranges.push(attr_range);
} else {
self.sess
.span_diagnostic
.delay_span_bug(inner_attr.span, "Missing token range for attribute");
}
}
let mut num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
match trailing_token {
let replace_ranges_end = self.capture_state.replace_ranges.len();
let cursor_snapshot_next_calls = cursor_snapshot.num_next_calls;
let mut end_pos = self.token_cursor.num_next_calls;
// Capture a trailing token if requested by the callback 'f'
match trailing {
TrailingToken::None => {}
TrailingToken::Semi => {
assert_eq!(self.token.kind, token::Semi);
num_calls += 1;
end_pos += 1;
}
TrailingToken::MaybeComma => {
if self.token.kind == token::Comma {
num_calls += 1;
end_pos += 1;
}
}
}
*tokens = Some(LazyTokenStream::new(LazyTokenStreamImpl {
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
// then extend the range of captured tokens to include it, since the parser
// was not actually bumped past it. When the `LazyTokenStream` gets converted
// into a `AttrAnnotatedTokenStream`, we will create the proper token.
if self.token_cursor.break_last_token {
assert_eq!(
trailing,
TrailingToken::None,
"Cannot set `break_last_token` and have trailing token"
);
end_pos += 1;
}
let num_calls = end_pos - cursor_snapshot_next_calls;
// If we have no attributes, then we will never need to
// use any replace ranges.
let replace_ranges: Box<[ReplaceRange]> = if ret.attrs().is_empty() && !self.capture_cfg {
Box::new([])
} else {
// Grab any replace ranges that occur *inside* the current AST node.
// We will perform the actual replacement when we convert the `LazyTokenStream`
// to a `AttrAnnotatedTokenStream`
let start_calls: u32 = cursor_snapshot_next_calls.try_into().unwrap();
self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
.iter()
.cloned()
.chain(inner_attr_replace_ranges.clone().into_iter())
.map(|(range, tokens)| {
((range.start - start_calls)..(range.end - start_calls), tokens)
})
.collect()
};
let tokens = LazyTokenStream::new(LazyTokenStreamImpl {
start_token,
num_calls,
cursor_snapshot,
desugar_doc_comments: self.desugar_doc_comments,
append_unglued_token: self.token_cursor.append_unglued_token.clone(),
}));
break_last_token: self.token_cursor.break_last_token,
replace_ranges,
});
// If we support tokens at all
if let Some(target_tokens) = ret.tokens_mut() {
if let Some(target_tokens) = target_tokens {
assert!(
!self.capture_cfg,
"Encountered existing tokens with capture_cfg set: {:?}",
target_tokens
);
} else {
// Store se our newly captured tokens into the AST node
*target_tokens = Some(tokens.clone());
};
}
let final_attrs = ret.attrs();
// If `capture_cfg` is set and we're inside a recursive call to
// `collect_tokens_trailing_token`, then we need to register a replace range
// if we have `#[cfg]` or `#[cfg_attr]`. This allows us to run eager cfg-expansion
// on the captured token stream.
if self.capture_cfg
&& matches!(self.capture_state.capturing, Capturing::Yes)
&& has_cfg_or_cfg_attr(&final_attrs)
{
let attr_data = AttributesData { attrs: final_attrs.to_vec().into(), tokens };
// Replace the entire AST node that we just parsed, including attributes,
// with a `FlatToken::AttrTarget`. If this AST node is inside an item
// that has `#[derive]`, then this will allow us to cfg-expand this
// AST node.
let start_pos =
if has_outer_attrs { attrs.start_pos } else { cursor_snapshot_next_calls };
let new_tokens = vec![(FlatToken::AttrTarget(attr_data), Spacing::Alone)];
assert!(
!self.token_cursor.break_last_token,
"Should not have unglued last token with cfg attr"
);
let range: Range<u32> = (start_pos.try_into().unwrap())..(end_pos.try_into().unwrap());
self.capture_state.replace_ranges.push((range, new_tokens));
self.capture_state.replace_ranges.extend(inner_attr_replace_ranges);
}
// Only clear our `replace_ranges` when we're finished capturing entirely.
if matches!(self.capture_state.capturing, Capturing::No) {
self.capture_state.replace_ranges.clear();
// We don't clear `inner_attr_ranges`, as doing so repeatedly
// had a measureable performance impact. Most inner attributes that
// we insert will get removed - when we drop the parser, we'll free
// up the memory used by any attributes that we didn't remove from the map.
}
Ok(ret)
}
}
@ -172,43 +398,112 @@ impl<'a> Parser<'a> {
/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
/// of open and close delims.
// FIXME(#67062): Currently, we don't parse `None`-delimited groups correctly,
// which can cause us to end up with mismatched `None` delimiters in our
// captured tokens. This function contains several hacks to work around this -
// essentially, we throw away mismatched `None` delimiters when we encounter them.
// Once we properly parse `None` delimiters, they can be captured just like any
// other tokens, and these hacks can be removed.
fn make_token_stream(
tokens: impl Iterator<Item = (Token, Spacing)>,
append_unglued_token: Option<TreeAndSpacing>,
) -> TokenStream {
mut iter: impl Iterator<Item = (FlatToken, Spacing)>,
break_last_token: bool,
) -> AttrAnnotatedTokenStream {
#[derive(Debug)]
struct FrameData {
open: Span,
inner: Vec<(TokenTree, Spacing)>,
open_delim: DelimToken,
inner: Vec<(AttrAnnotatedTokenTree, Spacing)>,
}
let mut stack = vec![FrameData { open: DUMMY_SP, inner: vec![] }];
for (token, spacing) in tokens {
let mut stack =
vec![FrameData { open: DUMMY_SP, open_delim: DelimToken::NoDelim, inner: vec![] }];
let mut token_and_spacing = iter.next();
while let Some((token, spacing)) = token_and_spacing {
match token {
Token { kind: TokenKind::OpenDelim(_), span } => {
stack.push(FrameData { open: span, inner: vec![] });
FlatToken::Token(Token { kind: TokenKind::OpenDelim(delim), span }) => {
stack.push(FrameData { open: span, open_delim: delim, inner: vec![] });
}
Token { kind: TokenKind::CloseDelim(delim), span } => {
let frame_data = stack.pop().expect("Token stack was empty!");
FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
// HACK: If we enconter a mismatched `None` delimiter at the top
// level, just ignore it.
if matches!(delim, DelimToken::NoDelim)
&& (stack.len() == 1
|| !matches!(stack.last_mut().unwrap().open_delim, DelimToken::NoDelim))
{
token_and_spacing = iter.next();
continue;
}
let frame_data = stack
.pop()
.unwrap_or_else(|| panic!("Token stack was empty for token: {:?}", token));
// HACK: If our current frame has a mismatched opening `None` delimiter,
// merge our current frame with the one above it. That is, transform
// `[ { < first second } third ]` into `[ { first second } third ]`
if !matches!(delim, DelimToken::NoDelim)
&& matches!(frame_data.open_delim, DelimToken::NoDelim)
{
stack.last_mut().unwrap().inner.extend(frame_data.inner);
// Process our closing delimiter again, this time at the previous
// frame in the stack
token_and_spacing = Some((token, spacing));
continue;
}
assert_eq!(
frame_data.open_delim, delim,
"Mismatched open/close delims: open={:?} close={:?}",
frame_data.open, span
);
let dspan = DelimSpan::from_pair(frame_data.open, span);
let stream = TokenStream::new(frame_data.inner);
let delimited = TokenTree::Delimited(dspan, delim, stream);
let stream = AttrAnnotatedTokenStream::new(frame_data.inner);
let delimited = AttrAnnotatedTokenTree::Delimited(dspan, delim, stream);
stack
.last_mut()
.unwrap_or_else(|| panic!("Bottom token frame is missing for tokens!"))
.unwrap_or_else(|| {
panic!("Bottom token frame is missing for token: {:?}", token)
})
.inner
.push((delimited, Spacing::Alone));
}
token => {
stack
.last_mut()
.expect("Bottom token frame is missing!")
.inner
.push((TokenTree::Token(token), spacing));
}
FlatToken::Token(token) => stack
.last_mut()
.expect("Bottom token frame is missing!")
.inner
.push((AttrAnnotatedTokenTree::Token(token), spacing)),
FlatToken::AttrTarget(data) => stack
.last_mut()
.expect("Bottom token frame is missing!")
.inner
.push((AttrAnnotatedTokenTree::Attributes(data), spacing)),
FlatToken::Empty => {}
}
token_and_spacing = iter.next();
}
// HACK: If we don't have a closing `None` delimiter for our last
// frame, merge the frame with the top-level frame. That is,
// turn `< first second` into `first second`
if stack.len() == 2 && stack[1].open_delim == DelimToken::NoDelim {
let temp_buf = stack.pop().unwrap();
stack.last_mut().unwrap().inner.extend(temp_buf.inner);
}
let mut final_buf = stack.pop().expect("Missing final buf!");
final_buf.inner.extend(append_unglued_token);
if break_last_token {
let (last_token, spacing) = final_buf.inner.pop().unwrap();
if let AttrAnnotatedTokenTree::Token(last_token) = last_token {
let unglued_first = last_token.kind.break_two_token_op().unwrap().0;
// A 'unglued' token is always two ASCII characters
let mut first_span = last_token.span.shrink_to_lo();
first_span = first_span.with_hi(first_span.lo() + rustc_span::BytePos(1));
final_buf.inner.push((
AttrAnnotatedTokenTree::Token(Token::new(unglued_first, first_span)),
spacing,
));
} else {
panic!("Unexpected last token {:?}", last_token)
}
}
assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
TokenStream::new(final_buf.inner)
AttrAnnotatedTokenStream::new(final_buf.inner)
}

View File

@ -2581,19 +2581,17 @@ impl<'a> Parser<'a> {
attrs: AttrWrapper,
f: impl FnOnce(&mut Self, Vec<ast::Attribute>) -> PResult<'a, P<Expr>>,
) -> PResult<'a, P<Expr>> {
// FIXME - come up with a nice way to properly forward `ForceCollect`from
// the nonterminal parsing code. TThis approach iscorrect, but will cause
// us to unnecessarily capture tokens for exprs that have only builtin
// attributes. Revisit this before #![feature(stmt_expr_attributes)] is stabilized
let force_collect = if attrs.is_empty() { ForceCollect::No } else { ForceCollect::Yes };
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
let res = f(this, attrs)?;
let trailing = if this.restrictions.contains(Restrictions::STMT_EXPR)
&& this.token.kind == token::Semi
{
TrailingToken::Semi
} else {
TrailingToken::None
// FIXME - pass this through from the place where we know
// we need a comma, rather than assuming that `#[attr] expr,`
// always captures a trailing comma
TrailingToken::MaybeComma
};
Ok((res, trailing))
})

View File

@ -103,20 +103,11 @@ impl<'a> Parser<'a> {
// over when we bump the parser
if let token::Interpolated(nt) = &self.token.kind {
if let token::NtItem(item) = &**nt {
let item = item.clone();
let mut item = item.clone();
self.bump();
return self.collect_tokens_trailing_token(
attrs,
force_collect,
|this, mut attrs| {
let mut item = item;
mem::swap(&mut item.attrs, &mut attrs);
item.attrs.extend(attrs);
// Bump the parser so the we capture the token::Interpolated
this.bump();
Ok((Some(item.into_inner()), TrailingToken::None))
},
);
attrs.prepend_to_nt_inner(&mut item.attrs);
return Ok(Some(item.into_inner()));
}
};
@ -530,7 +521,7 @@ impl<'a> Parser<'a> {
generics.where_clause = self.parse_where_clause()?;
let impl_items = self.parse_item_list(attrs, |p| p.parse_impl_item())?;
let impl_items = self.parse_item_list(attrs, |p| p.parse_impl_item(ForceCollect::No))?;
let item_kind = match ty_second {
Some(ty_second) => {
@ -718,22 +709,32 @@ impl<'a> Parser<'a> {
} else {
// It's a normal trait.
tps.where_clause = self.parse_where_clause()?;
let items = self.parse_item_list(attrs, |p| p.parse_trait_item())?;
let items = self.parse_item_list(attrs, |p| p.parse_trait_item(ForceCollect::No))?;
Ok((ident, ItemKind::Trait(box TraitKind(is_auto, unsafety, tps, bounds, items))))
}
}
pub fn parse_impl_item(&mut self) -> PResult<'a, Option<Option<P<AssocItem>>>> {
self.parse_assoc_item(|_| true)
pub fn parse_impl_item(
&mut self,
force_collect: ForceCollect,
) -> PResult<'a, Option<Option<P<AssocItem>>>> {
self.parse_assoc_item(|_| true, force_collect)
}
pub fn parse_trait_item(&mut self) -> PResult<'a, Option<Option<P<AssocItem>>>> {
self.parse_assoc_item(|edition| edition >= Edition::Edition2018)
pub fn parse_trait_item(
&mut self,
force_collect: ForceCollect,
) -> PResult<'a, Option<Option<P<AssocItem>>>> {
self.parse_assoc_item(|edition| edition >= Edition::Edition2018, force_collect)
}
/// Parses associated items.
fn parse_assoc_item(&mut self, req_name: ReqName) -> PResult<'a, Option<Option<P<AssocItem>>>> {
Ok(self.parse_item_(req_name, ForceCollect::No)?.map(
fn parse_assoc_item(
&mut self,
req_name: ReqName,
force_collect: ForceCollect,
) -> PResult<'a, Option<Option<P<AssocItem>>>> {
Ok(self.parse_item_(req_name, force_collect)?.map(
|Item { attrs, id, span, vis, ident, kind, tokens }| {
let kind = match AssocItemKind::try_from(kind) {
Ok(kind) => kind,
@ -918,14 +919,17 @@ impl<'a> Parser<'a> {
unsafety: Unsafe,
) -> PResult<'a, ItemInfo> {
let abi = self.parse_abi(); // ABI?
let items = self.parse_item_list(attrs, |p| p.parse_foreign_item())?;
let items = self.parse_item_list(attrs, |p| p.parse_foreign_item(ForceCollect::No))?;
let module = ast::ForeignMod { unsafety, abi, items };
Ok((Ident::invalid(), ItemKind::ForeignMod(module)))
}
/// Parses a foreign item (one in an `extern { ... }` block).
pub fn parse_foreign_item(&mut self) -> PResult<'a, Option<Option<P<ForeignItem>>>> {
Ok(self.parse_item_(|_| true, ForceCollect::No)?.map(
pub fn parse_foreign_item(
&mut self,
force_collect: ForceCollect,
) -> PResult<'a, Option<Option<P<ForeignItem>>>> {
Ok(self.parse_item_(|_| true, force_collect)?.map(
|Item { attrs, id, span, vis, ident, kind, tokens }| {
let kind = match ForeignItemKind::try_from(kind) {
Ok(kind) => kind,

View File

@ -19,13 +19,16 @@ pub use path::PathStyle;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
use rustc_ast::tokenstream::AttributesData;
use rustc_ast::tokenstream::{self, DelimSpan, Spacing};
use rustc_ast::tokenstream::{TokenStream, TokenTree, TreeAndSpacing};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_ast::AttrId;
use rustc_ast::DUMMY_NODE_ID;
use rustc_ast::{self as ast, AnonConst, AstLike, AttrStyle, AttrVec, Const, CrateSugar, Extern};
use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit, Unsafe};
use rustc_ast::{Visibility, VisibilityKind};
use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use rustc_errors::PResult;
use rustc_errors::{struct_span_err, Applicability, DiagnosticBuilder, FatalError};
@ -34,6 +37,7 @@ use rustc_span::source_map::{Span, DUMMY_SP};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use tracing::debug;
use std::ops::Range;
use std::{cmp, mem, slice};
bitflags::bitflags! {
@ -64,6 +68,7 @@ pub enum ForceCollect {
No,
}
#[derive(Debug, Eq, PartialEq)]
pub enum TrailingToken {
None,
Semi,
@ -111,6 +116,7 @@ pub struct Parser<'a> {
pub token_spacing: Spacing,
/// The previous token.
pub prev_token: Token,
pub capture_cfg: bool,
restrictions: Restrictions,
expected_tokens: Vec<TokenType>,
// Important: This must only be advanced from `next_tok`
@ -134,6 +140,44 @@ pub struct Parser<'a> {
pub last_type_ascription: Option<(Span, bool /* likely path typo */)>,
/// If present, this `Parser` is not parsing Rust code but rather a macro call.
subparser_name: Option<&'static str>,
capture_state: CaptureState,
}
/// Indicates a range of tokens that should be replaced by
/// the tokens in the provided vector. This is used in two
/// places during token collection:
///
/// 1. During the parsing of an AST node that may have a `#[derive]`
/// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
/// In this case, we use a `ReplaceRange` to replace the entire inner AST node
/// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
/// on a `AttrAnnotatedTokenStream`
///
/// 2. When we parse an inner attribute while collecting tokens. We
/// remove inner attributes from the token stream entirely, and
/// instead track them through the `attrs` field on the AST node.
/// This allows us to easily manipulate them (for example, removing
/// the first macro inner attribute to invoke a proc-macro).
/// When create a `TokenStream`, the inner attributes get inserted
/// into the proper place in the token stream.
pub type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
/// Controls how we capture tokens. Capturing can be expensive,
/// so we try to avoid performing capturing in cases where
/// we will never need a `AttrAnnotatedTokenStream`
#[derive(Copy, Clone)]
pub enum Capturing {
/// We aren't performing any capturing - this is the default mode.
No,
/// We are capturing tokens
Yes,
}
#[derive(Clone)]
struct CaptureState {
capturing: Capturing,
replace_ranges: Vec<ReplaceRange>,
inner_attr_ranges: FxHashMap<AttrId, ReplaceRange>,
}
impl<'a> Drop for Parser<'a> {
@ -167,18 +211,11 @@ struct TokenCursor {
// want to capture just the first 'unglued' token.
// For example, capturing the `Vec<u8>`
// in `Option<Vec<u8>>` requires us to unglue
// the trailing `>>` token. The `append_unglued_token`
// the trailing `>>` token. The `break_last_token`
// field is used to track this token - it gets
// appended to the captured stream when
// we evaluate a `LazyTokenStream`
append_unglued_token: Option<TreeAndSpacing>,
// If `true`, skip the delimiters for `None`-delimited groups,
// and just yield the inner tokens. This is `true` during
// normal parsing, since the parser code is not currently prepared
// to handle `None` delimiters. When capturing a `TokenStream`,
// however, we want to handle `None`-delimiters, since
// proc-macros always see `None`-delimited groups.
skip_none_delims: bool,
break_last_token: bool,
}
#[derive(Clone)]
@ -191,13 +228,13 @@ struct TokenCursorFrame {
}
impl TokenCursorFrame {
fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream, skip_none_delims: bool) -> Self {
fn new(span: DelimSpan, delim: DelimToken, tts: TokenStream) -> Self {
TokenCursorFrame {
delim,
span,
open_delim: delim == token::NoDelim && skip_none_delims,
open_delim: false,
tree_cursor: tts.into_trees(),
close_delim: delim == token::NoDelim && skip_none_delims,
close_delim: false,
}
}
}
@ -225,7 +262,7 @@ impl TokenCursor {
return (token, spacing);
}
TokenTree::Delimited(sp, delim, tts) => {
let frame = TokenCursorFrame::new(sp, delim, tts, self.skip_none_delims);
let frame = TokenCursorFrame::new(sp, delim, tts);
self.stack.push(mem::replace(&mut self.frame, frame));
}
}
@ -283,7 +320,6 @@ impl TokenCursor {
.cloned()
.collect::<TokenStream>()
},
self.skip_none_delims,
),
));
@ -372,26 +408,24 @@ impl<'a> Parser<'a> {
desugar_doc_comments: bool,
subparser_name: Option<&'static str>,
) -> Self {
let mut start_frame = TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, tokens);
start_frame.open_delim = true;
start_frame.close_delim = true;
let mut parser = Parser {
sess,
token: Token::dummy(),
token_spacing: Spacing::Alone,
prev_token: Token::dummy(),
capture_cfg: false,
restrictions: Restrictions::empty(),
expected_tokens: Vec::new(),
// Skip over the delimiters for `None`-delimited groups
token_cursor: TokenCursor {
frame: TokenCursorFrame::new(
DelimSpan::dummy(),
token::NoDelim,
tokens,
/* skip_none_delims */ true,
),
frame: start_frame,
stack: Vec::new(),
num_next_calls: 0,
desugar_doc_comments,
append_unglued_token: None,
skip_none_delims: true,
break_last_token: false,
},
desugar_doc_comments,
unmatched_angle_bracket_count: 0,
@ -400,6 +434,11 @@ impl<'a> Parser<'a> {
last_unexpected_token_span: None,
last_type_ascription: None,
subparser_name,
capture_state: CaptureState {
capturing: Capturing::No,
replace_ranges: Vec::new(),
inner_attr_ranges: Default::default(),
},
};
// Make parser point to the first token.
@ -409,21 +448,29 @@ impl<'a> Parser<'a> {
}
fn next_tok(&mut self, fallback_span: Span) -> (Token, Spacing) {
let (mut next, spacing) = if self.desugar_doc_comments {
self.token_cursor.next_desugared()
} else {
self.token_cursor.next()
};
self.token_cursor.num_next_calls += 1;
// We've retrieved an token from the underlying
// cursor, so we no longer need to worry about
// an unglued token. See `break_and_eat` for more details
self.token_cursor.append_unglued_token = None;
if next.span.is_dummy() {
// Tweak the location for better diagnostics, but keep syntactic context intact.
next.span = fallback_span.with_ctxt(next.span.ctxt());
loop {
let (mut next, spacing) = if self.desugar_doc_comments {
self.token_cursor.next_desugared()
} else {
self.token_cursor.next()
};
self.token_cursor.num_next_calls += 1;
// We've retrieved an token from the underlying
// cursor, so we no longer need to worry about
// an unglued token. See `break_and_eat` for more details
self.token_cursor.break_last_token = false;
if next.span.is_dummy() {
// Tweak the location for better diagnostics, but keep syntactic context intact.
next.span = fallback_span.with_ctxt(next.span.ctxt());
}
if matches!(
next.kind,
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim)
) {
continue;
}
return (next, spacing);
}
(next, spacing)
}
pub fn unexpected<T>(&mut self) -> PResult<'a, T> {
@ -621,8 +668,7 @@ impl<'a> Parser<'a> {
// If we consume any additional tokens, then this token
// is not needed (we'll capture the entire 'glued' token),
// and `next_tok` will set this field to `None`
self.token_cursor.append_unglued_token =
Some((TokenTree::Token(self.token.clone()), Spacing::Alone));
self.token_cursor.break_last_token = true;
// Use the spacing of the glued token as the spacing
// of the unglued second token.
self.bump_with((Token::new(second, second_span), self.token_spacing));
@ -1304,3 +1350,24 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &Pa
}
}
}
/// A helper struct used when building a `AttrAnnotatedTokenStream` from
/// a `LazyTokenStream`. Both delimiter and non-delimited tokens
/// are stored as `FlatToken::Token`. A vector of `FlatToken`s
/// is then 'parsed' to build up a `AttrAnnotatedTokenStream` with nested
/// `AttrAnnotatedTokenTree::Delimited` tokens
#[derive(Debug, Clone)]
pub enum FlatToken {
/// A token - this holds both delimiter (e.g. '{' and '}')
/// and non-delimiter tokens
Token(Token),
/// Holds the `AttributesData` for an AST node. The
/// `AttributesData` is inserted directly into the
/// constructed `AttrAnnotatedTokenStream` as
/// a `AttrAnnotatedTokenTree::Attributes`
AttrTarget(AttributesData),
/// A special 'empty' token that is ignored during the conversion
/// to a `AttrAnnotatedTokenStream`. This is used to simplify the
/// handling of replace ranges.
Empty,
}

View File

@ -153,9 +153,7 @@ impl<'a> Parser<'a> {
NonterminalKind::Path => token::NtPath(
self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?,
),
NonterminalKind::Meta => {
token::NtMeta(P(self.collect_tokens_no_attrs(|this| this.parse_attr_item(false))?))
}
NonterminalKind::Meta => token::NtMeta(P(self.parse_attr_item(true)?)),
NonterminalKind::TT => token::NtTT(self.parse_token_tree()),
NonterminalKind::Vis => token::NtVis(
self.collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?,

View File

@ -48,39 +48,26 @@ impl<'a> Parser<'a> {
if let token::Interpolated(nt) = &self.token.kind {
if let token::NtStmt(stmt) = &**nt {
let mut stmt = stmt.clone();
return self.collect_tokens_trailing_token(
attrs,
force_collect,
|this, mut attrs| {
stmt.visit_attrs(|stmt_attrs| {
mem::swap(stmt_attrs, &mut attrs);
stmt_attrs.extend(attrs);
});
// Make sure we capture the token::Interpolated
this.bump();
Ok((Some(stmt), TrailingToken::None))
},
);
self.bump();
stmt.visit_attrs(|stmt_attrs| {
attrs.prepend_to_nt_inner(stmt_attrs);
});
return Ok(Some(stmt));
}
}
Ok(Some(if self.token.is_keyword(kw::Let) {
self.parse_local_mk(lo, attrs, capture_semi, force_collect)?
} else if self.is_kw_followed_by_ident(kw::Mut) {
self.recover_stmt_local(
lo,
attrs.take_for_recovery().into(),
"missing keyword",
"let mut",
)?
self.recover_stmt_local(lo, attrs, "missing keyword", "let mut")?
} else if self.is_kw_followed_by_ident(kw::Auto) {
self.bump(); // `auto`
let msg = "write `let` instead of `auto` to introduce a new variable";
self.recover_stmt_local(lo, attrs.take_for_recovery().into(), msg, "let")?
self.recover_stmt_local(lo, attrs, msg, "let")?
} else if self.is_kw_followed_by_ident(sym::var) {
self.bump(); // `var`
let msg = "write `let` instead of `var` to introduce a new variable";
self.recover_stmt_local(lo, attrs.take_for_recovery().into(), msg, "let")?
self.recover_stmt_local(lo, attrs, msg, "let")?
} else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() {
// We have avoided contextual keywords like `union`, items with `crate` visibility,
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
@ -112,7 +99,7 @@ impl<'a> Parser<'a> {
attrs: AttrWrapper,
force_collect: ForceCollect,
) -> PResult<'a, Stmt> {
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
let stmt = self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
let path = this.parse_path(PathStyle::Expr)?;
if this.eat(&token::Not) {
@ -132,14 +119,22 @@ impl<'a> Parser<'a> {
};
let expr = this.with_res(Restrictions::STMT_EXPR, |this| {
let expr = this.parse_dot_or_call_expr_with(expr, lo, attrs)?;
this.parse_dot_or_call_expr_with(expr, lo, attrs)
})?;
// `DUMMY_SP` will get overwritten later in this function
Ok((this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)), TrailingToken::None))
})?;
if let StmtKind::Expr(expr) = stmt.kind {
// Perform this outside of the `collect_tokens_trailing_token` closure,
// since our outer attributes do not apply to this part of the expression
let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr))
})?;
Ok((
this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Expr(expr)),
TrailingToken::None,
))
})
Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr)))
} else {
Ok(stmt)
}
}
/// Parses a statement macro `mac!(args)` provided a `path` representing `mac`.
@ -183,7 +178,7 @@ impl<'a> Parser<'a> {
fn recover_stmt_local(
&mut self,
lo: Span,
attrs: AttrVec,
attrs: AttrWrapper,
msg: &str,
sugg: &str,
) -> PResult<'a, Stmt> {
@ -213,9 +208,15 @@ impl<'a> Parser<'a> {
})
}
fn recover_local_after_let(&mut self, lo: Span, attrs: AttrVec) -> PResult<'a, Stmt> {
let local = self.parse_local(attrs)?;
Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Local(local)))
fn recover_local_after_let(&mut self, lo: Span, attrs: AttrWrapper) -> PResult<'a, Stmt> {
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
let local = this.parse_local(attrs.into())?;
// FIXME - maybe capture semicolon in recovery?
Ok((
this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Local(local)),
TrailingToken::None,
))
})
}
/// Parses a local variable declaration.

View File

@ -1,7 +1,13 @@
use crate::parse::ParseSess;
use crate::session::Session;
use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
use rustc_ast::tokenstream::CanSynthesizeMissingTokens;
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_data_structures::profiling::VerboseTimingGuard;
use std::path::{Path, PathBuf};
pub type NtToTokenstream = fn(&Nonterminal, &ParseSess, CanSynthesizeMissingTokens) -> TokenStream;
impl Session {
pub fn timer<'a>(&'a self, what: &'static str) -> VerboseTimingGuard<'a> {
self.prof.verbose_generic_activity(what)
@ -53,3 +59,52 @@ impl CanonicalizedPath {
&self.original
}
}
// FIXME: Find a better spot for this - it needs to be accessible from `rustc_ast_lowering`,
// and needs to access `ParseSess
pub struct FlattenNonterminals<'a> {
pub parse_sess: &'a ParseSess,
pub synthesize_tokens: CanSynthesizeMissingTokens,
pub nt_to_tokenstream: NtToTokenstream,
}
impl<'a> FlattenNonterminals<'a> {
pub fn process_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
fn can_skip(stream: &TokenStream) -> bool {
stream.trees().all(|tree| match tree {
TokenTree::Token(token) => !matches!(token.kind, token::Interpolated(_)),
TokenTree::Delimited(_, _, inner) => can_skip(&inner),
})
}
if can_skip(&tokens) {
return tokens;
}
tokens.into_trees().flat_map(|tree| self.process_token_tree(tree).into_trees()).collect()
}
pub fn process_token_tree(&mut self, tree: TokenTree) -> TokenStream {
match tree {
TokenTree::Token(token) => self.process_token(token),
TokenTree::Delimited(span, delim, tts) => {
TokenTree::Delimited(span, delim, self.process_token_stream(tts)).into()
}
}
}
pub fn process_token(&mut self, token: Token) -> TokenStream {
match token.kind {
token::Interpolated(nt) => {
let tts = (self.nt_to_tokenstream)(&nt, self.parse_sess, self.synthesize_tokens);
TokenTree::Delimited(
DelimSpan::from_single(token.span),
DelimToken::NoDelim,
self.process_token_stream(tts),
)
.into()
}
_ => TokenTree::Token(token).into(),
}
}
}

View File

@ -80,67 +80,67 @@ PRINT-ATTR INPUT (DISPLAY): impl < T > MyTrait < T > for MyStruct < { true } > {
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "impl",
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:21:1: 21:5 (#0),
},
Punct {
ch: '<',
spacing: Alone,
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:21:5: 21:6 (#0),
},
Ident {
ident: "T",
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:21:6: 21:7 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:21:7: 21:8 (#0),
},
Ident {
ident: "MyTrait",
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:21:9: 21:16 (#0),
},
Punct {
ch: '<',
spacing: Alone,
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:21:16: 21:17 (#0),
},
Ident {
ident: "T",
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:21:17: 21:18 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:21:18: 21:19 (#0),
},
Ident {
ident: "for",
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:21:20: 21:23 (#0),
},
Ident {
ident: "MyStruct",
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:21:24: 21:32 (#0),
},
Punct {
ch: '<',
spacing: Alone,
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:21:32: 21:33 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "true",
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:21:34: 21:38 (#0),
},
],
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:21:33: 21:39 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:21:39: 21:40 (#0),
},
Group {
delimiter: Brace,
@ -148,24 +148,24 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Joint,
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:23:5: 23:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:23:6: 23:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:23:8: 23:19 (#0),
},
],
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:23:7: 23:20 (#0),
},
],
span: $DIR/attr-complex-fn.rs:21:1: 24:2 (#0),
span: $DIR/attr-complex-fn.rs:21:41: 24:2 (#0),
},
]

View File

@ -87,16 +87,16 @@ PRINT-DERIVE INPUT (DISPLAY): struct AttributeDerive { }
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: $DIR/attribute-after-derive.rs:18:1: 21:2 (#0),
span: $DIR/attribute-after-derive.rs:18:1: 18:7 (#0),
},
Ident {
ident: "AttributeDerive",
span: $DIR/attribute-after-derive.rs:18:1: 21:2 (#0),
span: $DIR/attribute-after-derive.rs:18:8: 18:23 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/attribute-after-derive.rs:18:1: 21:2 (#0),
span: $DIR/attribute-after-derive.rs:18:24: 21:2 (#0),
},
]
PRINT-DERIVE INPUT (DISPLAY): #[print_attr] struct DeriveAttribute { }
@ -104,45 +104,45 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
span: $DIR/attribute-after-derive.rs:24:1: 24:2 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_attr",
span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
span: $DIR/attribute-after-derive.rs:24:3: 24:13 (#0),
},
],
span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
span: $DIR/attribute-after-derive.rs:24:2: 24:14 (#0),
},
Ident {
ident: "struct",
span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
span: $DIR/attribute-after-derive.rs:25:1: 25:7 (#0),
},
Ident {
ident: "DeriveAttribute",
span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
span: $DIR/attribute-after-derive.rs:25:8: 25:23 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
span: $DIR/attribute-after-derive.rs:25:24: 28:2 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): struct DeriveAttribute { }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
span: $DIR/attribute-after-derive.rs:25:1: 25:7 (#0),
},
Ident {
ident: "DeriveAttribute",
span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
span: $DIR/attribute-after-derive.rs:25:8: 25:23 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/attribute-after-derive.rs:25:1: 28:2 (#0),
span: $DIR/attribute-after-derive.rs:25:24: 28:2 (#0),
},
]

View File

@ -2,251 +2,246 @@ PRINT-ATTR INPUT (DISPLAY): impl Foo <
[u8 ;
{
# ! [rustc_dummy(cursed_inner)] # ! [allow(unused)] struct Inner
{ field : [u8 ; { # ! [rustc_dummy(another_cursed_inner)] 1 }], } 0
{ field : [u8 ; { # ! [rustc_dummy(another_cursed_inner)] 1 }] } 0
}] > { # ! [rustc_dummy(evaluated_attr)] fn bar() { } }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "impl",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:18:1: 18:5 (#0),
},
Ident {
ident: "Foo",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:18:6: 18:9 (#0),
},
Punct {
ch: '<',
spacing: Alone,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:18:9: 18:10 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "u8",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:18:11: 18:13 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:18:13: 18:14 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Punct {
ch: '#',
spacing: Joint,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
spacing: Alone,
span: $DIR/cfg-eval-inner.rs:19:5: 19:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:19:6: 19:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:19:29: 19:40 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "cursed_inner",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:19:41: 19:53 (#0),
},
],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:19:40: 19:54 (#0),
},
],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:19:5: 19:6 (#0),
},
Punct {
ch: '#',
spacing: Joint,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:20:5: 20:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:20:6: 20:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:20:8: 20:13 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "unused",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:20:14: 20:20 (#0),
},
],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:20:13: 20:21 (#0),
},
],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:20:7: 20:22 (#0),
},
Ident {
ident: "struct",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:21:5: 21:11 (#0),
},
Ident {
ident: "Inner",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:21:12: 21:17 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "field",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:22:9: 22:14 (#0),
},
Punct {
ch: ':',
spacing: Alone,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:22:14: 22:15 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "u8",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:22:17: 22:19 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:22:19: 22:20 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Punct {
ch: '#',
spacing: Joint,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
spacing: Alone,
span: $DIR/cfg-eval-inner.rs:23:13: 23:14 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:23:14: 23:15 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:23:37: 23:48 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "another_cursed_inner",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:23:49: 23:69 (#0),
},
],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:23:48: 23:70 (#0),
},
],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:23:13: 23:14 (#0),
},
Literal {
kind: Integer,
symbol: "1",
suffix: None,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:24:13: 24:14 (#0),
},
],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:22:21: 25:10 (#0),
},
],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:22:16: 25:11 (#0),
},
],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:21:18: 26:6 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:28:5: 28:6 (#0),
},
],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:18:15: 29:2 (#0),
},
],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:18:10: 29:3 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:29:3: 29:4 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Punct {
ch: '#',
spacing: Joint,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
spacing: Alone,
span: $DIR/cfg-eval-inner.rs:32:5: 32:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:32:6: 32:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:32:29: 32:40 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "evaluated_attr",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:32:41: 32:55 (#0),
},
],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:32:40: 32:56 (#0),
},
],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:32:5: 32:6 (#0),
},
Ident {
ident: "fn",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:34:5: 34:7 (#0),
},
Ident {
ident: "bar",
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:34:8: 34:11 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:34:11: 34:13 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:34:14: 36:6 (#0),
},
],
span: $DIR/cfg-eval-inner.rs:18:1: 37:2 (#0),
span: $DIR/cfg-eval-inner.rs:29:5: 37:2 (#0),
},
]

View File

@ -2,11 +2,11 @@ PRINT-ATTR INPUT (DISPLAY): struct S1 { #[cfg(all())] #[allow()] field_true : u8
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:17:1: 17:7 (#0),
},
Ident {
ident: "S1",
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:17:8: 17:10 (#0),
},
Group {
delimiter: Brace,
@ -14,73 +14,73 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:20:5: 20:6 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "cfg",
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:20:7: 20:10 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "all",
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:20:11: 20:14 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:20:14: 20:24 (#0),
},
],
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:20:10: 20:25 (#0),
},
],
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:20:6: 20:26 (#0),
},
Punct {
ch: '#',
spacing: Alone,
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:22:5: 22:6 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:22:31: 22:36 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:22:36: 22:38 (#0),
},
],
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:22:5: 22:6 (#0),
},
Ident {
ident: "field_true",
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:23:5: 23:15 (#0),
},
Punct {
ch: ':',
spacing: Alone,
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:23:15: 23:16 (#0),
},
Ident {
ident: "u8",
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:23:17: 23:19 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:23:19: 23:20 (#0),
},
],
span: $DIR/cfg-eval.rs:17:1: 24:2 (#0),
span: $DIR/cfg-eval.rs:17:11: 24:2 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): #[rustc_dummy] (#[cfg(all())] 1,)
@ -88,17 +88,17 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
span: $DIR/cfg-eval.rs:35:39: 35:40 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
span: $DIR/cfg-eval.rs:35:62: 35:73 (#0),
},
],
span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
span: $DIR/cfg-eval.rs:35:39: 35:40 (#0),
},
Group {
delimiter: Parenthesis,
@ -106,43 +106,43 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
span: $DIR/cfg-eval.rs:36:23: 36:24 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "cfg",
span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
span: $DIR/cfg-eval.rs:36:25: 36:28 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "all",
span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
span: $DIR/cfg-eval.rs:36:29: 36:32 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
span: $DIR/cfg-eval.rs:36:32: 36:42 (#0),
},
],
span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
span: $DIR/cfg-eval.rs:36:28: 36:43 (#0),
},
],
span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
span: $DIR/cfg-eval.rs:36:24: 36:44 (#0),
},
Literal {
kind: Integer,
symbol: "1",
suffix: None,
span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
span: $DIR/cfg-eval.rs:36:45: 36:46 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),
span: $DIR/cfg-eval.rs:36:46: 36:47 (#0),
},
],
span: $DIR/cfg-eval.rs:36:5: 36:48 (#0),

View File

@ -1,40 +1,40 @@
PRINT-DERIVE INPUT (DISPLAY): struct Foo
{
field :
[bool ; { #[rustc_dummy] struct Inner { other_inner_field : u8, } 0 }],
[bool ; { #[rustc_dummy] struct Inner { other_inner_field : u8, } 0 }]
}
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:16:9: 16:15 (#4),
},
Ident {
ident: "Foo",
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:16:16: 16:19 (#4),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "field",
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:18:13: 18:18 (#4),
},
Punct {
ch: ':',
spacing: Alone,
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:18:18: 18:19 (#4),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "bool",
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:18:21: 18:25 (#4),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:18:25: 18:26 (#4),
},
Group {
delimiter: Brace,
@ -42,68 +42,63 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:27:5: 27:6 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:27:28: 27:39 (#0),
},
],
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:27:5: 27:6 (#0),
},
Ident {
ident: "struct",
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:28:5: 28:11 (#0),
},
Ident {
ident: "Inner",
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:28:12: 28:17 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "other_inner_field",
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:30:9: 30:26 (#0),
},
Punct {
ch: ':',
spacing: Alone,
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:30:26: 30:27 (#0),
},
Ident {
ident: "u8",
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:30:28: 30:30 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:30:30: 30:31 (#0),
},
],
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:28:18: 31:6 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:20:17: 20:18 (#4),
},
],
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:18:27: 21:14 (#4),
},
],
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:18:20: 21:15 (#4),
},
],
span: $DIR/expand-to-derive.rs:16:9: 22:10 (#4),
span: $DIR/expand-to-derive.rs:16:20: 22:10 (#4),
},
]

View File

@ -37,7 +37,7 @@ struct MyDerivePrint {
#![cfg_attr(not(FALSE), rustc_dummy(first))]
#![cfg_attr(not(FALSE), rustc_dummy(second))]
_ => {
#![cfg_attr(not(FALSE), rustc_dummy(second))]
#![cfg_attr(not(FALSE), rustc_dummy(third))]
true
}
};

File diff suppressed because it is too large Load Diff

View File

@ -1279,152 +1279,152 @@ PRINT-DERIVE INPUT (DISPLAY): #[print_helper(a)] #[allow(dead_code)] #[print_hel
[u8 ;
{
#[cfg(not(FALSE))] struct Inner ; match true
{ #[allow(warnings)] false => { } _ => { } } ; #[print_helper(c)]
{ #[allow(warnings)] false => { }, _ => { } } ; #[print_helper(c)]
#[cfg(not(FALSE))] fn kept_fn()
{ # ! [cfg(not(FALSE))] let my_val = true ; } enum TupleEnum
{ Foo(#[cfg(not(FALSE))] i32, u8), } struct
{ Foo(#[cfg(not(FALSE))] i32, u8) } struct
TupleStruct(#[cfg(not(FALSE))] i32, u8) ; 0
}], #[print_helper(d)] fourth : B,
}], #[print_helper(d)] fourth : B
}
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:19:1: 19:2 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_helper",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:19:3: 19:15 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "a",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:19:16: 19:17 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:19:15: 19:18 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:19:2: 19:19 (#0),
},
Punct {
ch: '#',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:21:1: 21:2 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:21:24: 21:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "dead_code",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:21:30: 21:39 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:21:29: 21:40 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:21:1: 21:2 (#0),
},
Punct {
ch: '#',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:24:1: 24:2 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_helper",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:24:3: 24:15 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "b",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:24:16: 24:17 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:24:15: 24:18 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:24:2: 24:19 (#0),
},
Ident {
ident: "struct",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:25:1: 25:7 (#0),
},
Ident {
ident: "Foo",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:25:8: 25:11 (#0),
},
Punct {
ch: '<',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
spacing: Joint,
span: $DIR/issue-75930-derive-cfg.rs:25:11: 25:12 (#0),
},
Ident {
ident: "B",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:25:29: 25:30 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:25:30: 25:31 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "second",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:27:40: 27:46 (#0),
},
Punct {
ch: ':',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:27:46: 27:47 (#0),
},
Ident {
ident: "bool",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:27:48: 27:52 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:27:52: 27:53 (#0),
},
Ident {
ident: "third",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:28:5: 28:10 (#0),
},
Punct {
ch: ':',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:28:10: 28:11 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "u8",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:28:13: 28:15 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:28:15: 28:16 (#0),
},
Group {
delimiter: Brace,
@ -1432,58 +1432,58 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:30:9: 30:10 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "cfg",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:30:11: 30:14 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "not",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:30:15: 30:18 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "FALSE",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:30:19: 30:24 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:30:18: 30:25 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:30:14: 30:26 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:30:10: 30:27 (#0),
},
Ident {
ident: "struct",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:30:28: 30:34 (#0),
},
Ident {
ident: "Inner",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:30:35: 30:40 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:30:40: 30:41 (#0),
},
Ident {
ident: "match",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:32:9: 32:14 (#0),
},
Ident {
ident: "true",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:32:15: 32:19 (#0),
},
Group {
delimiter: Brace,
@ -1491,146 +1491,151 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:34:13: 34:14 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:34:36: 34:41 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "warnings",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:34:42: 34:50 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:34:41: 34:51 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:34:13: 34:14 (#0),
},
Ident {
ident: "false",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:34:54: 34:59 (#0),
},
Punct {
ch: '=',
spacing: Joint,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:34:60: 34:62 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:34:60: 34:62 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:34:63: 34:65 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:34:65: 34:66 (#0),
},
Ident {
ident: "_",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:35:13: 35:14 (#0),
},
Punct {
ch: '=',
spacing: Joint,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:35:15: 35:17 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:35:15: 35:17 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:35:18: 35:20 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:32:20: 36:10 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:36:10: 36:11 (#0),
},
Punct {
ch: '#',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:9: 43:10 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_helper",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:11: 43:23 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "c",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:24: 43:25 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:23: 43:26 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:10: 43:27 (#0),
},
Punct {
ch: '#',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:28: 43:29 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "cfg",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:30: 43:33 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "not",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:34: 43:37 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "FALSE",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:38: 43:43 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:37: 43:44 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:33: 43:45 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:29: 43:46 (#0),
},
Ident {
ident: "fn",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:47: 43:49 (#0),
},
Ident {
ident: "kept_fn",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:50: 43:57 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:57: 43:59 (#0),
},
Group {
delimiter: Brace,
@ -1638,82 +1643,82 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Joint,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:44:13: 44:14 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:44:14: 44:15 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "cfg",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:44:16: 44:19 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "not",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:44:20: 44:23 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "FALSE",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:44:24: 44:29 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:44:23: 44:30 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:44:19: 44:31 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:44:15: 44:32 (#0),
},
Ident {
ident: "let",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:45:13: 45:16 (#0),
},
Ident {
ident: "my_val",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:45:17: 45:23 (#0),
},
Punct {
ch: '=',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:45:24: 45:25 (#0),
},
Ident {
ident: "true",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:45:26: 45:30 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:45:30: 45:31 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:43:60: 46:10 (#0),
},
Ident {
ident: "enum",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:48:9: 48:13 (#0),
},
Ident {
ident: "TupleEnum",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:48:14: 48:23 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "Foo",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:49:13: 49:16 (#0),
},
Group {
delimiter: Parenthesis,
@ -1721,69 +1726,64 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:52:17: 52:18 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "cfg",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:52:19: 52:22 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "not",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:52:23: 52:26 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "FALSE",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:52:27: 52:32 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:52:26: 52:33 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:52:22: 52:34 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:52:18: 52:35 (#0),
},
Ident {
ident: "i32",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:52:36: 52:39 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:52:39: 52:40 (#0),
},
Ident {
ident: "u8",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:53:39: 53:41 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:49:16: 54:14 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:48:24: 55:10 (#0),
},
Ident {
ident: "struct",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:57:9: 57:15 (#0),
},
Ident {
ident: "TupleStruct",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:57:16: 57:27 (#0),
},
Group {
delimiter: Parenthesis,
@ -1791,120 +1791,115 @@ PRINT-DERIVE INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:59:13: 59:14 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "cfg",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:59:15: 59:18 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "not",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:59:19: 59:22 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "FALSE",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:59:23: 59:28 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:59:22: 59:29 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:59:18: 59:30 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:59:14: 59:31 (#0),
},
Ident {
ident: "i32",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:59:32: 59:35 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:59:35: 59:36 (#0),
},
Ident {
ident: "u8",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:61:13: 61:15 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:57:27: 62:10 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:62:10: 62:11 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:68:9: 68:10 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:28:17: 69:6 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:28:12: 69:7 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:69:7: 69:8 (#0),
},
Punct {
ch: '#',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:70:5: 70:6 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_helper",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:70:7: 70:19 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "d",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:70:20: 70:21 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:70:19: 70:22 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:70:6: 70:23 (#0),
},
Ident {
ident: "fourth",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:71:5: 71:11 (#0),
},
Punct {
ch: ':',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:71:11: 71:12 (#0),
},
Ident {
ident: "B",
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:71:13: 71:14 (#0),
},
],
span: $DIR/issue-75930-derive-cfg.rs:25:1: 72:2 (#0),
span: $DIR/issue-75930-derive-cfg.rs:25:32: 72:2 (#0),
},
]

View File

@ -35,48 +35,48 @@ PRINT-BANG INPUT (DEBUG): TokenStream [
stream: TokenStream [
Ident {
ident: "mod",
span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 27:8 (#0),
},
Ident {
ident: "bar",
span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
span: $DIR/issue-78675-captured-inner-attrs.rs:27:9: 27:12 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Punct {
ch: '#',
spacing: Joint,
span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
spacing: Alone,
span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "doc",
span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0),
},
Punct {
ch: '=',
spacing: Alone,
span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0),
},
Literal {
kind: StrRaw(0),
symbol: " Foo",
suffix: None,
span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0),
},
],
span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
span: $DIR/issue-78675-captured-inner-attrs.rs:28:9: 28:16 (#0),
},
],
span: $DIR/issue-78675-captured-inner-attrs.rs:27:5: 29:6 (#0),
span: $DIR/issue-78675-captured-inner-attrs.rs:27:13: 29:6 (#0),
},
],
span: $DIR/issue-78675-captured-inner-attrs.rs:22:13: 22:18 (#4),

View File

@ -5,172 +5,167 @@ PRINT-DERIVE INPUT (DISPLAY): struct Foo
{
let a = #[rustc_dummy(first)] #[rustc_dummy(second)]
{ # ! [allow(unused)] 30 } ; 0
}],
}]
}
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:17:9: 17:15 (#4),
},
Ident {
ident: "Foo",
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:17:16: 17:19 (#4),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "val",
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:18:13: 18:16 (#4),
},
Punct {
ch: ':',
spacing: Alone,
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:18:16: 18:17 (#4),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "bool",
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:18:19: 18:23 (#4),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:18:23: 18:24 (#4),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "let",
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:19:17: 19:20 (#4),
},
Ident {
ident: "a",
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:19:21: 19:22 (#4),
},
Punct {
ch: '=',
spacing: Alone,
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:19:23: 19:24 (#4),
},
Punct {
ch: '#',
spacing: Alone,
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:19:25: 19:26 (#4),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:19:48: 19:59 (#4),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "first",
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:19:60: 19:65 (#4),
},
],
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:19:59: 19:66 (#4),
},
],
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:19:25: 19:26 (#4),
},
Punct {
ch: '#',
spacing: Alone,
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:26:13: 26:14 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "rustc_dummy",
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:26:36: 26:47 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "second",
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:26:48: 26:54 (#0),
},
],
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:26:47: 26:55 (#0),
},
],
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:26:13: 26:14 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Punct {
ch: '#',
spacing: Joint,
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
spacing: Alone,
span: $DIR/macro-rules-derive-cfg.rs:27:5: 27:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:27:6: 27:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:27:29: 27:34 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "unused",
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:27:35: 27:41 (#0),
},
],
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:27:34: 27:42 (#0),
},
],
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:27:5: 27:6 (#0),
},
Literal {
kind: Integer,
symbol: "30",
suffix: None,
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:28:5: 28:7 (#0),
},
],
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:26:58: 29:2 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:19:74: 19:75 (#4),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:20:17: 20:18 (#4),
},
],
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:18:25: 21:14 (#4),
},
],
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:18:18: 21:15 (#4),
},
],
span: $DIR/macro-rules-derive-cfg.rs:17:9: 22:10 (#4),
span: $DIR/macro-rules-derive-cfg.rs:17:20: 22:10 (#4),
},
]

View File

@ -1,94 +1,81 @@
PRINT-DERIVE INPUT (DISPLAY): struct Foo
{
my_array :
[bool ; { struct Inner { non_removed_inner_field : usize, } 0 }],
}
{ my_array : [bool ; { struct Inner { non_removed_inner_field : usize } 0 }] }
PRINT-DERIVE INPUT (DEBUG): TokenStream [
Ident {
ident: "struct",
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:12:1: 12:7 (#0),
},
Ident {
ident: "Foo",
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:12:8: 12:11 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "my_array",
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:14:5: 14:13 (#0),
},
Punct {
ch: ':',
spacing: Alone,
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:14:13: 14:14 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "bool",
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:14:16: 14:20 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:14:20: 14:21 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "struct",
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:15:9: 15:15 (#0),
},
Ident {
ident: "Inner",
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:15:16: 15:21 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "non_removed_inner_field",
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:17:13: 17:36 (#0),
},
Punct {
ch: ':',
spacing: Alone,
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:17:36: 17:37 (#0),
},
Ident {
ident: "usize",
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:17:38: 17:43 (#0),
},
],
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:15:22: 18:10 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:19:9: 19:10 (#0),
},
],
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:14:22: 20:6 (#0),
},
],
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:14:15: 20:7 (#0),
},
],
span: $DIR/nested-derive-cfg.rs:12:1: 21:2 (#0),
span: $DIR/nested-derive-cfg.rs:12:12: 21:2 (#0),
},
]

View File

@ -0,0 +1,19 @@
// check-pass
// compile-flags: -Z span-debug --error-format human
// aux-build:test-macros.rs
// edition:2018
#![feature(proc_macro_hygiene)]
#![no_std] // Don't load unnecessary hygiene information from std
extern crate std;
#[macro_use]
extern crate test_macros;
fn main() {
#[print_target_and_args(my_arg)] (
#![cfg_attr(not(FALSE), allow(unused))]
1, 2, 3
);
}

View File

@ -0,0 +1,79 @@
PRINT-ATTR_ARGS INPUT (DISPLAY): my_arg
PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
Ident {
ident: "my_arg",
span: $DIR/simple-tuple.rs:15:29: 15:35 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): (# ! [allow(unused)] 1, 2, 3) ;
PRINT-ATTR INPUT (DEBUG): TokenStream [
Group {
delimiter: Parenthesis,
stream: TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/simple-tuple.rs:16:9: 16:10 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/simple-tuple.rs:16:10: 16:11 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "allow",
span: $DIR/simple-tuple.rs:16:33: 16:38 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "unused",
span: $DIR/simple-tuple.rs:16:39: 16:45 (#0),
},
],
span: $DIR/simple-tuple.rs:16:38: 16:46 (#0),
},
],
span: $DIR/simple-tuple.rs:16:9: 16:10 (#0),
},
Literal {
kind: Integer,
symbol: "1",
suffix: None,
span: $DIR/simple-tuple.rs:17:9: 17:10 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/simple-tuple.rs:17:10: 17:11 (#0),
},
Literal {
kind: Integer,
symbol: "2",
suffix: None,
span: $DIR/simple-tuple.rs:17:12: 17:13 (#0),
},
Punct {
ch: ',',
spacing: Alone,
span: $DIR/simple-tuple.rs:17:13: 17:14 (#0),
},
Literal {
kind: Integer,
symbol: "3",
suffix: None,
span: $DIR/simple-tuple.rs:17:15: 17:16 (#0),
},
],
span: $DIR/simple-tuple.rs:15:38: 18:6 (#0),
},
Punct {
ch: ';',
spacing: Alone,
span: $DIR/simple-tuple.rs:18:6: 18:7 (#0),
},
]

View File

@ -15,40 +15,40 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:17:1: 17:2 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:17:3: 17:24 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "second_outer",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:17:25: 17:37 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:17:24: 17:38 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:17:2: 17:39 (#0),
},
Ident {
ident: "impl",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:1: 18:5 (#0),
},
Ident {
ident: "Bar",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:6: 18:9 (#0),
},
Punct {
ch: '<',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:9: 18:10 (#0),
},
Group {
delimiter: Brace,
@ -57,54 +57,54 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
kind: Integer,
symbol: "1",
suffix: None,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:11: 18:12 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:13: 18:14 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:15: 18:16 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:10: 18:17 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:17: 18:18 (#0),
},
Ident {
ident: "for",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:19: 18:22 (#0),
},
Ident {
ident: "Foo",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:23: 18:26 (#0),
},
Punct {
ch: '<',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:26: 18:27 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "true",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:28: 18:32 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:27: 18:33 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:33: 18:34 (#0),
},
Group {
delimiter: Brace,
@ -112,72 +112,72 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Joint,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:19:5: 19:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:19:6: 19:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:19:8: 19:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "first_inner",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:19:30: 19:41 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:19:29: 19:42 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:19:7: 19:43 (#0),
},
Punct {
ch: '#',
spacing: Joint,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:5: 20:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:6: 20:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:8: 20:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "second_inner",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:30: 20:42 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:29: 20:43 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:7: 20:44 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:35: 21:2 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): second_outer
PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
Ident {
ident: "second_outer",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:17:25: 17:37 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } >
@ -188,16 +188,16 @@ PRINT-ATTR INPUT (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } >
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "impl",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:1: 18:5 (#0),
},
Ident {
ident: "Bar",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:6: 18:9 (#0),
},
Punct {
ch: '<',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:9: 18:10 (#0),
},
Group {
delimiter: Brace,
@ -206,54 +206,54 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
kind: Integer,
symbol: "1",
suffix: None,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:11: 18:12 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:13: 18:14 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:15: 18:16 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:10: 18:17 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:17: 18:18 (#0),
},
Ident {
ident: "for",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:19: 18:22 (#0),
},
Ident {
ident: "Foo",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:23: 18:26 (#0),
},
Punct {
ch: '<',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:26: 18:27 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "true",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:28: 18:32 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:27: 18:33 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:33: 18:34 (#0),
},
Group {
delimiter: Brace,
@ -261,72 +261,72 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Joint,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:19:5: 19:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:19:6: 19:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:19:8: 19:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "first_inner",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:19:30: 19:41 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:19:29: 19:42 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:19:7: 19:43 (#0),
},
Punct {
ch: '#',
spacing: Joint,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:5: 20:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:6: 20:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:8: 20:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "second_inner",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:30: 20:42 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:29: 20:43 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:7: 20:44 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:35: 21:2 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): first_inner
PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
Ident {
ident: "first_inner",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:19:30: 19:41 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } >
@ -334,16 +334,16 @@ PRINT-ATTR INPUT (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } >
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "impl",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:1: 18:5 (#0),
},
Ident {
ident: "Bar",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:6: 18:9 (#0),
},
Punct {
ch: '<',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:9: 18:10 (#0),
},
Group {
delimiter: Brace,
@ -352,54 +352,54 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
kind: Integer,
symbol: "1",
suffix: None,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:11: 18:12 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:13: 18:14 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:15: 18:16 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:10: 18:17 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:17: 18:18 (#0),
},
Ident {
ident: "for",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:19: 18:22 (#0),
},
Ident {
ident: "Foo",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:23: 18:26 (#0),
},
Punct {
ch: '<',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:26: 18:27 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "true",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:28: 18:32 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:27: 18:33 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:33: 18:34 (#0),
},
Group {
delimiter: Brace,
@ -407,58 +407,58 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
Punct {
ch: '#',
spacing: Joint,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:5: 20:6 (#0),
},
Punct {
ch: '!',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:6: 20:7 (#0),
},
Group {
delimiter: Bracket,
stream: TokenStream [
Ident {
ident: "print_target_and_args",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:8: 20:29 (#0),
},
Group {
delimiter: Parenthesis,
stream: TokenStream [
Ident {
ident: "second_inner",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:30: 20:42 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:29: 20:43 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:7: 20:44 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:35: 21:2 (#0),
},
]
PRINT-ATTR_ARGS INPUT (DISPLAY): second_inner
PRINT-ATTR_ARGS INPUT (DEBUG): TokenStream [
Ident {
ident: "second_inner",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:20:30: 20:42 (#0),
},
]
PRINT-ATTR INPUT (DISPLAY): impl Bar < { 1 > 0 } > for Foo < { true } > { }
PRINT-ATTR INPUT (DEBUG): TokenStream [
Ident {
ident: "impl",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:1: 18:5 (#0),
},
Ident {
ident: "Bar",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:6: 18:9 (#0),
},
Punct {
ch: '<',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:9: 18:10 (#0),
},
Group {
delimiter: Brace,
@ -467,58 +467,58 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
kind: Integer,
symbol: "1",
suffix: None,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:11: 18:12 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:13: 18:14 (#0),
},
Literal {
kind: Integer,
symbol: "0",
suffix: None,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:15: 18:16 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:10: 18:17 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:17: 18:18 (#0),
},
Ident {
ident: "for",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:19: 18:22 (#0),
},
Ident {
ident: "Foo",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:23: 18:26 (#0),
},
Punct {
ch: '<',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:26: 18:27 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [
Ident {
ident: "true",
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:28: 18:32 (#0),
},
],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:27: 18:33 (#0),
},
Punct {
ch: '>',
spacing: Alone,
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:33: 18:34 (#0),
},
Group {
delimiter: Brace,
stream: TokenStream [],
span: $DIR/weird-braces.rs:18:1: 21:2 (#0),
span: $DIR/weird-braces.rs:18:35: 21:2 (#0),
},
]