Auto merge of #97251 - petrochenkov:eqtokens, r=nnethercote

rustc_parse: Move AST -> TokenStream conversion logic to rustc_ast

In the past falling back to reparsing pretty-printed strings was common, so some of this logic had to live in `rustc_parse`, but now the reparsing fallback is only used in two corner cases so we can move this logic to `rustc_ast` which makes many things simpler.

It also helps to fix `MacArgs::inner_tokens` for `MacArgs::Eq` with non-literal expressions, which is done in the second commit.
r? `@nnethercote`
This commit is contained in:
bors 2022-05-22 11:51:25 +00:00
commit 4bb4dc4672
14 changed files with 127 additions and 300 deletions

View File

@ -23,8 +23,8 @@ pub use GenericArgs::*;
pub use UnsafeSource::*;
use crate::ptr::P;
use crate::token::{self, CommentKind, Delimiter, Token, TokenKind};
use crate::tokenstream::{DelimSpan, LazyTokenStream, TokenStream, TokenTree};
use crate::token::{self, CommentKind, Delimiter};
use crate::tokenstream::{DelimSpan, LazyTokenStream, TokenStream};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::stack::ensure_sufficient_stack;
@ -444,8 +444,7 @@ impl Default for Generics {
pub struct WhereClause {
/// `true` if we ate a `where` token: this can happen
/// if we parsed no predicates (e.g. `struct Foo where {}`).
/// This allows us to accurately pretty-print
/// in `nt_to_tokenstream`
/// This allows us to pretty-print accurately.
pub has_where_token: bool,
pub predicates: Vec<WherePredicate>,
pub span: Span,
@ -1571,20 +1570,7 @@ impl MacArgs {
match self {
MacArgs::Empty => TokenStream::default(),
MacArgs::Delimited(.., tokens) => tokens.clone(),
MacArgs::Eq(_, MacArgsEq::Ast(expr)) => {
// Currently only literals are allowed here. If more complex expression kinds are
// allowed in the future, then `nt_to_tokenstream` should be used to extract the
// token stream. This will require some cleverness, perhaps with a function
// pointer, because `nt_to_tokenstream` is not directly usable from this crate.
// It will also require changing the `parse_expr` call in `parse_mac_args_common`
// to `parse_expr_force_collect`.
if let ExprKind::Lit(lit) = &expr.kind {
let token = Token::new(TokenKind::Literal(lit.token), lit.span);
TokenTree::Token(token).into()
} else {
unreachable!("couldn't extract literal when getting inner tokens: {:?}", expr)
}
}
MacArgs::Eq(_, MacArgsEq::Ast(expr)) => TokenStream::from_ast(expr),
MacArgs::Eq(_, MacArgsEq::Hir(lit)) => {
unreachable!("in literal form when getting inner tokens: {:?}", lit)
}

View File

@ -108,7 +108,7 @@ macro_rules! impl_has_span {
};
}
impl_has_span!(AssocItem, Expr, ForeignItem, Item, Stmt);
impl_has_span!(AssocItem, Block, Expr, ForeignItem, Item, Pat, Path, Stmt, Ty, Visibility);
impl<T: AstDeref<Target: HasSpan>> HasSpan for T {
fn span(&self) -> Span {
@ -116,6 +116,12 @@ impl<T: AstDeref<Target: HasSpan>> HasSpan for T {
}
}
impl HasSpan for AttrItem {
fn span(&self) -> Span {
self.span()
}
}
/// A trait for AST nodes having (or not having) collected tokens.
pub trait HasTokens {
fn tokens(&self) -> Option<&LazyTokenStream>;

View File

@ -13,7 +13,9 @@
//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
//! ownership of the original.
use crate::token::{self, Delimiter, Token, TokenKind};
use crate::ast::StmtKind;
use crate::ast_traits::{HasAttrs, HasSpan, HasTokens};
use crate::token::{self, Delimiter, Nonterminal, Token, TokenKind};
use crate::AttrVec;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
@ -45,12 +47,6 @@ pub enum TokenTree {
Delimited(DelimSpan, Delimiter, TokenStream),
}
#[derive(Copy, Clone)]
pub enum CanSynthesizeMissingTokens {
Yes,
No,
}
// Ensure all fields of `TokenTree` is `Send` and `Sync`.
#[cfg(parallel_compiler)]
fn _dummy()
@ -471,6 +467,89 @@ impl TokenStream {
.collect(),
))
}
fn opt_from_ast(node: &(impl HasAttrs + HasTokens)) -> Option<TokenStream> {
let tokens = node.tokens()?;
let attrs = node.attrs();
let attr_annotated = if attrs.is_empty() {
tokens.create_token_stream()
} else {
let attr_data = AttributesData { attrs: attrs.to_vec().into(), tokens: tokens.clone() };
AttrAnnotatedTokenStream::new(vec![(
AttrAnnotatedTokenTree::Attributes(attr_data),
Spacing::Alone,
)])
};
Some(attr_annotated.to_tokenstream())
}
pub fn from_ast(node: &(impl HasAttrs + HasSpan + HasTokens + fmt::Debug)) -> TokenStream {
TokenStream::opt_from_ast(node)
.unwrap_or_else(|| panic!("missing tokens for node at {:?}: {:?}", node.span(), node))
}
pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream {
match nt {
Nonterminal::NtIdent(ident, is_raw) => {
TokenTree::token(token::Ident(ident.name, *is_raw), ident.span).into()
}
Nonterminal::NtLifetime(ident) => {
TokenTree::token(token::Lifetime(ident.name), ident.span).into()
}
Nonterminal::NtItem(item) => TokenStream::from_ast(item),
Nonterminal::NtBlock(block) => TokenStream::from_ast(block),
Nonterminal::NtStmt(stmt) if let StmtKind::Empty = stmt.kind => {
// FIXME: Properly collect tokens for empty statements.
TokenTree::token(token::Semi, stmt.span).into()
}
Nonterminal::NtStmt(stmt) => TokenStream::from_ast(stmt),
Nonterminal::NtPat(pat) => TokenStream::from_ast(pat),
Nonterminal::NtTy(ty) => TokenStream::from_ast(ty),
Nonterminal::NtMeta(attr) => TokenStream::from_ast(attr),
Nonterminal::NtPath(path) => TokenStream::from_ast(path),
Nonterminal::NtVis(vis) => TokenStream::from_ast(vis),
Nonterminal::NtExpr(expr) | Nonterminal::NtLiteral(expr) => TokenStream::from_ast(expr),
}
}
fn flatten_token(token: &Token) -> TokenTree {
match &token.kind {
token::Interpolated(nt) if let token::NtIdent(ident, is_raw) = **nt => {
TokenTree::token(token::Ident(ident.name, is_raw), ident.span)
}
token::Interpolated(nt) => TokenTree::Delimited(
DelimSpan::from_single(token.span),
Delimiter::Invisible,
TokenStream::from_nonterminal_ast(&nt).flattened(),
),
_ => TokenTree::Token(token.clone()),
}
}
fn flatten_token_tree(tree: &TokenTree) -> TokenTree {
match tree {
TokenTree::Token(token) => TokenStream::flatten_token(token),
TokenTree::Delimited(span, delim, tts) => {
TokenTree::Delimited(*span, *delim, tts.flattened())
}
}
}
#[must_use]
pub fn flattened(&self) -> TokenStream {
fn can_skip(stream: &TokenStream) -> bool {
stream.trees().all(|tree| match tree {
TokenTree::Token(token) => !matches!(token.kind, token::Interpolated(_)),
TokenTree::Delimited(_, _, inner) => can_skip(inner),
})
}
if can_skip(self) {
return self.clone();
}
self.trees().map(|tree| TokenStream::flatten_token_tree(tree)).collect()
}
}
// 99.5%+ of the time we have 1 or 2 elements in this vector.

View File

@ -13,7 +13,6 @@ use rustc_hir::def::{DefKind, Res};
use rustc_hir::def_id::{LocalDefId, CRATE_DEF_ID};
use rustc_hir::PredicateOrigin;
use rustc_index::vec::{Idx, IndexVec};
use rustc_session::utils::NtToTokenstream;
use rustc_session::Session;
use rustc_span::source_map::DesugaringKind;
use rustc_span::symbol::{kw, sym, Ident};
@ -27,7 +26,6 @@ use std::iter;
pub(super) struct ItemLowerer<'a, 'hir> {
pub(super) sess: &'a Session,
pub(super) resolver: &'a mut dyn ResolverAstLowering,
pub(super) nt_to_tokenstream: NtToTokenstream,
pub(super) arena: &'hir Arena<'hir>,
pub(super) ast_index: &'a IndexVec<LocalDefId, AstOwner<'a>>,
pub(super) owners: &'a mut IndexVec<LocalDefId, hir::MaybeOwner<&'hir hir::OwnerInfo<'hir>>>,
@ -63,7 +61,6 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> {
// Pseudo-globals.
sess: &self.sess,
resolver: self.resolver,
nt_to_tokenstream: self.nt_to_tokenstream,
arena: self.arena,
// HirId handling.

View File

@ -37,7 +37,6 @@
#![recursion_limit = "256"]
#![allow(rustc::potential_query_instability)]
use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream};
use rustc_ast::visit;
use rustc_ast::{self as ast, *};
use rustc_ast_pretty::pprust;
@ -56,7 +55,6 @@ use rustc_hir::{ConstArg, GenericArg, ItemLocalId, ParamName, TraitCandidate};
use rustc_index::vec::{Idx, IndexVec};
use rustc_query_system::ich::StableHashingContext;
use rustc_session::parse::feature_err;
use rustc_session::utils::{FlattenNonterminals, NtToTokenstream};
use rustc_session::Session;
use rustc_span::hygiene::{ExpnId, MacroKind};
use rustc_span::source_map::DesugaringKind;
@ -89,11 +87,6 @@ struct LoweringContext<'a, 'hir: 'a> {
resolver: &'a mut dyn ResolverAstLowering,
/// HACK(Centril): there is a cyclic dependency between the parser and lowering
/// if we don't have this function pointer. To avoid that dependency so that
/// `rustc_middle` is independent of the parser, we use dynamic dispatch here.
nt_to_tokenstream: NtToTokenstream,
/// Used to allocate HIR nodes.
arena: &'hir Arena<'hir>,
@ -436,7 +429,6 @@ pub fn lower_crate<'a, 'hir>(
sess: &'a Session,
krate: &'a Crate,
resolver: &'a mut dyn ResolverAstLowering,
nt_to_tokenstream: NtToTokenstream,
arena: &'hir Arena<'hir>,
) -> &'hir hir::Crate<'hir> {
let _prof_timer = sess.prof.verbose_generic_activity("hir_lowering");
@ -447,15 +439,8 @@ pub fn lower_crate<'a, 'hir>(
IndexVec::from_fn_n(|_| hir::MaybeOwner::Phantom, resolver.definitions().def_index_count());
for def_id in ast_index.indices() {
item::ItemLowerer {
sess,
resolver,
nt_to_tokenstream,
arena,
ast_index: &ast_index,
owners: &mut owners,
}
.lower_node(def_id);
item::ItemLowerer { sess, resolver, arena, ast_index: &ast_index, owners: &mut owners }
.lower_node(def_id);
}
let hir_hash = compute_hir_hash(resolver, &owners);
@ -875,11 +860,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
// ```
//
// In both cases, we don't want to synthesize any tokens
MacArgs::Delimited(
dspan,
delim,
self.lower_token_stream(tokens.clone(), CanSynthesizeMissingTokens::No),
)
MacArgs::Delimited(dspan, delim, tokens.flattened())
}
// This is an inert key-value attribute - it will never be visible to macros
// after it gets lowered to HIR. Therefore, we can extract literals to handle
@ -904,19 +885,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
}
}
fn lower_token_stream(
&self,
tokens: TokenStream,
synthesize_tokens: CanSynthesizeMissingTokens,
) -> TokenStream {
FlattenNonterminals {
parse_sess: &self.sess.parse_sess,
synthesize_tokens,
nt_to_tokenstream: self.nt_to_tokenstream,
}
.process_token_stream(tokens)
}
/// Given an associated type constraint like one of these:
///
/// ```ignore (illustrative)

View File

@ -4,42 +4,12 @@ mod tests;
pub mod state;
pub use state::{print_crate, AnnNode, Comments, PpAnn, PrintState, State};
use rustc_ast as ast;
use rustc_ast::token::{Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_ast::{self as ast, AstDeref};
use std::borrow::Cow;
pub trait AstPrettyPrint {
fn pretty_print(&self) -> String;
}
impl<T: AstDeref<Target: AstPrettyPrint>> AstPrettyPrint for T {
fn pretty_print(&self) -> String {
self.ast_deref().pretty_print()
}
}
macro_rules! impl_ast_pretty_print {
($($T:ty => $method:ident),+ $(,)?) => {
$(
impl AstPrettyPrint for $T {
fn pretty_print(&self) -> String {
State::new().$method(self)
}
}
)+
};
}
impl_ast_pretty_print! {
ast::Item => item_to_string,
ast::AssocItem => assoc_item_to_string,
ast::ForeignItem => foreign_item_to_string,
ast::Expr => expr_to_string,
ast::Stmt => stmt_to_string,
}
pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
State::new().nonterminal_to_string(nt)
}

View File

@ -3,7 +3,6 @@ use crate::util::{check_builtin_macro_attribute, warn_on_duplicate_attribute};
use rustc_ast as ast;
use rustc_ast::mut_visit::MutVisitor;
use rustc_ast::ptr::P;
use rustc_ast::tokenstream::CanSynthesizeMissingTokens;
use rustc_ast::visit::Visitor;
use rustc_ast::NodeId;
use rustc_ast::{mut_visit, visit};
@ -13,7 +12,6 @@ use rustc_expand::config::StripUnconfigured;
use rustc_expand::configure;
use rustc_feature::Features;
use rustc_parse::parser::{ForceCollect, Parser};
use rustc_session::utils::FlattenNonterminals;
use rustc_session::Session;
use rustc_span::symbol::sym;
use rustc_span::Span;
@ -174,8 +172,6 @@ impl CfgEval<'_, '_> {
_ => unreachable!(),
};
let mut orig_tokens = annotatable.to_tokens(&self.cfg.sess.parse_sess);
// 'Flatten' all nonterminals (i.e. `TokenKind::Interpolated`)
// to `None`-delimited groups containing the corresponding tokens. This
// is normally delayed until the proc-macro server actually needs to
@ -189,12 +185,7 @@ impl CfgEval<'_, '_> {
// where `$item` is `#[cfg_attr] struct Foo {}`. We want to make
// sure to evaluate *all* `#[cfg]` and `#[cfg_attr]` attributes - the simplest
// way to do this is to do a single parse of a stream without any nonterminals.
let mut flatten = FlattenNonterminals {
nt_to_tokenstream: rustc_parse::nt_to_tokenstream,
parse_sess: &self.cfg.sess.parse_sess,
synthesize_tokens: CanSynthesizeMissingTokens::No,
};
orig_tokens = flatten.process_token_stream(orig_tokens);
let orig_tokens = annotatable.to_tokens().flattened();
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
// to the captured `AttrAnnotatedTokenStream` (specifically, we capture

View File

@ -4,7 +4,7 @@ use crate::module::DirOwnership;
use rustc_ast::attr::MarkedAttrs;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Nonterminal};
use rustc_ast::tokenstream::{CanSynthesizeMissingTokens, TokenStream};
use rustc_ast::tokenstream::TokenStream;
use rustc_ast::visit::{AssocCtxt, Visitor};
use rustc_ast::{self as ast, Attribute, HasAttrs, Item, NodeId, PatKind};
use rustc_attr::{self as attr, Deprecation, Stability};
@ -13,7 +13,7 @@ use rustc_data_structures::sync::{self, Lrc};
use rustc_errors::{Applicability, DiagnosticBuilder, ErrorGuaranteed, MultiSpan, PResult};
use rustc_lint_defs::builtin::PROC_MACRO_BACK_COMPAT;
use rustc_lint_defs::BuiltinLintDiagnostics;
use rustc_parse::{self, parser, to_token_stream, MACRO_ARGUMENTS};
use rustc_parse::{self, parser, MACRO_ARGUMENTS};
use rustc_session::{parse::ParseSess, Limit, Session};
use rustc_span::def_id::{CrateNum, DefId, LocalDefId};
use rustc_span::edition::Edition;
@ -109,20 +109,18 @@ impl Annotatable {
}
}
pub fn to_tokens(&self, sess: &ParseSess) -> TokenStream {
pub fn to_tokens(&self) -> TokenStream {
match self {
Annotatable::Item(node) => to_token_stream(node, sess, CanSynthesizeMissingTokens::No),
Annotatable::Item(node) => TokenStream::from_ast(node),
Annotatable::TraitItem(node) | Annotatable::ImplItem(node) => {
to_token_stream(node, sess, CanSynthesizeMissingTokens::No)
}
Annotatable::ForeignItem(node) => {
to_token_stream(node, sess, CanSynthesizeMissingTokens::No)
TokenStream::from_ast(node)
}
Annotatable::ForeignItem(node) => TokenStream::from_ast(node),
Annotatable::Stmt(node) => {
assert!(!matches!(node.kind, ast::StmtKind::Empty));
to_token_stream(node, sess, CanSynthesizeMissingTokens::No)
TokenStream::from_ast(node)
}
Annotatable::Expr(node) => to_token_stream(node, sess, CanSynthesizeMissingTokens::No),
Annotatable::Expr(node) => TokenStream::from_ast(node),
Annotatable::Arm(..)
| Annotatable::ExprField(..)
| Annotatable::PatField(..)

View File

@ -679,9 +679,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
)
) =>
{
rustc_parse::fake_token_stream(&self.cx.sess.parse_sess, item_inner)
rustc_parse::fake_token_stream_for_item(
&self.cx.sess.parse_sess,
item_inner,
)
}
_ => item.to_tokens(&self.cx.sess.parse_sess),
_ => item.to_tokens(),
};
let attr_item = attr.unwrap_normal_item();
if let MacArgs::Eq(..) = attr_item.args {

View File

@ -96,7 +96,7 @@ impl MultiItemModifier for ProcMacroDerive {
};
TokenTree::token(token::Interpolated(Lrc::new(nt)), DUMMY_SP).into()
} else {
item.to_tokens(&ecx.sess.parse_sess)
item.to_tokens()
};
let stream = {

View File

@ -2,14 +2,13 @@ use crate::base::ExtCtxt;
use rustc_ast as ast;
use rustc_ast::token;
use rustc_ast::tokenstream::{self, CanSynthesizeMissingTokens};
use rustc_ast::tokenstream::{DelimSpan, Spacing::*, TokenStream, TreeAndSpacing};
use rustc_ast::tokenstream::{self, DelimSpan, Spacing::*, TokenStream, TreeAndSpacing};
use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Diagnostic, MultiSpan, PResult};
use rustc_parse::lexer::nfc_normalize;
use rustc_parse::{nt_to_tokenstream, parse_stream_from_source_str};
use rustc_parse::parse_stream_from_source_str;
use rustc_session::parse::ParseSess;
use rustc_span::def_id::CrateNum;
use rustc_span::symbol::{self, kw, sym, Symbol};
@ -179,10 +178,9 @@ impl FromInternal<(TreeAndSpacing, &'_ mut Vec<Self>, &mut Rustc<'_, '_>)>
TokenTree::Ident(Ident::new(rustc.sess(), ident.name, is_raw, ident.span))
}
Interpolated(nt) => {
let stream = nt_to_tokenstream(&nt, rustc.sess(), CanSynthesizeMissingTokens::No);
TokenTree::Group(Group {
delimiter: pm::Delimiter::None,
stream,
stream: TokenStream::from_nonterminal_ast(&nt),
span: DelimSpan::from_single(span),
flatten: crate::base::nt_pretty_printing_compatibility_hack(&nt, rustc.sess()),
})
@ -454,7 +452,7 @@ impl server::TokenStream for Rustc<'_, '_> {
// NOTE: For now, limit `expand_expr` to exclusively expand to literals.
// This may be relaxed in the future.
// We don't use `nt_to_tokenstream` as the tokenstream currently cannot
// We don't use `TokenStream::from_ast` as the tokenstream currently cannot
// be recovered in the general case.
match &expr.kind {
ast::ExprKind::Lit(l) => {

View File

@ -494,13 +494,7 @@ pub fn lower_to_hir<'res, 'tcx>(
arena: &'tcx rustc_ast_lowering::Arena<'tcx>,
) -> &'tcx Crate<'tcx> {
// Lower AST to HIR.
let hir_crate = rustc_ast_lowering::lower_crate(
sess,
&*krate,
resolver,
rustc_parse::nt_to_tokenstream,
arena,
);
let hir_crate = rustc_ast_lowering::lower_crate(sess, &*krate, resolver, arena);
// Drop AST to free memory
sess.time("drop_ast", || std::mem::drop(krate));

View File

@ -12,20 +12,16 @@
extern crate tracing;
use rustc_ast as ast;
use rustc_ast::token::{self, Nonterminal, Token, TokenKind};
use rustc_ast::tokenstream::{self, AttributesData, CanSynthesizeMissingTokens, LazyTokenStream};
use rustc_ast::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
use rustc_ast::tokenstream::{Spacing, TokenStream};
use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream;
use rustc_ast::Attribute;
use rustc_ast::{AttrItem, MetaItem};
use rustc_ast::{HasAttrs, HasSpan, HasTokens};
use rustc_ast_pretty::pprust::{self, AstPrettyPrint};
use rustc_ast_pretty::pprust;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Applicability, Diagnostic, FatalError, Level, PResult};
use rustc_session::parse::ParseSess;
use rustc_span::{FileName, SourceFile, Span};
use std::fmt;
use std::path::Path;
pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments");
@ -240,111 +236,10 @@ pub fn parse_in<'a, T>(
Ok(result)
}
// NOTE(Centril): The following probably shouldn't be here but it acknowledges the
// fact that architecturally, we are using parsing (read on below to understand why).
pub fn to_token_stream(
node: &(impl HasAttrs + HasSpan + HasTokens + AstPrettyPrint + fmt::Debug),
sess: &ParseSess,
synthesize_tokens: CanSynthesizeMissingTokens,
) -> TokenStream {
if let Some(tokens) = prepend_attrs(&node.attrs(), node.tokens()) {
return tokens;
} else if matches!(synthesize_tokens, CanSynthesizeMissingTokens::Yes) {
return fake_token_stream(sess, node);
} else {
panic!("Missing tokens for nt {:?} at {:?}: {:?}", node, node.span(), node.pretty_print());
}
}
pub fn nt_to_tokenstream(
nt: &Nonterminal,
sess: &ParseSess,
synthesize_tokens: CanSynthesizeMissingTokens,
) -> TokenStream {
// A `Nonterminal` is often a parsed AST item. At this point we now
// need to convert the parsed AST to an actual token stream, e.g.
// un-parse it basically.
//
// Unfortunately there's not really a great way to do that in a
// guaranteed lossless fashion right now. The fallback here is to just
// stringify the AST node and reparse it, but this loses all span
// information.
//
// As a result, some AST nodes are annotated with the token stream they
// came from. Here we attempt to extract these lossless token streams
// before we fall back to the stringification.
let convert_tokens =
|tokens: Option<&LazyTokenStream>| Some(tokens?.create_token_stream().to_tokenstream());
let tokens = match *nt {
Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()),
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()),
Nonterminal::NtStmt(ref stmt) if let ast::StmtKind::Empty = stmt.kind => {
let tokens = AttrAnnotatedTokenStream::new(vec![(
tokenstream::AttrAnnotatedTokenTree::Token(Token::new(
TokenKind::Semi,
stmt.span,
)),
Spacing::Alone,
)]);
prepend_attrs(&stmt.attrs(), Some(&LazyTokenStream::new(tokens)))
}
Nonterminal::NtStmt(ref stmt) => prepend_attrs(&stmt.attrs(), stmt.tokens()),
Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()),
Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.as_ref()),
Nonterminal::NtIdent(ident, is_raw) => {
Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into())
}
Nonterminal::NtLifetime(ident) => {
Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into())
}
Nonterminal::NtMeta(ref attr) => convert_tokens(attr.tokens.as_ref()),
Nonterminal::NtPath(ref path) => convert_tokens(path.tokens.as_ref()),
Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.as_ref()),
Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => {
prepend_attrs(&expr.attrs, expr.tokens.as_ref())
}
};
if let Some(tokens) = tokens {
return tokens;
} else if matches!(synthesize_tokens, CanSynthesizeMissingTokens::Yes) {
return nt_fake_token_stream(sess, nt);
} else {
panic!(
"Missing tokens for nt {:?} at {:?}: {:?}",
nt,
nt.span(),
pprust::nonterminal_to_string(nt)
);
}
}
fn prepend_attrs(attrs: &[Attribute], tokens: Option<&LazyTokenStream>) -> Option<TokenStream> {
let tokens = tokens?;
if attrs.is_empty() {
return Some(tokens.create_token_stream().to_tokenstream());
}
let attr_data = AttributesData { attrs: attrs.to_vec().into(), tokens: tokens.clone() };
let wrapped = AttrAnnotatedTokenStream::new(vec![(
AttrAnnotatedTokenTree::Attributes(attr_data),
Spacing::Alone,
)]);
Some(wrapped.to_tokenstream())
}
pub fn fake_token_stream(sess: &ParseSess, node: &(impl AstPrettyPrint + HasSpan)) -> TokenStream {
let source = node.pretty_print();
pub fn fake_token_stream_for_item(sess: &ParseSess, item: &ast::Item) -> TokenStream {
let source = pprust::item_to_string(item);
let filename = FileName::macro_expansion_source_code(&source);
parse_stream_from_source_str(filename, source, sess, Some(node.span()))
}
fn nt_fake_token_stream(sess: &ParseSess, nt: &Nonterminal) -> TokenStream {
let source = pprust::nonterminal_to_string(nt);
let filename = FileName::macro_expansion_source_code(&source);
parse_stream_from_source_str(filename, source, sess, Some(nt.span()))
parse_stream_from_source_str(filename, source, sess, Some(item.span))
}
pub fn fake_token_stream_for_crate(sess: &ParseSess, krate: &ast::Crate) -> TokenStream {

View File

@ -1,13 +1,7 @@
use crate::parse::ParseSess;
use crate::session::Session;
use rustc_ast::token::{self, Delimiter, Nonterminal, Token};
use rustc_ast::tokenstream::CanSynthesizeMissingTokens;
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_data_structures::profiling::VerboseTimingGuard;
use std::path::{Path, PathBuf};
pub type NtToTokenstream = fn(&Nonterminal, &ParseSess, CanSynthesizeMissingTokens) -> TokenStream;
impl Session {
pub fn timer<'a>(&'a self, what: &'static str) -> VerboseTimingGuard<'a> {
self.prof.verbose_generic_activity(what)
@ -94,55 +88,3 @@ impl CanonicalizedPath {
&self.original
}
}
// FIXME: Find a better spot for this - it needs to be accessible from `rustc_ast_lowering`,
// and needs to access `ParseSess
pub struct FlattenNonterminals<'a> {
pub parse_sess: &'a ParseSess,
pub synthesize_tokens: CanSynthesizeMissingTokens,
pub nt_to_tokenstream: NtToTokenstream,
}
impl<'a> FlattenNonterminals<'a> {
pub fn process_token_stream(&mut self, tokens: TokenStream) -> TokenStream {
fn can_skip(stream: &TokenStream) -> bool {
stream.trees().all(|tree| match tree {
TokenTree::Token(token) => !matches!(token.kind, token::Interpolated(_)),
TokenTree::Delimited(_, _, inner) => can_skip(inner),
})
}
if can_skip(&tokens) {
return tokens;
}
tokens.into_trees().flat_map(|tree| self.process_token_tree(tree).into_trees()).collect()
}
pub fn process_token_tree(&mut self, tree: TokenTree) -> TokenStream {
match tree {
TokenTree::Token(token) => self.process_token(token),
TokenTree::Delimited(span, delim, tts) => {
TokenTree::Delimited(span, delim, self.process_token_stream(tts)).into()
}
}
}
pub fn process_token(&mut self, token: Token) -> TokenStream {
match token.kind {
token::Interpolated(nt) if let token::NtIdent(ident, is_raw) = *nt => {
TokenTree::Token(Token::new(token::Ident(ident.name, is_raw), ident.span)).into()
}
token::Interpolated(nt) => {
let tts = (self.nt_to_tokenstream)(&nt, self.parse_sess, self.synthesize_tokens);
TokenTree::Delimited(
DelimSpan::from_single(token.span),
Delimiter::Invisible,
self.process_token_stream(tts),
)
.into()
}
_ => TokenTree::Token(token).into(),
}
}
}