From b9b254641719ce42e9b8ec56c058f71af017a317 Mon Sep 17 00:00:00 2001 From: Aaron Hill <aa1ronham@gmail.com> Date: Sat, 26 Sep 2020 19:33:42 -0400 Subject: [PATCH] Unconditionally capture tokens for attributes. This allows us to avoid synthesizing tokens in `prepend_attr`, since we have the original tokens available. We still need to synthesize tokens when expanding `cfg_attr`, but this is an unavoidable consequence of the syntax of `cfg_attr` - the user does not supply the `#` and `[]` tokens that a `cfg_attr` expands to. --- compiler/rustc_ast/src/ast.rs | 1 + compiler/rustc_ast/src/attr/mod.rs | 10 +- compiler/rustc_ast/src/mut_visit.rs | 2 +- compiler/rustc_ast_lowering/src/expr.rs | 8 +- compiler/rustc_ast_lowering/src/lib.rs | 5 +- .../rustc_builtin_macros/src/cmdline_attrs.rs | 2 +- compiler/rustc_expand/src/config.rs | 35 ++++- compiler/rustc_expand/src/expand.rs | 1 + compiler/rustc_middle/src/ich/impls_syntax.rs | 3 +- compiler/rustc_parse/src/lib.rs | 55 ++------ compiler/rustc_parse/src/parser/attr.rs | 129 +++++++++++------- .../rustc_parse/src/parser/nonterminal.rs | 2 +- .../ast-json/ast-json-noexpand-output.stdout | 2 +- src/test/ui/ast-json/ast-json-output.stdout | 2 +- .../proc-macro/issue-75930-derive-cfg.stdout | 16 +-- 15 files changed, 153 insertions(+), 120 deletions(-) diff --git a/compiler/rustc_ast/src/ast.rs b/compiler/rustc_ast/src/ast.rs index 9eb934c0c9e..7224b482ed7 100644 --- a/compiler/rustc_ast/src/ast.rs +++ b/compiler/rustc_ast/src/ast.rs @@ -2423,6 +2423,7 @@ pub struct Attribute { /// or the construct this attribute is contained within (inner). pub style: AttrStyle, pub span: Span, + pub tokens: Option<LazyTokenStream>, } #[derive(Clone, Encodable, Decodable, Debug)] diff --git a/compiler/rustc_ast/src/attr/mod.rs b/compiler/rustc_ast/src/attr/mod.rs index 8351be222f6..34b03382c52 100644 --- a/compiler/rustc_ast/src/attr/mod.rs +++ b/compiler/rustc_ast/src/attr/mod.rs @@ -325,7 +325,7 @@ pub fn mk_attr(style: AttrStyle, path: Path, args: MacArgs, span: Span) -> Attri } pub fn mk_attr_from_item(style: AttrStyle, item: AttrItem, span: Span) -> Attribute { - Attribute { kind: AttrKind::Normal(item), id: mk_attr_id(), style, span } + Attribute { kind: AttrKind::Normal(item), id: mk_attr_id(), style, span, tokens: None } } /// Returns an inner attribute with the given value and span. @@ -344,7 +344,13 @@ pub fn mk_doc_comment( data: Symbol, span: Span, ) -> Attribute { - Attribute { kind: AttrKind::DocComment(comment_kind, data), id: mk_attr_id(), style, span } + Attribute { + kind: AttrKind::DocComment(comment_kind, data), + id: mk_attr_id(), + style, + span, + tokens: None, + } } pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool { diff --git a/compiler/rustc_ast/src/mut_visit.rs b/compiler/rustc_ast/src/mut_visit.rs index 382003c834e..166d36ce424 100644 --- a/compiler/rustc_ast/src/mut_visit.rs +++ b/compiler/rustc_ast/src/mut_visit.rs @@ -577,7 +577,7 @@ pub fn noop_visit_local<T: MutVisitor>(local: &mut P<Local>, vis: &mut T) { } pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) { - let Attribute { kind, id: _, style: _, span } = attr; + let Attribute { kind, id: _, style: _, span, tokens: _ } = attr; match kind { AttrKind::Normal(AttrItem { path, args, tokens: _ }) => { vis.visit_path(path); diff --git a/compiler/rustc_ast_lowering/src/expr.rs b/compiler/rustc_ast_lowering/src/expr.rs index c49fd76a313..a6ac056b93b 100644 --- a/compiler/rustc_ast_lowering/src/expr.rs +++ b/compiler/rustc_ast_lowering/src/expr.rs @@ -210,9 +210,9 @@ impl<'hir> LoweringContext<'_, 'hir> { ex.span = e.span; } // Merge attributes into the inner expression. - let mut attrs = e.attrs.clone(); + let mut attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect(); attrs.extend::<Vec<_>>(ex.attrs.into()); - ex.attrs = attrs; + ex.attrs = attrs.into(); return ex; } @@ -1471,13 +1471,15 @@ impl<'hir> LoweringContext<'_, 'hir> { hir::MatchSource::ForLoopDesugar, )); + let attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect(); + // This is effectively `{ let _result = ...; _result }`. // The construct was introduced in #21984 and is necessary to make sure that // temporaries in the `head` expression are dropped and do not leak to the // surrounding scope of the `match` since the `match` is not a terminating scope. // // Also, add the attributes to the outer returned expr node. - self.expr_drop_temps_mut(desugared_span, match_expr, e.attrs.clone()) + self.expr_drop_temps_mut(desugared_span, match_expr, attrs.into()) } /// Desugar `ExprKind::Try` from: `<expr>?` into: diff --git a/compiler/rustc_ast_lowering/src/lib.rs b/compiler/rustc_ast_lowering/src/lib.rs index a28d022c661..361bccd7a25 100644 --- a/compiler/rustc_ast_lowering/src/lib.rs +++ b/compiler/rustc_ast_lowering/src/lib.rs @@ -972,7 +972,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { AttrKind::DocComment(comment_kind, data) => AttrKind::DocComment(comment_kind, data), }; - Attribute { kind, id: attr.id, style: attr.style, span: attr.span } + // Tokens aren't needed after macro expansion and parsing + Attribute { kind, id: attr.id, style: attr.style, span: attr.span, tokens: None } } fn lower_mac_args(&mut self, args: &MacArgs) -> MacArgs { @@ -1713,7 +1714,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> { pat: self.lower_pat(&l.pat), init, span: l.span, - attrs: l.attrs.clone(), + attrs: l.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(), source: hir::LocalSource::Normal, }, ids, diff --git a/compiler/rustc_builtin_macros/src/cmdline_attrs.rs b/compiler/rustc_builtin_macros/src/cmdline_attrs.rs index 5ed8b69d92a..747e48ece70 100644 --- a/compiler/rustc_builtin_macros/src/cmdline_attrs.rs +++ b/compiler/rustc_builtin_macros/src/cmdline_attrs.rs @@ -15,7 +15,7 @@ pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) - ); let start_span = parser.token.span; - let AttrItem { path, args, tokens: _ } = match parser.parse_attr_item() { + let AttrItem { path, args, tokens: _ } = match parser.parse_attr_item(false) { Ok(ai) => ai, Err(mut err) => { err.emit(); diff --git a/compiler/rustc_expand/src/config.rs b/compiler/rustc_expand/src/config.rs index dd087ab9150..3551b92967c 100644 --- a/compiler/rustc_expand/src/config.rs +++ b/compiler/rustc_expand/src/config.rs @@ -3,10 +3,13 @@ use rustc_ast::attr::HasAttrs; use rustc_ast::mut_visit::*; use rustc_ast::ptr::P; +use rustc_ast::token::{DelimToken, Token, TokenKind}; +use rustc_ast::tokenstream::{DelimSpan, LazyTokenStreamInner, Spacing, TokenStream, TokenTree}; use rustc_ast::{self as ast, AttrItem, Attribute, MetaItem}; use rustc_attr as attr; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::map_in_place::MapInPlace; +use rustc_data_structures::sync::Lrc; use rustc_errors::{error_code, struct_span_err, Applicability, Handler}; use rustc_feature::{Feature, Features, State as FeatureState}; use rustc_feature::{ @@ -289,7 +292,37 @@ impl<'a> StripUnconfigured<'a> { expanded_attrs .into_iter() .flat_map(|(item, span)| { - let attr = attr::mk_attr_from_item(attr.style, item, span); + let orig_tokens = + attr.tokens.as_ref().unwrap_or_else(|| panic!("Missing tokens for {:?}", attr)); + + // We are taking an attribute of the form `#[cfg_attr(pred, attr)]` + // and producing an attribute of the form `#[attr]`. We + // have captured tokens for `attr` itself, but we need to + // synthesize tokens for the wrapper `#` and `[]`, which + // we do below. + + // Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token + // for `attr` when we expand it to `#[attr]` + let pound_token = orig_tokens.into_token_stream().trees().next().unwrap(); + if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) { + panic!("Bad tokens for attribute {:?}", attr); + } + // We don't really have a good span to use for the syntheized `[]` + // in `#[attr]`, so just use the span of the `#` token. + let bracket_group = TokenTree::Delimited( + DelimSpan::from_single(pound_token.span()), + DelimToken::Bracket, + item.tokens + .clone() + .unwrap_or_else(|| panic!("Missing tokens for {:?}", item)) + .into_token_stream(), + ); + + let mut attr = attr::mk_attr_from_item(attr.style, item, span); + attr.tokens = Some(Lrc::new(LazyTokenStreamInner::Ready(TokenStream::new(vec![ + (pound_token, Spacing::Alone), + (bracket_group, Spacing::Alone), + ])))); self.process_cfg_attr(attr) }) .collect() diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index 0b43225a242..ce198b3a41c 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -1785,6 +1785,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> { span: at.span, id: at.id, style: at.style, + tokens: None, }; } else { noop_visit_attribute(at, self) diff --git a/compiler/rustc_middle/src/ich/impls_syntax.rs b/compiler/rustc_middle/src/ich/impls_syntax.rs index 7aba4fc64a9..cab2ca2919f 100644 --- a/compiler/rustc_middle/src/ich/impls_syntax.rs +++ b/compiler/rustc_middle/src/ich/impls_syntax.rs @@ -40,11 +40,12 @@ impl<'ctx> rustc_ast::HashStableContext for StableHashingContext<'ctx> { debug_assert!(!attr.ident().map_or(false, |ident| self.is_ignored_attr(ident.name))); debug_assert!(!attr.is_doc_comment()); - let ast::Attribute { kind, id: _, style, span } = attr; + let ast::Attribute { kind, id: _, style, span, tokens } = attr; if let ast::AttrKind::Normal(item) = kind { item.hash_stable(self, hasher); style.hash_stable(self, hasher); span.hash_stable(self, hasher); + tokens.as_ref().expect_none("Tokens should have been removed during lowering!"); } else { unreachable!(); } diff --git a/compiler/rustc_parse/src/lib.rs b/compiler/rustc_parse/src/lib.rs index e073f571088..ba416be6b38 100644 --- a/compiler/rustc_parse/src/lib.rs +++ b/compiler/rustc_parse/src/lib.rs @@ -252,9 +252,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke let convert_tokens = |tokens: Option<LazyTokenStream>| tokens.map(|t| t.into_token_stream()); let tokens = match *nt { - Nonterminal::NtItem(ref item) => { - prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span) - } + Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()), Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.clone()), Nonterminal::NtStmt(ref stmt) => { // FIXME: We currently only collect tokens for `:stmt` @@ -279,7 +277,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke if expr.tokens.is_none() { debug!("missing tokens for expr {:?}", expr); } - prepend_attrs(sess, &expr.attrs, expr.tokens.as_ref(), span) + prepend_attrs(&expr.attrs, expr.tokens.as_ref()) } }; @@ -603,10 +601,8 @@ fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bool { } fn prepend_attrs( - sess: &ParseSess, attrs: &[ast::Attribute], tokens: Option<&tokenstream::LazyTokenStream>, - span: rustc_span::Span, ) -> Option<tokenstream::TokenStream> { let tokens = tokens?.clone().into_token_stream(); if attrs.is_empty() { @@ -619,47 +615,12 @@ fn prepend_attrs( ast::AttrStyle::Outer, "inner attributes should prevent cached tokens from existing" ); - - let source = pprust::attribute_to_string(attr); - let macro_filename = FileName::macro_expansion_source_code(&source); - - let item = match attr.kind { - ast::AttrKind::Normal(ref item) => item, - ast::AttrKind::DocComment(..) => { - let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span)); - builder.push(stream); - continue; - } - }; - - // synthesize # [ $path $tokens ] manually here - let mut brackets = tokenstream::TokenStreamBuilder::new(); - - // For simple paths, push the identifier directly - if item.path.segments.len() == 1 && item.path.segments[0].args.is_none() { - let ident = item.path.segments[0].ident; - let token = token::Ident(ident.name, ident.as_str().starts_with("r#")); - brackets.push(tokenstream::TokenTree::token(token, ident.span)); - - // ... and for more complicated paths, fall back to a reparse hack that - // should eventually be removed. - } else { - let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span)); - brackets.push(stream); - } - - brackets.push(item.args.outer_tokens()); - - // The span we list here for `#` and for `[ ... ]` are both wrong in - // that it encompasses more than each token, but it hopefully is "good - // enough" for now at least. - builder.push(tokenstream::TokenTree::token(token::Pound, attr.span)); - let delim_span = tokenstream::DelimSpan::from_single(attr.span); - builder.push(tokenstream::TokenTree::Delimited( - delim_span, - token::DelimToken::Bracket, - brackets.build(), - )); + builder.push( + attr.tokens + .clone() + .unwrap_or_else(|| panic!("Attribute {:?} is missing tokens!", attr)) + .into_token_stream(), + ); } builder.push(tokens.clone()); Some(builder.build()) diff --git a/compiler/rustc_parse/src/parser/attr.rs b/compiler/rustc_parse/src/parser/attr.rs index 73439643d69..20d41d8900f 100644 --- a/compiler/rustc_parse/src/parser/attr.rs +++ b/compiler/rustc_parse/src/parser/attr.rs @@ -29,42 +29,51 @@ impl<'a> Parser<'a> { let mut attrs: Vec<ast::Attribute> = Vec::new(); let mut just_parsed_doc_comment = false; loop { - debug!("parse_outer_attributes: self.token={:?}", self.token); - if self.check(&token::Pound) { - let inner_error_reason = if just_parsed_doc_comment { - "an inner attribute is not permitted following an outer doc comment" - } else if !attrs.is_empty() { - "an inner attribute is not permitted following an outer attribute" + let (attr, tokens) = self.collect_tokens(|this| { + debug!("parse_outer_attributes: self.token={:?}", this.token); + if this.check(&token::Pound) { + let inner_error_reason = if just_parsed_doc_comment { + "an inner attribute is not permitted following an outer doc comment" + } else if !attrs.is_empty() { + "an inner attribute is not permitted following an outer attribute" + } else { + DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG + }; + let inner_parse_policy = InnerAttrPolicy::Forbidden { + reason: inner_error_reason, + saw_doc_comment: just_parsed_doc_comment, + prev_attr_sp: attrs.last().map(|a| a.span), + }; + let attr = this.parse_attribute_with_inner_parse_policy(inner_parse_policy)?; + just_parsed_doc_comment = false; + Ok(Some(attr)) + } else if let token::DocComment(comment_kind, attr_style, data) = this.token.kind { + let attr = + attr::mk_doc_comment(comment_kind, attr_style, data, this.token.span); + if attr.style != ast::AttrStyle::Outer { + this.sess + .span_diagnostic + .struct_span_err_with_code( + this.token.span, + "expected outer doc comment", + error_code!(E0753), + ) + .note( + "inner doc comments like this (starting with \ + `//!` or `/*!`) can only appear before items", + ) + .emit(); + } + this.bump(); + just_parsed_doc_comment = true; + Ok(Some(attr)) } else { - DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG - }; - let inner_parse_policy = InnerAttrPolicy::Forbidden { - reason: inner_error_reason, - saw_doc_comment: just_parsed_doc_comment, - prev_attr_sp: attrs.last().map(|a| a.span), - }; - let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?; - attrs.push(attr); - just_parsed_doc_comment = false; - } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind { - let attr = attr::mk_doc_comment(comment_kind, attr_style, data, self.token.span); - if attr.style != ast::AttrStyle::Outer { - self.sess - .span_diagnostic - .struct_span_err_with_code( - self.token.span, - "expected outer doc comment", - error_code!(E0753), - ) - .note( - "inner doc comments like this (starting with \ - `//!` or `/*!`) can only appear before items", - ) - .emit(); + Ok(None) } + })?; + if let Some(mut attr) = attr { + attr.tokens = Some(tokens); attrs.push(attr); - self.bump(); - just_parsed_doc_comment = true; } else { break; } @@ -99,7 +108,7 @@ impl<'a> Parser<'a> { if self.eat(&token::Not) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer }; self.expect(&token::OpenDelim(token::Bracket))?; - let item = self.parse_attr_item()?; + let item = self.parse_attr_item(false)?; self.expect(&token::CloseDelim(token::Bracket))?; let attr_sp = lo.to(self.prev_token.span); @@ -148,7 +157,7 @@ impl<'a> Parser<'a> { /// PATH /// PATH `=` UNSUFFIXED_LIT /// The delimiters or `=` are still put into the resulting token stream. - pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> { + pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> { let item = match self.token.kind { token::Interpolated(ref nt) => match **nt { Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()), @@ -160,9 +169,18 @@ impl<'a> Parser<'a> { self.bump(); item } else { - let path = self.parse_path(PathStyle::Mod)?; - let args = self.parse_attr_args()?; - ast::AttrItem { path, args, tokens: None } + let do_parse = |this: &mut Self| { + let path = this.parse_path(PathStyle::Mod)?; + let args = this.parse_attr_args()?; + Ok(ast::AttrItem { path, args, tokens: None }) + }; + if capture_tokens { + let (mut item, tokens) = self.collect_tokens(do_parse)?; + item.tokens = Some(tokens); + item + } else { + do_parse(self)? + } }) } @@ -174,20 +192,29 @@ impl<'a> Parser<'a> { crate fn parse_inner_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> { let mut attrs: Vec<ast::Attribute> = vec![]; loop { - // Only try to parse if it is an inner attribute (has `!`). - if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) { - let attr = self.parse_attribute(true)?; - assert_eq!(attr.style, ast::AttrStyle::Inner); - attrs.push(attr); - } else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind { - // We need to get the position of this token before we bump. - let attr = attr::mk_doc_comment(comment_kind, attr_style, data, self.token.span); - if attr.style == ast::AttrStyle::Inner { - attrs.push(attr); - self.bump(); + let (attr, tokens) = self.collect_tokens(|this| { + // Only try to parse if it is an inner attribute (has `!`). + if this.check(&token::Pound) && this.look_ahead(1, |t| t == &token::Not) { + let attr = this.parse_attribute(true)?; + assert_eq!(attr.style, ast::AttrStyle::Inner); + Ok(Some(attr)) + } else if let token::DocComment(comment_kind, attr_style, data) = this.token.kind { + // We need to get the position of this token before we bump. + let attr = + attr::mk_doc_comment(comment_kind, attr_style, data, this.token.span); + if attr.style == ast::AttrStyle::Inner { + this.bump(); + Ok(Some(attr)) + } else { + Ok(None) + } } else { - break; + Ok(None) } + })?; + if let Some(mut attr) = attr { + attr.tokens = Some(tokens); + attrs.push(attr); } else { break; } @@ -220,7 +247,7 @@ impl<'a> Parser<'a> { let mut expanded_attrs = Vec::with_capacity(1); while self.token.kind != token::Eof { let lo = self.token.span; - let item = self.parse_attr_item()?; + let item = self.parse_attr_item(true)?; expanded_attrs.push((item, lo.to(self.prev_token.span))); if !self.eat(&token::Comma) { break; diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index 15660fd574c..121f2699baa 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -188,7 +188,7 @@ impl<'a> Parser<'a> { token::NtPath(path) } NonterminalKind::Meta => { - let (mut attr, tokens) = self.collect_tokens(|this| this.parse_attr_item())?; + let (mut attr, tokens) = self.collect_tokens(|this| this.parse_attr_item(false))?; // We may have eaten a nonterminal, which could already have tokens if attr.tokens.is_none() { attr.tokens = Some(tokens); diff --git a/src/test/ui/ast-json/ast-json-noexpand-output.stdout b/src/test/ui/ast-json/ast-json-noexpand-output.stdout index 3d7d476cf6c..fb311a9045b 100644 --- a/src/test/ui/ast-json/ast-json-noexpand-output.stdout +++ b/src/test/ui/ast-json/ast-json-noexpand-output.stdout @@ -1 +1 @@ -{"module":{"inner":{"lo":0,"hi":0},"unsafety":"No","items":[{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]},"tokens":null}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]} +{"module":{"inner":{"lo":0,"hi":0},"unsafety":"No","items":[{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]},"tokens":null}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0},"tokens":{"0":[[{"variant":"Token","fields":[{"kind":"Pound","span":{"lo":0,"hi":0}}]},"Joint"],[{"variant":"Token","fields":[{"kind":"Not","span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Delimited","fields":[{"open":{"lo":0,"hi":0},"close":{"lo":0,"hi":0}},"Bracket",{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["crate_type",false]},"span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Token","fields":[{"kind":"Eq","span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]},"Alone"]]}}],"span":{"lo":0,"hi":0},"proc_macros":[]} diff --git a/src/test/ui/ast-json/ast-json-output.stdout b/src/test/ui/ast-json/ast-json-output.stdout index a4ce6061b4c..d1e0f409948 100644 --- a/src/test/ui/ast-json/ast-json-output.stdout +++ b/src/test/ui/ast-json/ast-json-output.stdout @@ -1 +1 @@ -{"module":{"inner":{"lo":0,"hi":0},"unsafety":"No","items":[{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"prelude_import","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":"Empty","tokens":null}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"","span":{"lo":0,"hi":0}},"kind":{"variant":"Use","fields":[{"prefix":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"{{root}}","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"std","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"prelude","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"v1","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"kind":"Glob","span":{"lo":0,"hi":0}}]},"tokens":null},{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"macro_use","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":"Empty","tokens":null}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"std","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null},{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]},"tokens":null}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]} +{"module":{"inner":{"lo":0,"hi":0},"unsafety":"No","items":[{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"prelude_import","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":"Empty","tokens":null}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0},"tokens":null}],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"","span":{"lo":0,"hi":0}},"kind":{"variant":"Use","fields":[{"prefix":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"{{root}}","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"std","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"prelude","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"v1","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"kind":"Glob","span":{"lo":0,"hi":0}}]},"tokens":null},{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"macro_use","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":"Empty","tokens":null}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0},"tokens":null}],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"std","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null},{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]},"tokens":null}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0},"tokens":{"0":[[{"variant":"Token","fields":[{"kind":"Pound","span":{"lo":0,"hi":0}}]},"Joint"],[{"variant":"Token","fields":[{"kind":"Not","span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Delimited","fields":[{"open":{"lo":0,"hi":0},"close":{"lo":0,"hi":0}},"Bracket",{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Ident","fields":["crate_type",false]},"span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Token","fields":[{"kind":"Eq","span":{"lo":0,"hi":0}}]},"Alone"],[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]},"Alone"]]}}],"span":{"lo":0,"hi":0},"proc_macros":[]} diff --git a/src/test/ui/proc-macro/issue-75930-derive-cfg.stdout b/src/test/ui/proc-macro/issue-75930-derive-cfg.stdout index f3daa56a49c..4c0810217bf 100644 --- a/src/test/ui/proc-macro/issue-75930-derive-cfg.stdout +++ b/src/test/ui/proc-macro/issue-75930-derive-cfg.stdout @@ -26,7 +26,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:17:24: 17:40 (#0), + span: $DIR/issue-75930-derive-cfg.rs:17:1: 17:2 (#0), }, Group { delimiter: Bracket, @@ -46,12 +46,12 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ span: $DIR/issue-75930-derive-cfg.rs:17:29: 17:40 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:17:24: 17:40 (#0), + span: $DIR/issue-75930-derive-cfg.rs:17:1: 17:2 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:19:1: 19:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:19:1: 19:2 (#0), }, Group { delimiter: Bracket, @@ -71,12 +71,12 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ span: $DIR/issue-75930-derive-cfg.rs:19:9: 19:16 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:19:1: 19:17 (#0), + span: $DIR/issue-75930-derive-cfg.rs:19:2: 19:17 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:20:1: 20:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:20:1: 20:2 (#0), }, Group { delimiter: Bracket, @@ -96,12 +96,12 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ span: $DIR/issue-75930-derive-cfg.rs:20:15: 20:18 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:20:1: 20:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:20:2: 20:19 (#0), }, Punct { ch: '#', spacing: Alone, - span: $DIR/issue-75930-derive-cfg.rs:16:1: 16:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:16:1: 16:2 (#0), }, Group { delimiter: Bracket, @@ -121,7 +121,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [ span: $DIR/issue-75930-derive-cfg.rs:16:15: 16:18 (#0), }, ], - span: $DIR/issue-75930-derive-cfg.rs:16:1: 16:19 (#0), + span: $DIR/issue-75930-derive-cfg.rs:16:2: 16:19 (#0), }, Ident { ident: "struct",