mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-24 07:44:10 +00:00
Rollup merge of #73569 - Aaron1011:fix/macro-rules-group, r=petrochenkov
Handle `macro_rules!` tokens consistently across crates When we serialize a `macro_rules!` macro, we used a 'lowered' `TokenStream` for its body, which has all `Nonterminal`s expanded in-place via `nt_to_tokenstream`. This matters when an 'outer' `macro_rules!` macro expands to an 'inner' `macro_rules!` macro - the inner macro may use tokens captured from the 'outer' macro in its definition. This means that invoking a foreign `macro_rules!` macro may use a different body `TokenStream` than when the same `macro_rules!` macro is invoked in the same crate. This difference is observable by proc-macros invoked by a `macro_rules!` macro - a `None`-delimited group will be seen in the same-crate case (inserted when convering `Nonterminal`s to the `proc_macro` crate's structs), but no `None`-delimited group in the cross-crate case. To fix this inconsistency, we now insert `None`-delimited groups when 'lowering' a `Nonterminal` `macro_rules!` body, just as we do in `proc_macro_server`. Additionally, we no longer print extra spaces for `None`-delimited groups - as far as pretty-printing is concerned, they don't exist (only their contents do). This ensures that `Display` output of a `TokenStream` does not depend on which crate a `macro_rules!` macro was invoked from. This PR is necessary in order to patch the `solana-genesis-programs` for the upcoming hygiene serialization breakage (https://github.com/rust-lang/rust/pull/72121#issuecomment-646924847). The `solana-genesis-programs` crate will need to use a proc macro to re-span certain tokens in a nested `macro_rules!`, which requires us to consistently use a `None`-delimited group. See `src/test/ui/proc-macro/nested-macro-rules.rs` for an example of the kind of nested `macro_rules!` affected by this crate.
This commit is contained in:
commit
ce49944171
@ -560,6 +560,9 @@ impl MetaItemKind {
|
||||
tokens: &mut impl Iterator<Item = TokenTree>,
|
||||
) -> Option<MetaItemKind> {
|
||||
match tokens.next() {
|
||||
Some(TokenTree::Delimited(_, token::NoDelim, inner_tokens)) => {
|
||||
MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees())
|
||||
}
|
||||
Some(TokenTree::Token(token)) => {
|
||||
Lit::from_token(&token).ok().map(MetaItemKind::NameValue)
|
||||
}
|
||||
@ -619,13 +622,20 @@ impl NestedMetaItem {
|
||||
where
|
||||
I: Iterator<Item = TokenTree>,
|
||||
{
|
||||
if let Some(TokenTree::Token(token)) = tokens.peek() {
|
||||
if let Ok(lit) = Lit::from_token(token) {
|
||||
tokens.next();
|
||||
return Some(NestedMetaItem::Literal(lit));
|
||||
match tokens.peek() {
|
||||
Some(TokenTree::Token(token)) => {
|
||||
if let Ok(lit) = Lit::from_token(token) {
|
||||
tokens.next();
|
||||
return Some(NestedMetaItem::Literal(lit));
|
||||
}
|
||||
}
|
||||
Some(TokenTree::Delimited(_, token::NoDelim, inner_tokens)) => {
|
||||
let inner_tokens = inner_tokens.clone();
|
||||
tokens.next();
|
||||
return NestedMetaItem::from_tokens(&mut inner_tokens.into_trees().peekable());
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
MetaItem::from_tokens(tokens).map(NestedMetaItem::MetaItem)
|
||||
}
|
||||
}
|
||||
|
@ -39,8 +39,8 @@ use rustc_ast::ast;
|
||||
use rustc_ast::ast::*;
|
||||
use rustc_ast::attr;
|
||||
use rustc_ast::node_id::NodeMap;
|
||||
use rustc_ast::token::{self, Nonterminal, Token};
|
||||
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
||||
use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
|
||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
|
||||
use rustc_ast::visit::{self, AssocCtxt, Visitor};
|
||||
use rustc_ast::walk_list;
|
||||
use rustc_ast_pretty::pprust;
|
||||
@ -1029,7 +1029,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
match token.kind {
|
||||
token::Interpolated(nt) => {
|
||||
let tts = (self.nt_to_tokenstream)(&nt, &self.sess.parse_sess, token.span);
|
||||
self.lower_token_stream(tts)
|
||||
TokenTree::Delimited(
|
||||
DelimSpan::from_single(token.span),
|
||||
DelimToken::NoDelim,
|
||||
self.lower_token_stream(tts),
|
||||
)
|
||||
.into()
|
||||
}
|
||||
_ => TokenTree::Token(token).into(),
|
||||
}
|
||||
|
@ -257,7 +257,7 @@ fn token_kind_to_string_ext(tok: &TokenKind, convert_dollar_crate: Option<Span>)
|
||||
token::CloseDelim(token::Bracket) => "]".to_string(),
|
||||
token::OpenDelim(token::Brace) => "{".to_string(),
|
||||
token::CloseDelim(token::Brace) => "}".to_string(),
|
||||
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => " ".to_string(),
|
||||
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => "".to_string(),
|
||||
token::Pound => "#".to_string(),
|
||||
token::Dollar => "$".to_string(),
|
||||
token::Question => "?".to_string(),
|
||||
|
@ -387,6 +387,7 @@ pub fn compile_declarative_macro(
|
||||
def: &ast::Item,
|
||||
edition: Edition,
|
||||
) -> SyntaxExtension {
|
||||
debug!("compile_declarative_macro: {:?}", def);
|
||||
let mk_syn_ext = |expander| {
|
||||
SyntaxExtension::new(
|
||||
sess,
|
||||
|
@ -90,7 +90,7 @@ pub(super) fn parse(
|
||||
/// # Parameters
|
||||
///
|
||||
/// - `tree`: the tree we wish to convert.
|
||||
/// - `trees`: an iterator over trees. We may need to read more tokens from it in order to finish
|
||||
/// - `outer_trees`: an iterator over trees. We may need to read more tokens from it in order to finish
|
||||
/// converting `tree`
|
||||
/// - `expect_matchers`: same as for `parse` (see above).
|
||||
/// - `sess`: the parsing session. Any errors will be emitted to this session.
|
||||
@ -98,7 +98,7 @@ pub(super) fn parse(
|
||||
/// unstable features or not.
|
||||
fn parse_tree(
|
||||
tree: tokenstream::TokenTree,
|
||||
trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
|
||||
outer_trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
|
||||
expect_matchers: bool,
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
@ -106,56 +106,72 @@ fn parse_tree(
|
||||
// Depending on what `tree` is, we could be parsing different parts of a macro
|
||||
match tree {
|
||||
// `tree` is a `$` token. Look at the next token in `trees`
|
||||
tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() {
|
||||
// `tree` is followed by a delimited set of token trees. This indicates the beginning
|
||||
// of a repetition sequence in the macro (e.g. `$(pat)*`).
|
||||
Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
|
||||
// Must have `(` not `{` or `[`
|
||||
if delim != token::Paren {
|
||||
let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
|
||||
let msg = format!("expected `(`, found `{}`", tok);
|
||||
sess.span_diagnostic.span_err(span.entire(), &msg);
|
||||
tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => {
|
||||
// FIXME: Handle `None`-delimited groups in a more systematic way
|
||||
// during parsing.
|
||||
let mut next = outer_trees.next();
|
||||
let mut trees: Box<dyn Iterator<Item = tokenstream::TokenTree>>;
|
||||
if let Some(tokenstream::TokenTree::Delimited(_, token::NoDelim, tts)) = next {
|
||||
trees = Box::new(tts.into_trees());
|
||||
next = trees.next();
|
||||
} else {
|
||||
trees = Box::new(outer_trees);
|
||||
}
|
||||
|
||||
match next {
|
||||
// `tree` is followed by a delimited set of token trees. This indicates the beginning
|
||||
// of a repetition sequence in the macro (e.g. `$(pat)*`).
|
||||
Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
|
||||
// Must have `(` not `{` or `[`
|
||||
if delim != token::Paren {
|
||||
let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
|
||||
let msg = format!("expected `(`, found `{}`", tok);
|
||||
sess.span_diagnostic.span_err(span.entire(), &msg);
|
||||
}
|
||||
// Parse the contents of the sequence itself
|
||||
let sequence = parse(tts, expect_matchers, sess, node_id);
|
||||
// Get the Kleene operator and optional separator
|
||||
let (separator, kleene) =
|
||||
parse_sep_and_kleene_op(&mut trees, span.entire(), sess);
|
||||
// Count the number of captured "names" (i.e., named metavars)
|
||||
let name_captures = macro_parser::count_names(&sequence);
|
||||
TokenTree::Sequence(
|
||||
span,
|
||||
Lrc::new(SequenceRepetition {
|
||||
tts: sequence,
|
||||
separator,
|
||||
kleene,
|
||||
num_captures: name_captures,
|
||||
}),
|
||||
)
|
||||
}
|
||||
// Parse the contents of the sequence itself
|
||||
let sequence = parse(tts, expect_matchers, sess, node_id);
|
||||
// Get the Kleene operator and optional separator
|
||||
let (separator, kleene) = parse_sep_and_kleene_op(trees, span.entire(), sess);
|
||||
// Count the number of captured "names" (i.e., named metavars)
|
||||
let name_captures = macro_parser::count_names(&sequence);
|
||||
TokenTree::Sequence(
|
||||
span,
|
||||
Lrc::new(SequenceRepetition {
|
||||
tts: sequence,
|
||||
separator,
|
||||
kleene,
|
||||
num_captures: name_captures,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
// `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
|
||||
// metavariable that names the crate of the invocation.
|
||||
Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
|
||||
let (ident, is_raw) = token.ident().unwrap();
|
||||
let span = ident.span.with_lo(span.lo());
|
||||
if ident.name == kw::Crate && !is_raw {
|
||||
TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
|
||||
} else {
|
||||
TokenTree::MetaVar(span, ident)
|
||||
// `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
|
||||
// metavariable that names the crate of the invocation.
|
||||
Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
|
||||
let (ident, is_raw) = token.ident().unwrap();
|
||||
let span = ident.span.with_lo(span.lo());
|
||||
if ident.name == kw::Crate && !is_raw {
|
||||
TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
|
||||
} else {
|
||||
TokenTree::MetaVar(span, ident)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// `tree` is followed by a random token. This is an error.
|
||||
Some(tokenstream::TokenTree::Token(token)) => {
|
||||
let msg =
|
||||
format!("expected identifier, found `{}`", pprust::token_to_string(&token),);
|
||||
sess.span_diagnostic.span_err(token.span, &msg);
|
||||
TokenTree::MetaVar(token.span, Ident::invalid())
|
||||
}
|
||||
// `tree` is followed by a random token. This is an error.
|
||||
Some(tokenstream::TokenTree::Token(token)) => {
|
||||
let msg = format!(
|
||||
"expected identifier, found `{}`",
|
||||
pprust::token_to_string(&token),
|
||||
);
|
||||
sess.span_diagnostic.span_err(token.span, &msg);
|
||||
TokenTree::MetaVar(token.span, Ident::invalid())
|
||||
}
|
||||
|
||||
// There are no more tokens. Just return the `$` we already have.
|
||||
None => TokenTree::token(token::Dollar, span),
|
||||
},
|
||||
// There are no more tokens. Just return the `$` we already have.
|
||||
None => TokenTree::token(token::Dollar, span),
|
||||
}
|
||||
}
|
||||
|
||||
// `tree` is an arbitrary token. Keep it.
|
||||
tokenstream::TokenTree::Token(token) => TokenTree::Token(token),
|
||||
|
@ -1049,6 +1049,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
Some(attr) => attr,
|
||||
None => return Bound::Unbounded,
|
||||
};
|
||||
debug!("layout_scalar_valid_range: attr={:?}", attr);
|
||||
for meta in attr.meta_item_list().expect("rustc_layout_scalar_valid_range takes args") {
|
||||
match meta.literal().expect("attribute takes lit").kind {
|
||||
ast::LitKind::Int(a, _) => return Bound::Included(a),
|
||||
|
25
src/test/ui/macros/doc-comment.rs
Normal file
25
src/test/ui/macros/doc-comment.rs
Normal file
@ -0,0 +1,25 @@
|
||||
// check-pass
|
||||
// Tests that we properly handle a nested macro expansion
|
||||
// involving a `#[doc]` attribute
|
||||
#![deny(missing_docs)]
|
||||
//! Crate docs
|
||||
|
||||
macro_rules! doc_comment {
|
||||
($x:expr, $($tt:tt)*) => {
|
||||
#[doc = $x]
|
||||
$($tt)*
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! make_comment {
|
||||
() => {
|
||||
doc_comment!("Function docs",
|
||||
pub fn bar() {}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
make_comment!();
|
||||
|
||||
fn main() {}
|
12
src/test/ui/proc-macro/auxiliary/meta-delim.rs
Normal file
12
src/test/ui/proc-macro/auxiliary/meta-delim.rs
Normal file
@ -0,0 +1,12 @@
|
||||
macro_rules! produce_it {
|
||||
($dollar_one:tt $foo:ident $my_name:ident) => {
|
||||
#[macro_export]
|
||||
macro_rules! meta_delim {
|
||||
($dollar_one ($dollar_one $my_name:ident)*) => {
|
||||
stringify!($dollar_one ($dollar_one $my_name)*)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
produce_it!($my_name name);
|
15
src/test/ui/proc-macro/auxiliary/nested-macro-rules.rs
Normal file
15
src/test/ui/proc-macro/auxiliary/nested-macro-rules.rs
Normal file
@ -0,0 +1,15 @@
|
||||
pub struct FirstStruct;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! outer_macro {
|
||||
($name:ident) => {
|
||||
#[macro_export]
|
||||
macro_rules! inner_macro {
|
||||
($wrapper:ident) => {
|
||||
$wrapper!($name)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
outer_macro!(FirstStruct);
|
@ -101,6 +101,12 @@ pub fn print_bang(input: TokenStream) -> TokenStream {
|
||||
print_helper(input, "BANG")
|
||||
}
|
||||
|
||||
#[proc_macro]
|
||||
pub fn print_bang_consume(input: TokenStream) -> TokenStream {
|
||||
print_helper(input, "BANG");
|
||||
TokenStream::new()
|
||||
}
|
||||
|
||||
#[proc_macro_attribute]
|
||||
pub fn print_attr(_: TokenStream, input: TokenStream) -> TokenStream {
|
||||
print_helper(input, "ATTR")
|
||||
|
@ -1,5 +1,4 @@
|
||||
PRINT-BANG INPUT (DISPLAY): A
|
||||
PRINT-BANG RE-COLLECTED (DISPLAY): A
|
||||
PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
Group {
|
||||
delimiter: None,
|
||||
|
12
src/test/ui/proc-macro/meta-delim.rs
Normal file
12
src/test/ui/proc-macro/meta-delim.rs
Normal file
@ -0,0 +1,12 @@
|
||||
// aux-build:meta-delim.rs
|
||||
// edition:2018
|
||||
// run-pass
|
||||
|
||||
// Tests that we can properly deserialize a macro with strange delimiters
|
||||
// See https://github.com/rust-lang/rust/pull/73569#issuecomment-650860457
|
||||
|
||||
extern crate meta_delim;
|
||||
|
||||
fn main() {
|
||||
assert_eq!("a bunch of idents", meta_delim::meta_delim!(a bunch of idents));
|
||||
}
|
20
src/test/ui/proc-macro/nested-macro-rules.rs
Normal file
20
src/test/ui/proc-macro/nested-macro-rules.rs
Normal file
@ -0,0 +1,20 @@
|
||||
// run-pass
|
||||
// aux-build:nested-macro-rules.rs
|
||||
// aux-build:test-macros.rs
|
||||
// compile-flags: -Z span-debug
|
||||
// edition:2018
|
||||
|
||||
extern crate nested_macro_rules;
|
||||
extern crate test_macros;
|
||||
|
||||
use test_macros::print_bang;
|
||||
|
||||
use nested_macro_rules::FirstStruct;
|
||||
struct SecondStruct;
|
||||
|
||||
fn main() {
|
||||
nested_macro_rules::inner_macro!(print_bang);
|
||||
|
||||
nested_macro_rules::outer_macro!(SecondStruct);
|
||||
inner_macro!(print_bang);
|
||||
}
|
26
src/test/ui/proc-macro/nested-macro-rules.stdout
Normal file
26
src/test/ui/proc-macro/nested-macro-rules.stdout
Normal file
@ -0,0 +1,26 @@
|
||||
PRINT-BANG INPUT (DISPLAY): FirstStruct
|
||||
PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
Group {
|
||||
delimiter: None,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "FirstStruct",
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:15:14: 15:25 (#3),
|
||||
},
|
||||
],
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#3),
|
||||
},
|
||||
]
|
||||
PRINT-BANG INPUT (DISPLAY): SecondStruct
|
||||
PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
Group {
|
||||
delimiter: None,
|
||||
stream: TokenStream [
|
||||
Ident {
|
||||
ident: "SecondStruct",
|
||||
span: $DIR/nested-macro-rules.rs:18:38: 18:50 (#9),
|
||||
},
|
||||
],
|
||||
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#8),
|
||||
},
|
||||
]
|
19
src/test/ui/proc-macro/nodelim-groups.rs
Normal file
19
src/test/ui/proc-macro/nodelim-groups.rs
Normal file
@ -0,0 +1,19 @@
|
||||
// run-pass
|
||||
// aux-build:test-macros.rs
|
||||
// compile-flags: -Z span-debug
|
||||
// edition:2018
|
||||
//
|
||||
// Tests the pretty-printing behavior of inserting `NoDelim` groups
|
||||
|
||||
extern crate test_macros;
|
||||
use test_macros::print_bang_consume;
|
||||
|
||||
macro_rules! expand_it {
|
||||
(($val1:expr) ($val2:expr)) => { expand_it!($val1 + $val2) };
|
||||
($val:expr) => { print_bang_consume!("hi" $val (1 + 1)) };
|
||||
}
|
||||
|
||||
fn main() {
|
||||
expand_it!(1 + (25) + 1);
|
||||
expand_it!(("hello".len()) ("world".len()));
|
||||
}
|
156
src/test/ui/proc-macro/nodelim-groups.stdout
Normal file
156
src/test/ui/proc-macro/nodelim-groups.stdout
Normal file
@ -0,0 +1,156 @@
|
||||
PRINT-BANG INPUT (DISPLAY): "hi" 1 + (25) + 1 (1 + 1)
|
||||
PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
Literal {
|
||||
kind: Str,
|
||||
symbol: "hi",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:42: 13:46 (#3),
|
||||
},
|
||||
Group {
|
||||
delimiter: None,
|
||||
stream: TokenStream [
|
||||
Literal {
|
||||
kind: Integer,
|
||||
symbol: "1",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:17:16: 17:17 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: '+',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:17:18: 17:19 (#0),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Literal {
|
||||
kind: Integer,
|
||||
symbol: "25",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:17:21: 17:23 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/nodelim-groups.rs:17:20: 17:24 (#0),
|
||||
},
|
||||
Punct {
|
||||
ch: '+',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:17:25: 17:26 (#0),
|
||||
},
|
||||
Literal {
|
||||
kind: Integer,
|
||||
symbol: "1",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:17:27: 17:28 (#0),
|
||||
},
|
||||
],
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#3),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Literal {
|
||||
kind: Integer,
|
||||
symbol: "1",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:53: 13:54 (#3),
|
||||
},
|
||||
Punct {
|
||||
ch: '+',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:13:55: 13:56 (#3),
|
||||
},
|
||||
Literal {
|
||||
kind: Integer,
|
||||
symbol: "1",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:57: 13:58 (#3),
|
||||
},
|
||||
],
|
||||
span: $DIR/nodelim-groups.rs:13:52: 13:59 (#3),
|
||||
},
|
||||
]
|
||||
PRINT-BANG INPUT (DISPLAY): "hi" "hello".len() + "world".len() (1 + 1)
|
||||
PRINT-BANG RE-COLLECTED (DISPLAY): "hi" "hello" . len() + "world" . len() (1 + 1)
|
||||
PRINT-BANG INPUT (DEBUG): TokenStream [
|
||||
Literal {
|
||||
kind: Str,
|
||||
symbol: "hi",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:42: 13:46 (#8),
|
||||
},
|
||||
Group {
|
||||
delimiter: None,
|
||||
stream: TokenStream [
|
||||
Literal {
|
||||
kind: Str,
|
||||
symbol: "hello",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
},
|
||||
Punct {
|
||||
ch: '.',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
},
|
||||
Ident {
|
||||
ident: "len",
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
},
|
||||
Punct {
|
||||
ch: '+',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
},
|
||||
Literal {
|
||||
kind: Str,
|
||||
symbol: "world",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
},
|
||||
Punct {
|
||||
ch: '.',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
},
|
||||
Ident {
|
||||
ident: "len",
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [],
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
},
|
||||
],
|
||||
span: $DIR/nodelim-groups.rs:13:47: 13:51 (#8),
|
||||
},
|
||||
Group {
|
||||
delimiter: Parenthesis,
|
||||
stream: TokenStream [
|
||||
Literal {
|
||||
kind: Integer,
|
||||
symbol: "1",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:53: 13:54 (#8),
|
||||
},
|
||||
Punct {
|
||||
ch: '+',
|
||||
spacing: Alone,
|
||||
span: $DIR/nodelim-groups.rs:13:55: 13:56 (#8),
|
||||
},
|
||||
Literal {
|
||||
kind: Integer,
|
||||
symbol: "1",
|
||||
suffix: None,
|
||||
span: $DIR/nodelim-groups.rs:13:57: 13:58 (#8),
|
||||
},
|
||||
],
|
||||
span: $DIR/nodelim-groups.rs:13:52: 13:59 (#8),
|
||||
},
|
||||
]
|
16
src/test/ui/unsafe/ranged_ints_macro.rs
Normal file
16
src/test/ui/unsafe/ranged_ints_macro.rs
Normal file
@ -0,0 +1,16 @@
|
||||
// build-pass
|
||||
#![feature(rustc_attrs)]
|
||||
|
||||
macro_rules! apply {
|
||||
($val:expr) => {
|
||||
#[rustc_layout_scalar_valid_range_start($val)]
|
||||
#[repr(transparent)]
|
||||
pub(crate) struct NonZero<T>(pub(crate) T);
|
||||
}
|
||||
}
|
||||
|
||||
apply!(1);
|
||||
|
||||
fn main() {
|
||||
let _x = unsafe { NonZero(1) };
|
||||
}
|
Loading…
Reference in New Issue
Block a user