mirror of
https://github.com/rust-lang/rust.git
synced 2024-10-31 06:22:00 +00:00
Auto merge of #127892 - tgross35:rollup-7j9wkzc, r=tgross35
Rollup of 9 pull requests Successful merges: - #127542 ([`macro_metavar_expr_concat`] Add support for literals) - #127652 (Unignore cg_gcc fmt) - #127664 (Fix precise capturing suggestion for hidden regions when we have APITs) - #127806 (Some parser improvements) - #127828 (Commonize `uname -m` results for `aarch64` in docker runner) - #127845 (unix: break `stack_overflow::install_main_guard` into smaller fn) - #127859 (ptr::metadata: avoid references to extern types) - #127861 (Document the column numbers for the dbg! macro) - #127875 (style-guide: Clarify version-sorting) r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
4bb2f27861
@ -699,8 +699,7 @@ impl Token {
|
||||
false
|
||||
}
|
||||
|
||||
/// Would `maybe_whole_expr` in `parser.rs` return `Ok(..)`?
|
||||
/// That is, is this a pre-parsed expression dropped into the token stream
|
||||
/// Is this a pre-parsed expression dropped into the token stream
|
||||
/// (which happens while parsing the result of macro expansion)?
|
||||
pub fn is_whole_expr(&self) -> bool {
|
||||
if let Interpolated(nt) = &self.kind
|
||||
|
@ -1 +1,3 @@
|
||||
version = "Two"
|
||||
use_small_heuristics = "Max"
|
||||
merge_derives = false
|
||||
|
@ -34,7 +34,7 @@ impl Args {
|
||||
"--out-path" => match args.next() {
|
||||
Some(path) if !path.is_empty() => out_path = Some(path),
|
||||
_ => {
|
||||
return Err("Expected an argument after `--out-path`, found nothing".into())
|
||||
return Err("Expected an argument after `--out-path`, found nothing".into());
|
||||
}
|
||||
},
|
||||
"--help" => {
|
||||
|
@ -54,7 +54,7 @@ impl ConfigFile {
|
||||
config.gcc_path = Some(value.as_str().to_string())
|
||||
}
|
||||
("gcc-path", _) => {
|
||||
return failed_config_parsing(config_file, "Expected a string for `gcc-path`")
|
||||
return failed_config_parsing(config_file, "Expected a string for `gcc-path`");
|
||||
}
|
||||
("download-gccjit", TomlValue::Boolean(value)) => {
|
||||
config.download_gccjit = Some(*value)
|
||||
@ -63,7 +63,7 @@ impl ConfigFile {
|
||||
return failed_config_parsing(
|
||||
config_file,
|
||||
"Expected a boolean for `download-gccjit`",
|
||||
)
|
||||
);
|
||||
}
|
||||
_ => return failed_config_parsing(config_file, &format!("Unknown key `{}`", key)),
|
||||
}
|
||||
@ -73,7 +73,7 @@ impl ConfigFile {
|
||||
return failed_config_parsing(
|
||||
config_file,
|
||||
"At least one of `gcc-path` or `download-gccjit` value must be set",
|
||||
)
|
||||
);
|
||||
}
|
||||
(Some(_), Some(true)) => {
|
||||
println!(
|
||||
@ -144,7 +144,7 @@ impl ConfigInfo {
|
||||
_ => {
|
||||
return Err(
|
||||
"Expected a value after `--target-triple`, found nothing".to_string()
|
||||
)
|
||||
);
|
||||
}
|
||||
},
|
||||
"--out-dir" => match args.next() {
|
||||
@ -158,7 +158,7 @@ impl ConfigInfo {
|
||||
self.config_file = Some(arg.to_string());
|
||||
}
|
||||
_ => {
|
||||
return Err("Expected a value after `--config-file`, found nothing".to_string())
|
||||
return Err("Expected a value after `--config-file`, found nothing".to_string());
|
||||
}
|
||||
},
|
||||
"--release-sysroot" => self.sysroot_release_channel = true,
|
||||
@ -169,7 +169,7 @@ impl ConfigInfo {
|
||||
self.cg_gcc_path = Some(arg.into());
|
||||
}
|
||||
_ => {
|
||||
return Err("Expected a value after `--cg_gcc-path`, found nothing".to_string())
|
||||
return Err("Expected a value after `--cg_gcc-path`, found nothing".to_string());
|
||||
}
|
||||
},
|
||||
"--use-backend" => match args.next() {
|
||||
@ -277,7 +277,7 @@ impl ConfigInfo {
|
||||
self.gcc_path = match gcc_path {
|
||||
Some(path) => path,
|
||||
None => {
|
||||
return Err(format!("missing `gcc-path` value from `{}`", config_file.display(),))
|
||||
return Err(format!("missing `gcc-path` value from `{}`", config_file.display(),));
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
|
@ -109,7 +109,7 @@ impl TestArg {
|
||||
test_arg.flags.extend_from_slice(&["--features".into(), feature]);
|
||||
}
|
||||
_ => {
|
||||
return Err("Expected an argument after `--features`, found nothing".into())
|
||||
return Err("Expected an argument after `--features`, found nothing".into());
|
||||
}
|
||||
},
|
||||
"--use-system-gcc" => {
|
||||
@ -458,11 +458,7 @@ fn setup_rustc(env: &mut Env, args: &TestArg) -> Result<PathBuf, String> {
|
||||
.map_err(|error| format!("Failed to retrieve cargo path: {:?}", error))
|
||||
.and_then(|cargo| {
|
||||
let cargo = cargo.trim().to_owned();
|
||||
if cargo.is_empty() {
|
||||
Err(format!("`cargo` path is empty"))
|
||||
} else {
|
||||
Ok(cargo)
|
||||
}
|
||||
if cargo.is_empty() { Err(format!("`cargo` path is empty")) } else { Ok(cargo) }
|
||||
})?;
|
||||
let rustc = String::from_utf8(
|
||||
run_command_with_env(&[&"rustup", &toolchain, &"which", &"rustc"], rust_dir, Some(env))?
|
||||
@ -471,11 +467,7 @@ fn setup_rustc(env: &mut Env, args: &TestArg) -> Result<PathBuf, String> {
|
||||
.map_err(|error| format!("Failed to retrieve rustc path: {:?}", error))
|
||||
.and_then(|rustc| {
|
||||
let rustc = rustc.trim().to_owned();
|
||||
if rustc.is_empty() {
|
||||
Err(format!("`rustc` path is empty"))
|
||||
} else {
|
||||
Ok(rustc)
|
||||
}
|
||||
if rustc.is_empty() { Err(format!("`rustc` path is empty")) } else { Ok(rustc) }
|
||||
})?;
|
||||
let llvm_filecheck = match run_command_with_env(
|
||||
&[
|
||||
|
@ -175,11 +175,7 @@ pub fn cargo_install(to_install: &str) -> Result<(), String> {
|
||||
pub fn get_os_name() -> Result<String, String> {
|
||||
let output = run_command(&[&"uname"], None)?;
|
||||
let name = std::str::from_utf8(&output.stdout).unwrap_or("").trim().to_string();
|
||||
if !name.is_empty() {
|
||||
Ok(name)
|
||||
} else {
|
||||
Err("Failed to retrieve the OS name".to_string())
|
||||
}
|
||||
if !name.is_empty() { Ok(name) } else { Err("Failed to retrieve the OS name".to_string()) }
|
||||
}
|
||||
|
||||
#[derive(Default, PartialEq)]
|
||||
|
@ -26,11 +26,7 @@ impl<'a, 'gcc, 'tcx> AbiBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
} else {
|
||||
false
|
||||
};
|
||||
if on_stack {
|
||||
param.to_lvalue().get_address(None)
|
||||
} else {
|
||||
param.to_rvalue()
|
||||
}
|
||||
if on_stack { param.to_lvalue().get_address(None) } else { param.to_rvalue() }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -858,11 +858,7 @@ fn modifier_to_gcc(
|
||||
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::reg) => modifier,
|
||||
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::vreg)
|
||||
| InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::vreg_low16) => {
|
||||
if modifier == Some('v') {
|
||||
None
|
||||
} else {
|
||||
modifier
|
||||
}
|
||||
if modifier == Some('v') { None } else { modifier }
|
||||
}
|
||||
InlineAsmRegClass::AArch64(AArch64InlineAsmRegClass::preg) => {
|
||||
unreachable!("clobber-only")
|
||||
|
@ -1043,11 +1043,7 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> {
|
||||
let llty = place.layout.scalar_pair_element_gcc_type(self, i);
|
||||
let load = self.load(llty, llptr, align);
|
||||
scalar_load_metadata(self, load, scalar);
|
||||
if scalar.is_bool() {
|
||||
self.trunc(load, self.type_i1())
|
||||
} else {
|
||||
load
|
||||
}
|
||||
if scalar.is_bool() { self.trunc(load, self.type_i1()) } else { load }
|
||||
};
|
||||
|
||||
OperandValue::Pair(
|
||||
@ -1795,18 +1791,10 @@ impl<'a, 'gcc, 'tcx> Builder<'a, 'gcc, 'tcx> {
|
||||
// This already happens today with u128::MAX = 2^128 - 1 > f32::MAX.
|
||||
let int_max = |signed: bool, int_width: u64| -> u128 {
|
||||
let shift_amount = 128 - int_width;
|
||||
if signed {
|
||||
i128::MAX as u128 >> shift_amount
|
||||
} else {
|
||||
u128::MAX >> shift_amount
|
||||
}
|
||||
if signed { i128::MAX as u128 >> shift_amount } else { u128::MAX >> shift_amount }
|
||||
};
|
||||
let int_min = |signed: bool, int_width: u64| -> i128 {
|
||||
if signed {
|
||||
i128::MIN >> (128 - int_width)
|
||||
} else {
|
||||
0
|
||||
}
|
||||
if signed { i128::MIN >> (128 - int_width) } else { 0 }
|
||||
};
|
||||
|
||||
let compute_clamp_bounds_single = |signed: bool, int_width: u64| -> (u128, u128) {
|
||||
|
@ -58,11 +58,7 @@ pub fn type_is_pointer(typ: Type<'_>) -> bool {
|
||||
|
||||
impl<'gcc, 'tcx> ConstMethods<'tcx> for CodegenCx<'gcc, 'tcx> {
|
||||
fn const_null(&self, typ: Type<'gcc>) -> RValue<'gcc> {
|
||||
if type_is_pointer(typ) {
|
||||
self.context.new_null(typ)
|
||||
} else {
|
||||
self.const_int(typ, 0)
|
||||
}
|
||||
if type_is_pointer(typ) { self.context.new_null(typ) } else { self.const_int(typ, 0) }
|
||||
}
|
||||
|
||||
fn const_undef(&self, typ: Type<'gcc>) -> RValue<'gcc> {
|
||||
|
@ -119,6 +119,8 @@ impl MetaVarExpr {
|
||||
}
|
||||
}
|
||||
|
||||
/// Indicates what is placed in a `concat` parameter. For example, literals
|
||||
/// (`${concat("foo", "bar")}`) or adhoc identifiers (`${concat(foo, bar)}`).
|
||||
#[derive(Debug, Decodable, Encodable, PartialEq)]
|
||||
pub(crate) enum MetaVarExprConcatElem {
|
||||
/// Identifier WITHOUT a preceding dollar sign, which means that this identifier should be
|
||||
|
@ -6,9 +6,10 @@ use crate::mbe::macro_parser::{NamedMatch, NamedMatch::*};
|
||||
use crate::mbe::metavar_expr::{MetaVarExprConcatElem, RAW_IDENT_ERR};
|
||||
use crate::mbe::{self, KleeneOp, MetaVarExpr};
|
||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||
use rustc_ast::token::IdentIsRaw;
|
||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
|
||||
use rustc_ast::token::{IdentIsRaw, Lit, LitKind};
|
||||
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
|
||||
use rustc_ast::ExprKind;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_errors::{pluralize, Diag, DiagCtxtHandle, PResult};
|
||||
use rustc_parse::lexer::nfc_normalize;
|
||||
@ -17,7 +18,7 @@ use rustc_session::parse::ParseSess;
|
||||
use rustc_session::parse::SymbolGallery;
|
||||
use rustc_span::hygiene::{LocalExpnId, Transparency};
|
||||
use rustc_span::symbol::{sym, Ident, MacroRulesNormalizedIdent};
|
||||
use rustc_span::{with_metavar_spans, Span, SyntaxContext};
|
||||
use rustc_span::{with_metavar_spans, Span, Symbol, SyntaxContext};
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use std::mem;
|
||||
|
||||
@ -691,12 +692,12 @@ fn transcribe_metavar_expr<'a>(
|
||||
MetaVarExpr::Concat(ref elements) => {
|
||||
let mut concatenated = String::new();
|
||||
for element in elements.into_iter() {
|
||||
let string = match element {
|
||||
MetaVarExprConcatElem::Ident(elem) => elem.to_string(),
|
||||
MetaVarExprConcatElem::Literal(elem) => elem.as_str().into(),
|
||||
MetaVarExprConcatElem::Var(elem) => extract_ident(dcx, *elem, interp)?,
|
||||
let symbol = match element {
|
||||
MetaVarExprConcatElem::Ident(elem) => elem.name,
|
||||
MetaVarExprConcatElem::Literal(elem) => *elem,
|
||||
MetaVarExprConcatElem::Var(elem) => extract_var_symbol(dcx, *elem, interp)?,
|
||||
};
|
||||
concatenated.push_str(&string);
|
||||
concatenated.push_str(symbol.as_str());
|
||||
}
|
||||
let symbol = nfc_normalize(&concatenated);
|
||||
let concatenated_span = visited_span();
|
||||
@ -750,32 +751,42 @@ fn transcribe_metavar_expr<'a>(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Extracts an identifier that can be originated from a `$var:ident` variable or from a token tree.
|
||||
fn extract_ident<'a>(
|
||||
/// Extracts an metavariable symbol that can be an identifier, a token tree or a literal.
|
||||
fn extract_var_symbol<'a>(
|
||||
dcx: DiagCtxtHandle<'a>,
|
||||
ident: Ident,
|
||||
interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
|
||||
) -> PResult<'a, String> {
|
||||
) -> PResult<'a, Symbol> {
|
||||
if let NamedMatch::MatchedSingle(pnr) = matched_from_ident(dcx, ident, interp)? {
|
||||
if let ParseNtResult::Ident(nt_ident, is_raw) = pnr {
|
||||
if let IdentIsRaw::Yes = is_raw {
|
||||
return Err(dcx.struct_span_err(ident.span, RAW_IDENT_ERR));
|
||||
}
|
||||
return Ok(nt_ident.to_string());
|
||||
return Ok(nt_ident.name);
|
||||
}
|
||||
if let ParseNtResult::Tt(TokenTree::Token(
|
||||
Token { kind: TokenKind::Ident(token_ident, is_raw), .. },
|
||||
_,
|
||||
)) = pnr
|
||||
{
|
||||
|
||||
if let ParseNtResult::Tt(TokenTree::Token(Token { kind, .. }, _)) = pnr {
|
||||
if let TokenKind::Ident(symbol, is_raw) = kind {
|
||||
if let IdentIsRaw::Yes = is_raw {
|
||||
return Err(dcx.struct_span_err(ident.span, RAW_IDENT_ERR));
|
||||
}
|
||||
return Ok(token_ident.to_string());
|
||||
return Ok(*symbol);
|
||||
}
|
||||
|
||||
if let TokenKind::Literal(Lit { kind: LitKind::Str, symbol, suffix: None }) = kind {
|
||||
return Ok(*symbol);
|
||||
}
|
||||
}
|
||||
Err(dcx.struct_span_err(
|
||||
ident.span,
|
||||
"`${concat(..)}` currently only accepts identifiers or meta-variables as parameters",
|
||||
))
|
||||
|
||||
if let ParseNtResult::Nt(nt) = pnr
|
||||
&& let Nonterminal::NtLiteral(expr) = &**nt
|
||||
&& let ExprKind::Lit(Lit { kind: LitKind::Str, symbol, suffix: None }) = &expr.kind
|
||||
{
|
||||
return Ok(*symbol);
|
||||
}
|
||||
}
|
||||
Err(dcx
|
||||
.struct_err("metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`")
|
||||
.with_note("currently only string literals are supported")
|
||||
.with_span(ident.span))
|
||||
}
|
||||
|
@ -225,6 +225,8 @@ infer_outlives_content = lifetime of reference outlives lifetime of borrowed con
|
||||
infer_precise_capturing_existing = add `{$new_lifetime}` to the `use<...>` bound to explicitly capture it
|
||||
infer_precise_capturing_new = add a `use<...>` bound to explicitly capture `{$new_lifetime}`
|
||||
|
||||
infer_precise_capturing_new_but_apit = add a `use<...>` bound to explicitly capture `{$new_lifetime}` after turning all argument-position `impl Trait` into type parameters, noting that this possibly affects the API of this crate
|
||||
|
||||
infer_prlf_defined_with_sub = the lifetime `{$sub_symbol}` defined here...
|
||||
infer_prlf_defined_without_sub = the lifetime defined here...
|
||||
infer_prlf_known_limitation = this is a known limitation that will be removed in the future (see issue #100013 <https://github.com/rust-lang/rust/issues/100013> for more information)
|
||||
@ -387,6 +389,9 @@ infer_type_annotations_needed = {$source_kind ->
|
||||
.label = type must be known at this point
|
||||
|
||||
infer_types_declared_different = these two types are declared with different lifetimes...
|
||||
|
||||
infer_warn_removing_apit_params = you could use a `use<...>` bound to explicitly capture `{$new_lifetime}`, but argument-position `impl Trait`s are not nameable
|
||||
|
||||
infer_where_copy_predicates = copy the `where` clause predicates from the trait
|
||||
|
||||
infer_where_remove = remove the `where` clause
|
||||
|
@ -1269,9 +1269,13 @@ fn suggest_precise_capturing<'tcx>(
|
||||
captured_lifetime: ty::Region<'tcx>,
|
||||
diag: &mut Diag<'_>,
|
||||
) {
|
||||
let hir::OpaqueTy { bounds, .. } =
|
||||
let hir::OpaqueTy { bounds, origin, .. } =
|
||||
tcx.hir_node_by_def_id(opaque_def_id).expect_item().expect_opaque_ty();
|
||||
|
||||
let hir::OpaqueTyOrigin::FnReturn(fn_def_id) = *origin else {
|
||||
return;
|
||||
};
|
||||
|
||||
let new_lifetime = Symbol::intern(&captured_lifetime.to_string());
|
||||
|
||||
if let Some((args, span)) = bounds.iter().find_map(|bound| match bound {
|
||||
@ -1306,6 +1310,7 @@ fn suggest_precise_capturing<'tcx>(
|
||||
|
||||
let variances = tcx.variances_of(opaque_def_id);
|
||||
let mut generics = tcx.generics_of(opaque_def_id);
|
||||
let mut synthetics = vec![];
|
||||
loop {
|
||||
for param in &generics.own_params {
|
||||
if variances[param.index as usize] == ty::Bivariant {
|
||||
@ -1317,9 +1322,7 @@ fn suggest_precise_capturing<'tcx>(
|
||||
captured_lifetimes.insert(param.name);
|
||||
}
|
||||
ty::GenericParamDefKind::Type { synthetic: true, .. } => {
|
||||
// FIXME: We can't provide a good suggestion for
|
||||
// `use<...>` if we have an APIT. Bail for now.
|
||||
return;
|
||||
synthetics.push((tcx.def_span(param.def_id), param.name));
|
||||
}
|
||||
ty::GenericParamDefKind::Type { .. }
|
||||
| ty::GenericParamDefKind::Const { .. } => {
|
||||
@ -1340,6 +1343,7 @@ fn suggest_precise_capturing<'tcx>(
|
||||
return;
|
||||
}
|
||||
|
||||
if synthetics.is_empty() {
|
||||
let concatenated_bounds = captured_lifetimes
|
||||
.into_iter()
|
||||
.chain(captured_non_lifetimes)
|
||||
@ -1352,5 +1356,73 @@ fn suggest_precise_capturing<'tcx>(
|
||||
new_lifetime,
|
||||
concatenated_bounds,
|
||||
});
|
||||
} else {
|
||||
let mut next_fresh_param = || {
|
||||
["T", "U", "V", "W", "X", "Y", "A", "B", "C"]
|
||||
.into_iter()
|
||||
.map(Symbol::intern)
|
||||
.chain((0..).map(|i| Symbol::intern(&format!("T{i}"))))
|
||||
.find(|s| captured_non_lifetimes.insert(*s))
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
let mut new_params = String::new();
|
||||
let mut suggs = vec![];
|
||||
let mut apit_spans = vec![];
|
||||
|
||||
for (i, (span, name)) in synthetics.into_iter().enumerate() {
|
||||
apit_spans.push(span);
|
||||
|
||||
let fresh_param = next_fresh_param();
|
||||
|
||||
// Suggest renaming.
|
||||
suggs.push((span, fresh_param.to_string()));
|
||||
|
||||
// Super jank. Turn `impl Trait` into `T: Trait`.
|
||||
//
|
||||
// This currently involves stripping the `impl` from the name of
|
||||
// the parameter, since APITs are always named after how they are
|
||||
// rendered in the AST. This sucks! But to recreate the bound list
|
||||
// from the APIT itself would be miserable, so we're stuck with
|
||||
// this for now!
|
||||
if i > 0 {
|
||||
new_params += ", ";
|
||||
}
|
||||
let name_as_bounds = name.as_str().trim_start_matches("impl").trim_start();
|
||||
new_params += fresh_param.as_str();
|
||||
new_params += ": ";
|
||||
new_params += name_as_bounds;
|
||||
}
|
||||
|
||||
let Some(generics) = tcx.hir().get_generics(fn_def_id) else {
|
||||
// This shouldn't happen, but don't ICE.
|
||||
return;
|
||||
};
|
||||
|
||||
// Add generics or concatenate to the end of the list.
|
||||
suggs.push(if let Some(params_span) = generics.span_for_param_suggestion() {
|
||||
(params_span, format!(", {new_params}"))
|
||||
} else {
|
||||
(generics.span, format!("<{new_params}>"))
|
||||
});
|
||||
|
||||
let concatenated_bounds = captured_lifetimes
|
||||
.into_iter()
|
||||
.chain(captured_non_lifetimes)
|
||||
.map(|sym| sym.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
|
||||
suggs.push((
|
||||
tcx.def_span(opaque_def_id).shrink_to_hi(),
|
||||
format!(" + use<{concatenated_bounds}>"),
|
||||
));
|
||||
|
||||
diag.subdiagnostic(errors::AddPreciseCapturingAndParams {
|
||||
suggs,
|
||||
new_lifetime,
|
||||
apit_spans,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1609,3 +1609,25 @@ pub enum AddPreciseCapturing {
|
||||
post: &'static str,
|
||||
},
|
||||
}
|
||||
|
||||
pub struct AddPreciseCapturingAndParams {
|
||||
pub suggs: Vec<(Span, String)>,
|
||||
pub new_lifetime: Symbol,
|
||||
pub apit_spans: Vec<Span>,
|
||||
}
|
||||
|
||||
impl Subdiagnostic for AddPreciseCapturingAndParams {
|
||||
fn add_to_diag_with<G: EmissionGuarantee, F: SubdiagMessageOp<G>>(
|
||||
self,
|
||||
diag: &mut Diag<'_, G>,
|
||||
_f: &F,
|
||||
) {
|
||||
diag.arg("new_lifetime", self.new_lifetime);
|
||||
diag.multipart_suggestion_verbose(
|
||||
fluent::infer_precise_capturing_new_but_apit,
|
||||
self.suggs,
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
diag.span_note(self.apit_spans, fluent::infer_warn_removing_apit_params);
|
||||
}
|
||||
}
|
||||
|
@ -785,23 +785,14 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
};
|
||||
|
||||
self.parse_and_disallow_postfix_after_cast(cast_expr)
|
||||
}
|
||||
|
||||
/// Parses a postfix operators such as `.`, `?`, or index (`[]`) after a cast,
|
||||
/// then emits an error and returns the newly parsed tree.
|
||||
/// The resulting parse tree for `&x as T[0]` has a precedence of `((&x) as T)[0]`.
|
||||
fn parse_and_disallow_postfix_after_cast(
|
||||
&mut self,
|
||||
cast_expr: P<Expr>,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
if let ExprKind::Type(_, _) = cast_expr.kind {
|
||||
panic!("ExprKind::Type must not be parsed");
|
||||
}
|
||||
// Try to parse a postfix operator such as `.`, `?`, or index (`[]`)
|
||||
// after a cast. If one is present, emit an error then return a valid
|
||||
// parse tree; For something like `&x as T[0]` will be as if it was
|
||||
// written `((&x) as T)[0]`.
|
||||
|
||||
let span = cast_expr.span;
|
||||
|
||||
let with_postfix = self.parse_expr_dot_or_call_with_(cast_expr, span)?;
|
||||
let with_postfix = self.parse_expr_dot_or_call_with(AttrVec::new(), cast_expr, span)?;
|
||||
|
||||
// Check if an illegal postfix operator has been added after the cast.
|
||||
// If the resulting expression is not a cast, it is an illegal postfix operator.
|
||||
@ -885,42 +876,22 @@ impl<'a> Parser<'a> {
|
||||
self.collect_tokens_for_expr(attrs, |this, attrs| {
|
||||
let base = this.parse_expr_bottom()?;
|
||||
let span = this.interpolated_or_expr_span(&base);
|
||||
this.parse_expr_dot_or_call_with(base, span, attrs)
|
||||
this.parse_expr_dot_or_call_with(attrs, base, span)
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) fn parse_expr_dot_or_call_with(
|
||||
&mut self,
|
||||
e0: P<Expr>,
|
||||
lo: Span,
|
||||
mut attrs: ast::AttrVec,
|
||||
mut e: P<Expr>,
|
||||
lo: Span,
|
||||
) -> PResult<'a, P<Expr>> {
|
||||
// Stitch the list of outer attributes onto the return value.
|
||||
// A little bit ugly, but the best way given the current code
|
||||
// structure
|
||||
let res = ensure_sufficient_stack(
|
||||
// this expr demonstrates the recursion it guards against
|
||||
|| self.parse_expr_dot_or_call_with_(e0, lo),
|
||||
);
|
||||
if attrs.is_empty() {
|
||||
res
|
||||
} else {
|
||||
res.map(|expr| {
|
||||
expr.map(|mut expr| {
|
||||
attrs.extend(expr.attrs);
|
||||
expr.attrs = attrs;
|
||||
expr
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_expr_dot_or_call_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
|
||||
let res = ensure_sufficient_stack(|| {
|
||||
loop {
|
||||
let has_question =
|
||||
if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
|
||||
// we are using noexpect here because we don't expect a `?` directly after a `return`
|
||||
// which could be suggested otherwise
|
||||
// We are using noexpect here because we don't expect a `?` directly after
|
||||
// a `return` which could be suggested otherwise.
|
||||
self.eat_noexpect(&token::Question)
|
||||
} else {
|
||||
self.eat(&token::Question)
|
||||
@ -930,9 +901,10 @@ impl<'a> Parser<'a> {
|
||||
e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e));
|
||||
continue;
|
||||
}
|
||||
let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
|
||||
// we are using noexpect here because we don't expect a `.` directly after a `return`
|
||||
// which could be suggested otherwise
|
||||
let has_dot =
|
||||
if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
|
||||
// We are using noexpect here because we don't expect a `.` directly after
|
||||
// a `return` which could be suggested otherwise.
|
||||
self.eat_noexpect(&token::Dot)
|
||||
} else if self.token.kind == TokenKind::RArrow && self.may_recover() {
|
||||
// Recovery for `expr->suffix`.
|
||||
@ -957,6 +929,21 @@ impl<'a> Parser<'a> {
|
||||
_ => return Ok(e),
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Stitch the list of outer attributes onto the return value. A little
|
||||
// bit ugly, but the best way given the current code structure.
|
||||
if attrs.is_empty() {
|
||||
res
|
||||
} else {
|
||||
res.map(|expr| {
|
||||
expr.map(|mut expr| {
|
||||
attrs.extend(expr.attrs);
|
||||
expr.attrs = attrs;
|
||||
expr
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn parse_dot_suffix_expr(
|
||||
@ -1388,7 +1375,7 @@ impl<'a> Parser<'a> {
|
||||
/// Parses things like parenthesized exprs, macros, `return`, etc.
|
||||
///
|
||||
/// N.B., this does not parse outer attributes, and is private because it only works
|
||||
/// correctly if called from `parse_dot_or_call_expr()`.
|
||||
/// correctly if called from `parse_expr_dot_or_call`.
|
||||
fn parse_expr_bottom(&mut self) -> PResult<'a, P<Expr>> {
|
||||
maybe_recover_from_interpolated_ty_qpath!(self, true);
|
||||
|
||||
|
@ -128,27 +128,11 @@ impl<'a> Parser<'a> {
|
||||
Some(item.into_inner())
|
||||
});
|
||||
|
||||
let item =
|
||||
self.collect_tokens_trailing_token(attrs, force_collect, |this: &mut Self, attrs| {
|
||||
let item =
|
||||
this.parse_item_common_(attrs, mac_allowed, attrs_allowed, fn_parse_mode);
|
||||
Ok((item?, TrailingToken::None))
|
||||
})?;
|
||||
|
||||
Ok(item)
|
||||
}
|
||||
|
||||
fn parse_item_common_(
|
||||
&mut self,
|
||||
mut attrs: AttrVec,
|
||||
mac_allowed: bool,
|
||||
attrs_allowed: bool,
|
||||
fn_parse_mode: FnParseMode,
|
||||
) -> PResult<'a, Option<Item>> {
|
||||
let lo = self.token.span;
|
||||
let vis = self.parse_visibility(FollowedByType::No)?;
|
||||
let mut def = self.parse_defaultness();
|
||||
let kind = self.parse_item_kind(
|
||||
self.collect_tokens_trailing_token(attrs, force_collect, |this, mut attrs| {
|
||||
let lo = this.token.span;
|
||||
let vis = this.parse_visibility(FollowedByType::No)?;
|
||||
let mut def = this.parse_defaultness();
|
||||
let kind = this.parse_item_kind(
|
||||
&mut attrs,
|
||||
mac_allowed,
|
||||
lo,
|
||||
@ -158,26 +142,27 @@ impl<'a> Parser<'a> {
|
||||
Case::Sensitive,
|
||||
)?;
|
||||
if let Some((ident, kind)) = kind {
|
||||
self.error_on_unconsumed_default(def, &kind);
|
||||
let span = lo.to(self.prev_token.span);
|
||||
this.error_on_unconsumed_default(def, &kind);
|
||||
let span = lo.to(this.prev_token.span);
|
||||
let id = DUMMY_NODE_ID;
|
||||
let item = Item { ident, attrs, id, kind, vis, span, tokens: None };
|
||||
return Ok(Some(item));
|
||||
return Ok((Some(item), TrailingToken::None));
|
||||
}
|
||||
|
||||
// At this point, we have failed to parse an item.
|
||||
if !matches!(vis.kind, VisibilityKind::Inherited) {
|
||||
self.dcx().emit_err(errors::VisibilityNotFollowedByItem { span: vis.span, vis });
|
||||
this.dcx().emit_err(errors::VisibilityNotFollowedByItem { span: vis.span, vis });
|
||||
}
|
||||
|
||||
if let Defaultness::Default(span) = def {
|
||||
self.dcx().emit_err(errors::DefaultNotFollowedByItem { span });
|
||||
this.dcx().emit_err(errors::DefaultNotFollowedByItem { span });
|
||||
}
|
||||
|
||||
if !attrs_allowed {
|
||||
self.recover_attrs_no_item(&attrs)?;
|
||||
this.recover_attrs_no_item(&attrs)?;
|
||||
}
|
||||
Ok(None)
|
||||
Ok((None, TrailingToken::None))
|
||||
})
|
||||
}
|
||||
|
||||
/// Error in-case `default` was parsed in an in-appropriate context.
|
||||
|
@ -101,7 +101,6 @@ pub enum TrailingToken {
|
||||
MaybeComma,
|
||||
}
|
||||
|
||||
/// Like `maybe_whole_expr`, but for things other than expressions.
|
||||
#[macro_export]
|
||||
macro_rules! maybe_whole {
|
||||
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
|
||||
|
@ -392,9 +392,9 @@ impl<'a> Parser<'a> {
|
||||
// Parse `?`, `.f`, `(arg0, arg1, ...)` or `[expr]` until they've all been eaten.
|
||||
if let Ok(expr) = snapshot
|
||||
.parse_expr_dot_or_call_with(
|
||||
AttrVec::new(),
|
||||
self.mk_expr(pat_span, ExprKind::Dummy), // equivalent to transforming the parsed pattern into an `Expr`
|
||||
pat_span,
|
||||
AttrVec::new(),
|
||||
)
|
||||
.map_err(|err| err.cancel())
|
||||
{
|
||||
|
@ -164,7 +164,7 @@ impl<'a> Parser<'a> {
|
||||
};
|
||||
|
||||
let expr = this.with_res(Restrictions::STMT_EXPR, |this| {
|
||||
this.parse_expr_dot_or_call_with(expr, lo, attrs)
|
||||
this.parse_expr_dot_or_call_with(attrs, expr, lo)
|
||||
})?;
|
||||
// `DUMMY_SP` will get overwritten later in this function
|
||||
Ok((this.mk_stmt(rustc_span::DUMMY_SP, StmtKind::Expr(expr)), TrailingToken::None))
|
||||
@ -206,7 +206,7 @@ impl<'a> Parser<'a> {
|
||||
// Since none of the above applied, this is an expression statement macro.
|
||||
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac));
|
||||
let e = self.maybe_recover_from_bad_qpath(e)?;
|
||||
let e = self.parse_expr_dot_or_call_with(e, lo, attrs)?;
|
||||
let e = self.parse_expr_dot_or_call_with(attrs, e, lo)?;
|
||||
let e = self
|
||||
.parse_expr_assoc_with(0, LhsExpr::Parsed { expr: e, starts_statement: false })?;
|
||||
StmtKind::Expr(e)
|
||||
|
@ -5,6 +5,7 @@ use crate::hash::{Hash, Hasher};
|
||||
use crate::intrinsics::aggregate_raw_ptr;
|
||||
use crate::intrinsics::ptr_metadata;
|
||||
use crate::marker::Freeze;
|
||||
use crate::ptr::NonNull;
|
||||
|
||||
/// Provides the pointer metadata type of any pointed-to type.
|
||||
///
|
||||
@ -153,7 +154,7 @@ pub const fn from_raw_parts_mut<T: ?Sized>(
|
||||
/// compare equal (since identical vtables can be deduplicated within a codegen unit).
|
||||
#[lang = "dyn_metadata"]
|
||||
pub struct DynMetadata<Dyn: ?Sized> {
|
||||
_vtable_ptr: &'static VTable,
|
||||
_vtable_ptr: NonNull<VTable>,
|
||||
_phantom: crate::marker::PhantomData<Dyn>,
|
||||
}
|
||||
|
||||
@ -166,15 +167,18 @@ extern "C" {
|
||||
}
|
||||
|
||||
impl<Dyn: ?Sized> DynMetadata<Dyn> {
|
||||
/// One of the things that rustc_middle does with this being a lang item is
|
||||
/// give it `FieldsShape::Primitive`, which means that as far as codegen can
|
||||
/// tell, it *is* a reference, and thus doesn't have any fields.
|
||||
/// That means we can't use field access, and have to transmute it instead.
|
||||
/// When `DynMetadata` appears as the metadata field of a wide pointer, the rustc_middle layout
|
||||
/// computation does magic and the resulting layout is *not* a `FieldsShape::Aggregate`, instead
|
||||
/// it is a `FieldsShape::Primitive`. This means that the same type can have different layout
|
||||
/// depending on whether it appears as the metadata field of a wide pointer or as a stand-alone
|
||||
/// type, which understandably confuses codegen and leads to ICEs when trying to project to a
|
||||
/// field of `DynMetadata`. To work around that issue, we use `transmute` instead of using a
|
||||
/// field projection.
|
||||
#[inline]
|
||||
fn vtable_ptr(self) -> *const VTable {
|
||||
// SAFETY: this layout assumption is hard-coded into the compiler.
|
||||
// If it's somehow not a size match, the transmute will error.
|
||||
unsafe { crate::mem::transmute::<Self, &'static VTable>(self) }
|
||||
unsafe { crate::mem::transmute::<Self, *const VTable>(self) }
|
||||
}
|
||||
|
||||
/// Returns the size of the type associated with this vtable.
|
||||
|
@ -230,7 +230,7 @@ macro_rules! eprintln {
|
||||
/// ```rust
|
||||
/// let a = 2;
|
||||
/// let b = dbg!(a * 2) + 1;
|
||||
/// // ^-- prints: [src/main.rs:2] a * 2 = 4
|
||||
/// // ^-- prints: [src/main.rs:2:9] a * 2 = 4
|
||||
/// assert_eq!(b, 5);
|
||||
/// ```
|
||||
///
|
||||
@ -281,7 +281,7 @@ macro_rules! eprintln {
|
||||
/// This prints to [stderr]:
|
||||
///
|
||||
/// ```text,ignore
|
||||
/// [src/main.rs:4] n.checked_sub(4) = None
|
||||
/// [src/main.rs:2:22] n.checked_sub(4) = None
|
||||
/// ```
|
||||
///
|
||||
/// Naive factorial implementation:
|
||||
@ -301,15 +301,15 @@ macro_rules! eprintln {
|
||||
/// This prints to [stderr]:
|
||||
///
|
||||
/// ```text,ignore
|
||||
/// [src/main.rs:3] n <= 1 = false
|
||||
/// [src/main.rs:3] n <= 1 = false
|
||||
/// [src/main.rs:3] n <= 1 = false
|
||||
/// [src/main.rs:3] n <= 1 = true
|
||||
/// [src/main.rs:4] 1 = 1
|
||||
/// [src/main.rs:5] n * factorial(n - 1) = 2
|
||||
/// [src/main.rs:5] n * factorial(n - 1) = 6
|
||||
/// [src/main.rs:5] n * factorial(n - 1) = 24
|
||||
/// [src/main.rs:11] factorial(4) = 24
|
||||
/// [src/main.rs:2:8] n <= 1 = false
|
||||
/// [src/main.rs:2:8] n <= 1 = false
|
||||
/// [src/main.rs:2:8] n <= 1 = false
|
||||
/// [src/main.rs:2:8] n <= 1 = true
|
||||
/// [src/main.rs:3:9] 1 = 1
|
||||
/// [src/main.rs:7:9] n * factorial(n - 1) = 2
|
||||
/// [src/main.rs:7:9] n * factorial(n - 1) = 6
|
||||
/// [src/main.rs:7:9] n * factorial(n - 1) = 24
|
||||
/// [src/main.rs:9:1] factorial(4) = 24
|
||||
/// ```
|
||||
///
|
||||
/// The `dbg!(..)` macro moves the input:
|
||||
|
@ -44,6 +44,7 @@ mod imp {
|
||||
use crate::ops::Range;
|
||||
use crate::ptr;
|
||||
use crate::sync::atomic::{AtomicBool, AtomicPtr, AtomicUsize, Ordering};
|
||||
use crate::sync::OnceLock;
|
||||
use crate::sys::pal::unix::os;
|
||||
use crate::thread;
|
||||
|
||||
@ -306,9 +307,8 @@ mod imp {
|
||||
ret
|
||||
}
|
||||
|
||||
unsafe fn get_stack_start_aligned() -> Option<*mut libc::c_void> {
|
||||
let page_size = PAGE_SIZE.load(Ordering::Relaxed);
|
||||
let stackptr = get_stack_start()?;
|
||||
fn stack_start_aligned(page_size: usize) -> Option<*mut libc::c_void> {
|
||||
let stackptr = unsafe { get_stack_start()? };
|
||||
let stackaddr = stackptr.addr();
|
||||
|
||||
// Ensure stackaddr is page aligned! A parent process might
|
||||
@ -325,9 +325,28 @@ mod imp {
|
||||
})
|
||||
}
|
||||
|
||||
#[forbid(unsafe_op_in_unsafe_fn)]
|
||||
unsafe fn install_main_guard() -> Option<Range<usize>> {
|
||||
let page_size = PAGE_SIZE.load(Ordering::Relaxed);
|
||||
|
||||
unsafe {
|
||||
// this way someone on any unix-y OS can check that all these compile
|
||||
if cfg!(all(target_os = "linux", not(target_env = "musl"))) {
|
||||
install_main_guard_linux(page_size)
|
||||
} else if cfg!(all(target_os = "linux", target_env = "musl")) {
|
||||
install_main_guard_linux_musl(page_size)
|
||||
} else if cfg!(target_os = "freebsd") {
|
||||
install_main_guard_freebsd(page_size)
|
||||
} else if cfg!(any(target_os = "netbsd", target_os = "openbsd")) {
|
||||
install_main_guard_bsds(page_size)
|
||||
} else {
|
||||
install_main_guard_default(page_size)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[forbid(unsafe_op_in_unsafe_fn)]
|
||||
unsafe fn install_main_guard_linux(page_size: usize) -> Option<Range<usize>> {
|
||||
// Linux doesn't allocate the whole stack right away, and
|
||||
// the kernel has its own stack-guard mechanism to fault
|
||||
// when growing too close to an existing mapping. If we map
|
||||
@ -338,50 +357,56 @@ mod imp {
|
||||
// Instead, we'll just note where we expect rlimit to start
|
||||
// faulting, so our handler can report "stack overflow", and
|
||||
// trust that the kernel's own stack guard will work.
|
||||
let stackptr = get_stack_start_aligned()?;
|
||||
let stackptr = stack_start_aligned(page_size)?;
|
||||
let stackaddr = stackptr.addr();
|
||||
Some(stackaddr - page_size..stackaddr)
|
||||
} else if cfg!(all(target_os = "linux", target_env = "musl")) {
|
||||
}
|
||||
|
||||
#[forbid(unsafe_op_in_unsafe_fn)]
|
||||
unsafe fn install_main_guard_linux_musl(_page_size: usize) -> Option<Range<usize>> {
|
||||
// For the main thread, the musl's pthread_attr_getstack
|
||||
// returns the current stack size, rather than maximum size
|
||||
// it can eventually grow to. It cannot be used to determine
|
||||
// the position of kernel's stack guard.
|
||||
None
|
||||
} else if cfg!(target_os = "freebsd") {
|
||||
}
|
||||
|
||||
#[forbid(unsafe_op_in_unsafe_fn)]
|
||||
unsafe fn install_main_guard_freebsd(page_size: usize) -> Option<Range<usize>> {
|
||||
// FreeBSD's stack autogrows, and optionally includes a guard page
|
||||
// at the bottom. If we try to remap the bottom of the stack
|
||||
// ourselves, FreeBSD's guard page moves upwards. So we'll just use
|
||||
// the builtin guard page.
|
||||
let stackptr = get_stack_start_aligned()?;
|
||||
let stackptr = stack_start_aligned(page_size)?;
|
||||
let guardaddr = stackptr.addr();
|
||||
// Technically the number of guard pages is tunable and controlled
|
||||
// by the security.bsd.stack_guard_page sysctl.
|
||||
// By default it is 1, checking once is enough since it is
|
||||
// a boot time config value.
|
||||
static PAGES: crate::sync::OnceLock<usize> = crate::sync::OnceLock::new();
|
||||
static PAGES: OnceLock<usize> = OnceLock::new();
|
||||
|
||||
let pages = PAGES.get_or_init(|| {
|
||||
use crate::sys::weak::dlsym;
|
||||
dlsym!(fn sysctlbyname(*const libc::c_char, *mut libc::c_void, *mut libc::size_t, *const libc::c_void, libc::size_t) -> libc::c_int);
|
||||
let mut guard: usize = 0;
|
||||
let mut size = crate::mem::size_of_val(&guard);
|
||||
let oid = crate::ffi::CStr::from_bytes_with_nul(
|
||||
b"security.bsd.stack_guard_page\0",
|
||||
)
|
||||
.unwrap();
|
||||
let mut size = mem::size_of_val(&guard);
|
||||
let oid = c"security.bsd.stack_guard_page";
|
||||
match sysctlbyname.get() {
|
||||
Some(fcn) => {
|
||||
if fcn(oid.as_ptr(), core::ptr::addr_of_mut!(guard) as *mut _, core::ptr::addr_of_mut!(size) as *mut _, crate::ptr::null_mut(), 0) == 0 {
|
||||
guard
|
||||
} else {
|
||||
1
|
||||
}
|
||||
},
|
||||
Some(fcn) if unsafe {
|
||||
fcn(oid.as_ptr(),
|
||||
ptr::addr_of_mut!(guard).cast(),
|
||||
ptr::addr_of_mut!(size),
|
||||
ptr::null_mut(),
|
||||
0) == 0
|
||||
} => guard,
|
||||
_ => 1,
|
||||
}
|
||||
});
|
||||
Some(guardaddr..guardaddr + pages * page_size)
|
||||
} else if cfg!(any(target_os = "openbsd", target_os = "netbsd")) {
|
||||
}
|
||||
|
||||
#[forbid(unsafe_op_in_unsafe_fn)]
|
||||
unsafe fn install_main_guard_bsds(page_size: usize) -> Option<Range<usize>> {
|
||||
// OpenBSD stack already includes a guard page, and stack is
|
||||
// immutable.
|
||||
// NetBSD stack includes the guard page.
|
||||
@ -389,10 +414,13 @@ mod imp {
|
||||
// We'll just note where we expect rlimit to start
|
||||
// faulting, so our handler can report "stack overflow", and
|
||||
// trust that the kernel's own stack guard will work.
|
||||
let stackptr = get_stack_start_aligned()?;
|
||||
let stackptr = stack_start_aligned(page_size)?;
|
||||
let stackaddr = stackptr.addr();
|
||||
Some(stackaddr - page_size..stackaddr)
|
||||
} else {
|
||||
}
|
||||
|
||||
#[forbid(unsafe_op_in_unsafe_fn)]
|
||||
unsafe fn install_main_guard_default(page_size: usize) -> Option<Range<usize>> {
|
||||
// Reallocate the last page of the stack.
|
||||
// This ensures SIGBUS will be raised on
|
||||
// stack overflow.
|
||||
@ -401,20 +429,22 @@ mod imp {
|
||||
// than the initial mmap() used, so we mmap() here with
|
||||
// read/write permissions and only then mprotect() it to
|
||||
// no permissions at all. See issue #50313.
|
||||
let stackptr = get_stack_start_aligned()?;
|
||||
let result = mmap64(
|
||||
let stackptr = stack_start_aligned(page_size)?;
|
||||
let result = unsafe {
|
||||
mmap64(
|
||||
stackptr,
|
||||
page_size,
|
||||
PROT_READ | PROT_WRITE,
|
||||
MAP_PRIVATE | MAP_ANON | MAP_FIXED,
|
||||
-1,
|
||||
0,
|
||||
);
|
||||
)
|
||||
};
|
||||
if result != stackptr || result == MAP_FAILED {
|
||||
panic!("failed to allocate a guard page: {}", io::Error::last_os_error());
|
||||
}
|
||||
|
||||
let result = mprotect(stackptr, page_size, PROT_NONE);
|
||||
let result = unsafe { mprotect(stackptr, page_size, PROT_NONE) };
|
||||
if result != 0 {
|
||||
panic!("failed to protect the guard page: {}", io::Error::last_os_error());
|
||||
}
|
||||
@ -423,7 +453,6 @@ mod imp {
|
||||
|
||||
Some(guardaddr..guardaddr + page_size)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "macos", target_os = "openbsd", target_os = "solaris"))]
|
||||
unsafe fn current_guard() -> Option<Range<usize>> {
|
||||
|
@ -31,7 +31,6 @@ ignore = [
|
||||
"library/backtrace",
|
||||
"library/portable-simd",
|
||||
"library/stdarch",
|
||||
"compiler/rustc_codegen_gcc",
|
||||
"src/doc/book",
|
||||
"src/doc/edition-guide",
|
||||
"src/doc/embedded-book",
|
||||
@ -50,4 +49,8 @@ ignore = [
|
||||
# These are ignored by a standard cargo fmt run.
|
||||
"compiler/rustc_codegen_cranelift/scripts",
|
||||
"compiler/rustc_codegen_cranelift/example/gen_block_iterate.rs", # uses edition 2024
|
||||
"compiler/rustc_codegen_gcc/tests",
|
||||
# Code automatically generated and included.
|
||||
"compiler/rustc_codegen_gcc/src/intrinsic/archs.rs",
|
||||
"compiler/rustc_codegen_gcc/example",
|
||||
]
|
||||
|
@ -27,8 +27,11 @@ do
|
||||
shift
|
||||
done
|
||||
|
||||
# MacOS reports "arm64" while Linux reports "aarch64". Commonize this.
|
||||
machine="$(uname -m | sed 's/arm64/aarch64/')"
|
||||
|
||||
script_dir="`dirname $script`"
|
||||
docker_dir="${script_dir}/host-$(uname -m)"
|
||||
docker_dir="${script_dir}/host-${machine}"
|
||||
ci_dir="`dirname $script_dir`"
|
||||
src_dir="`dirname $ci_dir`"
|
||||
root_dir="`dirname $src_dir`"
|
||||
@ -68,7 +71,7 @@ if [ -f "$docker_dir/$image/Dockerfile" ]; then
|
||||
|
||||
# Include the architecture in the hash key, since our Linux CI does not
|
||||
# only run in x86_64 machines.
|
||||
uname -m >> $hash_key
|
||||
echo "$machine" >> $hash_key
|
||||
|
||||
# Include cache version. Can be used to manually bust the Docker cache.
|
||||
echo "2" >> $hash_key
|
||||
@ -178,7 +181,7 @@ elif [ -f "$docker_dir/disabled/$image/Dockerfile" ]; then
|
||||
build \
|
||||
--rm \
|
||||
-t rust-ci \
|
||||
-f "host-$(uname -m)/$image/Dockerfile" \
|
||||
-f "host-${machine}/$image/Dockerfile" \
|
||||
-
|
||||
else
|
||||
echo Invalid image: $image
|
||||
@ -201,7 +204,7 @@ else
|
||||
else
|
||||
continue
|
||||
fi
|
||||
echo "Note: the current host architecture is $(uname -m)"
|
||||
echo "Note: the current host architecture is $machine"
|
||||
done
|
||||
|
||||
exit 1
|
||||
|
@ -117,8 +117,7 @@ fn baz() {}
|
||||
In various cases, the default Rust style specifies to sort things. If not
|
||||
otherwise specified, such sorting should be "version sorting", which ensures
|
||||
that (for instance) `x8` comes before `x16` even though the character `1` comes
|
||||
before the character `8`. (If not otherwise specified, version-sorting is
|
||||
lexicographical.)
|
||||
before the character `8`.
|
||||
|
||||
For the purposes of the Rust style, to compare two strings for version-sorting:
|
||||
|
||||
@ -132,12 +131,13 @@ For the purposes of the Rust style, to compare two strings for version-sorting:
|
||||
these strings, treat the chunks as equal (moving on to the next chunk) but
|
||||
remember which string had more leading zeroes.
|
||||
- To compare two chunks if both are not numeric, compare them by Unicode
|
||||
character lexicographically, except that `_` (underscore) sorts immediately
|
||||
after ` ` (space) but before any other character. (This treats underscore as
|
||||
a word separator, as commonly used in identifiers.)
|
||||
- If the use of version sorting specifies further modifiers, such as sorting
|
||||
non-lowercase before lowercase, apply those modifiers to the lexicographic
|
||||
sort in this step.
|
||||
character lexicographically, with two exceptions:
|
||||
- `_` (underscore) sorts immediately after ` ` (space) but before any other
|
||||
character. (This treats underscore as a word separator, as commonly used in
|
||||
identifiers.)
|
||||
- Unless otherwise specified, version-sorting should sort non-lowercase
|
||||
characters (characters that can start an `UpperCamelCase` identifier)
|
||||
before lowercase characters.
|
||||
- If the comparison reaches the end of the string and considers each pair of
|
||||
chunks equal:
|
||||
- If one of the numeric comparisons noted the earliest point at which one
|
||||
@ -157,7 +157,17 @@ leading zeroes.
|
||||
|
||||
As an example, version-sorting will sort the following strings in the order
|
||||
given:
|
||||
- `_ZYWX`
|
||||
- `_ZYXW`
|
||||
- `_abcd`
|
||||
- `A2`
|
||||
- `ABCD`
|
||||
- `Z_YXW`
|
||||
- `ZY_XW`
|
||||
- `ZY_XW`
|
||||
- `ZYXW`
|
||||
- `ZYXW_`
|
||||
- `a1`
|
||||
- `abcd`
|
||||
- `u_zzz`
|
||||
- `u8`
|
||||
- `u16`
|
||||
@ -190,11 +200,7 @@ given:
|
||||
- `x86_64`
|
||||
- `x86_128`
|
||||
- `x87`
|
||||
- `Z_YWX`
|
||||
- `ZY_WX`
|
||||
- `ZYW_X`
|
||||
- `ZYWX`
|
||||
- `ZYWX_`
|
||||
- `zyxw`
|
||||
|
||||
### [Module-level items](items.md)
|
||||
|
||||
|
@ -489,10 +489,8 @@ foo::{
|
||||
A *group* of imports is a set of imports on the same or sequential lines. One or
|
||||
more blank lines or other items (e.g., a function) separate groups of imports.
|
||||
|
||||
Within a group of imports, imports must be version-sorted, except that
|
||||
non-lowercase characters (characters that can start an `UpperCamelCase`
|
||||
identifier) must be sorted before lowercase characters. Groups of imports must
|
||||
not be merged or re-ordered.
|
||||
Within a group of imports, imports must be version-sorted. Groups of imports
|
||||
must not be merged or re-ordered.
|
||||
|
||||
E.g., input:
|
||||
|
||||
@ -520,9 +518,7 @@ re-ordering.
|
||||
### Ordering list import
|
||||
|
||||
Names in a list import must be version-sorted, except that:
|
||||
- `self` and `super` always come first if present,
|
||||
- non-lowercase characters (characters that can start an `UpperCamelCase`
|
||||
identifier) must be sorted before lowercase characters, and
|
||||
- `self` and `super` always come first if present, and
|
||||
- groups and glob imports always come last if present.
|
||||
|
||||
This applies recursively. For example, `a::*` comes before `b::a` but `a::b`
|
||||
|
@ -27,4 +27,16 @@ fn missing<'a, 'captured, 'not_captured, Captured>(x: &'a ()) -> impl Captures<'
|
||||
//~^ ERROR hidden type for
|
||||
}
|
||||
|
||||
fn no_params_yet(_: impl Sized, y: &()) -> impl Sized {
|
||||
//~^ HELP add a `use<...>` bound
|
||||
y
|
||||
//~^ ERROR hidden type for
|
||||
}
|
||||
|
||||
fn yes_params_yet<'a, T>(_: impl Sized, y: &'a ()) -> impl Sized {
|
||||
//~^ HELP add a `use<...>` bound
|
||||
y
|
||||
//~^ ERROR hidden type for
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
@ -62,6 +62,48 @@ help: add a `use<...>` bound to explicitly capture `'a`
|
||||
LL | fn missing<'a, 'captured, 'not_captured, Captured>(x: &'a ()) -> impl Captures<'captured> + use<'captured, 'a, Captured> {
|
||||
| ++++++++++++++++++++++++++++++
|
||||
|
||||
error: aborting due to 4 previous errors
|
||||
error[E0700]: hidden type for `impl Sized` captures lifetime that does not appear in bounds
|
||||
--> $DIR/hidden-type-suggestion.rs:32:5
|
||||
|
|
||||
LL | fn no_params_yet(_: impl Sized, y: &()) -> impl Sized {
|
||||
| --- ---------- opaque type defined here
|
||||
| |
|
||||
| hidden type `&()` captures the anonymous lifetime defined here
|
||||
LL |
|
||||
LL | y
|
||||
| ^
|
||||
|
|
||||
note: you could use a `use<...>` bound to explicitly capture `'_`, but argument-position `impl Trait`s are not nameable
|
||||
--> $DIR/hidden-type-suggestion.rs:30:21
|
||||
|
|
||||
LL | fn no_params_yet(_: impl Sized, y: &()) -> impl Sized {
|
||||
| ^^^^^^^^^^
|
||||
help: add a `use<...>` bound to explicitly capture `'_` after turning all argument-position `impl Trait` into type parameters, noting that this possibly affects the API of this crate
|
||||
|
|
||||
LL | fn no_params_yet<T: Sized>(_: T, y: &()) -> impl Sized + use<'_, T> {
|
||||
| ++++++++++ ~ ++++++++++++
|
||||
|
||||
error[E0700]: hidden type for `impl Sized` captures lifetime that does not appear in bounds
|
||||
--> $DIR/hidden-type-suggestion.rs:38:5
|
||||
|
|
||||
LL | fn yes_params_yet<'a, T>(_: impl Sized, y: &'a ()) -> impl Sized {
|
||||
| -- ---------- opaque type defined here
|
||||
| |
|
||||
| hidden type `&'a ()` captures the lifetime `'a` as defined here
|
||||
LL |
|
||||
LL | y
|
||||
| ^
|
||||
|
|
||||
note: you could use a `use<...>` bound to explicitly capture `'a`, but argument-position `impl Trait`s are not nameable
|
||||
--> $DIR/hidden-type-suggestion.rs:36:29
|
||||
|
|
||||
LL | fn yes_params_yet<'a, T>(_: impl Sized, y: &'a ()) -> impl Sized {
|
||||
| ^^^^^^^^^^
|
||||
help: add a `use<...>` bound to explicitly capture `'a` after turning all argument-position `impl Trait` into type parameters, noting that this possibly affects the API of this crate
|
||||
|
|
||||
LL | fn yes_params_yet<'a, T, U: Sized>(_: U, y: &'a ()) -> impl Sized + use<'a, T, U> {
|
||||
| ++++++++++ ~ +++++++++++++++
|
||||
|
||||
error: aborting due to 6 previous errors
|
||||
|
||||
For more information about this error, try `rustc --explain E0700`.
|
||||
|
@ -1,6 +1,6 @@
|
||||
//@ run-pass
|
||||
|
||||
#![allow(dead_code, non_camel_case_types, non_upper_case_globals)]
|
||||
#![allow(dead_code, non_camel_case_types, non_upper_case_globals, unused_variables)]
|
||||
#![feature(macro_metavar_expr_concat)]
|
||||
|
||||
macro_rules! create_things {
|
||||
@ -37,13 +37,58 @@ macro_rules! without_dollar_sign_is_an_ident {
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! literals {
|
||||
($ident:ident) => {{
|
||||
let ${concat(_a, "_b")}: () = ();
|
||||
let ${concat("_b", _a)}: () = ();
|
||||
macro_rules! combinations {
|
||||
($ident:ident, $literal:literal, $tt_ident:tt, $tt_literal:tt) => {{
|
||||
// tt ident
|
||||
let ${concat($tt_ident, b)} = ();
|
||||
let ${concat($tt_ident, _b)} = ();
|
||||
let ${concat($tt_ident, "b")} = ();
|
||||
let ${concat($tt_ident, $tt_ident)} = ();
|
||||
let ${concat($tt_ident, $tt_literal)} = ();
|
||||
let ${concat($tt_ident, $ident)} = ();
|
||||
let ${concat($tt_ident, $literal)} = ();
|
||||
// tt literal
|
||||
let ${concat($tt_literal, b)} = ();
|
||||
let ${concat($tt_literal, _b)} = ();
|
||||
let ${concat($tt_literal, "b")} = ();
|
||||
let ${concat($tt_literal, $tt_ident)} = ();
|
||||
let ${concat($tt_literal, $tt_literal)} = ();
|
||||
let ${concat($tt_literal, $ident)} = ();
|
||||
let ${concat($tt_literal, $literal)} = ();
|
||||
|
||||
let ${concat($ident, "_b")}: () = ();
|
||||
let ${concat("_b", $ident)}: () = ();
|
||||
// ident (adhoc)
|
||||
let ${concat(_b, b)} = ();
|
||||
let ${concat(_b, _b)} = ();
|
||||
let ${concat(_b, "b")} = ();
|
||||
let ${concat(_b, $tt_ident)} = ();
|
||||
let ${concat(_b, $tt_literal)} = ();
|
||||
let ${concat(_b, $ident)} = ();
|
||||
let ${concat(_b, $literal)} = ();
|
||||
// ident (param)
|
||||
let ${concat($ident, b)} = ();
|
||||
let ${concat($ident, _b)} = ();
|
||||
let ${concat($ident, "b")} = ();
|
||||
let ${concat($ident, $tt_ident)} = ();
|
||||
let ${concat($ident, $tt_literal)} = ();
|
||||
let ${concat($ident, $ident)} = ();
|
||||
let ${concat($ident, $literal)} = ();
|
||||
|
||||
// literal (adhoc)
|
||||
let ${concat("a", b)} = ();
|
||||
let ${concat("a", _b)} = ();
|
||||
let ${concat("a", "b")} = ();
|
||||
let ${concat("a", $tt_ident)} = ();
|
||||
let ${concat("a", $tt_literal)} = ();
|
||||
let ${concat("a", $ident)} = ();
|
||||
let ${concat("a", $literal)} = ();
|
||||
// literal (param)
|
||||
let ${concat($literal, b)} = ();
|
||||
let ${concat($literal, _b)} = ();
|
||||
let ${concat($literal, "b")} = ();
|
||||
let ${concat($literal, $tt_ident)} = ();
|
||||
let ${concat($literal, $tt_literal)} = ();
|
||||
let ${concat($literal, $ident)} = ();
|
||||
let ${concat($literal, $literal)} = ();
|
||||
}};
|
||||
}
|
||||
|
||||
@ -66,5 +111,5 @@ fn main() {
|
||||
assert_eq!(VARident, 1);
|
||||
assert_eq!(VAR_123, 2);
|
||||
|
||||
literals!(_hello);
|
||||
combinations!(_hello, "a", b, "b");
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ macro_rules! wrong_concat_declarations {
|
||||
//~^ ERROR `concat` must have at least two elements
|
||||
|
||||
${concat($ex, aaaa)}
|
||||
//~^ ERROR `${concat(..)}` currently only accepts identifiers
|
||||
//~^ ERROR metavariables of `${concat(..)}` must be of type
|
||||
|
||||
${concat($ex, aaaa 123)}
|
||||
//~^ ERROR expected comma
|
||||
@ -98,6 +98,39 @@ macro_rules! unsupported_literals {
|
||||
}};
|
||||
}
|
||||
|
||||
macro_rules! bad_literal_string {
|
||||
($literal:literal) => {
|
||||
const ${concat(_foo, $literal)}: () = ();
|
||||
//~^ ERROR `${concat(..)}` is not generating a valid identifier
|
||||
//~| ERROR `${concat(..)}` is not generating a valid identifier
|
||||
//~| ERROR `${concat(..)}` is not generating a valid identifier
|
||||
//~| ERROR `${concat(..)}` is not generating a valid identifier
|
||||
//~| ERROR `${concat(..)}` is not generating a valid identifier
|
||||
//~| ERROR `${concat(..)}` is not generating a valid identifier
|
||||
//~| ERROR `${concat(..)}` is not generating a valid identifier
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! bad_literal_non_string {
|
||||
($literal:literal) => {
|
||||
const ${concat(_foo, $literal)}: () = ();
|
||||
//~^ ERROR metavariables of `${concat(..)}` must be of type
|
||||
//~| ERROR metavariables of `${concat(..)}` must be of type
|
||||
//~| ERROR metavariables of `${concat(..)}` must be of type
|
||||
//~| ERROR metavariables of `${concat(..)}` must be of type
|
||||
//~| ERROR metavariables of `${concat(..)}` must be of type
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! bad_tt_literal {
|
||||
($tt:tt) => {
|
||||
const ${concat(_foo, $tt)}: () = ();
|
||||
//~^ ERROR metavariables of `${concat(..)}` must be of type
|
||||
//~| ERROR metavariables of `${concat(..)}` must be of type
|
||||
//~| ERROR metavariables of `${concat(..)}` must be of type
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
wrong_concat_declarations!(1);
|
||||
|
||||
@ -113,4 +146,23 @@ fn main() {
|
||||
unsupported_literals!(_abc);
|
||||
|
||||
empty!();
|
||||
|
||||
bad_literal_string!("\u{00BD}");
|
||||
bad_literal_string!("\x41");
|
||||
bad_literal_string!("🤷");
|
||||
bad_literal_string!("d[-_-]b");
|
||||
|
||||
bad_literal_string!("-1");
|
||||
bad_literal_string!("1.0");
|
||||
bad_literal_string!("'1'");
|
||||
|
||||
bad_literal_non_string!(1);
|
||||
bad_literal_non_string!(-1);
|
||||
bad_literal_non_string!(1.0);
|
||||
bad_literal_non_string!('1');
|
||||
bad_literal_non_string!(false);
|
||||
|
||||
bad_tt_literal!(1);
|
||||
bad_tt_literal!(1.0);
|
||||
bad_tt_literal!('1');
|
||||
}
|
||||
|
@ -64,11 +64,13 @@ error: expected identifier or string literal
|
||||
LL | let ${concat($ident, 1)}: () = ();
|
||||
| ^
|
||||
|
||||
error: `${concat(..)}` currently only accepts identifiers or meta-variables as parameters
|
||||
error: metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`
|
||||
--> $DIR/syntax-errors.rs:22:19
|
||||
|
|
||||
LL | ${concat($ex, aaaa)}
|
||||
| ^^
|
||||
|
|
||||
= note: currently only string literals are supported
|
||||
|
||||
error: variable `foo` is not recognized in meta-variable expression
|
||||
--> $DIR/syntax-errors.rs:35:30
|
||||
@ -131,5 +133,152 @@ LL | empty!();
|
||||
|
|
||||
= note: this error originates in the macro `empty` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: aborting due to 18 previous errors
|
||||
error: `${concat(..)}` is not generating a valid identifier
|
||||
--> $DIR/syntax-errors.rs:103:16
|
||||
|
|
||||
LL | const ${concat(_foo, $literal)}: () = ();
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
...
|
||||
LL | bad_literal_string!("\u{00BD}");
|
||||
| ------------------------------- in this macro invocation
|
||||
|
|
||||
= note: this error originates in the macro `bad_literal_string` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: `${concat(..)}` is not generating a valid identifier
|
||||
--> $DIR/syntax-errors.rs:103:16
|
||||
|
|
||||
LL | const ${concat(_foo, $literal)}: () = ();
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
...
|
||||
LL | bad_literal_string!("\x41");
|
||||
| --------------------------- in this macro invocation
|
||||
|
|
||||
= note: this error originates in the macro `bad_literal_string` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: `${concat(..)}` is not generating a valid identifier
|
||||
--> $DIR/syntax-errors.rs:103:16
|
||||
|
|
||||
LL | const ${concat(_foo, $literal)}: () = ();
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
...
|
||||
LL | bad_literal_string!("🤷");
|
||||
| ------------------------- in this macro invocation
|
||||
|
|
||||
= note: this error originates in the macro `bad_literal_string` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: `${concat(..)}` is not generating a valid identifier
|
||||
--> $DIR/syntax-errors.rs:103:16
|
||||
|
|
||||
LL | const ${concat(_foo, $literal)}: () = ();
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
...
|
||||
LL | bad_literal_string!("d[-_-]b");
|
||||
| ------------------------------ in this macro invocation
|
||||
|
|
||||
= note: this error originates in the macro `bad_literal_string` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: `${concat(..)}` is not generating a valid identifier
|
||||
--> $DIR/syntax-errors.rs:103:16
|
||||
|
|
||||
LL | const ${concat(_foo, $literal)}: () = ();
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
...
|
||||
LL | bad_literal_string!("-1");
|
||||
| ------------------------- in this macro invocation
|
||||
|
|
||||
= note: this error originates in the macro `bad_literal_string` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: `${concat(..)}` is not generating a valid identifier
|
||||
--> $DIR/syntax-errors.rs:103:16
|
||||
|
|
||||
LL | const ${concat(_foo, $literal)}: () = ();
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
...
|
||||
LL | bad_literal_string!("1.0");
|
||||
| -------------------------- in this macro invocation
|
||||
|
|
||||
= note: this error originates in the macro `bad_literal_string` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: `${concat(..)}` is not generating a valid identifier
|
||||
--> $DIR/syntax-errors.rs:103:16
|
||||
|
|
||||
LL | const ${concat(_foo, $literal)}: () = ();
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
...
|
||||
LL | bad_literal_string!("'1'");
|
||||
| -------------------------- in this macro invocation
|
||||
|
|
||||
= note: this error originates in the macro `bad_literal_string` (in Nightly builds, run with -Z macro-backtrace for more info)
|
||||
|
||||
error: metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`
|
||||
--> $DIR/syntax-errors.rs:116:31
|
||||
|
|
||||
LL | const ${concat(_foo, $literal)}: () = ();
|
||||
| ^^^^^^^
|
||||
|
|
||||
= note: currently only string literals are supported
|
||||
|
||||
error: metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`
|
||||
--> $DIR/syntax-errors.rs:116:31
|
||||
|
|
||||
LL | const ${concat(_foo, $literal)}: () = ();
|
||||
| ^^^^^^^
|
||||
|
|
||||
= note: currently only string literals are supported
|
||||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||
|
||||
error: metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`
|
||||
--> $DIR/syntax-errors.rs:116:31
|
||||
|
|
||||
LL | const ${concat(_foo, $literal)}: () = ();
|
||||
| ^^^^^^^
|
||||
|
|
||||
= note: currently only string literals are supported
|
||||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||
|
||||
error: metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`
|
||||
--> $DIR/syntax-errors.rs:116:31
|
||||
|
|
||||
LL | const ${concat(_foo, $literal)}: () = ();
|
||||
| ^^^^^^^
|
||||
|
|
||||
= note: currently only string literals are supported
|
||||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||
|
||||
error: metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`
|
||||
--> $DIR/syntax-errors.rs:116:31
|
||||
|
|
||||
LL | const ${concat(_foo, $literal)}: () = ();
|
||||
| ^^^^^^^
|
||||
|
|
||||
= note: currently only string literals are supported
|
||||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||
|
||||
error: metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`
|
||||
--> $DIR/syntax-errors.rs:127:31
|
||||
|
|
||||
LL | const ${concat(_foo, $tt)}: () = ();
|
||||
| ^^
|
||||
|
|
||||
= note: currently only string literals are supported
|
||||
|
||||
error: metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`
|
||||
--> $DIR/syntax-errors.rs:127:31
|
||||
|
|
||||
LL | const ${concat(_foo, $tt)}: () = ();
|
||||
| ^^
|
||||
|
|
||||
= note: currently only string literals are supported
|
||||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||
|
||||
error: metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`
|
||||
--> $DIR/syntax-errors.rs:127:31
|
||||
|
|
||||
LL | const ${concat(_foo, $tt)}: () = ();
|
||||
| ^^
|
||||
|
|
||||
= note: currently only string literals are supported
|
||||
= note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no`
|
||||
|
||||
error: aborting due to 33 previous errors
|
||||
|
||||
|
@ -3,12 +3,17 @@
|
||||
#![feature(macro_metavar_expr_concat)]
|
||||
|
||||
macro_rules! turn_to_page {
|
||||
($ident:ident) => {
|
||||
($ident:ident, $literal:literal, $tt:tt) => {
|
||||
const ${concat("Ḧ", $ident)}: i32 = 394;
|
||||
const ${concat("Ḧ", $literal)}: i32 = 394;
|
||||
const ${concat("Ḧ", $tt)}: i32 = 394;
|
||||
};
|
||||
}
|
||||
|
||||
fn main() {
|
||||
turn_to_page!(P);
|
||||
assert_eq!(ḦP, 394);
|
||||
turn_to_page!(P1, "Ṕ2", Ṕ);
|
||||
assert_eq!(ḦṔ, 394);
|
||||
assert_eq!(ḦP1, 394);
|
||||
assert_eq!(ḦṔ2, 394);
|
||||
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user