mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-21 22:34:05 +00:00
inline format!() args up to and including rustc_middle
This commit is contained in:
parent
2e0136a131
commit
23815467a2
@ -18,7 +18,7 @@ fn set_windows_exe_options() {
|
|||||||
let mut manifest = env::current_dir().unwrap();
|
let mut manifest = env::current_dir().unwrap();
|
||||||
manifest.push(WINDOWS_MANIFEST_FILE);
|
manifest.push(WINDOWS_MANIFEST_FILE);
|
||||||
|
|
||||||
println!("cargo:rerun-if-changed={}", WINDOWS_MANIFEST_FILE);
|
println!("cargo:rerun-if-changed={WINDOWS_MANIFEST_FILE}");
|
||||||
// Embed the Windows application manifest file.
|
// Embed the Windows application manifest file.
|
||||||
println!("cargo:rustc-link-arg-bin=rustc-main=/MANIFEST:EMBED");
|
println!("cargo:rustc-link-arg-bin=rustc-main=/MANIFEST:EMBED");
|
||||||
println!("cargo:rustc-link-arg-bin=rustc-main=/MANIFESTINPUT:{}", manifest.to_str().unwrap());
|
println!("cargo:rustc-link-arg-bin=rustc-main=/MANIFESTINPUT:{}", manifest.to_str().unwrap());
|
||||||
|
@ -260,8 +260,7 @@ pub trait LayoutCalculator {
|
|||||||
}
|
}
|
||||||
_ => assert!(
|
_ => assert!(
|
||||||
start == Bound::Unbounded && end == Bound::Unbounded,
|
start == Bound::Unbounded && end == Bound::Unbounded,
|
||||||
"nonscalar layout for layout_scalar_valid_range type: {:#?}",
|
"nonscalar layout for layout_scalar_valid_range type: {st:#?}",
|
||||||
st,
|
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -463,7 +462,7 @@ pub trait LayoutCalculator {
|
|||||||
min = 0;
|
min = 0;
|
||||||
max = 0;
|
max = 0;
|
||||||
}
|
}
|
||||||
assert!(min <= max, "discriminant range is {}...{}", min, max);
|
assert!(min <= max, "discriminant range is {min}...{max}");
|
||||||
let (min_ity, signed) = discr_range_of_repr(min, max); //Integer::repr_discr(tcx, ty, &repr, min, max);
|
let (min_ity, signed) = discr_range_of_repr(min, max); //Integer::repr_discr(tcx, ty, &repr, min, max);
|
||||||
|
|
||||||
let mut align = dl.aggregate_align;
|
let mut align = dl.aggregate_align;
|
||||||
@ -537,8 +536,7 @@ pub trait LayoutCalculator {
|
|||||||
// space necessary to represent would have to be discarded (or layout is wrong
|
// space necessary to represent would have to be discarded (or layout is wrong
|
||||||
// on thinking it needs 16 bits)
|
// on thinking it needs 16 bits)
|
||||||
panic!(
|
panic!(
|
||||||
"layout decided on a larger discriminant type ({:?}) than typeck ({:?})",
|
"layout decided on a larger discriminant type ({min_ity:?}) than typeck ({typeck_ity:?})"
|
||||||
min_ity, typeck_ity
|
|
||||||
);
|
);
|
||||||
// However, it is fine to make discr type however large (as an optimisation)
|
// However, it is fine to make discr type however large (as an optimisation)
|
||||||
// after this point – we’ll just truncate the value we load in codegen.
|
// after this point – we’ll just truncate the value we load in codegen.
|
||||||
|
@ -332,7 +332,7 @@ impl TargetDataLayout {
|
|||||||
16 => 1 << 15,
|
16 => 1 << 15,
|
||||||
32 => 1 << 31,
|
32 => 1 << 31,
|
||||||
64 => 1 << 47,
|
64 => 1 << 47,
|
||||||
bits => panic!("obj_size_bound: unknown pointer bit size {}", bits),
|
bits => panic!("obj_size_bound: unknown pointer bit size {bits}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -342,7 +342,7 @@ impl TargetDataLayout {
|
|||||||
16 => I16,
|
16 => I16,
|
||||||
32 => I32,
|
32 => I32,
|
||||||
64 => I64,
|
64 => I64,
|
||||||
bits => panic!("ptr_sized_integer: unknown pointer bit size {}", bits),
|
bits => panic!("ptr_sized_integer: unknown pointer bit size {bits}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -399,7 +399,7 @@ impl FromStr for Endian {
|
|||||||
match s {
|
match s {
|
||||||
"little" => Ok(Self::Little),
|
"little" => Ok(Self::Little),
|
||||||
"big" => Ok(Self::Big),
|
"big" => Ok(Self::Big),
|
||||||
_ => Err(format!(r#"unknown endian: "{}""#, s)),
|
_ => Err(format!(r#"unknown endian: "{s}""#)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -456,7 +456,7 @@ impl Size {
|
|||||||
pub fn bits(self) -> u64 {
|
pub fn bits(self) -> u64 {
|
||||||
#[cold]
|
#[cold]
|
||||||
fn overflow(bytes: u64) -> ! {
|
fn overflow(bytes: u64) -> ! {
|
||||||
panic!("Size::bits: {} bytes in bits doesn't fit in u64", bytes)
|
panic!("Size::bits: {bytes} bytes in bits doesn't fit in u64")
|
||||||
}
|
}
|
||||||
|
|
||||||
self.bytes().checked_mul(8).unwrap_or_else(|| overflow(self.bytes()))
|
self.bytes().checked_mul(8).unwrap_or_else(|| overflow(self.bytes()))
|
||||||
@ -1179,17 +1179,12 @@ impl FieldsShape {
|
|||||||
unreachable!("FieldsShape::offset: `Primitive`s have no fields")
|
unreachable!("FieldsShape::offset: `Primitive`s have no fields")
|
||||||
}
|
}
|
||||||
FieldsShape::Union(count) => {
|
FieldsShape::Union(count) => {
|
||||||
assert!(
|
assert!(i < count.get(), "tried to access field {i} of union with {count} fields");
|
||||||
i < count.get(),
|
|
||||||
"tried to access field {} of union with {} fields",
|
|
||||||
i,
|
|
||||||
count
|
|
||||||
);
|
|
||||||
Size::ZERO
|
Size::ZERO
|
||||||
}
|
}
|
||||||
FieldsShape::Array { stride, count } => {
|
FieldsShape::Array { stride, count } => {
|
||||||
let i = u64::try_from(i).unwrap();
|
let i = u64::try_from(i).unwrap();
|
||||||
assert!(i < count, "tried to access field {} of array with {} fields", i, count);
|
assert!(i < count, "tried to access field {i} of array with {count} fields");
|
||||||
stride * i
|
stride * i
|
||||||
}
|
}
|
||||||
FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::from_usize(i)],
|
FieldsShape::Arbitrary { ref offsets, .. } => offsets[FieldIdx::from_usize(i)],
|
||||||
@ -1294,7 +1289,7 @@ impl Abi {
|
|||||||
Primitive::Int(_, signed) => signed,
|
Primitive::Int(_, signed) => signed,
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
_ => panic!("`is_signed` on non-scalar ABI {:?}", self),
|
_ => panic!("`is_signed` on non-scalar ABI {self:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -659,7 +659,7 @@ fn validate_generic_param_order(
|
|||||||
GenericParamKind::Type { .. } => (ParamKindOrd::TypeOrConst, ident.to_string()),
|
GenericParamKind::Type { .. } => (ParamKindOrd::TypeOrConst, ident.to_string()),
|
||||||
GenericParamKind::Const { ty, .. } => {
|
GenericParamKind::Const { ty, .. } => {
|
||||||
let ty = pprust::ty_to_string(ty);
|
let ty = pprust::ty_to_string(ty);
|
||||||
(ParamKindOrd::TypeOrConst, format!("const {}: {}", ident, ty))
|
(ParamKindOrd::TypeOrConst, format!("const {ident}: {ty}"))
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
param_idents.push((kind, ord_kind, bounds, idx, ident));
|
param_idents.push((kind, ord_kind, bounds, idx, ident));
|
||||||
@ -1463,15 +1463,12 @@ fn deny_equality_constraints(
|
|||||||
let Some(arg) = args.args.last() else {
|
let Some(arg) = args.args.last() else {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
(
|
(format!(", {assoc} = {ty}"), arg.span().shrink_to_hi())
|
||||||
format!(", {} = {}", assoc, ty),
|
|
||||||
arg.span().shrink_to_hi(),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
_ => continue,
|
_ => continue,
|
||||||
},
|
},
|
||||||
None => (
|
None => (
|
||||||
format!("<{} = {}>", assoc, ty),
|
format!("<{assoc} = {ty}>"),
|
||||||
trait_segment.span().shrink_to_hi(),
|
trait_segment.span().shrink_to_hi(),
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
|
@ -575,7 +575,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, args: AsmArgs) -> Option<ast::Inl
|
|||||||
|| named_pos.contains_key(&idx)
|
|| named_pos.contains_key(&idx)
|
||||||
|| args.reg_args.contains(idx)
|
|| args.reg_args.contains(idx)
|
||||||
{
|
{
|
||||||
let msg = format!("invalid reference to argument at index {}", idx);
|
let msg = format!("invalid reference to argument at index {idx}");
|
||||||
let mut err = ecx.struct_span_err(span, msg);
|
let mut err = ecx.struct_span_err(span, msg);
|
||||||
err.span_label(span, "from here");
|
err.span_label(span, "from here");
|
||||||
|
|
||||||
@ -588,9 +588,9 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, args: AsmArgs) -> Option<ast::Inl
|
|||||||
""
|
""
|
||||||
};
|
};
|
||||||
let msg = match positional_args {
|
let msg = match positional_args {
|
||||||
0 => format!("no {}arguments were given", positional),
|
0 => format!("no {positional}arguments were given"),
|
||||||
1 => format!("there is 1 {}argument", positional),
|
1 => format!("there is 1 {positional}argument"),
|
||||||
x => format!("there are {} {}arguments", x, positional),
|
x => format!("there are {x} {positional}arguments"),
|
||||||
};
|
};
|
||||||
err.note(msg);
|
err.note(msg);
|
||||||
|
|
||||||
@ -624,7 +624,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, args: AsmArgs) -> Option<ast::Inl
|
|||||||
match args.named_args.get(&Symbol::intern(name)) {
|
match args.named_args.get(&Symbol::intern(name)) {
|
||||||
Some(&idx) => Some(idx),
|
Some(&idx) => Some(idx),
|
||||||
None => {
|
None => {
|
||||||
let msg = format!("there is no argument named `{}`", name);
|
let msg = format!("there is no argument named `{name}`");
|
||||||
let span = arg.position_span;
|
let span = arg.position_span;
|
||||||
ecx.struct_span_err(
|
ecx.struct_span_err(
|
||||||
template_span
|
template_span
|
||||||
@ -697,8 +697,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, args: AsmArgs) -> Option<ast::Inl
|
|||||||
err.span_label(sp, msg);
|
err.span_label(sp, msg);
|
||||||
err.help(format!(
|
err.help(format!(
|
||||||
"if this argument is intentionally unused, \
|
"if this argument is intentionally unused, \
|
||||||
consider using it in an asm comment: `\"/*{} */\"`",
|
consider using it in an asm comment: `\"/*{help_str} */\"`"
|
||||||
help_str
|
|
||||||
));
|
));
|
||||||
err.emit();
|
err.emit();
|
||||||
}
|
}
|
||||||
@ -712,8 +711,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, args: AsmArgs) -> Option<ast::Inl
|
|||||||
}
|
}
|
||||||
err.help(format!(
|
err.help(format!(
|
||||||
"if these arguments are intentionally unused, \
|
"if these arguments are intentionally unused, \
|
||||||
consider using them in an asm comment: `\"/*{} */\"`",
|
consider using them in an asm comment: `\"/*{help_str} */\"`"
|
||||||
help_str
|
|
||||||
));
|
));
|
||||||
err.emit();
|
err.emit();
|
||||||
}
|
}
|
||||||
|
@ -144,7 +144,7 @@ fn cs_clone_simple(
|
|||||||
}
|
}
|
||||||
_ => cx.span_bug(
|
_ => cx.span_bug(
|
||||||
trait_span,
|
trait_span,
|
||||||
format!("unexpected substructure in simple `derive({})`", name),
|
format!("unexpected substructure in simple `derive({name})`"),
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -178,10 +178,10 @@ fn cs_clone(
|
|||||||
vdata = &variant.data;
|
vdata = &variant.data;
|
||||||
}
|
}
|
||||||
EnumTag(..) | AllFieldlessEnum(..) => {
|
EnumTag(..) | AllFieldlessEnum(..) => {
|
||||||
cx.span_bug(trait_span, format!("enum tags in `derive({})`", name,))
|
cx.span_bug(trait_span, format!("enum tags in `derive({name})`",))
|
||||||
}
|
}
|
||||||
StaticEnum(..) | StaticStruct(..) => {
|
StaticEnum(..) | StaticStruct(..) => {
|
||||||
cx.span_bug(trait_span, format!("associated function in `derive({})`", name))
|
cx.span_bug(trait_span, format!("associated function in `derive({name})`"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -193,7 +193,7 @@ fn cs_clone(
|
|||||||
let Some(ident) = field.name else {
|
let Some(ident) = field.name else {
|
||||||
cx.span_bug(
|
cx.span_bug(
|
||||||
trait_span,
|
trait_span,
|
||||||
format!("unnamed field in normal struct in `derive({})`", name,),
|
format!("unnamed field in normal struct in `derive({name})`",),
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
let call = subcall(cx, field);
|
let call = subcall(cx, field);
|
||||||
|
@ -204,7 +204,7 @@ where
|
|||||||
let fields = fields
|
let fields = fields
|
||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.map(|(i, &span)| getarg(cx, span, Symbol::intern(&format!("_field{}", i)), i))
|
.map(|(i, &span)| getarg(cx, span, Symbol::intern(&format!("_field{i}")), i))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
cx.expr_call(trait_span, path_expr, fields)
|
cx.expr_call(trait_span, path_expr, fields)
|
||||||
|
@ -173,7 +173,7 @@ fn encodable_substructure(
|
|||||||
for (i, &FieldInfo { name, ref self_expr, span, .. }) in fields.iter().enumerate() {
|
for (i, &FieldInfo { name, ref self_expr, span, .. }) in fields.iter().enumerate() {
|
||||||
let name = match name {
|
let name = match name {
|
||||||
Some(id) => id.name,
|
Some(id) => id.name,
|
||||||
None => Symbol::intern(&format!("_field{}", i)),
|
None => Symbol::intern(&format!("_field{i}")),
|
||||||
};
|
};
|
||||||
let self_ref = cx.expr_addr_of(span, self_expr.clone());
|
let self_ref = cx.expr_addr_of(span, self_expr.clone());
|
||||||
let enc =
|
let enc =
|
||||||
|
@ -1166,7 +1166,7 @@ impl<'a> MethodDef<'a> {
|
|||||||
.iter()
|
.iter()
|
||||||
.enumerate()
|
.enumerate()
|
||||||
.skip(1)
|
.skip(1)
|
||||||
.map(|(arg_count, _selflike_arg)| format!("__arg{}", arg_count)),
|
.map(|(arg_count, _selflike_arg)| format!("__arg{arg_count}")),
|
||||||
)
|
)
|
||||||
.collect::<Vec<String>>();
|
.collect::<Vec<String>>();
|
||||||
|
|
||||||
@ -1181,7 +1181,7 @@ impl<'a> MethodDef<'a> {
|
|||||||
let get_tag_pieces = |cx: &ExtCtxt<'_>| {
|
let get_tag_pieces = |cx: &ExtCtxt<'_>| {
|
||||||
let tag_idents: Vec<_> = prefixes
|
let tag_idents: Vec<_> = prefixes
|
||||||
.iter()
|
.iter()
|
||||||
.map(|name| Ident::from_str_and_span(&format!("{}_tag", name), span))
|
.map(|name| Ident::from_str_and_span(&format!("{name}_tag"), span))
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let mut tag_exprs: Vec<_> = tag_idents
|
let mut tag_exprs: Vec<_> = tag_idents
|
||||||
@ -1521,7 +1521,7 @@ impl<'a> TraitDef<'a> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn mk_pattern_ident(&self, prefix: &str, i: usize) -> Ident {
|
fn mk_pattern_ident(&self, prefix: &str, i: usize) -> Ident {
|
||||||
Ident::from_str_and_span(&format!("{}_{}", prefix, i), self.span)
|
Ident::from_str_and_span(&format!("{prefix}_{i}"), self.span)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn create_struct_pattern_fields(
|
fn create_struct_pattern_fields(
|
||||||
@ -1602,8 +1602,7 @@ impl<'a> TraitDef<'a> {
|
|||||||
sp,
|
sp,
|
||||||
ast::CRATE_NODE_ID,
|
ast::CRATE_NODE_ID,
|
||||||
format!(
|
format!(
|
||||||
"{} slice in a packed struct that derives a built-in trait",
|
"{ty} slice in a packed struct that derives a built-in trait"
|
||||||
ty
|
|
||||||
),
|
),
|
||||||
rustc_lint_defs::BuiltinLintDiagnostics::ByteSliceInPackedStructWithDerive
|
rustc_lint_defs::BuiltinLintDiagnostics::ByteSliceInPackedStructWithDerive
|
||||||
);
|
);
|
||||||
|
@ -179,7 +179,7 @@ fn make_format_args(
|
|||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
unexpanded_fmt_span.shrink_to_lo(),
|
unexpanded_fmt_span.shrink_to_lo(),
|
||||||
"you might be missing a string literal to format with",
|
"you might be missing a string literal to format with",
|
||||||
format!("\"{}\", ", sugg_fmt),
|
format!("\"{sugg_fmt}\", "),
|
||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -668,7 +668,7 @@ fn report_invalid_references(
|
|||||||
let num_args_desc = match args.explicit_args().len() {
|
let num_args_desc = match args.explicit_args().len() {
|
||||||
0 => "no arguments were given".to_string(),
|
0 => "no arguments were given".to_string(),
|
||||||
1 => "there is 1 argument".to_string(),
|
1 => "there is 1 argument".to_string(),
|
||||||
n => format!("there are {} arguments", n),
|
n => format!("there are {n} arguments"),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut e;
|
let mut e;
|
||||||
@ -780,7 +780,7 @@ fn report_invalid_references(
|
|||||||
if num_placeholders == 1 {
|
if num_placeholders == 1 {
|
||||||
"is 1 argument".to_string()
|
"is 1 argument".to_string()
|
||||||
} else {
|
} else {
|
||||||
format!("are {} arguments", num_placeholders)
|
format!("are {num_placeholders} arguments")
|
||||||
},
|
},
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
@ -811,7 +811,7 @@ fn report_invalid_references(
|
|||||||
};
|
};
|
||||||
e = ecx.struct_span_err(
|
e = ecx.struct_span_err(
|
||||||
span,
|
span,
|
||||||
format!("invalid reference to positional {} ({})", arg_list, num_args_desc),
|
format!("invalid reference to positional {arg_list} ({num_args_desc})"),
|
||||||
);
|
);
|
||||||
e.note("positional arguments are zero-based");
|
e.note("positional arguments are zero-based");
|
||||||
}
|
}
|
||||||
|
@ -86,10 +86,7 @@ pub(crate) mod printf {
|
|||||||
'-' => c_left = true,
|
'-' => c_left = true,
|
||||||
'+' => c_plus = true,
|
'+' => c_plus = true,
|
||||||
_ => {
|
_ => {
|
||||||
return Err(Some(format!(
|
return Err(Some(format!("the flag `{c}` is unknown or unsupported")));
|
||||||
"the flag `{}` is unknown or unsupported",
|
|
||||||
c
|
|
||||||
)));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -268,21 +265,21 @@ pub(crate) mod printf {
|
|||||||
impl Num {
|
impl Num {
|
||||||
fn from_str(s: &str, arg: Option<&str>) -> Self {
|
fn from_str(s: &str, arg: Option<&str>) -> Self {
|
||||||
if let Some(arg) = arg {
|
if let Some(arg) = arg {
|
||||||
Num::Arg(arg.parse().unwrap_or_else(|_| panic!("invalid format arg `{:?}`", arg)))
|
Num::Arg(arg.parse().unwrap_or_else(|_| panic!("invalid format arg `{arg:?}`")))
|
||||||
} else if s == "*" {
|
} else if s == "*" {
|
||||||
Num::Next
|
Num::Next
|
||||||
} else {
|
} else {
|
||||||
Num::Num(s.parse().unwrap_or_else(|_| panic!("invalid format num `{:?}`", s)))
|
Num::Num(s.parse().unwrap_or_else(|_| panic!("invalid format num `{s:?}`")))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn translate(&self, s: &mut String) -> std::fmt::Result {
|
fn translate(&self, s: &mut String) -> std::fmt::Result {
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
match *self {
|
match *self {
|
||||||
Num::Num(n) => write!(s, "{}", n),
|
Num::Num(n) => write!(s, "{n}"),
|
||||||
Num::Arg(n) => {
|
Num::Arg(n) => {
|
||||||
let n = n.checked_sub(1).ok_or(std::fmt::Error)?;
|
let n = n.checked_sub(1).ok_or(std::fmt::Error)?;
|
||||||
write!(s, "{}$", n)
|
write!(s, "{n}$")
|
||||||
}
|
}
|
||||||
Num::Next => write!(s, "*"),
|
Num::Next => write!(s, "*"),
|
||||||
}
|
}
|
||||||
@ -626,8 +623,8 @@ pub mod shell {
|
|||||||
impl Substitution<'_> {
|
impl Substitution<'_> {
|
||||||
pub fn as_str(&self) -> String {
|
pub fn as_str(&self) -> String {
|
||||||
match self {
|
match self {
|
||||||
Substitution::Ordinal(n, _) => format!("${}", n),
|
Substitution::Ordinal(n, _) => format!("${n}"),
|
||||||
Substitution::Name(n, _) => format!("${}", n),
|
Substitution::Name(n, _) => format!("${n}"),
|
||||||
Substitution::Escape(_) => "$$".into(),
|
Substitution::Escape(_) => "$$".into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -72,7 +72,7 @@ impl AllocFnFactory<'_, '_> {
|
|||||||
let mut abi_args = ThinVec::new();
|
let mut abi_args = ThinVec::new();
|
||||||
let mut i = 0;
|
let mut i = 0;
|
||||||
let mut mk = || {
|
let mut mk = || {
|
||||||
let name = Ident::from_str_and_span(&format!("arg{}", i), self.span);
|
let name = Ident::from_str_and_span(&format!("arg{i}"), self.span);
|
||||||
i += 1;
|
i += 1;
|
||||||
name
|
name
|
||||||
};
|
};
|
||||||
|
@ -179,8 +179,7 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> {
|
|||||||
== prev_item.path.segments[0].ident.name
|
== prev_item.path.segments[0].ident.name
|
||||||
{
|
{
|
||||||
format!(
|
format!(
|
||||||
"only one `#[{}]` attribute is allowed on any given function",
|
"only one `#[{path_str}]` attribute is allowed on any given function",
|
||||||
path_str,
|
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
format!(
|
format!(
|
||||||
|
@ -149,7 +149,7 @@ pub fn expand_include<'cx>(
|
|||||||
Ok(None) => {
|
Ok(None) => {
|
||||||
if self.p.token != token::Eof {
|
if self.p.token != token::Eof {
|
||||||
let token = pprust::token_to_string(&self.p.token);
|
let token = pprust::token_to_string(&self.p.token);
|
||||||
let msg = format!("expected item, found `{}`", token);
|
let msg = format!("expected item, found `{token}`");
|
||||||
self.p.struct_span_err(self.p.token.span, msg).emit();
|
self.p.struct_span_err(self.p.token.span, msg).emit();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,17 +71,17 @@ pub enum TranslationBundleError {
|
|||||||
impl fmt::Display for TranslationBundleError {
|
impl fmt::Display for TranslationBundleError {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
TranslationBundleError::ReadFtl(e) => write!(f, "could not read ftl file: {}", e),
|
TranslationBundleError::ReadFtl(e) => write!(f, "could not read ftl file: {e}"),
|
||||||
TranslationBundleError::ParseFtl(e) => {
|
TranslationBundleError::ParseFtl(e) => {
|
||||||
write!(f, "could not parse ftl file: {}", e)
|
write!(f, "could not parse ftl file: {e}")
|
||||||
}
|
}
|
||||||
TranslationBundleError::AddResource(e) => write!(f, "failed to add resource: {}", e),
|
TranslationBundleError::AddResource(e) => write!(f, "failed to add resource: {e}"),
|
||||||
TranslationBundleError::MissingLocale => write!(f, "missing locale directory"),
|
TranslationBundleError::MissingLocale => write!(f, "missing locale directory"),
|
||||||
TranslationBundleError::ReadLocalesDir(e) => {
|
TranslationBundleError::ReadLocalesDir(e) => {
|
||||||
write!(f, "could not read locales dir: {}", e)
|
write!(f, "could not read locales dir: {e}")
|
||||||
}
|
}
|
||||||
TranslationBundleError::ReadLocalesDirEntry(e) => {
|
TranslationBundleError::ReadLocalesDirEntry(e) => {
|
||||||
write!(f, "could not read locales dir entry: {}", e)
|
write!(f, "could not read locales dir entry: {e}")
|
||||||
}
|
}
|
||||||
TranslationBundleError::LocaleIsNotDir => {
|
TranslationBundleError::LocaleIsNotDir => {
|
||||||
write!(f, "`$sysroot/share/locales/$locale` is not a directory")
|
write!(f, "`$sysroot/share/locales/$locale` is not a directory")
|
||||||
|
@ -2145,7 +2145,7 @@ impl EmitterWriter {
|
|||||||
&mut self.dst,
|
&mut self.dst,
|
||||||
self.short_message,
|
self.short_message,
|
||||||
) {
|
) {
|
||||||
panic!("failed to emit error: {}", e)
|
panic!("failed to emit error: {e}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !self.short_message {
|
if !self.short_message {
|
||||||
@ -2161,7 +2161,7 @@ impl EmitterWriter {
|
|||||||
true,
|
true,
|
||||||
None,
|
None,
|
||||||
) {
|
) {
|
||||||
panic!("failed to emit error: {}", err);
|
panic!("failed to emit error: {err}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
for sugg in suggestions {
|
for sugg in suggestions {
|
||||||
@ -2180,7 +2180,7 @@ impl EmitterWriter {
|
|||||||
true,
|
true,
|
||||||
None,
|
None,
|
||||||
) {
|
) {
|
||||||
panic!("failed to emit error: {}", e);
|
panic!("failed to emit error: {e}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
SuggestionStyle::HideCodeInline
|
SuggestionStyle::HideCodeInline
|
||||||
@ -2193,22 +2193,22 @@ impl EmitterWriter {
|
|||||||
&Level::Help,
|
&Level::Help,
|
||||||
max_line_num_len,
|
max_line_num_len,
|
||||||
) {
|
) {
|
||||||
panic!("failed to emit error: {}", e);
|
panic!("failed to emit error: {e}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(e) => panic!("failed to emit error: {}", e),
|
Err(e) => panic!("failed to emit error: {e}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut dst = self.dst.writable();
|
let mut dst = self.dst.writable();
|
||||||
match writeln!(dst) {
|
match writeln!(dst) {
|
||||||
Err(e) => panic!("failed to emit error: {}", e),
|
Err(e) => panic!("failed to emit error: {e}"),
|
||||||
_ => {
|
_ => {
|
||||||
if let Err(e) = dst.flush() {
|
if let Err(e) = dst.flush() {
|
||||||
panic!("failed to emit error: {}", e)
|
panic!("failed to emit error: {e}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -159,7 +159,7 @@ impl Emitter for JsonEmitter {
|
|||||||
}
|
}
|
||||||
.and_then(|_| self.dst.flush());
|
.and_then(|_| self.dst.flush());
|
||||||
if let Err(e) = result {
|
if let Err(e) = result {
|
||||||
panic!("failed to print diagnostics: {:?}", e);
|
panic!("failed to print diagnostics: {e:?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -172,7 +172,7 @@ impl Emitter for JsonEmitter {
|
|||||||
}
|
}
|
||||||
.and_then(|_| self.dst.flush());
|
.and_then(|_| self.dst.flush());
|
||||||
if let Err(e) = result {
|
if let Err(e) = result {
|
||||||
panic!("failed to print notification: {:?}", e);
|
panic!("failed to print notification: {e:?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -194,7 +194,7 @@ impl Emitter for JsonEmitter {
|
|||||||
}
|
}
|
||||||
.and_then(|_| self.dst.flush());
|
.and_then(|_| self.dst.flush());
|
||||||
if let Err(e) = result {
|
if let Err(e) = result {
|
||||||
panic!("failed to print future breakage report: {:?}", e);
|
panic!("failed to print future breakage report: {e:?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -208,7 +208,7 @@ impl Emitter for JsonEmitter {
|
|||||||
}
|
}
|
||||||
.and_then(|_| self.dst.flush());
|
.and_then(|_| self.dst.flush());
|
||||||
if let Err(e) = result {
|
if let Err(e) = result {
|
||||||
panic!("failed to print unused externs: {:?}", e);
|
panic!("failed to print unused externs: {e:?}");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1720,13 +1720,11 @@ impl HandlerInner {
|
|||||||
(count, delayed_count, as_bug) => {
|
(count, delayed_count, as_bug) => {
|
||||||
if delayed_count > 0 {
|
if delayed_count > 0 {
|
||||||
panic!(
|
panic!(
|
||||||
"aborting after {} errors and {} delayed bugs due to `-Z treat-err-as-bug={}`",
|
"aborting after {count} errors and {delayed_count} delayed bugs due to `-Z treat-err-as-bug={as_bug}`",
|
||||||
count, delayed_count, as_bug,
|
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
panic!(
|
panic!(
|
||||||
"aborting after {} errors due to `-Z treat-err-as-bug={}`",
|
"aborting after {count} errors due to `-Z treat-err-as-bug={as_bug}`",
|
||||||
count, as_bug,
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1862,7 +1860,7 @@ pub fn add_elided_lifetime_in_path_suggestion(
|
|||||||
}
|
}
|
||||||
let anon_lts = vec!["'_"; n].join(", ");
|
let anon_lts = vec!["'_"; n].join(", ");
|
||||||
let suggestion =
|
let suggestion =
|
||||||
if incl_angl_brckt { format!("<{}>", anon_lts) } else { format!("{}, ", anon_lts) };
|
if incl_angl_brckt { format!("<{anon_lts}>") } else { format!("{anon_lts}, ") };
|
||||||
|
|
||||||
diag.subdiagnostic(IndicateAnonymousLifetime {
|
diag.subdiagnostic(IndicateAnonymousLifetime {
|
||||||
span: insertion_span.shrink_to_hi(),
|
span: insertion_span.shrink_to_hi(),
|
||||||
|
@ -369,7 +369,7 @@ impl<'a> StripUnconfigured<'a> {
|
|||||||
let TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }, _) =
|
let TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }, _) =
|
||||||
orig_trees.next().unwrap().clone()
|
orig_trees.next().unwrap().clone()
|
||||||
else {
|
else {
|
||||||
panic!("Bad tokens for attribute {:?}", attr);
|
panic!("Bad tokens for attribute {attr:?}");
|
||||||
};
|
};
|
||||||
let pound_span = pound_token.span;
|
let pound_span = pound_token.span;
|
||||||
|
|
||||||
@ -379,7 +379,7 @@ impl<'a> StripUnconfigured<'a> {
|
|||||||
let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _) =
|
let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }, _) =
|
||||||
orig_trees.next().unwrap().clone()
|
orig_trees.next().unwrap().clone()
|
||||||
else {
|
else {
|
||||||
panic!("Bad tokens for attribute {:?}", attr);
|
panic!("Bad tokens for attribute {attr:?}");
|
||||||
};
|
};
|
||||||
trees.push(AttrTokenTree::Token(bang_token, Spacing::Alone));
|
trees.push(AttrTokenTree::Token(bang_token, Spacing::Alone));
|
||||||
}
|
}
|
||||||
@ -390,7 +390,7 @@ impl<'a> StripUnconfigured<'a> {
|
|||||||
Delimiter::Bracket,
|
Delimiter::Bracket,
|
||||||
item.tokens
|
item.tokens
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
|
.unwrap_or_else(|| panic!("Missing tokens for {item:?}"))
|
||||||
.to_attr_token_stream(),
|
.to_attr_token_stream(),
|
||||||
);
|
);
|
||||||
trees.push(bracket_group);
|
trees.push(bracket_group);
|
||||||
|
@ -803,7 +803,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||||||
&self.cx.sess.parse_sess,
|
&self.cx.sess.parse_sess,
|
||||||
sym::proc_macro_hygiene,
|
sym::proc_macro_hygiene,
|
||||||
span,
|
span,
|
||||||
format!("custom attributes cannot be applied to {}", kind),
|
format!("custom attributes cannot be applied to {kind}"),
|
||||||
)
|
)
|
||||||
.emit();
|
.emit();
|
||||||
}
|
}
|
||||||
@ -1707,7 +1707,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
|
|||||||
&UNUSED_ATTRIBUTES,
|
&UNUSED_ATTRIBUTES,
|
||||||
attr.span,
|
attr.span,
|
||||||
self.cx.current_expansion.lint_node_id,
|
self.cx.current_expansion.lint_node_id,
|
||||||
format!("unused attribute `{}`", attr_name),
|
format!("unused attribute `{attr_name}`"),
|
||||||
BuiltinLintDiagnostics::UnusedBuiltinAttribute {
|
BuiltinLintDiagnostics::UnusedBuiltinAttribute {
|
||||||
attr_name,
|
attr_name,
|
||||||
macro_name: pprust::path_to_string(&call.path),
|
macro_name: pprust::path_to_string(&call.path),
|
||||||
|
@ -257,7 +257,7 @@ pub(super) fn emit_frag_parse_err(
|
|||||||
e.span_suggestion_verbose(
|
e.span_suggestion_verbose(
|
||||||
site_span,
|
site_span,
|
||||||
"surround the macro invocation with `{}` to interpret the expansion as a statement",
|
"surround the macro invocation with `{}` to interpret the expansion as a statement",
|
||||||
format!("{{ {}; }}", snippet),
|
format!("{{ {snippet}; }}"),
|
||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -593,7 +593,7 @@ fn check_ops_is_prefix(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
buffer_lint(sess, span.into(), node_id, format!("unknown macro variable `{}`", name));
|
buffer_lint(sess, span.into(), node_id, format!("unknown macro variable `{name}`"));
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns whether `binder_ops` is a prefix of `occurrence_ops`.
|
/// Returns whether `binder_ops` is a prefix of `occurrence_ops`.
|
||||||
@ -626,7 +626,7 @@ fn ops_is_prefix(
|
|||||||
if i >= occurrence_ops.len() {
|
if i >= occurrence_ops.len() {
|
||||||
let mut span = MultiSpan::from_span(span);
|
let mut span = MultiSpan::from_span(span);
|
||||||
span.push_span_label(binder.span, "expected repetition");
|
span.push_span_label(binder.span, "expected repetition");
|
||||||
let message = format!("variable '{}' is still repeating at this depth", name);
|
let message = format!("variable '{name}' is still repeating at this depth");
|
||||||
buffer_lint(sess, span, node_id, message);
|
buffer_lint(sess, span, node_id, message);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
@ -156,7 +156,7 @@ impl Display for MatcherLoc {
|
|||||||
MatcherLoc::MetaVarDecl { bind, kind, .. } => {
|
MatcherLoc::MetaVarDecl { bind, kind, .. } => {
|
||||||
write!(f, "meta-variable `${bind}")?;
|
write!(f, "meta-variable `${bind}")?;
|
||||||
if let Some(kind) = kind {
|
if let Some(kind) = kind {
|
||||||
write!(f, ":{}", kind)?;
|
write!(f, ":{kind}")?;
|
||||||
}
|
}
|
||||||
write!(f, "`")?;
|
write!(f, "`")?;
|
||||||
Ok(())
|
Ok(())
|
||||||
@ -723,7 +723,7 @@ impl TtParser {
|
|||||||
.iter()
|
.iter()
|
||||||
.map(|mp| match &matcher[mp.idx] {
|
.map(|mp| match &matcher[mp.idx] {
|
||||||
MatcherLoc::MetaVarDecl { bind, kind: Some(kind), .. } => {
|
MatcherLoc::MetaVarDecl { bind, kind: Some(kind), .. } => {
|
||||||
format!("{} ('{}')", kind, bind)
|
format!("{kind} ('{bind}')")
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
})
|
})
|
||||||
@ -736,8 +736,8 @@ impl TtParser {
|
|||||||
"local ambiguity when calling macro `{}`: multiple parsing options: {}",
|
"local ambiguity when calling macro `{}`: multiple parsing options: {}",
|
||||||
self.macro_name,
|
self.macro_name,
|
||||||
match self.next_mps.len() {
|
match self.next_mps.len() {
|
||||||
0 => format!("built-in NTs {}.", nts),
|
0 => format!("built-in NTs {nts}."),
|
||||||
n => format!("built-in NTs {} or {n} other option{s}.", nts, s = pluralize!(n)),
|
n => format!("built-in NTs {nts} or {n} other option{s}.", s = pluralize!(n)),
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@ -757,7 +757,7 @@ impl TtParser {
|
|||||||
match ret_val.entry(MacroRulesNormalizedIdent::new(bind)) {
|
match ret_val.entry(MacroRulesNormalizedIdent::new(bind)) {
|
||||||
Vacant(spot) => spot.insert(res.next().unwrap()),
|
Vacant(spot) => spot.insert(res.next().unwrap()),
|
||||||
Occupied(..) => {
|
Occupied(..) => {
|
||||||
return Error(span, format!("duplicated bind name: {}", bind));
|
return Error(span, format!("duplicated bind name: {bind}"));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
|
@ -554,7 +554,7 @@ pub fn compile_declarative_macro(
|
|||||||
let (transparency, transparency_error) = attr::find_transparency(&def.attrs, macro_rules);
|
let (transparency, transparency_error) = attr::find_transparency(&def.attrs, macro_rules);
|
||||||
match transparency_error {
|
match transparency_error {
|
||||||
Some(TransparencyError::UnknownTransparency(value, span)) => {
|
Some(TransparencyError::UnknownTransparency(value, span)) => {
|
||||||
diag.span_err(span, format!("unknown macro transparency: `{}`", value));
|
diag.span_err(span, format!("unknown macro transparency: `{value}`"));
|
||||||
}
|
}
|
||||||
Some(TransparencyError::MultipleTransparencyAttrs(old_span, new_span)) => {
|
Some(TransparencyError::MultipleTransparencyAttrs(old_span, new_span)) => {
|
||||||
diag.span_err(vec![old_span, new_span], "multiple macro transparency attributes");
|
diag.span_err(vec![old_span, new_span], "multiple macro transparency attributes");
|
||||||
@ -1197,7 +1197,7 @@ fn check_matcher_core<'tt>(
|
|||||||
may_be = may_be
|
may_be = may_be
|
||||||
),
|
),
|
||||||
);
|
);
|
||||||
err.span_label(sp, format!("not allowed after `{}` fragments", kind));
|
err.span_label(sp, format!("not allowed after `{kind}` fragments"));
|
||||||
|
|
||||||
if kind == NonterminalKind::PatWithOr
|
if kind == NonterminalKind::PatWithOr
|
||||||
&& sess.edition.at_least_rust_2021()
|
&& sess.edition.at_least_rust_2021()
|
||||||
@ -1221,8 +1221,7 @@ fn check_matcher_core<'tt>(
|
|||||||
&[] => {}
|
&[] => {}
|
||||||
&[t] => {
|
&[t] => {
|
||||||
err.note(format!(
|
err.note(format!(
|
||||||
"only {} is allowed after `{}` fragments",
|
"only {t} is allowed after `{kind}` fragments",
|
||||||
t, kind,
|
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
ts => {
|
ts => {
|
||||||
@ -1407,9 +1406,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
|
|||||||
fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
|
fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
|
||||||
match tt {
|
match tt {
|
||||||
mbe::TokenTree::Token(token) => pprust::token_to_string(&token).into(),
|
mbe::TokenTree::Token(token) => pprust::token_to_string(&token).into(),
|
||||||
mbe::TokenTree::MetaVar(_, name) => format!("${}", name),
|
mbe::TokenTree::MetaVar(_, name) => format!("${name}"),
|
||||||
mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${}:{}", name, kind),
|
mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${name}:{kind}"),
|
||||||
mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${}:", name),
|
mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${name}:"),
|
||||||
_ => panic!(
|
_ => panic!(
|
||||||
"{}",
|
"{}",
|
||||||
"unexpected mbe::TokenTree::{Sequence or Delimited} \
|
"unexpected mbe::TokenTree::{Sequence or Delimited} \
|
||||||
|
@ -194,7 +194,7 @@ fn parse_tree<'a>(
|
|||||||
Delimiter::Parenthesis => {}
|
Delimiter::Parenthesis => {}
|
||||||
_ => {
|
_ => {
|
||||||
let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
|
let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
|
||||||
let msg = format!("expected `(` or `{{`, found `{}`", tok);
|
let msg = format!("expected `(` or `{{`, found `{tok}`");
|
||||||
sess.span_diagnostic.span_err(delim_span.entire(), msg);
|
sess.span_diagnostic.span_err(delim_span.entire(), msg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -95,7 +95,7 @@ impl base::AttrProcMacro for AttrProcMacro {
|
|||||||
|e| {
|
|e| {
|
||||||
let mut err = ecx.struct_span_err(span, "custom attribute panicked");
|
let mut err = ecx.struct_span_err(span, "custom attribute panicked");
|
||||||
if let Some(s) = e.as_str() {
|
if let Some(s) = e.as_str() {
|
||||||
err.help(format!("message: {}", s));
|
err.help(format!("message: {s}"));
|
||||||
}
|
}
|
||||||
err.emit()
|
err.emit()
|
||||||
},
|
},
|
||||||
@ -148,7 +148,7 @@ impl MultiItemModifier for DeriveProcMacro {
|
|||||||
Err(e) => {
|
Err(e) => {
|
||||||
let mut err = ecx.struct_span_err(span, "proc-macro derive panicked");
|
let mut err = ecx.struct_span_err(span, "proc-macro derive panicked");
|
||||||
if let Some(s) = e.as_str() {
|
if let Some(s) = e.as_str() {
|
||||||
err.help(format!("message: {}", s));
|
err.help(format!("message: {s}"));
|
||||||
}
|
}
|
||||||
err.emit();
|
err.emit();
|
||||||
return ExpandResult::Ready(vec![]);
|
return ExpandResult::Ready(vec![]);
|
||||||
|
@ -622,7 +622,7 @@ impl server::SourceFile for Rustc<'_, '_> {
|
|||||||
impl server::Span for Rustc<'_, '_> {
|
impl server::Span for Rustc<'_, '_> {
|
||||||
fn debug(&mut self, span: Self::Span) -> String {
|
fn debug(&mut self, span: Self::Span) -> String {
|
||||||
if self.ecx.ecfg.span_debug {
|
if self.ecx.ecfg.span_debug {
|
||||||
format!("{:?}", span)
|
format!("{span:?}")
|
||||||
} else {
|
} else {
|
||||||
format!("{:?} bytes({}..{})", span.ctxt(), span.lo().0, span.hi().0)
|
format!("{:?} bytes({}..{})", span.ctxt(), span.lo().0, span.hi().0)
|
||||||
}
|
}
|
||||||
|
@ -123,7 +123,7 @@ where
|
|||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
let backtrace = std::backtrace::Backtrace::capture();
|
let backtrace = std::backtrace::Backtrace::capture();
|
||||||
writeln!(writer, "stack backtrace: \n{:?}", backtrace)
|
writeln!(writer, "stack backtrace: \n{backtrace:?}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -43,7 +43,7 @@ fn decodable_body(
|
|||||||
let ty_name = s.ast().ident.to_string();
|
let ty_name = s.ast().ident.to_string();
|
||||||
let decode_body = match s.variants() {
|
let decode_body = match s.variants() {
|
||||||
[] => {
|
[] => {
|
||||||
let message = format!("`{}` has no variants to decode", ty_name);
|
let message = format!("`{ty_name}` has no variants to decode");
|
||||||
quote! {
|
quote! {
|
||||||
panic!(#message)
|
panic!(#message)
|
||||||
}
|
}
|
||||||
|
@ -380,7 +380,7 @@ impl<'tcx> DepNodeParams<TyCtxt<'tcx>> for HirId {
|
|||||||
let local_id = local_id
|
let local_id = local_id
|
||||||
.as_u64()
|
.as_u64()
|
||||||
.try_into()
|
.try_into()
|
||||||
.unwrap_or_else(|_| panic!("local id should be u32, found {:?}", local_id));
|
.unwrap_or_else(|_| panic!("local id should be u32, found {local_id:?}"));
|
||||||
Some(HirId { owner: OwnerId { def_id }, local_id: ItemLocalId::from_u32(local_id) })
|
Some(HirId { owner: OwnerId { def_id }, local_id: ItemLocalId::from_u32(local_id) })
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
@ -35,7 +35,7 @@ impl rustc_query_system::dep_graph::DepKind for DepKind {
|
|||||||
if let Some(def_id) = node.extract_def_id(tcx) {
|
if let Some(def_id) = node.extract_def_id(tcx) {
|
||||||
write!(f, "{}", tcx.def_path_debug_str(def_id))?;
|
write!(f, "{}", tcx.def_path_debug_str(def_id))?;
|
||||||
} else if let Some(ref s) = tcx.dep_graph.dep_node_debug_str(*node) {
|
} else if let Some(ref s) = tcx.dep_graph.dep_node_debug_str(*node) {
|
||||||
write!(f, "{}", s)?;
|
write!(f, "{s}")?;
|
||||||
} else {
|
} else {
|
||||||
write!(f, "{}", node.hash)?;
|
write!(f, "{}", node.hash)?;
|
||||||
}
|
}
|
||||||
|
@ -534,7 +534,7 @@ impl<'hir> Map<'hir> {
|
|||||||
(m, span, hir_id)
|
(m, span, hir_id)
|
||||||
}
|
}
|
||||||
Some(OwnerNode::Crate(item)) => (item, item.spans.inner_span, hir_id),
|
Some(OwnerNode::Crate(item)) => (item, item.spans.inner_span, hir_id),
|
||||||
node => panic!("not a module: {:?}", node),
|
node => panic!("not a module: {node:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -218,14 +218,12 @@ pub fn explain_lint_level_source(
|
|||||||
let hyphen_case_lint_name = name.replace('_', "-");
|
let hyphen_case_lint_name = name.replace('_', "-");
|
||||||
if lint_flag_val.as_str() == name {
|
if lint_flag_val.as_str() == name {
|
||||||
err.note_once(format!(
|
err.note_once(format!(
|
||||||
"requested on the command line with `{} {}`",
|
"requested on the command line with `{flag} {hyphen_case_lint_name}`"
|
||||||
flag, hyphen_case_lint_name
|
|
||||||
));
|
));
|
||||||
} else {
|
} else {
|
||||||
let hyphen_case_flag_val = lint_flag_val.as_str().replace('_', "-");
|
let hyphen_case_flag_val = lint_flag_val.as_str().replace('_', "-");
|
||||||
err.note_once(format!(
|
err.note_once(format!(
|
||||||
"`{} {}` implied by `{} {}`",
|
"`{flag} {hyphen_case_lint_name}` implied by `{flag} {hyphen_case_flag_val}`"
|
||||||
flag, hyphen_case_lint_name, flag, hyphen_case_flag_val
|
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -237,8 +235,7 @@ pub fn explain_lint_level_source(
|
|||||||
if lint_attr_name.as_str() != name {
|
if lint_attr_name.as_str() != name {
|
||||||
let level_str = level.as_str();
|
let level_str = level.as_str();
|
||||||
err.note_once(format!(
|
err.note_once(format!(
|
||||||
"`#[{}({})]` implied by `#[{}({})]`",
|
"`#[{level_str}({name})]` implied by `#[{level_str}({lint_attr_name})]`"
|
||||||
level_str, name, level_str, lint_attr_name
|
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -416,12 +413,11 @@ pub fn struct_lint_level(
|
|||||||
FutureIncompatibilityReason::EditionError(edition) => {
|
FutureIncompatibilityReason::EditionError(edition) => {
|
||||||
let current_edition = sess.edition();
|
let current_edition = sess.edition();
|
||||||
format!(
|
format!(
|
||||||
"this is accepted in the current edition (Rust {}) but is a hard error in Rust {}!",
|
"this is accepted in the current edition (Rust {current_edition}) but is a hard error in Rust {edition}!"
|
||||||
current_edition, edition
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
FutureIncompatibilityReason::EditionSemanticsChange(edition) => {
|
FutureIncompatibilityReason::EditionSemanticsChange(edition) => {
|
||||||
format!("this changes meaning in Rust {}", edition)
|
format!("this changes meaning in Rust {edition}")
|
||||||
}
|
}
|
||||||
FutureIncompatibilityReason::Custom(reason) => reason.to_owned(),
|
FutureIncompatibilityReason::Custom(reason) => reason.to_owned(),
|
||||||
};
|
};
|
||||||
|
@ -107,7 +107,7 @@ pub fn report_unstable(
|
|||||||
soft_handler: impl FnOnce(&'static Lint, Span, String),
|
soft_handler: impl FnOnce(&'static Lint, Span, String),
|
||||||
) {
|
) {
|
||||||
let msg = match reason {
|
let msg = match reason {
|
||||||
Some(r) => format!("use of unstable library feature '{}': {}", feature, r),
|
Some(r) => format!("use of unstable library feature '{feature}': {r}"),
|
||||||
None => format!("use of unstable library feature '{}'", &feature),
|
None => format!("use of unstable library feature '{}'", &feature),
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -170,7 +170,7 @@ pub fn deprecation_suggestion(
|
|||||||
if let Some(suggestion) = suggestion {
|
if let Some(suggestion) = suggestion {
|
||||||
diag.span_suggestion_verbose(
|
diag.span_suggestion_verbose(
|
||||||
span,
|
span,
|
||||||
format!("replace the use of the deprecated {}", kind),
|
format!("replace the use of the deprecated {kind}"),
|
||||||
suggestion,
|
suggestion,
|
||||||
Applicability::MachineApplicable,
|
Applicability::MachineApplicable,
|
||||||
);
|
);
|
||||||
@ -189,12 +189,12 @@ fn deprecation_message(
|
|||||||
path: &str,
|
path: &str,
|
||||||
) -> String {
|
) -> String {
|
||||||
let message = if is_in_effect {
|
let message = if is_in_effect {
|
||||||
format!("use of deprecated {} `{}`", kind, path)
|
format!("use of deprecated {kind} `{path}`")
|
||||||
} else {
|
} else {
|
||||||
let since = since.as_ref().map(Symbol::as_str);
|
let since = since.as_ref().map(Symbol::as_str);
|
||||||
|
|
||||||
if since == Some("TBD") {
|
if since == Some("TBD") {
|
||||||
format!("use of {} `{}` that will be deprecated in a future Rust version", kind, path)
|
format!("use of {kind} `{path}` that will be deprecated in a future Rust version")
|
||||||
} else {
|
} else {
|
||||||
format!(
|
format!(
|
||||||
"use of {} `{}` that will be deprecated in future version {}",
|
"use of {} `{}` that will be deprecated in future version {}",
|
||||||
@ -206,7 +206,7 @@ fn deprecation_message(
|
|||||||
};
|
};
|
||||||
|
|
||||||
match note {
|
match note {
|
||||||
Some(reason) => format!("{}: {}", message, reason),
|
Some(reason) => format!("{message}: {reason}"),
|
||||||
None => message,
|
None => message,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -312,7 +312,7 @@ fn suggestion_for_allocator_api(
|
|||||||
return Some((
|
return Some((
|
||||||
inner_types,
|
inner_types,
|
||||||
"consider wrapping the inner types in tuple".to_string(),
|
"consider wrapping the inner types in tuple".to_string(),
|
||||||
format!("({})", snippet),
|
format!("({snippet})"),
|
||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
@ -599,7 +599,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
|||||||
|span, def_id| {
|
|span, def_id| {
|
||||||
// The API could be uncallable for other reasons, for example when a private module
|
// The API could be uncallable for other reasons, for example when a private module
|
||||||
// was referenced.
|
// was referenced.
|
||||||
self.sess.delay_span_bug(span, format!("encountered unmarked API: {:?}", def_id));
|
self.sess.delay_span_bug(span, format!("encountered unmarked API: {def_id:?}"));
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -7,7 +7,7 @@ use rustc_middle::ty::TyCtxt;
|
|||||||
pub fn mir_fn_to_generic_graph<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'_>) -> Graph {
|
pub fn mir_fn_to_generic_graph<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'_>) -> Graph {
|
||||||
let def_id = body.source.def_id();
|
let def_id = body.source.def_id();
|
||||||
let def_name = graphviz_safe_def_name(def_id);
|
let def_name = graphviz_safe_def_name(def_id);
|
||||||
let graph_name = format!("Mir_{}", def_name);
|
let graph_name = format!("Mir_{def_name}");
|
||||||
let dark_mode = tcx.sess.opts.unstable_opts.graphviz_dark_mode;
|
let dark_mode = tcx.sess.opts.unstable_opts.graphviz_dark_mode;
|
||||||
|
|
||||||
// Nodes
|
// Nodes
|
||||||
@ -48,7 +48,7 @@ fn bb_to_graph_node(block: BasicBlock, body: &Body<'_>, dark_mode: bool) -> Node
|
|||||||
};
|
};
|
||||||
|
|
||||||
let style = NodeStyle { title_bg: Some(bgcolor.to_owned()), ..Default::default() };
|
let style = NodeStyle { title_bg: Some(bgcolor.to_owned()), ..Default::default() };
|
||||||
let mut stmts: Vec<String> = data.statements.iter().map(|x| format!("{:?}", x)).collect();
|
let mut stmts: Vec<String> = data.statements.iter().map(|x| format!("{x:?}")).collect();
|
||||||
|
|
||||||
// add the terminator to the stmts, gsgdt can print it out separately
|
// add the terminator to the stmts, gsgdt can print it out separately
|
||||||
let mut terminator_head = String::new();
|
let mut terminator_head = String::new();
|
||||||
|
@ -70,8 +70,8 @@ impl<
|
|||||||
|
|
||||||
writeln!(w, r#" graph [{}];"#, graph_attrs.join(" "))?;
|
writeln!(w, r#" graph [{}];"#, graph_attrs.join(" "))?;
|
||||||
let content_attrs_str = content_attrs.join(" ");
|
let content_attrs_str = content_attrs.join(" ");
|
||||||
writeln!(w, r#" node [{}];"#, content_attrs_str)?;
|
writeln!(w, r#" node [{content_attrs_str}];"#)?;
|
||||||
writeln!(w, r#" edge [{}];"#, content_attrs_str)?;
|
writeln!(w, r#" edge [{content_attrs_str}];"#)?;
|
||||||
|
|
||||||
// Graph label
|
// Graph label
|
||||||
if let Some(graph_label) = &self.graph_label {
|
if let Some(graph_label) = &self.graph_label {
|
||||||
@ -112,7 +112,7 @@ impl<
|
|||||||
// (format!("{:?}", node), color)
|
// (format!("{:?}", node), color)
|
||||||
// };
|
// };
|
||||||
let color = if dark_mode { "dimgray" } else { "gray" };
|
let color = if dark_mode { "dimgray" } else { "gray" };
|
||||||
let (blk, bgcolor) = (format!("{:?}", node), color);
|
let (blk, bgcolor) = (format!("{node:?}"), color);
|
||||||
write!(
|
write!(
|
||||||
w,
|
w,
|
||||||
r#"<tr><td bgcolor="{bgcolor}" {attrs} colspan="{colspan}">{blk}</td></tr>"#,
|
r#"<tr><td bgcolor="{bgcolor}" {attrs} colspan="{colspan}">{blk}</td></tr>"#,
|
||||||
@ -151,7 +151,7 @@ impl<
|
|||||||
} else {
|
} else {
|
||||||
"".to_owned()
|
"".to_owned()
|
||||||
};
|
};
|
||||||
writeln!(w, r#" {} -> {} [label=<{}>];"#, src, trg, escaped_edge_label)?;
|
writeln!(w, r#" {src} -> {trg} [label=<{escaped_edge_label}>];"#)?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -163,7 +163,7 @@ impl<
|
|||||||
W: Write,
|
W: Write,
|
||||||
{
|
{
|
||||||
let escaped_label = dot::escape_html(label);
|
let escaped_label = dot::escape_html(label);
|
||||||
writeln!(w, r#" label=<<br/><br/>{}<br align="left"/><br/><br/><br/>>;"#, escaped_label)
|
writeln!(w, r#" label=<<br/><br/>{escaped_label}<br align="left"/><br/><br/><br/>>;"#)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn node(&self, node: G::Node) -> String {
|
fn node(&self, node: G::Node) -> String {
|
||||||
|
@ -127,5 +127,5 @@ fn write_graph_label<'tcx, W: std::fmt::Write>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn escape<T: Debug>(t: &T) -> String {
|
fn escape<T: Debug>(t: &T) -> String {
|
||||||
dot::escape_html(&format!("{:?}", t))
|
dot::escape_html(&format!("{t:?}"))
|
||||||
}
|
}
|
||||||
|
@ -542,11 +542,7 @@ impl InitMaskMaterialized {
|
|||||||
debug_assert_eq!(
|
debug_assert_eq!(
|
||||||
result,
|
result,
|
||||||
find_bit_slow(self, start, end, is_init),
|
find_bit_slow(self, start, end, is_init),
|
||||||
"optimized implementation of find_bit is wrong for start={:?} end={:?} is_init={} init_mask={:#?}",
|
"optimized implementation of find_bit is wrong for start={start:?} end={end:?} is_init={is_init} init_mask={self:#?}"
|
||||||
start,
|
|
||||||
end,
|
|
||||||
is_init,
|
|
||||||
self
|
|
||||||
);
|
);
|
||||||
|
|
||||||
result
|
result
|
||||||
|
@ -155,7 +155,7 @@ impl<'tcx> InterpErrorInfo<'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn print_backtrace(backtrace: &Backtrace) {
|
fn print_backtrace(backtrace: &Backtrace) {
|
||||||
eprintln!("\n\nAn error occurred in miri:\n{}", backtrace);
|
eprintln!("\n\nAn error occurred in miri:\n{backtrace}");
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ErrorGuaranteed> for InterpErrorInfo<'_> {
|
impl From<ErrorGuaranteed> for InterpErrorInfo<'_> {
|
||||||
|
@ -176,7 +176,7 @@ impl<'tcx> GlobalId<'tcx> {
|
|||||||
pub fn display(self, tcx: TyCtxt<'tcx>) -> String {
|
pub fn display(self, tcx: TyCtxt<'tcx>) -> String {
|
||||||
let instance_name = with_no_trimmed_paths!(tcx.def_path_str(self.instance.def.def_id()));
|
let instance_name = with_no_trimmed_paths!(tcx.def_path_str(self.instance.def.def_id()));
|
||||||
if let Some(promoted) = self.promoted {
|
if let Some(promoted) = self.promoted {
|
||||||
format!("{}::{:?}", instance_name, promoted)
|
format!("{instance_name}::{promoted:?}")
|
||||||
} else {
|
} else {
|
||||||
instance_name
|
instance_name
|
||||||
}
|
}
|
||||||
|
@ -135,8 +135,8 @@ static_assert_size!(Scalar, 24);
|
|||||||
impl<Prov: Provenance> fmt::Debug for Scalar<Prov> {
|
impl<Prov: Provenance> fmt::Debug for Scalar<Prov> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Scalar::Ptr(ptr, _size) => write!(f, "{:?}", ptr),
|
Scalar::Ptr(ptr, _size) => write!(f, "{ptr:?}"),
|
||||||
Scalar::Int(int) => write!(f, "{:?}", int),
|
Scalar::Int(int) => write!(f, "{int:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -144,8 +144,8 @@ impl<Prov: Provenance> fmt::Debug for Scalar<Prov> {
|
|||||||
impl<Prov: Provenance> fmt::Display for Scalar<Prov> {
|
impl<Prov: Provenance> fmt::Display for Scalar<Prov> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Scalar::Ptr(ptr, _size) => write!(f, "pointer to {:?}", ptr),
|
Scalar::Ptr(ptr, _size) => write!(f, "pointer to {ptr:?}"),
|
||||||
Scalar::Int(int) => write!(f, "{}", int),
|
Scalar::Int(int) => write!(f, "{int}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -153,8 +153,8 @@ impl<Prov: Provenance> fmt::Display for Scalar<Prov> {
|
|||||||
impl<Prov: Provenance> fmt::LowerHex for Scalar<Prov> {
|
impl<Prov: Provenance> fmt::LowerHex for Scalar<Prov> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Scalar::Ptr(ptr, _size) => write!(f, "pointer to {:?}", ptr),
|
Scalar::Ptr(ptr, _size) => write!(f, "pointer to {ptr:?}"),
|
||||||
Scalar::Int(int) => write!(f, "{:#x}", int),
|
Scalar::Int(int) => write!(f, "{int:#x}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -619,7 +619,7 @@ impl<D: TyDecoder, T: Decodable<D>> Decodable<D> for ClearCrossCrate<T> {
|
|||||||
let val = T::decode(d);
|
let val = T::decode(d);
|
||||||
ClearCrossCrate::Set(val)
|
ClearCrossCrate::Set(val)
|
||||||
}
|
}
|
||||||
tag => panic!("Invalid tag for ClearCrossCrate: {:?}", tag),
|
tag => panic!("Invalid tag for ClearCrossCrate: {tag:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1046,12 +1046,12 @@ pub enum VarDebugInfoContents<'tcx> {
|
|||||||
impl<'tcx> Debug for VarDebugInfoContents<'tcx> {
|
impl<'tcx> Debug for VarDebugInfoContents<'tcx> {
|
||||||
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
VarDebugInfoContents::Const(c) => write!(fmt, "{}", c),
|
VarDebugInfoContents::Const(c) => write!(fmt, "{c}"),
|
||||||
VarDebugInfoContents::Place(p) => write!(fmt, "{:?}", p),
|
VarDebugInfoContents::Place(p) => write!(fmt, "{p:?}"),
|
||||||
VarDebugInfoContents::Composite { ty, fragments } => {
|
VarDebugInfoContents::Composite { ty, fragments } => {
|
||||||
write!(fmt, "{:?}{{ ", ty)?;
|
write!(fmt, "{ty:?}{{ ")?;
|
||||||
for f in fragments.iter() {
|
for f in fragments.iter() {
|
||||||
write!(fmt, "{:?}, ", f)?;
|
write!(fmt, "{f:?}, ")?;
|
||||||
}
|
}
|
||||||
write!(fmt, "}}")
|
write!(fmt, "}}")
|
||||||
}
|
}
|
||||||
@ -1315,55 +1315,47 @@ impl<O> AssertKind<O> {
|
|||||||
match self {
|
match self {
|
||||||
BoundsCheck { ref len, ref index } => write!(
|
BoundsCheck { ref len, ref index } => write!(
|
||||||
f,
|
f,
|
||||||
"\"index out of bounds: the length is {{}} but the index is {{}}\", {:?}, {:?}",
|
"\"index out of bounds: the length is {{}} but the index is {{}}\", {len:?}, {index:?}"
|
||||||
len, index
|
|
||||||
),
|
),
|
||||||
|
|
||||||
OverflowNeg(op) => {
|
OverflowNeg(op) => {
|
||||||
write!(f, "\"attempt to negate `{{}}`, which would overflow\", {:?}", op)
|
write!(f, "\"attempt to negate `{{}}`, which would overflow\", {op:?}")
|
||||||
}
|
}
|
||||||
DivisionByZero(op) => write!(f, "\"attempt to divide `{{}}` by zero\", {:?}", op),
|
DivisionByZero(op) => write!(f, "\"attempt to divide `{{}}` by zero\", {op:?}"),
|
||||||
RemainderByZero(op) => write!(
|
RemainderByZero(op) => write!(
|
||||||
f,
|
f,
|
||||||
"\"attempt to calculate the remainder of `{{}}` with a divisor of zero\", {:?}",
|
"\"attempt to calculate the remainder of `{{}}` with a divisor of zero\", {op:?}"
|
||||||
op
|
|
||||||
),
|
),
|
||||||
Overflow(BinOp::Add, l, r) => write!(
|
Overflow(BinOp::Add, l, r) => write!(
|
||||||
f,
|
f,
|
||||||
"\"attempt to compute `{{}} + {{}}`, which would overflow\", {:?}, {:?}",
|
"\"attempt to compute `{{}} + {{}}`, which would overflow\", {l:?}, {r:?}"
|
||||||
l, r
|
|
||||||
),
|
),
|
||||||
Overflow(BinOp::Sub, l, r) => write!(
|
Overflow(BinOp::Sub, l, r) => write!(
|
||||||
f,
|
f,
|
||||||
"\"attempt to compute `{{}} - {{}}`, which would overflow\", {:?}, {:?}",
|
"\"attempt to compute `{{}} - {{}}`, which would overflow\", {l:?}, {r:?}"
|
||||||
l, r
|
|
||||||
),
|
),
|
||||||
Overflow(BinOp::Mul, l, r) => write!(
|
Overflow(BinOp::Mul, l, r) => write!(
|
||||||
f,
|
f,
|
||||||
"\"attempt to compute `{{}} * {{}}`, which would overflow\", {:?}, {:?}",
|
"\"attempt to compute `{{}} * {{}}`, which would overflow\", {l:?}, {r:?}"
|
||||||
l, r
|
|
||||||
),
|
),
|
||||||
Overflow(BinOp::Div, l, r) => write!(
|
Overflow(BinOp::Div, l, r) => write!(
|
||||||
f,
|
f,
|
||||||
"\"attempt to compute `{{}} / {{}}`, which would overflow\", {:?}, {:?}",
|
"\"attempt to compute `{{}} / {{}}`, which would overflow\", {l:?}, {r:?}"
|
||||||
l, r
|
|
||||||
),
|
),
|
||||||
Overflow(BinOp::Rem, l, r) => write!(
|
Overflow(BinOp::Rem, l, r) => write!(
|
||||||
f,
|
f,
|
||||||
"\"attempt to compute the remainder of `{{}} % {{}}`, which would overflow\", {:?}, {:?}",
|
"\"attempt to compute the remainder of `{{}} % {{}}`, which would overflow\", {l:?}, {r:?}"
|
||||||
l, r
|
|
||||||
),
|
),
|
||||||
Overflow(BinOp::Shr, _, r) => {
|
Overflow(BinOp::Shr, _, r) => {
|
||||||
write!(f, "\"attempt to shift right by `{{}}`, which would overflow\", {:?}", r)
|
write!(f, "\"attempt to shift right by `{{}}`, which would overflow\", {r:?}")
|
||||||
}
|
}
|
||||||
Overflow(BinOp::Shl, _, r) => {
|
Overflow(BinOp::Shl, _, r) => {
|
||||||
write!(f, "\"attempt to shift left by `{{}}`, which would overflow\", {:?}", r)
|
write!(f, "\"attempt to shift left by `{{}}`, which would overflow\", {r:?}")
|
||||||
}
|
}
|
||||||
MisalignedPointerDereference { required, found } => {
|
MisalignedPointerDereference { required, found } => {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"\"misaligned pointer dereference: address must be a multiple of {{}} but is {{}}\", {:?}, {:?}",
|
"\"misaligned pointer dereference: address must be a multiple of {{}} but is {{}}\", {required:?}, {found:?}"
|
||||||
required, found
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
_ => write!(f, "\"{}\"", self.description()),
|
_ => write!(f, "\"{}\"", self.description()),
|
||||||
@ -1459,9 +1451,9 @@ impl Debug for Statement<'_> {
|
|||||||
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
use self::StatementKind::*;
|
use self::StatementKind::*;
|
||||||
match self.kind {
|
match self.kind {
|
||||||
Assign(box (ref place, ref rv)) => write!(fmt, "{:?} = {:?}", place, rv),
|
Assign(box (ref place, ref rv)) => write!(fmt, "{place:?} = {rv:?}"),
|
||||||
FakeRead(box (ref cause, ref place)) => {
|
FakeRead(box (ref cause, ref place)) => {
|
||||||
write!(fmt, "FakeRead({:?}, {:?})", cause, place)
|
write!(fmt, "FakeRead({cause:?}, {place:?})")
|
||||||
}
|
}
|
||||||
Retag(ref kind, ref place) => write!(
|
Retag(ref kind, ref place) => write!(
|
||||||
fmt,
|
fmt,
|
||||||
@ -1474,20 +1466,20 @@ impl Debug for Statement<'_> {
|
|||||||
},
|
},
|
||||||
place,
|
place,
|
||||||
),
|
),
|
||||||
StorageLive(ref place) => write!(fmt, "StorageLive({:?})", place),
|
StorageLive(ref place) => write!(fmt, "StorageLive({place:?})"),
|
||||||
StorageDead(ref place) => write!(fmt, "StorageDead({:?})", place),
|
StorageDead(ref place) => write!(fmt, "StorageDead({place:?})"),
|
||||||
SetDiscriminant { ref place, variant_index } => {
|
SetDiscriminant { ref place, variant_index } => {
|
||||||
write!(fmt, "discriminant({:?}) = {:?}", place, variant_index)
|
write!(fmt, "discriminant({place:?}) = {variant_index:?}")
|
||||||
}
|
}
|
||||||
Deinit(ref place) => write!(fmt, "Deinit({:?})", place),
|
Deinit(ref place) => write!(fmt, "Deinit({place:?})"),
|
||||||
PlaceMention(ref place) => {
|
PlaceMention(ref place) => {
|
||||||
write!(fmt, "PlaceMention({:?})", place)
|
write!(fmt, "PlaceMention({place:?})")
|
||||||
}
|
}
|
||||||
AscribeUserType(box (ref place, ref c_ty), ref variance) => {
|
AscribeUserType(box (ref place, ref c_ty), ref variance) => {
|
||||||
write!(fmt, "AscribeUserType({:?}, {:?}, {:?})", place, variance, c_ty)
|
write!(fmt, "AscribeUserType({place:?}, {variance:?}, {c_ty:?})")
|
||||||
}
|
}
|
||||||
Coverage(box self::Coverage { ref kind, code_region: Some(ref rgn) }) => {
|
Coverage(box self::Coverage { ref kind, code_region: Some(ref rgn) }) => {
|
||||||
write!(fmt, "Coverage::{:?} for {:?}", kind, rgn)
|
write!(fmt, "Coverage::{kind:?} for {rgn:?}")
|
||||||
}
|
}
|
||||||
Coverage(box ref coverage) => write!(fmt, "Coverage::{:?}", coverage.kind),
|
Coverage(box ref coverage) => write!(fmt, "Coverage::{:?}", coverage.kind),
|
||||||
Intrinsic(box ref intrinsic) => write!(fmt, "{intrinsic}"),
|
Intrinsic(box ref intrinsic) => write!(fmt, "{intrinsic}"),
|
||||||
@ -1767,13 +1759,13 @@ impl Debug for Place<'_> {
|
|||||||
for elem in self.projection.iter() {
|
for elem in self.projection.iter() {
|
||||||
match elem {
|
match elem {
|
||||||
ProjectionElem::OpaqueCast(ty) => {
|
ProjectionElem::OpaqueCast(ty) => {
|
||||||
write!(fmt, " as {})", ty)?;
|
write!(fmt, " as {ty})")?;
|
||||||
}
|
}
|
||||||
ProjectionElem::Downcast(Some(name), _index) => {
|
ProjectionElem::Downcast(Some(name), _index) => {
|
||||||
write!(fmt, " as {})", name)?;
|
write!(fmt, " as {name})")?;
|
||||||
}
|
}
|
||||||
ProjectionElem::Downcast(None, index) => {
|
ProjectionElem::Downcast(None, index) => {
|
||||||
write!(fmt, " as variant#{:?})", index)?;
|
write!(fmt, " as variant#{index:?})")?;
|
||||||
}
|
}
|
||||||
ProjectionElem::Deref => {
|
ProjectionElem::Deref => {
|
||||||
write!(fmt, ")")?;
|
write!(fmt, ")")?;
|
||||||
@ -1782,25 +1774,25 @@ impl Debug for Place<'_> {
|
|||||||
write!(fmt, ".{:?}: {:?})", field.index(), ty)?;
|
write!(fmt, ".{:?}: {:?})", field.index(), ty)?;
|
||||||
}
|
}
|
||||||
ProjectionElem::Index(ref index) => {
|
ProjectionElem::Index(ref index) => {
|
||||||
write!(fmt, "[{:?}]", index)?;
|
write!(fmt, "[{index:?}]")?;
|
||||||
}
|
}
|
||||||
ProjectionElem::ConstantIndex { offset, min_length, from_end: false } => {
|
ProjectionElem::ConstantIndex { offset, min_length, from_end: false } => {
|
||||||
write!(fmt, "[{:?} of {:?}]", offset, min_length)?;
|
write!(fmt, "[{offset:?} of {min_length:?}]")?;
|
||||||
}
|
}
|
||||||
ProjectionElem::ConstantIndex { offset, min_length, from_end: true } => {
|
ProjectionElem::ConstantIndex { offset, min_length, from_end: true } => {
|
||||||
write!(fmt, "[-{:?} of {:?}]", offset, min_length)?;
|
write!(fmt, "[-{offset:?} of {min_length:?}]")?;
|
||||||
}
|
}
|
||||||
ProjectionElem::Subslice { from, to, from_end: true } if to == 0 => {
|
ProjectionElem::Subslice { from, to, from_end: true } if to == 0 => {
|
||||||
write!(fmt, "[{:?}:]", from)?;
|
write!(fmt, "[{from:?}:]")?;
|
||||||
}
|
}
|
||||||
ProjectionElem::Subslice { from, to, from_end: true } if from == 0 => {
|
ProjectionElem::Subslice { from, to, from_end: true } if from == 0 => {
|
||||||
write!(fmt, "[:-{:?}]", to)?;
|
write!(fmt, "[:-{to:?}]")?;
|
||||||
}
|
}
|
||||||
ProjectionElem::Subslice { from, to, from_end: true } => {
|
ProjectionElem::Subslice { from, to, from_end: true } => {
|
||||||
write!(fmt, "[{:?}:-{:?}]", from, to)?;
|
write!(fmt, "[{from:?}:-{to:?}]")?;
|
||||||
}
|
}
|
||||||
ProjectionElem::Subslice { from, to, from_end: false } => {
|
ProjectionElem::Subslice { from, to, from_end: false } => {
|
||||||
write!(fmt, "[{:?}..{:?}]", from, to)?;
|
write!(fmt, "[{from:?}..{to:?}]")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1894,9 +1886,9 @@ impl<'tcx> Debug for Operand<'tcx> {
|
|||||||
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
use self::Operand::*;
|
use self::Operand::*;
|
||||||
match *self {
|
match *self {
|
||||||
Constant(ref a) => write!(fmt, "{:?}", a),
|
Constant(ref a) => write!(fmt, "{a:?}"),
|
||||||
Copy(ref place) => write!(fmt, "{:?}", place),
|
Copy(ref place) => write!(fmt, "{place:?}"),
|
||||||
Move(ref place) => write!(fmt, "move {:?}", place),
|
Move(ref place) => write!(fmt, "move {place:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1935,11 +1927,11 @@ impl<'tcx> Operand<'tcx> {
|
|||||||
let param_env_and_ty = ty::ParamEnv::empty().and(ty);
|
let param_env_and_ty = ty::ParamEnv::empty().and(ty);
|
||||||
let type_size = tcx
|
let type_size = tcx
|
||||||
.layout_of(param_env_and_ty)
|
.layout_of(param_env_and_ty)
|
||||||
.unwrap_or_else(|e| panic!("could not compute layout for {:?}: {:?}", ty, e))
|
.unwrap_or_else(|e| panic!("could not compute layout for {ty:?}: {e:?}"))
|
||||||
.size;
|
.size;
|
||||||
let scalar_size = match val {
|
let scalar_size = match val {
|
||||||
Scalar::Int(int) => int.size(),
|
Scalar::Int(int) => int.size(),
|
||||||
_ => panic!("Invalid scalar type {:?}", val),
|
_ => panic!("Invalid scalar type {val:?}"),
|
||||||
};
|
};
|
||||||
scalar_size == type_size
|
scalar_size == type_size
|
||||||
});
|
});
|
||||||
@ -2055,26 +2047,26 @@ impl<'tcx> Debug for Rvalue<'tcx> {
|
|||||||
use self::Rvalue::*;
|
use self::Rvalue::*;
|
||||||
|
|
||||||
match *self {
|
match *self {
|
||||||
Use(ref place) => write!(fmt, "{:?}", place),
|
Use(ref place) => write!(fmt, "{place:?}"),
|
||||||
Repeat(ref a, b) => {
|
Repeat(ref a, b) => {
|
||||||
write!(fmt, "[{:?}; ", a)?;
|
write!(fmt, "[{a:?}; ")?;
|
||||||
pretty_print_const(b, fmt, false)?;
|
pretty_print_const(b, fmt, false)?;
|
||||||
write!(fmt, "]")
|
write!(fmt, "]")
|
||||||
}
|
}
|
||||||
Len(ref a) => write!(fmt, "Len({:?})", a),
|
Len(ref a) => write!(fmt, "Len({a:?})"),
|
||||||
Cast(ref kind, ref place, ref ty) => {
|
Cast(ref kind, ref place, ref ty) => {
|
||||||
write!(fmt, "{:?} as {:?} ({:?})", place, ty, kind)
|
write!(fmt, "{place:?} as {ty:?} ({kind:?})")
|
||||||
}
|
}
|
||||||
BinaryOp(ref op, box (ref a, ref b)) => write!(fmt, "{:?}({:?}, {:?})", op, a, b),
|
BinaryOp(ref op, box (ref a, ref b)) => write!(fmt, "{op:?}({a:?}, {b:?})"),
|
||||||
CheckedBinaryOp(ref op, box (ref a, ref b)) => {
|
CheckedBinaryOp(ref op, box (ref a, ref b)) => {
|
||||||
write!(fmt, "Checked{:?}({:?}, {:?})", op, a, b)
|
write!(fmt, "Checked{op:?}({a:?}, {b:?})")
|
||||||
}
|
}
|
||||||
UnaryOp(ref op, ref a) => write!(fmt, "{:?}({:?})", op, a),
|
UnaryOp(ref op, ref a) => write!(fmt, "{op:?}({a:?})"),
|
||||||
Discriminant(ref place) => write!(fmt, "discriminant({:?})", place),
|
Discriminant(ref place) => write!(fmt, "discriminant({place:?})"),
|
||||||
NullaryOp(ref op, ref t) => match op {
|
NullaryOp(ref op, ref t) => match op {
|
||||||
NullOp::SizeOf => write!(fmt, "SizeOf({:?})", t),
|
NullOp::SizeOf => write!(fmt, "SizeOf({t:?})"),
|
||||||
NullOp::AlignOf => write!(fmt, "AlignOf({:?})", t),
|
NullOp::AlignOf => write!(fmt, "AlignOf({t:?})"),
|
||||||
NullOp::OffsetOf(fields) => write!(fmt, "OffsetOf({:?}, {:?})", t, fields),
|
NullOp::OffsetOf(fields) => write!(fmt, "OffsetOf({t:?}, {fields:?})"),
|
||||||
},
|
},
|
||||||
ThreadLocalRef(did) => ty::tls::with(|tcx| {
|
ThreadLocalRef(did) => ty::tls::with(|tcx| {
|
||||||
let muta = tcx.static_mutability(did).unwrap().prefix_str();
|
let muta = tcx.static_mutability(did).unwrap().prefix_str();
|
||||||
@ -2101,10 +2093,10 @@ impl<'tcx> Debug for Rvalue<'tcx> {
|
|||||||
// Do not even print 'static
|
// Do not even print 'static
|
||||||
String::new()
|
String::new()
|
||||||
};
|
};
|
||||||
write!(fmt, "&{}{}{:?}", region, kind_str, place)
|
write!(fmt, "&{region}{kind_str}{place:?}")
|
||||||
}
|
}
|
||||||
|
|
||||||
CopyForDeref(ref place) => write!(fmt, "deref_copy {:#?}", place),
|
CopyForDeref(ref place) => write!(fmt, "deref_copy {place:#?}"),
|
||||||
|
|
||||||
AddressOf(mutability, ref place) => {
|
AddressOf(mutability, ref place) => {
|
||||||
let kind_str = match mutability {
|
let kind_str = match mutability {
|
||||||
@ -2112,7 +2104,7 @@ impl<'tcx> Debug for Rvalue<'tcx> {
|
|||||||
Mutability::Not => "const",
|
Mutability::Not => "const",
|
||||||
};
|
};
|
||||||
|
|
||||||
write!(fmt, "&raw {} {:?}", kind_str, place)
|
write!(fmt, "&raw {kind_str} {place:?}")
|
||||||
}
|
}
|
||||||
|
|
||||||
Aggregate(ref kind, ref places) => {
|
Aggregate(ref kind, ref places) => {
|
||||||
@ -2125,7 +2117,7 @@ impl<'tcx> Debug for Rvalue<'tcx> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
match **kind {
|
match **kind {
|
||||||
AggregateKind::Array(_) => write!(fmt, "{:?}", places),
|
AggregateKind::Array(_) => write!(fmt, "{places:?}"),
|
||||||
|
|
||||||
AggregateKind::Tuple => {
|
AggregateKind::Tuple => {
|
||||||
if places.is_empty() {
|
if places.is_empty() {
|
||||||
@ -2211,7 +2203,7 @@ impl<'tcx> Debug for Rvalue<'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ShallowInitBox(ref place, ref ty) => {
|
ShallowInitBox(ref place, ref ty) => {
|
||||||
write!(fmt, "ShallowInitBox({:?}, {:?})", place, ty)
|
write!(fmt, "ShallowInitBox({place:?}, {ty:?})")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2755,7 +2747,7 @@ rustc_index::newtype_index! {
|
|||||||
|
|
||||||
impl<'tcx> Debug for Constant<'tcx> {
|
impl<'tcx> Debug for Constant<'tcx> {
|
||||||
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, fmt: &mut Formatter<'_>) -> fmt::Result {
|
||||||
write!(fmt, "{}", self)
|
write!(fmt, "{self}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2831,7 +2823,7 @@ fn pretty_print_const_value<'tcx>(
|
|||||||
let ty = tcx.lift(ty).unwrap();
|
let ty = tcx.lift(ty).unwrap();
|
||||||
|
|
||||||
if tcx.sess.verbose() {
|
if tcx.sess.verbose() {
|
||||||
fmt.write_str(&format!("ConstValue({:?}: {})", ct, ty))?;
|
fmt.write_str(&format!("ConstValue({ct:?}: {ty})"))?;
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2901,7 +2893,7 @@ fn pretty_print_const_value<'tcx>(
|
|||||||
fmt.write_str(")")?;
|
fmt.write_str(")")?;
|
||||||
}
|
}
|
||||||
ty::Adt(def, _) if def.variants().is_empty() => {
|
ty::Adt(def, _) if def.variants().is_empty() => {
|
||||||
fmt.write_str(&format!("{{unreachable(): {}}}", ty))?;
|
fmt.write_str(&format!("{{unreachable(): {ty}}}"))?;
|
||||||
}
|
}
|
||||||
ty::Adt(def, args) => {
|
ty::Adt(def, args) => {
|
||||||
let variant_idx = contents
|
let variant_idx = contents
|
||||||
|
@ -223,7 +223,7 @@ impl<'tcx> MonoItem<'tcx> {
|
|||||||
impl<'tcx> fmt::Display for MonoItem<'tcx> {
|
impl<'tcx> fmt::Display for MonoItem<'tcx> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match *self {
|
match *self {
|
||||||
MonoItem::Fn(instance) => write!(f, "fn {}", instance),
|
MonoItem::Fn(instance) => write!(f, "fn {instance}"),
|
||||||
MonoItem::Static(def_id) => {
|
MonoItem::Static(def_id) => {
|
||||||
write!(f, "static {}", Instance::new(def_id, GenericArgs::empty()))
|
write!(f, "static {}", Instance::new(def_id, GenericArgs::empty()))
|
||||||
}
|
}
|
||||||
@ -534,17 +534,17 @@ impl<'tcx> CodegenUnitNameBuilder<'tcx> {
|
|||||||
format!("{}.{:08x}{}", tcx.crate_name(cnum), stable_crate_id, local_crate_id)
|
format!("{}.{:08x}{}", tcx.crate_name(cnum), stable_crate_id, local_crate_id)
|
||||||
});
|
});
|
||||||
|
|
||||||
write!(cgu_name, "{}", crate_prefix).unwrap();
|
write!(cgu_name, "{crate_prefix}").unwrap();
|
||||||
|
|
||||||
// Add the components
|
// Add the components
|
||||||
for component in components {
|
for component in components {
|
||||||
write!(cgu_name, "-{}", component).unwrap();
|
write!(cgu_name, "-{component}").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(special_suffix) = special_suffix {
|
if let Some(special_suffix) = special_suffix {
|
||||||
// We add a dot in here so it cannot clash with anything in a regular
|
// We add a dot in here so it cannot clash with anything in a regular
|
||||||
// Rust identifier
|
// Rust identifier
|
||||||
write!(cgu_name, ".{}", special_suffix).unwrap();
|
write!(cgu_name, ".{special_suffix}").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
Symbol::intern(&cgu_name)
|
Symbol::intern(&cgu_name)
|
||||||
|
@ -124,14 +124,14 @@ fn dump_matched_mir_node<'tcx, F>(
|
|||||||
let def_path =
|
let def_path =
|
||||||
ty::print::with_forced_impl_filename_line!(tcx.def_path_str(body.source.def_id()));
|
ty::print::with_forced_impl_filename_line!(tcx.def_path_str(body.source.def_id()));
|
||||||
// ignore-tidy-odd-backticks the literal below is fine
|
// ignore-tidy-odd-backticks the literal below is fine
|
||||||
write!(file, "// MIR for `{}", def_path)?;
|
write!(file, "// MIR for `{def_path}")?;
|
||||||
match body.source.promoted {
|
match body.source.promoted {
|
||||||
None => write!(file, "`")?,
|
None => write!(file, "`")?,
|
||||||
Some(promoted) => write!(file, "::{:?}`", promoted)?,
|
Some(promoted) => write!(file, "::{promoted:?}`")?,
|
||||||
}
|
}
|
||||||
writeln!(file, " {} {}", disambiguator, pass_name)?;
|
writeln!(file, " {disambiguator} {pass_name}")?;
|
||||||
if let Some(ref layout) = body.generator_layout() {
|
if let Some(ref layout) = body.generator_layout() {
|
||||||
writeln!(file, "/* generator_layout = {:#?} */", layout)?;
|
writeln!(file, "/* generator_layout = {layout:#?} */")?;
|
||||||
}
|
}
|
||||||
writeln!(file)?;
|
writeln!(file)?;
|
||||||
extra_data(PassWhere::BeforeCFG, &mut file)?;
|
extra_data(PassWhere::BeforeCFG, &mut file)?;
|
||||||
@ -169,7 +169,7 @@ fn dump_file_basename<'tcx>(
|
|||||||
) -> String {
|
) -> String {
|
||||||
let source = body.source;
|
let source = body.source;
|
||||||
let promotion_id = match source.promoted {
|
let promotion_id = match source.promoted {
|
||||||
Some(id) => format!("-{:?}", id),
|
Some(id) => format!("-{id:?}"),
|
||||||
None => String::new(),
|
None => String::new(),
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -203,8 +203,7 @@ fn dump_file_basename<'tcx>(
|
|||||||
};
|
};
|
||||||
|
|
||||||
format!(
|
format!(
|
||||||
"{}.{}{}{}{}.{}.{}",
|
"{crate_name}.{item_name}{shim_disambiguator}{promotion_id}{pass_num}.{pass_name}.{disambiguator}",
|
||||||
crate_name, item_name, shim_disambiguator, promotion_id, pass_num, pass_name, disambiguator,
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -215,7 +214,7 @@ fn dump_path(tcx: TyCtxt<'_>, basename: &str, extension: &str) -> PathBuf {
|
|||||||
let mut file_path = PathBuf::new();
|
let mut file_path = PathBuf::new();
|
||||||
file_path.push(Path::new(&tcx.sess.opts.unstable_opts.dump_mir_dir));
|
file_path.push(Path::new(&tcx.sess.opts.unstable_opts.dump_mir_dir));
|
||||||
|
|
||||||
let file_name = format!("{}.{}", basename, extension,);
|
let file_name = format!("{basename}.{extension}",);
|
||||||
|
|
||||||
file_path.push(&file_name);
|
file_path.push(&file_name);
|
||||||
|
|
||||||
@ -233,12 +232,12 @@ fn create_dump_file_with_basename(
|
|||||||
fs::create_dir_all(parent).map_err(|e| {
|
fs::create_dir_all(parent).map_err(|e| {
|
||||||
io::Error::new(
|
io::Error::new(
|
||||||
e.kind(),
|
e.kind(),
|
||||||
format!("IO error creating MIR dump directory: {:?}; {}", parent, e),
|
format!("IO error creating MIR dump directory: {parent:?}; {e}"),
|
||||||
)
|
)
|
||||||
})?;
|
})?;
|
||||||
}
|
}
|
||||||
Ok(io::BufWriter::new(fs::File::create(&file_path).map_err(|e| {
|
Ok(io::BufWriter::new(fs::File::create(&file_path).map_err(|e| {
|
||||||
io::Error::new(e.kind(), format!("IO error creating MIR dump file: {:?}; {}", file_path, e))
|
io::Error::new(e.kind(), format!("IO error creating MIR dump file: {file_path:?}; {e}"))
|
||||||
})?))
|
})?))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -346,28 +345,24 @@ where
|
|||||||
|
|
||||||
// Basic block label at the top.
|
// Basic block label at the top.
|
||||||
let cleanup_text = if data.is_cleanup { " (cleanup)" } else { "" };
|
let cleanup_text = if data.is_cleanup { " (cleanup)" } else { "" };
|
||||||
writeln!(w, "{}{:?}{}: {{", INDENT, block, cleanup_text)?;
|
writeln!(w, "{INDENT}{block:?}{cleanup_text}: {{")?;
|
||||||
|
|
||||||
// List of statements in the middle.
|
// List of statements in the middle.
|
||||||
let mut current_location = Location { block, statement_index: 0 };
|
let mut current_location = Location { block, statement_index: 0 };
|
||||||
for statement in &data.statements {
|
for statement in &data.statements {
|
||||||
extra_data(PassWhere::BeforeLocation(current_location), w)?;
|
extra_data(PassWhere::BeforeLocation(current_location), w)?;
|
||||||
let indented_body = format!("{0}{0}{1:?};", INDENT, statement);
|
let indented_body = format!("{INDENT}{INDENT}{statement:?};");
|
||||||
if tcx.sess.opts.unstable_opts.mir_include_spans {
|
if tcx.sess.opts.unstable_opts.mir_include_spans {
|
||||||
writeln!(
|
writeln!(
|
||||||
w,
|
w,
|
||||||
"{:A$} // {}{}",
|
"{:A$} // {}{}",
|
||||||
indented_body,
|
indented_body,
|
||||||
if tcx.sess.verbose() {
|
if tcx.sess.verbose() { format!("{current_location:?}: ") } else { String::new() },
|
||||||
format!("{:?}: ", current_location)
|
|
||||||
} else {
|
|
||||||
String::new()
|
|
||||||
},
|
|
||||||
comment(tcx, statement.source_info),
|
comment(tcx, statement.source_info),
|
||||||
A = ALIGN,
|
A = ALIGN,
|
||||||
)?;
|
)?;
|
||||||
} else {
|
} else {
|
||||||
writeln!(w, "{}", indented_body)?;
|
writeln!(w, "{indented_body}")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
write_extra(tcx, w, |visitor| {
|
write_extra(tcx, w, |visitor| {
|
||||||
@ -387,12 +382,12 @@ where
|
|||||||
w,
|
w,
|
||||||
"{:A$} // {}{}",
|
"{:A$} // {}{}",
|
||||||
indented_terminator,
|
indented_terminator,
|
||||||
if tcx.sess.verbose() { format!("{:?}: ", current_location) } else { String::new() },
|
if tcx.sess.verbose() { format!("{current_location:?}: ") } else { String::new() },
|
||||||
comment(tcx, data.terminator().source_info),
|
comment(tcx, data.terminator().source_info),
|
||||||
A = ALIGN,
|
A = ALIGN,
|
||||||
)?;
|
)?;
|
||||||
} else {
|
} else {
|
||||||
writeln!(w, "{}", indented_terminator)?;
|
writeln!(w, "{indented_terminator}")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
write_extra(tcx, w, |visitor| {
|
write_extra(tcx, w, |visitor| {
|
||||||
@ -402,7 +397,7 @@ where
|
|||||||
extra_data(PassWhere::AfterLocation(current_location), w)?;
|
extra_data(PassWhere::AfterLocation(current_location), w)?;
|
||||||
extra_data(PassWhere::AfterTerminator(block), w)?;
|
extra_data(PassWhere::AfterTerminator(block), w)?;
|
||||||
|
|
||||||
writeln!(w, "{}}}", INDENT)
|
writeln!(w, "{INDENT}}}")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// After we print the main statement, we sometimes dump extra
|
/// After we print the main statement, we sometimes dump extra
|
||||||
@ -457,25 +452,25 @@ impl<'tcx> Visitor<'tcx> for ExtraComments<'tcx> {
|
|||||||
self.tcx.sess.source_map().span_to_embeddable_string(*span)
|
self.tcx.sess.source_map().span_to_embeddable_string(*span)
|
||||||
));
|
));
|
||||||
if let Some(user_ty) = user_ty {
|
if let Some(user_ty) = user_ty {
|
||||||
self.push(&format!("+ user_ty: {:?}", user_ty));
|
self.push(&format!("+ user_ty: {user_ty:?}"));
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: this is a poor version of `pretty_print_const_value`.
|
// FIXME: this is a poor version of `pretty_print_const_value`.
|
||||||
let fmt_val = |val: &ConstValue<'tcx>| match val {
|
let fmt_val = |val: &ConstValue<'tcx>| match val {
|
||||||
ConstValue::ZeroSized => "<ZST>".to_string(),
|
ConstValue::ZeroSized => "<ZST>".to_string(),
|
||||||
ConstValue::Scalar(s) => format!("Scalar({:?})", s),
|
ConstValue::Scalar(s) => format!("Scalar({s:?})"),
|
||||||
ConstValue::Slice { .. } => "Slice(..)".to_string(),
|
ConstValue::Slice { .. } => "Slice(..)".to_string(),
|
||||||
ConstValue::ByRef { .. } => "ByRef(..)".to_string(),
|
ConstValue::ByRef { .. } => "ByRef(..)".to_string(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let fmt_valtree = |valtree: &ty::ValTree<'tcx>| match valtree {
|
let fmt_valtree = |valtree: &ty::ValTree<'tcx>| match valtree {
|
||||||
ty::ValTree::Leaf(leaf) => format!("ValTree::Leaf({:?})", leaf),
|
ty::ValTree::Leaf(leaf) => format!("ValTree::Leaf({leaf:?})"),
|
||||||
ty::ValTree::Branch(_) => "ValTree::Branch(..)".to_string(),
|
ty::ValTree::Branch(_) => "ValTree::Branch(..)".to_string(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let val = match literal {
|
let val = match literal {
|
||||||
ConstantKind::Ty(ct) => match ct.kind() {
|
ConstantKind::Ty(ct) => match ct.kind() {
|
||||||
ty::ConstKind::Param(p) => format!("Param({})", p),
|
ty::ConstKind::Param(p) => format!("Param({p})"),
|
||||||
ty::ConstKind::Unevaluated(uv) => {
|
ty::ConstKind::Unevaluated(uv) => {
|
||||||
format!("Unevaluated({}, {:?})", self.tcx.def_path_str(uv.def), uv.args,)
|
format!("Unevaluated({}, {:?})", self.tcx.def_path_str(uv.def), uv.args,)
|
||||||
}
|
}
|
||||||
@ -514,20 +509,20 @@ impl<'tcx> Visitor<'tcx> for ExtraComments<'tcx> {
|
|||||||
match **kind {
|
match **kind {
|
||||||
AggregateKind::Closure(def_id, args) => {
|
AggregateKind::Closure(def_id, args) => {
|
||||||
self.push("closure");
|
self.push("closure");
|
||||||
self.push(&format!("+ def_id: {:?}", def_id));
|
self.push(&format!("+ def_id: {def_id:?}"));
|
||||||
self.push(&format!("+ args: {:#?}", args));
|
self.push(&format!("+ args: {args:#?}"));
|
||||||
}
|
}
|
||||||
|
|
||||||
AggregateKind::Generator(def_id, args, movability) => {
|
AggregateKind::Generator(def_id, args, movability) => {
|
||||||
self.push("generator");
|
self.push("generator");
|
||||||
self.push(&format!("+ def_id: {:?}", def_id));
|
self.push(&format!("+ def_id: {def_id:?}"));
|
||||||
self.push(&format!("+ args: {:#?}", args));
|
self.push(&format!("+ args: {args:#?}"));
|
||||||
self.push(&format!("+ movability: {:?}", movability));
|
self.push(&format!("+ movability: {movability:?}"));
|
||||||
}
|
}
|
||||||
|
|
||||||
AggregateKind::Adt(_, _, _, Some(user_ty), _) => {
|
AggregateKind::Adt(_, _, _, Some(user_ty), _) => {
|
||||||
self.push("adt");
|
self.push("adt");
|
||||||
self.push(&format!("+ user_ty: {:?}", user_ty));
|
self.push(&format!("+ user_ty: {user_ty:?}"));
|
||||||
}
|
}
|
||||||
|
|
||||||
_ => {}
|
_ => {}
|
||||||
@ -578,7 +573,7 @@ fn write_scope_tree(
|
|||||||
comment(tcx, var_debug_info.source_info),
|
comment(tcx, var_debug_info.source_info),
|
||||||
)?;
|
)?;
|
||||||
} else {
|
} else {
|
||||||
writeln!(w, "{}", indented_debug_info)?;
|
writeln!(w, "{indented_debug_info}")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -600,7 +595,7 @@ fn write_scope_tree(
|
|||||||
format!("{0:1$}let {2}{3:?}: {4:?}", INDENT, indent, mut_str, local, local_decl.ty);
|
format!("{0:1$}let {2}{3:?}: {4:?}", INDENT, indent, mut_str, local, local_decl.ty);
|
||||||
if let Some(user_ty) = &local_decl.user_ty {
|
if let Some(user_ty) = &local_decl.user_ty {
|
||||||
for user_ty in user_ty.projections() {
|
for user_ty in user_ty.projections() {
|
||||||
write!(indented_decl, " as {:?}", user_ty).unwrap();
|
write!(indented_decl, " as {user_ty:?}").unwrap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
indented_decl.push(';');
|
indented_decl.push(';');
|
||||||
@ -617,7 +612,7 @@ fn write_scope_tree(
|
|||||||
comment(tcx, local_decl.source_info),
|
comment(tcx, local_decl.source_info),
|
||||||
)?;
|
)?;
|
||||||
} else {
|
} else {
|
||||||
writeln!(w, "{}", indented_decl,)?;
|
writeln!(w, "{indented_decl}",)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -654,10 +649,10 @@ fn write_scope_tree(
|
|||||||
tcx.sess.source_map().span_to_embeddable_string(span),
|
tcx.sess.source_map().span_to_embeddable_string(span),
|
||||||
)?;
|
)?;
|
||||||
} else {
|
} else {
|
||||||
writeln!(w, "{}", indented_header)?;
|
writeln!(w, "{indented_header}")?;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
writeln!(w, "{}", indented_header)?;
|
writeln!(w, "{indented_header}")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
write_scope_tree(tcx, body, scope_tree, w, child, depth + 1)?;
|
write_scope_tree(tcx, body, scope_tree, w, child, depth + 1)?;
|
||||||
@ -844,7 +839,7 @@ fn write_allocation_endline(w: &mut dyn std::fmt::Write, ascii: &str) -> std::fm
|
|||||||
for _ in 0..(BYTES_PER_LINE - ascii.chars().count()) {
|
for _ in 0..(BYTES_PER_LINE - ascii.chars().count()) {
|
||||||
write!(w, " ")?;
|
write!(w, " ")?;
|
||||||
}
|
}
|
||||||
writeln!(w, " │ {}", ascii)
|
writeln!(w, " │ {ascii}")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Number of bytes to print per allocation hex dump line.
|
/// Number of bytes to print per allocation hex dump line.
|
||||||
@ -880,7 +875,7 @@ pub fn write_allocation_bytes<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
|
|||||||
if num_lines > 0 {
|
if num_lines > 0 {
|
||||||
write!(w, "{}0x{:02$x} │ ", prefix, 0, pos_width)?;
|
write!(w, "{}0x{:02$x} │ ", prefix, 0, pos_width)?;
|
||||||
} else {
|
} else {
|
||||||
write!(w, "{}", prefix)?;
|
write!(w, "{prefix}")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut i = Size::ZERO;
|
let mut i = Size::ZERO;
|
||||||
@ -913,10 +908,10 @@ pub fn write_allocation_bytes<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
|
|||||||
let offset = Size::from_bytes(offset);
|
let offset = Size::from_bytes(offset);
|
||||||
let provenance_width = |bytes| bytes * 3;
|
let provenance_width = |bytes| bytes * 3;
|
||||||
let ptr = Pointer::new(prov, offset);
|
let ptr = Pointer::new(prov, offset);
|
||||||
let mut target = format!("{:?}", ptr);
|
let mut target = format!("{ptr:?}");
|
||||||
if target.len() > provenance_width(ptr_size.bytes_usize() - 1) {
|
if target.len() > provenance_width(ptr_size.bytes_usize() - 1) {
|
||||||
// This is too long, try to save some space.
|
// This is too long, try to save some space.
|
||||||
target = format!("{:#?}", ptr);
|
target = format!("{ptr:#?}");
|
||||||
}
|
}
|
||||||
if ((i - line_start) + ptr_size).bytes_usize() > BYTES_PER_LINE {
|
if ((i - line_start) + ptr_size).bytes_usize() > BYTES_PER_LINE {
|
||||||
// This branch handles the situation where a provenance starts in the current line
|
// This branch handles the situation where a provenance starts in the current line
|
||||||
@ -935,10 +930,10 @@ pub fn write_allocation_bytes<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
|
|||||||
line_start =
|
line_start =
|
||||||
write_allocation_newline(w, line_start, &ascii, pos_width, prefix)?;
|
write_allocation_newline(w, line_start, &ascii, pos_width, prefix)?;
|
||||||
ascii.clear();
|
ascii.clear();
|
||||||
write!(w, "{0:─^1$}╼", target, overflow_width)?;
|
write!(w, "{target:─^overflow_width$}╼")?;
|
||||||
} else {
|
} else {
|
||||||
oversized_ptr(&mut target, remainder_width);
|
oversized_ptr(&mut target, remainder_width);
|
||||||
write!(w, "╾{0:─^1$}", target, remainder_width)?;
|
write!(w, "╾{target:─^remainder_width$}")?;
|
||||||
line_start =
|
line_start =
|
||||||
write_allocation_newline(w, line_start, &ascii, pos_width, prefix)?;
|
write_allocation_newline(w, line_start, &ascii, pos_width, prefix)?;
|
||||||
write!(w, "{0:─^1$}╼", "", overflow_width)?;
|
write!(w, "{0:─^1$}╼", "", overflow_width)?;
|
||||||
@ -955,7 +950,7 @@ pub fn write_allocation_bytes<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
|
|||||||
let provenance_width = provenance_width(ptr_size.bytes_usize() - 1);
|
let provenance_width = provenance_width(ptr_size.bytes_usize() - 1);
|
||||||
oversized_ptr(&mut target, provenance_width);
|
oversized_ptr(&mut target, provenance_width);
|
||||||
ascii.push('╾');
|
ascii.push('╾');
|
||||||
write!(w, "╾{0:─^1$}╼", target, provenance_width)?;
|
write!(w, "╾{target:─^provenance_width$}╼")?;
|
||||||
for _ in 0..ptr_size.bytes() - 2 {
|
for _ in 0..ptr_size.bytes() - 2 {
|
||||||
ascii.push('─');
|
ascii.push('─');
|
||||||
}
|
}
|
||||||
@ -972,7 +967,7 @@ pub fn write_allocation_bytes<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
|
|||||||
// Format is similar to "oversized" above.
|
// Format is similar to "oversized" above.
|
||||||
let j = i.bytes_usize();
|
let j = i.bytes_usize();
|
||||||
let c = alloc.inspect_with_uninit_and_ptr_outside_interpreter(j..j + 1)[0];
|
let c = alloc.inspect_with_uninit_and_ptr_outside_interpreter(j..j + 1)[0];
|
||||||
write!(w, "╾{:02x}{:#?} (1 ptr byte)╼", c, prov)?;
|
write!(w, "╾{c:02x}{prov:#?} (1 ptr byte)╼")?;
|
||||||
i += Size::from_bytes(1);
|
i += Size::from_bytes(1);
|
||||||
} else if alloc
|
} else if alloc
|
||||||
.init_mask()
|
.init_mask()
|
||||||
@ -984,7 +979,7 @@ pub fn write_allocation_bytes<'tcx, Prov: Provenance, Extra, Bytes: AllocBytes>(
|
|||||||
// Checked definedness (and thus range) and provenance. This access also doesn't
|
// Checked definedness (and thus range) and provenance. This access also doesn't
|
||||||
// influence interpreter execution but is only for debugging.
|
// influence interpreter execution but is only for debugging.
|
||||||
let c = alloc.inspect_with_uninit_and_ptr_outside_interpreter(j..j + 1)[0];
|
let c = alloc.inspect_with_uninit_and_ptr_outside_interpreter(j..j + 1)[0];
|
||||||
write!(w, "{:02x}", c)?;
|
write!(w, "{c:02x}")?;
|
||||||
if c.is_ascii_control() || c >= 0x80 {
|
if c.is_ascii_control() || c >= 0x80 {
|
||||||
ascii.push('.');
|
ascii.push('.');
|
||||||
} else {
|
} else {
|
||||||
@ -1018,7 +1013,7 @@ fn write_mir_sig(tcx: TyCtxt<'_>, body: &Body<'_>, w: &mut dyn Write) -> io::Res
|
|||||||
_ => tcx.is_closure(def_id),
|
_ => tcx.is_closure(def_id),
|
||||||
};
|
};
|
||||||
match (kind, body.source.promoted) {
|
match (kind, body.source.promoted) {
|
||||||
(_, Some(i)) => write!(w, "{:?} in ", i)?,
|
(_, Some(i)) => write!(w, "{i:?} in ")?,
|
||||||
(DefKind::Const | DefKind::AssocConst, _) => write!(w, "const ")?,
|
(DefKind::Const | DefKind::AssocConst, _) => write!(w, "const ")?,
|
||||||
(DefKind::Static(hir::Mutability::Not), _) => write!(w, "static ")?,
|
(DefKind::Static(hir::Mutability::Not), _) => write!(w, "static ")?,
|
||||||
(DefKind::Static(hir::Mutability::Mut), _) => write!(w, "static mut ")?,
|
(DefKind::Static(hir::Mutability::Mut), _) => write!(w, "static mut ")?,
|
||||||
@ -1051,7 +1046,7 @@ fn write_mir_sig(tcx: TyCtxt<'_>, body: &Body<'_>, w: &mut dyn Write) -> io::Res
|
|||||||
|
|
||||||
if let Some(yield_ty) = body.yield_ty() {
|
if let Some(yield_ty) = body.yield_ty() {
|
||||||
writeln!(w)?;
|
writeln!(w)?;
|
||||||
writeln!(w, "yields {}", yield_ty)?;
|
writeln!(w, "yields {yield_ty}")?;
|
||||||
}
|
}
|
||||||
|
|
||||||
write!(w, " ")?;
|
write!(w, " ")?;
|
||||||
|
@ -198,7 +198,7 @@ impl Debug for GeneratorLayout<'_> {
|
|||||||
if fmt.alternate() {
|
if fmt.alternate() {
|
||||||
write!(fmt, "{:9}({:?})", variant_name, self.0)
|
write!(fmt, "{:9}({:?})", variant_name, self.0)
|
||||||
} else {
|
} else {
|
||||||
write!(fmt, "{}", variant_name)
|
write!(fmt, "{variant_name}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -159,10 +159,10 @@ where
|
|||||||
indent_to_initial_start_col,
|
indent_to_initial_start_col,
|
||||||
source_map.span_to_snippet(spanview_span).expect("function should have printable source")
|
source_map.span_to_snippet(spanview_span).expect("function should have printable source")
|
||||||
);
|
);
|
||||||
writeln!(w, "{}", HEADER)?;
|
writeln!(w, "{HEADER}")?;
|
||||||
writeln!(w, "<title>{}</title>", title)?;
|
writeln!(w, "<title>{title}</title>")?;
|
||||||
writeln!(w, "{}", STYLE_SECTION)?;
|
writeln!(w, "{STYLE_SECTION}")?;
|
||||||
writeln!(w, "{}", START_BODY)?;
|
writeln!(w, "{START_BODY}")?;
|
||||||
write!(
|
write!(
|
||||||
w,
|
w,
|
||||||
r#"<div class="code" style="counter-reset: line {}"><span class="line">{}"#,
|
r#"<div class="code" style="counter-reset: line {}"><span class="line">{}"#,
|
||||||
@ -226,7 +226,7 @@ where
|
|||||||
write_coverage_gap(tcx, from_pos, end_pos, w)?;
|
write_coverage_gap(tcx, from_pos, end_pos, w)?;
|
||||||
}
|
}
|
||||||
writeln!(w, r#"</span></div>"#)?;
|
writeln!(w, r#"</span></div>"#)?;
|
||||||
writeln!(w, "{}", FOOTER)?;
|
writeln!(w, "{FOOTER}")?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -561,17 +561,16 @@ where
|
|||||||
}
|
}
|
||||||
for (i, line) in html_snippet.lines().enumerate() {
|
for (i, line) in html_snippet.lines().enumerate() {
|
||||||
if i > 0 {
|
if i > 0 {
|
||||||
write!(w, "{}", NEW_LINE_SPAN)?;
|
write!(w, "{NEW_LINE_SPAN}")?;
|
||||||
}
|
}
|
||||||
write!(
|
write!(
|
||||||
w,
|
w,
|
||||||
r#"<span class="code{}" style="--layer: {}"{}>{}</span>"#,
|
r#"<span class="code{maybe_alt_class}" style="--layer: {layer}"{maybe_title_attr}>{line}</span>"#
|
||||||
maybe_alt_class, layer, maybe_title_attr, line
|
|
||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
// Check for and translate trailing newlines, because `str::lines()` ignores them
|
// Check for and translate trailing newlines, because `str::lines()` ignores them
|
||||||
if html_snippet.ends_with('\n') {
|
if html_snippet.ends_with('\n') {
|
||||||
write!(w, "{}", NEW_LINE_SPAN)?;
|
write!(w, "{NEW_LINE_SPAN}")?;
|
||||||
}
|
}
|
||||||
if layer == 1 {
|
if layer == 1 {
|
||||||
write!(w, "</span>")?;
|
write!(w, "</span>")?;
|
||||||
|
@ -280,7 +280,7 @@ impl<'tcx> Debug for TerminatorKind<'tcx> {
|
|||||||
|
|
||||||
match (successor_count, unwind) {
|
match (successor_count, unwind) {
|
||||||
(0, None) => Ok(()),
|
(0, None) => Ok(()),
|
||||||
(0, Some(unwind)) => write!(fmt, " -> {}", unwind),
|
(0, Some(unwind)) => write!(fmt, " -> {unwind}"),
|
||||||
(1, None) => write!(fmt, " -> {:?}", self.successors().next().unwrap()),
|
(1, None) => write!(fmt, " -> {:?}", self.successors().next().unwrap()),
|
||||||
_ => {
|
_ => {
|
||||||
write!(fmt, " -> [")?;
|
write!(fmt, " -> [")?;
|
||||||
@ -307,22 +307,22 @@ impl<'tcx> TerminatorKind<'tcx> {
|
|||||||
use self::TerminatorKind::*;
|
use self::TerminatorKind::*;
|
||||||
match self {
|
match self {
|
||||||
Goto { .. } => write!(fmt, "goto"),
|
Goto { .. } => write!(fmt, "goto"),
|
||||||
SwitchInt { discr, .. } => write!(fmt, "switchInt({:?})", discr),
|
SwitchInt { discr, .. } => write!(fmt, "switchInt({discr:?})"),
|
||||||
Return => write!(fmt, "return"),
|
Return => write!(fmt, "return"),
|
||||||
GeneratorDrop => write!(fmt, "generator_drop"),
|
GeneratorDrop => write!(fmt, "generator_drop"),
|
||||||
Resume => write!(fmt, "resume"),
|
Resume => write!(fmt, "resume"),
|
||||||
Terminate => write!(fmt, "abort"),
|
Terminate => write!(fmt, "abort"),
|
||||||
Yield { value, resume_arg, .. } => write!(fmt, "{:?} = yield({:?})", resume_arg, value),
|
Yield { value, resume_arg, .. } => write!(fmt, "{resume_arg:?} = yield({value:?})"),
|
||||||
Unreachable => write!(fmt, "unreachable"),
|
Unreachable => write!(fmt, "unreachable"),
|
||||||
Drop { place, .. } => write!(fmt, "drop({:?})", place),
|
Drop { place, .. } => write!(fmt, "drop({place:?})"),
|
||||||
Call { func, args, destination, .. } => {
|
Call { func, args, destination, .. } => {
|
||||||
write!(fmt, "{:?} = ", destination)?;
|
write!(fmt, "{destination:?} = ")?;
|
||||||
write!(fmt, "{:?}(", func)?;
|
write!(fmt, "{func:?}(")?;
|
||||||
for (index, arg) in args.iter().enumerate() {
|
for (index, arg) in args.iter().enumerate() {
|
||||||
if index > 0 {
|
if index > 0 {
|
||||||
write!(fmt, ", ")?;
|
write!(fmt, ", ")?;
|
||||||
}
|
}
|
||||||
write!(fmt, "{:?}", arg)?;
|
write!(fmt, "{arg:?}")?;
|
||||||
}
|
}
|
||||||
write!(fmt, ")")
|
write!(fmt, ")")
|
||||||
}
|
}
|
||||||
@ -331,7 +331,7 @@ impl<'tcx> TerminatorKind<'tcx> {
|
|||||||
if !expected {
|
if !expected {
|
||||||
write!(fmt, "!")?;
|
write!(fmt, "!")?;
|
||||||
}
|
}
|
||||||
write!(fmt, "{:?}, ", cond)?;
|
write!(fmt, "{cond:?}, ")?;
|
||||||
msg.fmt_assert_args(fmt)?;
|
msg.fmt_assert_args(fmt)?;
|
||||||
write!(fmt, ")")
|
write!(fmt, ")")
|
||||||
}
|
}
|
||||||
@ -344,7 +344,7 @@ impl<'tcx> TerminatorKind<'tcx> {
|
|||||||
let print_late = |&late| if late { "late" } else { "" };
|
let print_late = |&late| if late { "late" } else { "" };
|
||||||
match op {
|
match op {
|
||||||
InlineAsmOperand::In { reg, value } => {
|
InlineAsmOperand::In { reg, value } => {
|
||||||
write!(fmt, "in({}) {:?}", reg, value)?;
|
write!(fmt, "in({reg}) {value:?}")?;
|
||||||
}
|
}
|
||||||
InlineAsmOperand::Out { reg, late, place: Some(place) } => {
|
InlineAsmOperand::Out { reg, late, place: Some(place) } => {
|
||||||
write!(fmt, "{}out({}) {:?}", print_late(late), reg, place)?;
|
write!(fmt, "{}out({}) {:?}", print_late(late), reg, place)?;
|
||||||
@ -371,17 +371,17 @@ impl<'tcx> TerminatorKind<'tcx> {
|
|||||||
write!(fmt, "in{}out({}) {:?} => _", print_late(late), reg, in_value)?;
|
write!(fmt, "in{}out({}) {:?} => _", print_late(late), reg, in_value)?;
|
||||||
}
|
}
|
||||||
InlineAsmOperand::Const { value } => {
|
InlineAsmOperand::Const { value } => {
|
||||||
write!(fmt, "const {:?}", value)?;
|
write!(fmt, "const {value:?}")?;
|
||||||
}
|
}
|
||||||
InlineAsmOperand::SymFn { value } => {
|
InlineAsmOperand::SymFn { value } => {
|
||||||
write!(fmt, "sym_fn {:?}", value)?;
|
write!(fmt, "sym_fn {value:?}")?;
|
||||||
}
|
}
|
||||||
InlineAsmOperand::SymStatic { def_id } => {
|
InlineAsmOperand::SymStatic { def_id } => {
|
||||||
write!(fmt, "sym_static {:?}", def_id)?;
|
write!(fmt, "sym_static {def_id:?}")?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
write!(fmt, ", options({:?}))", options)
|
write!(fmt, ", options({options:?}))")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1284,7 +1284,7 @@ rustc_queries! {
|
|||||||
query vtable_allocation(key: (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>)) -> mir::interpret::AllocId {
|
query vtable_allocation(key: (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>)) -> mir::interpret::AllocId {
|
||||||
desc { |tcx| "vtable const allocation for <{} as {}>",
|
desc { |tcx| "vtable const allocation for <{} as {}>",
|
||||||
key.0,
|
key.0,
|
||||||
key.1.map(|trait_ref| format!("{}", trait_ref)).unwrap_or("_".to_owned())
|
key.1.map(|trait_ref| format!("{trait_ref}")).unwrap_or("_".to_owned())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -659,7 +659,7 @@ impl<'tcx> Pat<'tcx> {
|
|||||||
|
|
||||||
impl<'tcx> IntoDiagnosticArg for Pat<'tcx> {
|
impl<'tcx> IntoDiagnosticArg for Pat<'tcx> {
|
||||||
fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
|
fn into_diagnostic_arg(self) -> DiagnosticArgValue<'static> {
|
||||||
format!("{}", self).into_diagnostic_arg()
|
format!("{self}").into_diagnostic_arg()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -789,7 +789,7 @@ impl<'tcx> fmt::Display for Pat<'tcx> {
|
|||||||
|
|
||||||
match self.kind {
|
match self.kind {
|
||||||
PatKind::Wild => write!(f, "_"),
|
PatKind::Wild => write!(f, "_"),
|
||||||
PatKind::AscribeUserType { ref subpattern, .. } => write!(f, "{}: _", subpattern),
|
PatKind::AscribeUserType { ref subpattern, .. } => write!(f, "{subpattern}: _"),
|
||||||
PatKind::Binding { mutability, name, mode, ref subpattern, .. } => {
|
PatKind::Binding { mutability, name, mode, ref subpattern, .. } => {
|
||||||
let is_mut = match mode {
|
let is_mut = match mode {
|
||||||
BindingMode::ByValue => mutability == Mutability::Mut,
|
BindingMode::ByValue => mutability == Mutability::Mut,
|
||||||
@ -801,9 +801,9 @@ impl<'tcx> fmt::Display for Pat<'tcx> {
|
|||||||
if is_mut {
|
if is_mut {
|
||||||
write!(f, "mut ")?;
|
write!(f, "mut ")?;
|
||||||
}
|
}
|
||||||
write!(f, "{}", name)?;
|
write!(f, "{name}")?;
|
||||||
if let Some(ref subpattern) = *subpattern {
|
if let Some(ref subpattern) = *subpattern {
|
||||||
write!(f, " @ {}", subpattern)?;
|
write!(f, " @ {subpattern}")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@ -833,7 +833,7 @@ impl<'tcx> fmt::Display for Pat<'tcx> {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if let Some((variant, name)) = &variant_and_name {
|
if let Some((variant, name)) = &variant_and_name {
|
||||||
write!(f, "{}", name)?;
|
write!(f, "{name}")?;
|
||||||
|
|
||||||
// Only for Adt we can have `S {...}`,
|
// Only for Adt we can have `S {...}`,
|
||||||
// which we handle separately here.
|
// which we handle separately here.
|
||||||
@ -893,13 +893,13 @@ impl<'tcx> fmt::Display for Pat<'tcx> {
|
|||||||
}
|
}
|
||||||
_ => bug!("{} is a bad Deref pattern type", self.ty),
|
_ => bug!("{} is a bad Deref pattern type", self.ty),
|
||||||
}
|
}
|
||||||
write!(f, "{}", subpattern)
|
write!(f, "{subpattern}")
|
||||||
}
|
}
|
||||||
PatKind::Constant { value } => write!(f, "{}", value),
|
PatKind::Constant { value } => write!(f, "{value}"),
|
||||||
PatKind::Range(box PatRange { lo, hi, end }) => {
|
PatKind::Range(box PatRange { lo, hi, end }) => {
|
||||||
write!(f, "{}", lo)?;
|
write!(f, "{lo}")?;
|
||||||
write!(f, "{}", end)?;
|
write!(f, "{end}")?;
|
||||||
write!(f, "{}", hi)
|
write!(f, "{hi}")
|
||||||
}
|
}
|
||||||
PatKind::Slice { ref prefix, ref slice, ref suffix }
|
PatKind::Slice { ref prefix, ref slice, ref suffix }
|
||||||
| PatKind::Array { ref prefix, ref slice, ref suffix } => {
|
| PatKind::Array { ref prefix, ref slice, ref suffix } => {
|
||||||
@ -911,7 +911,7 @@ impl<'tcx> fmt::Display for Pat<'tcx> {
|
|||||||
write!(f, "{}", start_or_comma())?;
|
write!(f, "{}", start_or_comma())?;
|
||||||
match slice.kind {
|
match slice.kind {
|
||||||
PatKind::Wild => {}
|
PatKind::Wild => {}
|
||||||
_ => write!(f, "{}", slice)?,
|
_ => write!(f, "{slice}")?,
|
||||||
}
|
}
|
||||||
write!(f, "..")?;
|
write!(f, "..")?;
|
||||||
}
|
}
|
||||||
|
@ -777,49 +777,48 @@ impl ObjectSafetyViolation {
|
|||||||
"where clause cannot reference non-lifetime `for<...>` variables".into()
|
"where clause cannot reference non-lifetime `for<...>` variables".into()
|
||||||
}
|
}
|
||||||
ObjectSafetyViolation::Method(name, MethodViolationCode::StaticMethod(_), _) => {
|
ObjectSafetyViolation::Method(name, MethodViolationCode::StaticMethod(_), _) => {
|
||||||
format!("associated function `{}` has no `self` parameter", name).into()
|
format!("associated function `{name}` has no `self` parameter").into()
|
||||||
}
|
}
|
||||||
ObjectSafetyViolation::Method(
|
ObjectSafetyViolation::Method(
|
||||||
name,
|
name,
|
||||||
MethodViolationCode::ReferencesSelfInput(_),
|
MethodViolationCode::ReferencesSelfInput(_),
|
||||||
DUMMY_SP,
|
DUMMY_SP,
|
||||||
) => format!("method `{}` references the `Self` type in its parameters", name).into(),
|
) => format!("method `{name}` references the `Self` type in its parameters").into(),
|
||||||
ObjectSafetyViolation::Method(name, MethodViolationCode::ReferencesSelfInput(_), _) => {
|
ObjectSafetyViolation::Method(name, MethodViolationCode::ReferencesSelfInput(_), _) => {
|
||||||
format!("method `{}` references the `Self` type in this parameter", name).into()
|
format!("method `{name}` references the `Self` type in this parameter").into()
|
||||||
}
|
}
|
||||||
ObjectSafetyViolation::Method(name, MethodViolationCode::ReferencesSelfOutput, _) => {
|
ObjectSafetyViolation::Method(name, MethodViolationCode::ReferencesSelfOutput, _) => {
|
||||||
format!("method `{}` references the `Self` type in its return type", name).into()
|
format!("method `{name}` references the `Self` type in its return type").into()
|
||||||
}
|
}
|
||||||
ObjectSafetyViolation::Method(
|
ObjectSafetyViolation::Method(
|
||||||
name,
|
name,
|
||||||
MethodViolationCode::ReferencesImplTraitInTrait(_),
|
MethodViolationCode::ReferencesImplTraitInTrait(_),
|
||||||
_,
|
_,
|
||||||
) => format!("method `{}` references an `impl Trait` type in its return type", name)
|
) => {
|
||||||
.into(),
|
format!("method `{name}` references an `impl Trait` type in its return type").into()
|
||||||
|
}
|
||||||
ObjectSafetyViolation::Method(name, MethodViolationCode::AsyncFn, _) => {
|
ObjectSafetyViolation::Method(name, MethodViolationCode::AsyncFn, _) => {
|
||||||
format!("method `{}` is `async`", name).into()
|
format!("method `{name}` is `async`").into()
|
||||||
}
|
}
|
||||||
ObjectSafetyViolation::Method(
|
ObjectSafetyViolation::Method(
|
||||||
name,
|
name,
|
||||||
MethodViolationCode::WhereClauseReferencesSelf,
|
MethodViolationCode::WhereClauseReferencesSelf,
|
||||||
_,
|
_,
|
||||||
) => {
|
) => format!("method `{name}` references the `Self` type in its `where` clause").into(),
|
||||||
format!("method `{}` references the `Self` type in its `where` clause", name).into()
|
|
||||||
}
|
|
||||||
ObjectSafetyViolation::Method(name, MethodViolationCode::Generic, _) => {
|
ObjectSafetyViolation::Method(name, MethodViolationCode::Generic, _) => {
|
||||||
format!("method `{}` has generic type parameters", name).into()
|
format!("method `{name}` has generic type parameters").into()
|
||||||
}
|
}
|
||||||
ObjectSafetyViolation::Method(
|
ObjectSafetyViolation::Method(
|
||||||
name,
|
name,
|
||||||
MethodViolationCode::UndispatchableReceiver(_),
|
MethodViolationCode::UndispatchableReceiver(_),
|
||||||
_,
|
_,
|
||||||
) => format!("method `{}`'s `self` parameter cannot be dispatched on", name).into(),
|
) => format!("method `{name}`'s `self` parameter cannot be dispatched on").into(),
|
||||||
ObjectSafetyViolation::AssocConst(name, DUMMY_SP) => {
|
ObjectSafetyViolation::AssocConst(name, DUMMY_SP) => {
|
||||||
format!("it contains associated `const` `{}`", name).into()
|
format!("it contains associated `const` `{name}`").into()
|
||||||
}
|
}
|
||||||
ObjectSafetyViolation::AssocConst(..) => "it contains this associated `const`".into(),
|
ObjectSafetyViolation::AssocConst(..) => "it contains this associated `const`".into(),
|
||||||
ObjectSafetyViolation::GAT(name, _) => {
|
ObjectSafetyViolation::GAT(name, _) => {
|
||||||
format!("it contains the generic associated type `{}`", name).into()
|
format!("it contains the generic associated type `{name}`").into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -837,8 +836,7 @@ impl ObjectSafetyViolation {
|
|||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
add_self_sugg.1,
|
add_self_sugg.1,
|
||||||
format!(
|
format!(
|
||||||
"consider turning `{}` into a method by giving it a `&self` argument",
|
"consider turning `{name}` into a method by giving it a `&self` argument"
|
||||||
name
|
|
||||||
),
|
),
|
||||||
add_self_sugg.0.to_string(),
|
add_self_sugg.0.to_string(),
|
||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
@ -846,9 +844,8 @@ impl ObjectSafetyViolation {
|
|||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
make_sized_sugg.1,
|
make_sized_sugg.1,
|
||||||
format!(
|
format!(
|
||||||
"alternatively, consider constraining `{}` so it does not apply to \
|
"alternatively, consider constraining `{name}` so it does not apply to \
|
||||||
trait objects",
|
trait objects"
|
||||||
name
|
|
||||||
),
|
),
|
||||||
make_sized_sugg.0.to_string(),
|
make_sized_sugg.0.to_string(),
|
||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
@ -861,7 +858,7 @@ impl ObjectSafetyViolation {
|
|||||||
) => {
|
) => {
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
*span,
|
*span,
|
||||||
format!("consider changing method `{}`'s `self` parameter to be `&self`", name),
|
format!("consider changing method `{name}`'s `self` parameter to be `&self`"),
|
||||||
"&Self",
|
"&Self",
|
||||||
Applicability::MachineApplicable,
|
Applicability::MachineApplicable,
|
||||||
);
|
);
|
||||||
@ -869,7 +866,7 @@ impl ObjectSafetyViolation {
|
|||||||
ObjectSafetyViolation::AssocConst(name, _)
|
ObjectSafetyViolation::AssocConst(name, _)
|
||||||
| ObjectSafetyViolation::GAT(name, _)
|
| ObjectSafetyViolation::GAT(name, _)
|
||||||
| ObjectSafetyViolation::Method(name, ..) => {
|
| ObjectSafetyViolation::Method(name, ..) => {
|
||||||
err.help(format!("consider moving `{}` to another trait", name));
|
err.help(format!("consider moving `{name}` to another trait"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -68,7 +68,7 @@ impl<'a, 'b> ProofTreeFormatter<'a, 'b> {
|
|||||||
writeln!(self.f, "NESTED GOALS ADDED TO CALLER: [")?;
|
writeln!(self.f, "NESTED GOALS ADDED TO CALLER: [")?;
|
||||||
self.nested(|this| {
|
self.nested(|this| {
|
||||||
for goal in goal.returned_goals.iter() {
|
for goal in goal.returned_goals.iter() {
|
||||||
writeln!(this.f, "ADDED GOAL: {:?},", goal)?;
|
writeln!(this.f, "ADDED GOAL: {goal:?},")?;
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
@ -104,7 +104,7 @@ impl<'a, 'b> ProofTreeFormatter<'a, 'b> {
|
|||||||
writeln!(self.f, "ASSEMBLING CANDIDATES FOR DYN UPCASTING:")
|
writeln!(self.f, "ASSEMBLING CANDIDATES FOR DYN UPCASTING:")
|
||||||
}
|
}
|
||||||
CandidateKind::Candidate { name, result } => {
|
CandidateKind::Candidate { name, result } => {
|
||||||
writeln!(self.f, "CANDIDATE {}: {:?}", name, result)
|
writeln!(self.f, "CANDIDATE {name}: {result:?}")
|
||||||
}
|
}
|
||||||
}?;
|
}?;
|
||||||
|
|
||||||
|
@ -43,7 +43,7 @@ impl Graph {
|
|||||||
/// The parent of a given impl, which is the `DefId` of the trait when the
|
/// The parent of a given impl, which is the `DefId` of the trait when the
|
||||||
/// impl is a "specialization root".
|
/// impl is a "specialization root".
|
||||||
pub fn parent(&self, child: DefId) -> DefId {
|
pub fn parent(&self, child: DefId) -> DefId {
|
||||||
*self.parent.get(&child).unwrap_or_else(|| panic!("Failed to get parent for {:?}", child))
|
*self.parent.get(&child).unwrap_or_else(|| panic!("Failed to get parent for {child:?}"))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -7,14 +7,14 @@ use std::fmt;
|
|||||||
impl<'tcx, N: fmt::Debug> fmt::Debug for traits::ImplSource<'tcx, N> {
|
impl<'tcx, N: fmt::Debug> fmt::Debug for traits::ImplSource<'tcx, N> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
super::ImplSource::UserDefined(v) => write!(f, "{:?}", v),
|
super::ImplSource::UserDefined(v) => write!(f, "{v:?}"),
|
||||||
|
|
||||||
super::ImplSource::Builtin(source, d) => {
|
super::ImplSource::Builtin(source, d) => {
|
||||||
write!(f, "Builtin({source:?}, {d:?})")
|
write!(f, "Builtin({source:?}, {d:?})")
|
||||||
}
|
}
|
||||||
|
|
||||||
super::ImplSource::Param(ct, n) => {
|
super::ImplSource::Param(ct, n) => {
|
||||||
write!(f, "ImplSourceParamData({:?}, {:?})", n, ct)
|
write!(f, "ImplSourceParamData({n:?}, {ct:?})")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -348,7 +348,7 @@ pub fn place_to_string_for_capture<'tcx>(tcx: TyCtxt<'tcx>, place: &HirPlace<'tc
|
|||||||
for (i, proj) in place.projections.iter().enumerate() {
|
for (i, proj) in place.projections.iter().enumerate() {
|
||||||
match proj.kind {
|
match proj.kind {
|
||||||
HirProjectionKind::Deref => {
|
HirProjectionKind::Deref => {
|
||||||
curr_string = format!("*{}", curr_string);
|
curr_string = format!("*{curr_string}");
|
||||||
}
|
}
|
||||||
HirProjectionKind::Field(idx, variant) => match place.ty_before_projection(i).kind() {
|
HirProjectionKind::Field(idx, variant) => match place.ty_before_projection(i).kind() {
|
||||||
ty::Adt(def, ..) => {
|
ty::Adt(def, ..) => {
|
||||||
|
@ -212,7 +212,7 @@ impl<'tcx> Const<'tcx> {
|
|||||||
Err(e) => {
|
Err(e) => {
|
||||||
tcx.sess.delay_span_bug(
|
tcx.sess.delay_span_bug(
|
||||||
expr.span,
|
expr.span,
|
||||||
format!("Const::from_anon_const: couldn't lit_to_const {:?}", e),
|
format!("Const::from_anon_const: couldn't lit_to_const {e:?}"),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -267,7 +267,7 @@ impl<'tcx> Const<'tcx> {
|
|||||||
pub fn from_bits(tcx: TyCtxt<'tcx>, bits: u128, ty: ParamEnvAnd<'tcx, Ty<'tcx>>) -> Self {
|
pub fn from_bits(tcx: TyCtxt<'tcx>, bits: u128, ty: ParamEnvAnd<'tcx, Ty<'tcx>>) -> Self {
|
||||||
let size = tcx
|
let size = tcx
|
||||||
.layout_of(ty)
|
.layout_of(ty)
|
||||||
.unwrap_or_else(|e| panic!("could not compute layout for {:?}: {:?}", ty, e))
|
.unwrap_or_else(|e| panic!("could not compute layout for {ty:?}: {e:?}"))
|
||||||
.size;
|
.size;
|
||||||
ty::Const::new_value(
|
ty::Const::new_value(
|
||||||
tcx,
|
tcx,
|
||||||
|
@ -463,7 +463,7 @@ impl TryFrom<ScalarInt> for Double {
|
|||||||
impl fmt::Debug for ScalarInt {
|
impl fmt::Debug for ScalarInt {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
// Dispatch to LowerHex below.
|
// Dispatch to LowerHex below.
|
||||||
write!(f, "0x{:x}", self)
|
write!(f, "0x{self:x}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -17,7 +17,7 @@ pub struct UnevaluatedConst<'tcx> {
|
|||||||
|
|
||||||
impl rustc_errors::IntoDiagnosticArg for UnevaluatedConst<'_> {
|
impl rustc_errors::IntoDiagnosticArg for UnevaluatedConst<'_> {
|
||||||
fn into_diagnostic_arg(self) -> rustc_errors::DiagnosticArgValue<'static> {
|
fn into_diagnostic_arg(self) -> rustc_errors::DiagnosticArgValue<'static> {
|
||||||
format!("{:?}", self).into_diagnostic_arg()
|
format!("{self:?}").into_diagnostic_arg()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -126,7 +126,7 @@ pub fn suggest_arbitrary_trait_bound<'tcx>(
|
|||||||
if constraint.ends_with('>') {
|
if constraint.ends_with('>') {
|
||||||
constraint = format!("{}, {} = {}>", &constraint[..constraint.len() - 1], name, term);
|
constraint = format!("{}, {} = {}>", &constraint[..constraint.len() - 1], name, term);
|
||||||
} else {
|
} else {
|
||||||
constraint.push_str(&format!("<{} = {}>", name, term));
|
constraint.push_str(&format!("<{name} = {term}>"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -274,9 +274,9 @@ pub fn suggest_constraining_type_params<'a>(
|
|||||||
if span_to_replace.is_some() {
|
if span_to_replace.is_some() {
|
||||||
constraint.clone()
|
constraint.clone()
|
||||||
} else if bound_list_non_empty {
|
} else if bound_list_non_empty {
|
||||||
format!(" + {}", constraint)
|
format!(" + {constraint}")
|
||||||
} else {
|
} else {
|
||||||
format!(" {}", constraint)
|
format!(" {constraint}")
|
||||||
},
|
},
|
||||||
SuggestChangingConstraintsMessage::RestrictBoundFurther,
|
SuggestChangingConstraintsMessage::RestrictBoundFurther,
|
||||||
))
|
))
|
||||||
@ -337,7 +337,7 @@ pub fn suggest_constraining_type_params<'a>(
|
|||||||
generics.tail_span_for_predicate_suggestion(),
|
generics.tail_span_for_predicate_suggestion(),
|
||||||
constraints
|
constraints
|
||||||
.iter()
|
.iter()
|
||||||
.map(|&(constraint, _)| format!(", {}: {}", param_name, constraint))
|
.map(|&(constraint, _)| format!(", {param_name}: {constraint}"))
|
||||||
.collect::<String>(),
|
.collect::<String>(),
|
||||||
SuggestChangingConstraintsMessage::RestrictTypeFurther { ty: param_name },
|
SuggestChangingConstraintsMessage::RestrictTypeFurther { ty: param_name },
|
||||||
));
|
));
|
||||||
@ -358,7 +358,7 @@ pub fn suggest_constraining_type_params<'a>(
|
|||||||
// default (`<T=Foo>`), so we suggest adding `where T: Bar`.
|
// default (`<T=Foo>`), so we suggest adding `where T: Bar`.
|
||||||
suggestions.push((
|
suggestions.push((
|
||||||
generics.tail_span_for_predicate_suggestion(),
|
generics.tail_span_for_predicate_suggestion(),
|
||||||
format!(" where {}: {}", param_name, constraint),
|
format!(" where {param_name}: {constraint}"),
|
||||||
SuggestChangingConstraintsMessage::RestrictTypeFurther { ty: param_name },
|
SuggestChangingConstraintsMessage::RestrictTypeFurther { ty: param_name },
|
||||||
));
|
));
|
||||||
continue;
|
continue;
|
||||||
@ -371,7 +371,7 @@ pub fn suggest_constraining_type_params<'a>(
|
|||||||
if let Some(colon_span) = param.colon_span {
|
if let Some(colon_span) = param.colon_span {
|
||||||
suggestions.push((
|
suggestions.push((
|
||||||
colon_span.shrink_to_hi(),
|
colon_span.shrink_to_hi(),
|
||||||
format!(" {}", constraint),
|
format!(" {constraint}"),
|
||||||
SuggestChangingConstraintsMessage::RestrictType { ty: param_name },
|
SuggestChangingConstraintsMessage::RestrictType { ty: param_name },
|
||||||
));
|
));
|
||||||
continue;
|
continue;
|
||||||
@ -383,7 +383,7 @@ pub fn suggest_constraining_type_params<'a>(
|
|||||||
// - help: consider restricting this type parameter with `T: Foo`
|
// - help: consider restricting this type parameter with `T: Foo`
|
||||||
suggestions.push((
|
suggestions.push((
|
||||||
param.span.shrink_to_hi(),
|
param.span.shrink_to_hi(),
|
||||||
format!(": {}", constraint),
|
format!(": {constraint}"),
|
||||||
SuggestChangingConstraintsMessage::RestrictType { ty: param_name },
|
SuggestChangingConstraintsMessage::RestrictType { ty: param_name },
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
@ -401,10 +401,10 @@ pub fn suggest_constraining_type_params<'a>(
|
|||||||
Cow::from("consider further restricting this bound")
|
Cow::from("consider further restricting this bound")
|
||||||
}
|
}
|
||||||
SuggestChangingConstraintsMessage::RestrictType { ty } => {
|
SuggestChangingConstraintsMessage::RestrictType { ty } => {
|
||||||
Cow::from(format!("consider restricting type parameter `{}`", ty))
|
Cow::from(format!("consider restricting type parameter `{ty}`"))
|
||||||
}
|
}
|
||||||
SuggestChangingConstraintsMessage::RestrictTypeFurther { ty } => {
|
SuggestChangingConstraintsMessage::RestrictTypeFurther { ty } => {
|
||||||
Cow::from(format!("consider further restricting type parameter `{}`", ty))
|
Cow::from(format!("consider further restricting type parameter `{ty}`"))
|
||||||
}
|
}
|
||||||
SuggestChangingConstraintsMessage::RemoveMaybeUnsized => {
|
SuggestChangingConstraintsMessage::RemoveMaybeUnsized => {
|
||||||
Cow::from("consider removing the `?Sized` bound to make the type parameter `Sized`")
|
Cow::from("consider removing the `?Sized` bound to make the type parameter `Sized`")
|
||||||
|
@ -90,9 +90,9 @@ impl<'tcx> TypeError<'tcx> {
|
|||||||
// A naive approach to making sure that we're not reporting silly errors such as:
|
// A naive approach to making sure that we're not reporting silly errors such as:
|
||||||
// (expected closure, found closure).
|
// (expected closure, found closure).
|
||||||
if expected == found {
|
if expected == found {
|
||||||
format!("expected {}, found a different {}", expected, found)
|
format!("expected {expected}, found a different {found}")
|
||||||
} else {
|
} else {
|
||||||
format!("expected {}, found {}", expected, found)
|
format!("expected {expected}, found {found}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -131,7 +131,7 @@ impl<'tcx> TypeError<'tcx> {
|
|||||||
)
|
)
|
||||||
.into(),
|
.into(),
|
||||||
ArgCount => "incorrect number of function parameters".into(),
|
ArgCount => "incorrect number of function parameters".into(),
|
||||||
FieldMisMatch(adt, field) => format!("field type mismatch: {}.{}", adt, field).into(),
|
FieldMisMatch(adt, field) => format!("field type mismatch: {adt}.{field}").into(),
|
||||||
RegionsDoesNotOutlive(..) => "lifetime mismatch".into(),
|
RegionsDoesNotOutlive(..) => "lifetime mismatch".into(),
|
||||||
// Actually naming the region here is a bit confusing because context is lacking
|
// Actually naming the region here is a bit confusing because context is lacking
|
||||||
RegionsInsufficientlyPolymorphic(..) => {
|
RegionsInsufficientlyPolymorphic(..) => {
|
||||||
@ -164,7 +164,7 @@ impl<'tcx> TypeError<'tcx> {
|
|||||||
ty::IntVarValue::IntType(ty) => ty.name_str(),
|
ty::IntVarValue::IntType(ty) => ty.name_str(),
|
||||||
ty::IntVarValue::UintType(ty) => ty.name_str(),
|
ty::IntVarValue::UintType(ty) => ty.name_str(),
|
||||||
};
|
};
|
||||||
format!("expected `{}`, found `{}`", expected, found).into()
|
format!("expected `{expected}`, found `{found}`").into()
|
||||||
}
|
}
|
||||||
FloatMismatch(ref values) => format!(
|
FloatMismatch(ref values) => format!(
|
||||||
"expected `{}`, found `{}`",
|
"expected `{}`, found `{}`",
|
||||||
|
@ -308,13 +308,13 @@ fn fmt_instance(
|
|||||||
InstanceDef::ReifyShim(_) => write!(f, " - shim(reify)"),
|
InstanceDef::ReifyShim(_) => write!(f, " - shim(reify)"),
|
||||||
InstanceDef::ThreadLocalShim(_) => write!(f, " - shim(tls)"),
|
InstanceDef::ThreadLocalShim(_) => write!(f, " - shim(tls)"),
|
||||||
InstanceDef::Intrinsic(_) => write!(f, " - intrinsic"),
|
InstanceDef::Intrinsic(_) => write!(f, " - intrinsic"),
|
||||||
InstanceDef::Virtual(_, num) => write!(f, " - virtual#{}", num),
|
InstanceDef::Virtual(_, num) => write!(f, " - virtual#{num}"),
|
||||||
InstanceDef::FnPtrShim(_, ty) => write!(f, " - shim({})", ty),
|
InstanceDef::FnPtrShim(_, ty) => write!(f, " - shim({ty})"),
|
||||||
InstanceDef::ClosureOnceShim { .. } => write!(f, " - shim"),
|
InstanceDef::ClosureOnceShim { .. } => write!(f, " - shim"),
|
||||||
InstanceDef::DropGlue(_, None) => write!(f, " - shim(None)"),
|
InstanceDef::DropGlue(_, None) => write!(f, " - shim(None)"),
|
||||||
InstanceDef::DropGlue(_, Some(ty)) => write!(f, " - shim(Some({}))", ty),
|
InstanceDef::DropGlue(_, Some(ty)) => write!(f, " - shim(Some({ty}))"),
|
||||||
InstanceDef::CloneShim(_, ty) => write!(f, " - shim({})", ty),
|
InstanceDef::CloneShim(_, ty) => write!(f, " - shim({ty})"),
|
||||||
InstanceDef::FnPtrAddrShim(_, ty) => write!(f, " - shim({})", ty),
|
InstanceDef::FnPtrAddrShim(_, ty) => write!(f, " - shim({ty})"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -336,9 +336,7 @@ impl<'tcx> Instance<'tcx> {
|
|||||||
pub fn new(def_id: DefId, args: GenericArgsRef<'tcx>) -> Instance<'tcx> {
|
pub fn new(def_id: DefId, args: GenericArgsRef<'tcx>) -> Instance<'tcx> {
|
||||||
assert!(
|
assert!(
|
||||||
!args.has_escaping_bound_vars(),
|
!args.has_escaping_bound_vars(),
|
||||||
"args of instance {:?} not normalized for codegen: {:?}",
|
"args of instance {def_id:?} not normalized for codegen: {args:?}"
|
||||||
def_id,
|
|
||||||
args
|
|
||||||
);
|
);
|
||||||
Instance { def: InstanceDef::Item(def_id), args }
|
Instance { def: InstanceDef::Item(def_id), args }
|
||||||
}
|
}
|
||||||
@ -425,7 +423,7 @@ impl<'tcx> Instance<'tcx> {
|
|||||||
) -> Option<Instance<'tcx>> {
|
) -> Option<Instance<'tcx>> {
|
||||||
debug!("resolve(def_id={:?}, args={:?})", def_id, args);
|
debug!("resolve(def_id={:?}, args={:?})", def_id, args);
|
||||||
// Use either `resolve_closure` or `resolve_for_vtable`
|
// Use either `resolve_closure` or `resolve_for_vtable`
|
||||||
assert!(!tcx.is_closure(def_id), "Called `resolve_for_fn_ptr` on closure: {:?}", def_id);
|
assert!(!tcx.is_closure(def_id), "Called `resolve_for_fn_ptr` on closure: {def_id:?}");
|
||||||
Instance::resolve(tcx, param_env, def_id, args).ok().flatten().map(|mut resolved| {
|
Instance::resolve(tcx, param_env, def_id, args).ok().flatten().map(|mut resolved| {
|
||||||
match resolved.def {
|
match resolved.def {
|
||||||
InstanceDef::Item(def) if resolved.def.requires_caller_location(tcx) => {
|
InstanceDef::Item(def) if resolved.def.requires_caller_location(tcx) => {
|
||||||
|
@ -249,9 +249,9 @@ impl<'tcx> LayoutError<'tcx> {
|
|||||||
impl<'tcx> fmt::Display for LayoutError<'tcx> {
|
impl<'tcx> fmt::Display for LayoutError<'tcx> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match *self {
|
match *self {
|
||||||
LayoutError::Unknown(ty) => write!(f, "the type `{}` has an unknown layout", ty),
|
LayoutError::Unknown(ty) => write!(f, "the type `{ty}` has an unknown layout"),
|
||||||
LayoutError::SizeOverflow(ty) => {
|
LayoutError::SizeOverflow(ty) => {
|
||||||
write!(f, "values of the type `{}` are too big for the current architecture", ty)
|
write!(f, "values of the type `{ty}` are too big for the current architecture")
|
||||||
}
|
}
|
||||||
LayoutError::NormalizationFailure(t, e) => write!(
|
LayoutError::NormalizationFailure(t, e) => write!(
|
||||||
f,
|
f,
|
||||||
|
@ -958,9 +958,9 @@ pub struct Term<'tcx> {
|
|||||||
impl Debug for Term<'_> {
|
impl Debug for Term<'_> {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
let data = if let Some(ty) = self.ty() {
|
let data = if let Some(ty) = self.ty() {
|
||||||
format!("Term::Ty({:?})", ty)
|
format!("Term::Ty({ty:?})")
|
||||||
} else if let Some(ct) = self.ct() {
|
} else if let Some(ct) = self.ct() {
|
||||||
format!("Term::Ct({:?})", ct)
|
format!("Term::Ct({ct:?})")
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
};
|
||||||
|
@ -20,8 +20,8 @@ pub enum NormalizationError<'tcx> {
|
|||||||
impl<'tcx> NormalizationError<'tcx> {
|
impl<'tcx> NormalizationError<'tcx> {
|
||||||
pub fn get_type_for_failure(&self) -> String {
|
pub fn get_type_for_failure(&self) -> String {
|
||||||
match self {
|
match self {
|
||||||
NormalizationError::Type(t) => format!("{}", t),
|
NormalizationError::Type(t) => format!("{t}"),
|
||||||
NormalizationError::Const(c) => format!("{}", c),
|
NormalizationError::Const(c) => format!("{c}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -124,7 +124,7 @@ impl<'tcx> TypeFolder<TyCtxt<'tcx>> for ReverseMapper<'tcx> {
|
|||||||
|
|
||||||
match self.map.get(&r.into()).map(|k| k.unpack()) {
|
match self.map.get(&r.into()).map(|k| k.unpack()) {
|
||||||
Some(GenericArgKind::Lifetime(r1)) => r1,
|
Some(GenericArgKind::Lifetime(r1)) => r1,
|
||||||
Some(u) => panic!("region mapped to unexpected kind: {:?}", u),
|
Some(u) => panic!("region mapped to unexpected kind: {u:?}"),
|
||||||
None if self.do_not_error => self.tcx.lifetimes.re_static,
|
None if self.do_not_error => self.tcx.lifetimes.re_static,
|
||||||
None => {
|
None => {
|
||||||
let e = self
|
let e = self
|
||||||
@ -134,9 +134,8 @@ impl<'tcx> TypeFolder<TyCtxt<'tcx>> for ReverseMapper<'tcx> {
|
|||||||
.span_label(
|
.span_label(
|
||||||
self.span,
|
self.span,
|
||||||
format!(
|
format!(
|
||||||
"lifetime `{}` is part of concrete type but not used in \
|
"lifetime `{r}` is part of concrete type but not used in \
|
||||||
parameter list of the `impl Trait` type alias",
|
parameter list of the `impl Trait` type alias"
|
||||||
r
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.emit();
|
.emit();
|
||||||
@ -169,7 +168,7 @@ impl<'tcx> TypeFolder<TyCtxt<'tcx>> for ReverseMapper<'tcx> {
|
|||||||
// Found it in the substitution list; replace with the parameter from the
|
// Found it in the substitution list; replace with the parameter from the
|
||||||
// opaque type.
|
// opaque type.
|
||||||
Some(GenericArgKind::Type(t1)) => t1,
|
Some(GenericArgKind::Type(t1)) => t1,
|
||||||
Some(u) => panic!("type mapped to unexpected kind: {:?}", u),
|
Some(u) => panic!("type mapped to unexpected kind: {u:?}"),
|
||||||
None => {
|
None => {
|
||||||
debug!(?param, ?self.map);
|
debug!(?param, ?self.map);
|
||||||
if !self.ignore_errors {
|
if !self.ignore_errors {
|
||||||
@ -178,9 +177,8 @@ impl<'tcx> TypeFolder<TyCtxt<'tcx>> for ReverseMapper<'tcx> {
|
|||||||
.struct_span_err(
|
.struct_span_err(
|
||||||
self.span,
|
self.span,
|
||||||
format!(
|
format!(
|
||||||
"type parameter `{}` is part of concrete type but not \
|
"type parameter `{ty}` is part of concrete type but not \
|
||||||
used in parameter list for the `impl Trait` type alias",
|
used in parameter list for the `impl Trait` type alias"
|
||||||
ty
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
.emit();
|
.emit();
|
||||||
@ -205,7 +203,7 @@ impl<'tcx> TypeFolder<TyCtxt<'tcx>> for ReverseMapper<'tcx> {
|
|||||||
// Found it in the substitution list, replace with the parameter from the
|
// Found it in the substitution list, replace with the parameter from the
|
||||||
// opaque type.
|
// opaque type.
|
||||||
Some(GenericArgKind::Const(c1)) => c1,
|
Some(GenericArgKind::Const(c1)) => c1,
|
||||||
Some(u) => panic!("const mapped to unexpected kind: {:?}", u),
|
Some(u) => panic!("const mapped to unexpected kind: {u:?}"),
|
||||||
None => {
|
None => {
|
||||||
let guar = self
|
let guar = self
|
||||||
.tcx
|
.tcx
|
||||||
|
@ -1497,7 +1497,7 @@ pub trait PrettyPrinter<'tcx>:
|
|||||||
let data = int.assert_bits(self.tcx().data_layout.pointer_size);
|
let data = int.assert_bits(self.tcx().data_layout.pointer_size);
|
||||||
self = self.typed_value(
|
self = self.typed_value(
|
||||||
|mut this| {
|
|mut this| {
|
||||||
write!(this, "0x{:x}", data)?;
|
write!(this, "0x{data:x}")?;
|
||||||
Ok(this)
|
Ok(this)
|
||||||
},
|
},
|
||||||
|this| this.print_type(ty),
|
|this| this.print_type(ty),
|
||||||
@ -1510,7 +1510,7 @@ pub trait PrettyPrinter<'tcx>:
|
|||||||
if int.size() == Size::ZERO {
|
if int.size() == Size::ZERO {
|
||||||
write!(this, "transmute(())")?;
|
write!(this, "transmute(())")?;
|
||||||
} else {
|
} else {
|
||||||
write!(this, "transmute(0x{:x})", int)?;
|
write!(this, "transmute(0x{int:x})")?;
|
||||||
}
|
}
|
||||||
Ok(this)
|
Ok(this)
|
||||||
};
|
};
|
||||||
@ -2348,10 +2348,10 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
|
|||||||
} else {
|
} else {
|
||||||
cont
|
cont
|
||||||
};
|
};
|
||||||
let _ = write!(cx, "{}", w);
|
let _ = write!(cx, "{w}");
|
||||||
};
|
};
|
||||||
let do_continue = |cx: &mut Self, cont: Symbol| {
|
let do_continue = |cx: &mut Self, cont: Symbol| {
|
||||||
let _ = write!(cx, "{}", cont);
|
let _ = write!(cx, "{cont}");
|
||||||
};
|
};
|
||||||
|
|
||||||
define_scoped_cx!(self);
|
define_scoped_cx!(self);
|
||||||
@ -2387,7 +2387,7 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
|
|||||||
let (new_value, map) = if self.should_print_verbose() {
|
let (new_value, map) = if self.should_print_verbose() {
|
||||||
for var in value.bound_vars().iter() {
|
for var in value.bound_vars().iter() {
|
||||||
start_or_continue(&mut self, "for<", ", ");
|
start_or_continue(&mut self, "for<", ", ");
|
||||||
write!(self, "{:?}", var)?;
|
write!(self, "{var:?}")?;
|
||||||
}
|
}
|
||||||
start_or_continue(&mut self, "", "> ");
|
start_or_continue(&mut self, "", "> ");
|
||||||
(value.clone().skip_binder(), BTreeMap::default())
|
(value.clone().skip_binder(), BTreeMap::default())
|
||||||
|
@ -73,9 +73,9 @@ impl fmt::Debug for ty::BoundRegionKind {
|
|||||||
ty::BrAnon(span) => write!(f, "BrAnon({span:?})"),
|
ty::BrAnon(span) => write!(f, "BrAnon({span:?})"),
|
||||||
ty::BrNamed(did, name) => {
|
ty::BrNamed(did, name) => {
|
||||||
if did.is_crate_root() {
|
if did.is_crate_root() {
|
||||||
write!(f, "BrNamed({})", name)
|
write!(f, "BrNamed({name})")
|
||||||
} else {
|
} else {
|
||||||
write!(f, "BrNamed({:?}, {})", did, name)
|
write!(f, "BrNamed({did:?}, {name})")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ty::BrEnv => write!(f, "BrEnv"),
|
ty::BrEnv => write!(f, "BrEnv"),
|
||||||
@ -205,7 +205,7 @@ impl<'tcx> fmt::Debug for ty::ClauseKind<'tcx> {
|
|||||||
ty::ClauseKind::RegionOutlives(ref pair) => pair.fmt(f),
|
ty::ClauseKind::RegionOutlives(ref pair) => pair.fmt(f),
|
||||||
ty::ClauseKind::TypeOutlives(ref pair) => pair.fmt(f),
|
ty::ClauseKind::TypeOutlives(ref pair) => pair.fmt(f),
|
||||||
ty::ClauseKind::Projection(ref pair) => pair.fmt(f),
|
ty::ClauseKind::Projection(ref pair) => pair.fmt(f),
|
||||||
ty::ClauseKind::WellFormed(ref data) => write!(f, "WellFormed({:?})", data),
|
ty::ClauseKind::WellFormed(ref data) => write!(f, "WellFormed({data:?})"),
|
||||||
ty::ClauseKind::ConstEvaluatable(ct) => {
|
ty::ClauseKind::ConstEvaluatable(ct) => {
|
||||||
write!(f, "ConstEvaluatable({ct:?})")
|
write!(f, "ConstEvaluatable({ct:?})")
|
||||||
}
|
}
|
||||||
@ -220,12 +220,12 @@ impl<'tcx> fmt::Debug for ty::PredicateKind<'tcx> {
|
|||||||
ty::PredicateKind::Subtype(ref pair) => pair.fmt(f),
|
ty::PredicateKind::Subtype(ref pair) => pair.fmt(f),
|
||||||
ty::PredicateKind::Coerce(ref pair) => pair.fmt(f),
|
ty::PredicateKind::Coerce(ref pair) => pair.fmt(f),
|
||||||
ty::PredicateKind::ObjectSafe(trait_def_id) => {
|
ty::PredicateKind::ObjectSafe(trait_def_id) => {
|
||||||
write!(f, "ObjectSafe({:?})", trait_def_id)
|
write!(f, "ObjectSafe({trait_def_id:?})")
|
||||||
}
|
}
|
||||||
ty::PredicateKind::ClosureKind(closure_def_id, closure_args, kind) => {
|
ty::PredicateKind::ClosureKind(closure_def_id, closure_args, kind) => {
|
||||||
write!(f, "ClosureKind({:?}, {:?}, {:?})", closure_def_id, closure_args, kind)
|
write!(f, "ClosureKind({closure_def_id:?}, {closure_args:?}, {kind:?})")
|
||||||
}
|
}
|
||||||
ty::PredicateKind::ConstEquate(c1, c2) => write!(f, "ConstEquate({:?}, {:?})", c1, c2),
|
ty::PredicateKind::ConstEquate(c1, c2) => write!(f, "ConstEquate({c1:?}, {c2:?})"),
|
||||||
ty::PredicateKind::Ambiguous => write!(f, "Ambiguous"),
|
ty::PredicateKind::Ambiguous => write!(f, "Ambiguous"),
|
||||||
ty::PredicateKind::AliasRelate(t1, t2, dir) => {
|
ty::PredicateKind::AliasRelate(t1, t2, dir) => {
|
||||||
write!(f, "AliasRelate({t1:?}, {dir:?}, {t2:?})")
|
write!(f, "AliasRelate({t1:?}, {dir:?}, {t2:?})")
|
||||||
|
@ -57,7 +57,7 @@ impl<'tcx> fmt::Display for Discr<'tcx> {
|
|||||||
let x = self.val;
|
let x = self.val;
|
||||||
// sign extend the raw representation to be an i128
|
// sign extend the raw representation to be an i128
|
||||||
let x = size.sign_extend(x) as i128;
|
let x = size.sign_extend(x) as i128;
|
||||||
write!(fmt, "{}", x)
|
write!(fmt, "{x}")
|
||||||
}
|
}
|
||||||
_ => write!(fmt, "{}", self.val),
|
_ => write!(fmt, "{}", self.val),
|
||||||
}
|
}
|
||||||
|
@ -29,8 +29,8 @@ impl<'tcx> fmt::Debug for VtblEntry<'tcx> {
|
|||||||
VtblEntry::MetadataSize => write!(f, "MetadataSize"),
|
VtblEntry::MetadataSize => write!(f, "MetadataSize"),
|
||||||
VtblEntry::MetadataAlign => write!(f, "MetadataAlign"),
|
VtblEntry::MetadataAlign => write!(f, "MetadataAlign"),
|
||||||
VtblEntry::Vacant => write!(f, "Vacant"),
|
VtblEntry::Vacant => write!(f, "Vacant"),
|
||||||
VtblEntry::Method(instance) => write!(f, "Method({})", instance),
|
VtblEntry::Method(instance) => write!(f, "Method({instance})"),
|
||||||
VtblEntry::TraitVPtr(trait_ref) => write!(f, "TraitVPtr({})", trait_ref),
|
VtblEntry::TraitVPtr(trait_ref) => write!(f, "TraitVPtr({trait_ref})"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -29,7 +29,7 @@ fn opt_span_bug_fmt<S: Into<MultiSpan>>(
|
|||||||
location: &Location<'_>,
|
location: &Location<'_>,
|
||||||
) -> ! {
|
) -> ! {
|
||||||
tls::with_opt(move |tcx| {
|
tls::with_opt(move |tcx| {
|
||||||
let msg = format!("{}: {}", location, args);
|
let msg = format!("{location}: {args}");
|
||||||
match (tcx, span) {
|
match (tcx, span) {
|
||||||
(Some(tcx), Some(span)) => tcx.sess.diagnostic().span_bug(span, msg),
|
(Some(tcx), Some(span)) => tcx.sess.diagnostic().span_bug(span, msg),
|
||||||
(Some(tcx), None) => tcx.sess.diagnostic().bug(msg),
|
(Some(tcx), None) => tcx.sess.diagnostic().bug(msg),
|
||||||
|
@ -17,7 +17,7 @@ pub fn to_readable_str(mut val: usize) -> String {
|
|||||||
groups.push(group.to_string());
|
groups.push(group.to_string());
|
||||||
break;
|
break;
|
||||||
} else {
|
} else {
|
||||||
groups.push(format!("{:03}", group));
|
groups.push(format!("{group:03}"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,5 +2,5 @@ use proc_macro::TokenStream;
|
|||||||
|
|
||||||
#[proc_macro]
|
#[proc_macro]
|
||||||
pub fn let_bcb(item: TokenStream) -> TokenStream {
|
pub fn let_bcb(item: TokenStream) -> TokenStream {
|
||||||
format!("let bcb{} = graph::BasicCoverageBlock::from_usize({});", item, item).parse().unwrap()
|
format!("let bcb{item} = graph::BasicCoverageBlock::from_usize({item});").parse().unwrap()
|
||||||
}
|
}
|
||||||
|
@ -46,7 +46,7 @@ pub fn report_missing_open_delim(
|
|||||||
};
|
};
|
||||||
err.span_label(
|
err.span_label(
|
||||||
unmatch_brace.found_span.shrink_to_lo(),
|
unmatch_brace.found_span.shrink_to_lo(),
|
||||||
format!("missing open `{}` for this delimiter", missed_open),
|
format!("missing open `{missed_open}` for this delimiter"),
|
||||||
);
|
);
|
||||||
reported_missing_open = true;
|
reported_missing_open = true;
|
||||||
}
|
}
|
||||||
|
@ -198,7 +198,7 @@ impl<'a> TokenTreesReader<'a> {
|
|||||||
// An unexpected closing delimiter (i.e., there is no
|
// An unexpected closing delimiter (i.e., there is no
|
||||||
// matching opening delimiter).
|
// matching opening delimiter).
|
||||||
let token_str = token_to_string(&self.token);
|
let token_str = token_to_string(&self.token);
|
||||||
let msg = format!("unexpected closing delimiter: `{}`", token_str);
|
let msg = format!("unexpected closing delimiter: `{token_str}`");
|
||||||
let mut err = self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, msg);
|
let mut err = self.string_reader.sess.span_diagnostic.struct_span_err(self.token.span, msg);
|
||||||
|
|
||||||
report_suspicious_mismatch_block(
|
report_suspicious_mismatch_block(
|
||||||
|
@ -135,7 +135,7 @@ pub(crate) fn emit_unescape_error(
|
|||||||
"unknown character escape"
|
"unknown character escape"
|
||||||
};
|
};
|
||||||
let ec = escaped_char(c);
|
let ec = escaped_char(c);
|
||||||
let mut diag = handler.struct_span_err(span, format!("{}: `{}`", label, ec));
|
let mut diag = handler.struct_span_err(span, format!("{label}: `{ec}`"));
|
||||||
diag.span_label(span, label);
|
diag.span_label(span, label);
|
||||||
if c == '{' || c == '}' && matches!(mode, Mode::Str | Mode::RawStr) {
|
if c == '{' || c == '}' && matches!(mode, Mode::Str | Mode::RawStr) {
|
||||||
diag.help(
|
diag.help(
|
||||||
@ -151,7 +151,7 @@ pub(crate) fn emit_unescape_error(
|
|||||||
diag.span_suggestion(
|
diag.span_suggestion(
|
||||||
span_with_quotes,
|
span_with_quotes,
|
||||||
"if you meant to write a literal backslash (perhaps escaping in a regular expression), consider a raw string literal",
|
"if you meant to write a literal backslash (perhaps escaping in a regular expression), consider a raw string literal",
|
||||||
format!("r\"{}\"", lit),
|
format!("r\"{lit}\""),
|
||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -180,21 +180,20 @@ pub(crate) fn emit_unescape_error(
|
|||||||
Mode::RawByteStr => "raw byte string literal",
|
Mode::RawByteStr => "raw byte string literal",
|
||||||
_ => panic!("non-is_byte literal paired with NonAsciiCharInByte"),
|
_ => panic!("non-is_byte literal paired with NonAsciiCharInByte"),
|
||||||
};
|
};
|
||||||
let mut err = handler.struct_span_err(span, format!("non-ASCII character in {}", desc));
|
let mut err = handler.struct_span_err(span, format!("non-ASCII character in {desc}"));
|
||||||
let postfix = if unicode_width::UnicodeWidthChar::width(c).unwrap_or(1) == 0 {
|
let postfix = if unicode_width::UnicodeWidthChar::width(c).unwrap_or(1) == 0 {
|
||||||
format!(" but is {:?}", c)
|
format!(" but is {c:?}")
|
||||||
} else {
|
} else {
|
||||||
String::new()
|
String::new()
|
||||||
};
|
};
|
||||||
err.span_label(span, format!("must be ASCII{}", postfix));
|
err.span_label(span, format!("must be ASCII{postfix}"));
|
||||||
// Note: the \\xHH suggestions are not given for raw byte string
|
// Note: the \\xHH suggestions are not given for raw byte string
|
||||||
// literals, because they are araw and so cannot use any escapes.
|
// literals, because they are araw and so cannot use any escapes.
|
||||||
if (c as u32) <= 0xFF && mode != Mode::RawByteStr {
|
if (c as u32) <= 0xFF && mode != Mode::RawByteStr {
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
span,
|
span,
|
||||||
format!(
|
format!(
|
||||||
"if you meant to use the unicode code point for {:?}, use a \\xHH escape",
|
"if you meant to use the unicode code point for {c:?}, use a \\xHH escape"
|
||||||
c
|
|
||||||
),
|
),
|
||||||
format!("\\x{:X}", c as u32),
|
format!("\\x{:X}", c as u32),
|
||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
@ -206,7 +205,7 @@ pub(crate) fn emit_unescape_error(
|
|||||||
utf8.push(c);
|
utf8.push(c);
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
span,
|
span,
|
||||||
format!("if you meant to use the UTF-8 encoding of {:?}, use \\xHH escapes", c),
|
format!("if you meant to use the UTF-8 encoding of {c:?}, use \\xHH escapes"),
|
||||||
utf8.as_bytes()
|
utf8.as_bytes()
|
||||||
.iter()
|
.iter()
|
||||||
.map(|b: &u8| format!("\\x{:X}", *b))
|
.map(|b: &u8| format!("\\x{:X}", *b))
|
||||||
|
@ -349,7 +349,7 @@ pub(super) fn check_for_substitution(
|
|||||||
let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8() * count));
|
let span = Span::with_root_ctxt(pos, pos + Pos::from_usize(ch.len_utf8() * count));
|
||||||
|
|
||||||
let Some((_, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(s, _, _)| s == ascii_str) else {
|
let Some((_, ascii_name, token)) = ASCII_ARRAY.iter().find(|&&(s, _, _)| s == ascii_str) else {
|
||||||
let msg = format!("substitution character not found for '{}'", ch);
|
let msg = format!("substitution character not found for '{ch}'");
|
||||||
reader.sess.span_diagnostic.span_bug_no_panic(span, msg);
|
reader.sess.span_diagnostic.span_bug_no_panic(span, msg);
|
||||||
return (None, None);
|
return (None, None);
|
||||||
};
|
};
|
||||||
|
@ -247,7 +247,7 @@ pub fn parse_cfg_attr(
|
|||||||
match parse_in(parse_sess, tokens.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) {
|
match parse_in(parse_sess, tokens.clone(), "`cfg_attr` input", |p| p.parse_cfg_attr()) {
|
||||||
Ok(r) => return Some(r),
|
Ok(r) => return Some(r),
|
||||||
Err(mut e) => {
|
Err(mut e) => {
|
||||||
e.help(format!("the valid syntax is `{}`", CFG_ATTR_GRAMMAR_HELP))
|
e.help(format!("the valid syntax is `{CFG_ATTR_GRAMMAR_HELP}`"))
|
||||||
.note(CFG_ATTR_NOTE_REF)
|
.note(CFG_ATTR_NOTE_REF)
|
||||||
.emit();
|
.emit();
|
||||||
}
|
}
|
||||||
|
@ -145,13 +145,11 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
|||||||
// another replace range will capture the *replaced* tokens for the inner
|
// another replace range will capture the *replaced* tokens for the inner
|
||||||
// range, not the original tokens.
|
// range, not the original tokens.
|
||||||
for (range, new_tokens) in replace_ranges.into_iter().rev() {
|
for (range, new_tokens) in replace_ranges.into_iter().rev() {
|
||||||
assert!(!range.is_empty(), "Cannot replace an empty range: {:?}", range);
|
assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}");
|
||||||
// Replace ranges are only allowed to decrease the number of tokens.
|
// Replace ranges are only allowed to decrease the number of tokens.
|
||||||
assert!(
|
assert!(
|
||||||
range.len() >= new_tokens.len(),
|
range.len() >= new_tokens.len(),
|
||||||
"Range {:?} has greater len than {:?}",
|
"Range {range:?} has greater len than {new_tokens:?}"
|
||||||
range,
|
|
||||||
new_tokens
|
|
||||||
);
|
);
|
||||||
|
|
||||||
// Replace any removed tokens with `FlatToken::Empty`.
|
// Replace any removed tokens with `FlatToken::Empty`.
|
||||||
@ -409,22 +407,19 @@ fn make_token_stream(
|
|||||||
FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
|
FlatToken::Token(Token { kind: TokenKind::CloseDelim(delim), span }) => {
|
||||||
let frame_data = stack
|
let frame_data = stack
|
||||||
.pop()
|
.pop()
|
||||||
.unwrap_or_else(|| panic!("Token stack was empty for token: {:?}", token));
|
.unwrap_or_else(|| panic!("Token stack was empty for token: {token:?}"));
|
||||||
|
|
||||||
let (open_delim, open_sp) = frame_data.open_delim_sp.unwrap();
|
let (open_delim, open_sp) = frame_data.open_delim_sp.unwrap();
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
open_delim, delim,
|
open_delim, delim,
|
||||||
"Mismatched open/close delims: open={:?} close={:?}",
|
"Mismatched open/close delims: open={open_delim:?} close={span:?}"
|
||||||
open_delim, span
|
|
||||||
);
|
);
|
||||||
let dspan = DelimSpan::from_pair(open_sp, span);
|
let dspan = DelimSpan::from_pair(open_sp, span);
|
||||||
let stream = AttrTokenStream::new(frame_data.inner);
|
let stream = AttrTokenStream::new(frame_data.inner);
|
||||||
let delimited = AttrTokenTree::Delimited(dspan, delim, stream);
|
let delimited = AttrTokenTree::Delimited(dspan, delim, stream);
|
||||||
stack
|
stack
|
||||||
.last_mut()
|
.last_mut()
|
||||||
.unwrap_or_else(|| {
|
.unwrap_or_else(|| panic!("Bottom token frame is missing for token: {token:?}"))
|
||||||
panic!("Bottom token frame is missing for token: {:?}", token)
|
|
||||||
})
|
|
||||||
.inner
|
.inner
|
||||||
.push(delimited);
|
.push(delimited);
|
||||||
}
|
}
|
||||||
@ -456,7 +451,7 @@ fn make_token_stream(
|
|||||||
.inner
|
.inner
|
||||||
.push(AttrTokenTree::Token(Token::new(unglued_first, first_span), spacing));
|
.push(AttrTokenTree::Token(Token::new(unglued_first, first_span), spacing));
|
||||||
} else {
|
} else {
|
||||||
panic!("Unexpected last token {:?}", last_token)
|
panic!("Unexpected last token {last_token:?}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
AttrTokenStream::new(final_buf.inner)
|
AttrTokenStream::new(final_buf.inner)
|
||||||
|
@ -612,13 +612,13 @@ impl<'a> Parser<'a> {
|
|||||||
if let TokenKind::Ident(prev, _) = &self.prev_token.kind
|
if let TokenKind::Ident(prev, _) = &self.prev_token.kind
|
||||||
&& let TokenKind::Ident(cur, _) = &self.token.kind
|
&& let TokenKind::Ident(cur, _) = &self.token.kind
|
||||||
{
|
{
|
||||||
let concat = Symbol::intern(&format!("{}{}", prev, cur));
|
let concat = Symbol::intern(&format!("{prev}{cur}"));
|
||||||
let ident = Ident::new(concat, DUMMY_SP);
|
let ident = Ident::new(concat, DUMMY_SP);
|
||||||
if ident.is_used_keyword() || ident.is_reserved() || ident.is_raw_guess() {
|
if ident.is_used_keyword() || ident.is_reserved() || ident.is_raw_guess() {
|
||||||
let span = self.prev_token.span.to(self.token.span);
|
let span = self.prev_token.span.to(self.token.span);
|
||||||
err.span_suggestion_verbose(
|
err.span_suggestion_verbose(
|
||||||
span,
|
span,
|
||||||
format!("consider removing the space to spell keyword `{}`", concat),
|
format!("consider removing the space to spell keyword `{concat}`"),
|
||||||
concat,
|
concat,
|
||||||
Applicability::MachineApplicable,
|
Applicability::MachineApplicable,
|
||||||
);
|
);
|
||||||
|
@ -1052,7 +1052,7 @@ impl<'a> Parser<'a> {
|
|||||||
}
|
}
|
||||||
components.push(Punct(c));
|
components.push(Punct(c));
|
||||||
} else {
|
} else {
|
||||||
panic!("unexpected character in a float token: {:?}", c)
|
panic!("unexpected character in a float token: {c:?}")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !ident_like.is_empty() {
|
if !ident_like.is_empty() {
|
||||||
@ -1113,7 +1113,7 @@ impl<'a> Parser<'a> {
|
|||||||
self.error_unexpected_after_dot();
|
self.error_unexpected_after_dot();
|
||||||
DestructuredFloat::Error
|
DestructuredFloat::Error
|
||||||
}
|
}
|
||||||
_ => panic!("unexpected components in a float token: {:?}", components),
|
_ => panic!("unexpected components in a float token: {components:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -375,7 +375,7 @@ impl TokenType {
|
|||||||
fn to_string(&self) -> String {
|
fn to_string(&self) -> String {
|
||||||
match self {
|
match self {
|
||||||
TokenType::Token(t) => format!("`{}`", pprust::token_kind_to_string(t)),
|
TokenType::Token(t) => format!("`{}`", pprust::token_kind_to_string(t)),
|
||||||
TokenType::Keyword(kw) => format!("`{}`", kw),
|
TokenType::Keyword(kw) => format!("`{kw}`"),
|
||||||
TokenType::Operator => "an operator".to_string(),
|
TokenType::Operator => "an operator".to_string(),
|
||||||
TokenType::Lifetime => "lifetime".to_string(),
|
TokenType::Lifetime => "lifetime".to_string(),
|
||||||
TokenType::Ident => "identifier".to_string(),
|
TokenType::Ident => "identifier".to_string(),
|
||||||
@ -445,7 +445,7 @@ pub(super) fn token_descr(token: &Token) -> String {
|
|||||||
TokenDescription::DocComment => "doc comment",
|
TokenDescription::DocComment => "doc comment",
|
||||||
});
|
});
|
||||||
|
|
||||||
if let Some(kind) = kind { format!("{} `{}`", kind, name) } else { format!("`{}`", name) }
|
if let Some(kind) = kind { format!("{kind} `{name}`") } else { format!("`{name}`") }
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Parser<'a> {
|
impl<'a> Parser<'a> {
|
||||||
@ -929,7 +929,7 @@ impl<'a> Parser<'a> {
|
|||||||
expect_err
|
expect_err
|
||||||
.span_suggestion_short(
|
.span_suggestion_short(
|
||||||
sp,
|
sp,
|
||||||
format!("missing `{}`", token_str),
|
format!("missing `{token_str}`"),
|
||||||
token_str,
|
token_str,
|
||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
)
|
)
|
||||||
|
@ -428,7 +428,7 @@ impl<'a> Parser<'a> {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let mut err = self_.struct_span_err(self_.token.span, msg);
|
let mut err = self_.struct_span_err(self_.token.span, msg);
|
||||||
err.span_label(self_.token.span, format!("expected {}", expected));
|
err.span_label(self_.token.span, format!("expected {expected}"));
|
||||||
err
|
err
|
||||||
});
|
});
|
||||||
PatKind::Lit(self.mk_expr(lo, ExprKind::Lit(lit)))
|
PatKind::Lit(self.mk_expr(lo, ExprKind::Lit(lit)))
|
||||||
@ -664,7 +664,7 @@ impl<'a> Parser<'a> {
|
|||||||
let msg = format!("expected {}, found {}", expected, super::token_descr(&self.token));
|
let msg = format!("expected {}, found {}", expected, super::token_descr(&self.token));
|
||||||
|
|
||||||
let mut err = self.struct_span_err(self.token.span, msg);
|
let mut err = self.struct_span_err(self.token.span, msg);
|
||||||
err.span_label(self.token.span, format!("expected {}", expected));
|
err.span_label(self.token.span, format!("expected {expected}"));
|
||||||
|
|
||||||
let sp = self.sess.source_map().start_point(self.token.span);
|
let sp = self.sess.source_map().start_point(self.token.span);
|
||||||
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
|
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) {
|
||||||
@ -977,7 +977,7 @@ impl<'a> Parser<'a> {
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
let token_str = super::token_descr(&self.token);
|
let token_str = super::token_descr(&self.token);
|
||||||
let msg = format!("expected `}}`, found {}", token_str);
|
let msg = format!("expected `}}`, found {token_str}");
|
||||||
let mut err = self.struct_span_err(self.token.span, msg);
|
let mut err = self.struct_span_err(self.token.span, msg);
|
||||||
|
|
||||||
err.span_label(self.token.span, "expected `}`");
|
err.span_label(self.token.span, "expected `}`");
|
||||||
|
@ -679,7 +679,7 @@ impl<'a> Parser<'a> {
|
|||||||
);
|
);
|
||||||
err.span_suggestion(
|
err.span_suggestion(
|
||||||
eq.to(before_next),
|
eq.to(before_next),
|
||||||
format!("remove the `=` if `{}` is a type", ident),
|
format!("remove the `=` if `{ident}` is a type"),
|
||||||
"",
|
"",
|
||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
)
|
)
|
||||||
|
@ -300,7 +300,7 @@ impl<'a> Parser<'a> {
|
|||||||
Ok(ty) => (None, Some(ty)),
|
Ok(ty) => (None, Some(ty)),
|
||||||
Err(mut err) => {
|
Err(mut err) => {
|
||||||
if let Ok(snip) = self.span_to_snippet(pat.span) {
|
if let Ok(snip) = self.span_to_snippet(pat.span) {
|
||||||
err.span_label(pat.span, format!("while parsing the type for `{}`", snip));
|
err.span_label(pat.span, format!("while parsing the type for `{snip}`"));
|
||||||
}
|
}
|
||||||
// we use noexpect here because we don't actually expect Eq to be here
|
// we use noexpect here because we don't actually expect Eq to be here
|
||||||
// but we are still checking for it in order to be able to handle it if
|
// but we are still checking for it in order to be able to handle it if
|
||||||
@ -502,7 +502,7 @@ impl<'a> Parser<'a> {
|
|||||||
|
|
||||||
fn error_block_no_opening_brace<T>(&mut self) -> PResult<'a, T> {
|
fn error_block_no_opening_brace<T>(&mut self) -> PResult<'a, T> {
|
||||||
let tok = super::token_descr(&self.token);
|
let tok = super::token_descr(&self.token);
|
||||||
let msg = format!("expected `{{`, found {}", tok);
|
let msg = format!("expected `{{`, found {tok}");
|
||||||
Err(self.error_block_no_opening_brace_msg(Cow::from(msg)))
|
Err(self.error_block_no_opening_brace_msg(Cow::from(msg)))
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -638,10 +638,9 @@ impl<'a> Parser<'a> {
|
|||||||
e.span_suggestion(
|
e.span_suggestion(
|
||||||
sp.with_hi(sp.lo() + BytePos(marker.len() as u32)),
|
sp.with_hi(sp.lo() + BytePos(marker.len() as u32)),
|
||||||
format!(
|
format!(
|
||||||
"add a space before `{}` to use a regular comment",
|
"add a space before `{doc_comment_marker}` to use a regular comment",
|
||||||
doc_comment_marker,
|
|
||||||
),
|
),
|
||||||
format!("{} {}", comment_marker, doc_comment_marker),
|
format!("{comment_marker} {doc_comment_marker}"),
|
||||||
Applicability::MaybeIncorrect,
|
Applicability::MaybeIncorrect,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -557,7 +557,7 @@ impl<K: DepKind> DepGraph<K> {
|
|||||||
result,
|
result,
|
||||||
prev_index,
|
prev_index,
|
||||||
hash_result,
|
hash_result,
|
||||||
|value| format!("{:?}", value),
|
|value| format!("{value:?}"),
|
||||||
);
|
);
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
@ -1433,7 +1433,7 @@ pub(crate) fn print_markframe_trace<K: DepKind>(
|
|||||||
let mut current = frame;
|
let mut current = frame;
|
||||||
while let Some(frame) = current {
|
while let Some(frame) = current {
|
||||||
let node = data.previous.index_to_node(frame.index);
|
let node = data.previous.index_to_node(frame.index);
|
||||||
eprintln!("#{i} {:?}", node);
|
eprintln!("#{i} {node:?}");
|
||||||
current = frame.parent;
|
current = frame.parent;
|
||||||
i += 1;
|
i += 1;
|
||||||
}
|
}
|
||||||
|
@ -1539,9 +1539,8 @@ pub(super) fn build_target_config(
|
|||||||
);
|
);
|
||||||
let (target, target_warnings) = target_result.unwrap_or_else(|e| {
|
let (target, target_warnings) = target_result.unwrap_or_else(|e| {
|
||||||
handler.early_error(format!(
|
handler.early_error(format!(
|
||||||
"Error loading target specification: {}. \
|
"Error loading target specification: {e}. \
|
||||||
Run `rustc --print target-list` for a list of built-in targets",
|
Run `rustc --print target-list` for a list of built-in targets"
|
||||||
e
|
|
||||||
))
|
))
|
||||||
});
|
});
|
||||||
for warning in target_warnings.warning_messages() {
|
for warning in target_warnings.warning_messages() {
|
||||||
@ -1978,8 +1977,7 @@ pub fn parse_crate_edition(handler: &EarlyErrorHandler, matches: &getopts::Match
|
|||||||
let is_nightly = nightly_options::match_is_nightly_build(matches);
|
let is_nightly = nightly_options::match_is_nightly_build(matches);
|
||||||
let msg = if !is_nightly {
|
let msg = if !is_nightly {
|
||||||
format!(
|
format!(
|
||||||
"the crate requires edition {}, but the latest edition supported by this Rust version is {}",
|
"the crate requires edition {edition}, but the latest edition supported by this Rust version is {LATEST_STABLE_EDITION}"
|
||||||
edition, LATEST_STABLE_EDITION
|
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
format!("edition {edition} is unstable and only available with -Z unstable-options")
|
format!("edition {edition} is unstable and only available with -Z unstable-options")
|
||||||
|
@ -330,8 +330,7 @@ fn build_options<O: Default>(
|
|||||||
match value {
|
match value {
|
||||||
None => handler.early_error(
|
None => handler.early_error(
|
||||||
format!(
|
format!(
|
||||||
"{0} option `{1}` requires {2} ({3} {1}=<value>)",
|
"{outputname} option `{key}` requires {type_desc} ({prefix} {key}=<value>)"
|
||||||
outputname, key, type_desc, prefix
|
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
Some(value) => handler.early_error(
|
Some(value) => handler.early_error(
|
||||||
|
@ -408,7 +408,7 @@ pub fn debug_bound_var<T: std::fmt::Write>(
|
|||||||
var: impl std::fmt::Debug,
|
var: impl std::fmt::Debug,
|
||||||
) -> Result<(), std::fmt::Error> {
|
) -> Result<(), std::fmt::Error> {
|
||||||
if debruijn == INNERMOST {
|
if debruijn == INNERMOST {
|
||||||
write!(fmt, "^{:?}", var)
|
write!(fmt, "^{var:?}")
|
||||||
} else {
|
} else {
|
||||||
write!(fmt, "^{}_{:?}", debruijn.index(), var)
|
write!(fmt, "^{}_{:?}", debruijn.index(), var)
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user