Remove unnecessary &* sigil pairs in derived code.

By producing `&T` expressions for fields instead of `T`. This matches
what the existing comments (e.g. on `FieldInfo`) claim is happening, and
it's also what most of the trait-specific code needs.

The exception is `PartialEq`, which needs `T` expressions for lots of
special case error messaging to work. So we now convert the `&T` back to
a `T` for `PartialEq`.
This commit is contained in:
Nicholas Nethercote 2022-07-07 11:09:07 +10:00
parent 277bc9641d
commit 96f09d73cd
8 changed files with 102 additions and 81 deletions

View File

@ -161,7 +161,7 @@ fn cs_clone(
let all_fields;
let fn_path = cx.std_path(&[sym::clone, sym::Clone, sym::clone]);
let subcall = |cx: &mut ExtCtxt<'_>, field: &FieldInfo| {
let args = vec![cx.expr_addr_of(field.span, field.self_expr.clone())];
let args = vec![field.self_expr.clone()];
cx.expr_call_global(field.span, fn_path.clone(), args)
};

View File

@ -63,10 +63,7 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> Bl
let [other_expr] = &field.other_selflike_exprs[..] else {
cx.span_bug(field.span, "not exactly 2 arguments in `derive(Ord)`");
};
let args = vec![
cx.expr_addr_of(field.span, field.self_expr.clone()),
cx.expr_addr_of(field.span, other_expr.clone()),
];
let args = vec![field.self_expr.clone(), other_expr.clone()];
cx.expr_call_global(field.span, cmp_path.clone(), args)
}
CsFold::Combine(span, expr1, expr2) => {

View File

@ -2,7 +2,8 @@ use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::{path_local, path_std};
use rustc_ast::{BinOpKind, MetaItem};
use rustc_ast::ptr::P;
use rustc_ast::{BinOpKind, BorrowKind, Expr, ExprKind, MetaItem, Mutability};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::sym;
use rustc_span::Span;
@ -32,7 +33,21 @@ pub fn expand_deriving_partial_eq(
let [other_expr] = &field.other_selflike_exprs[..] else {
cx.span_bug(field.span, "not exactly 2 arguments in `derive(PartialEq)`");
};
cx.expr_binary(field.span, op, field.self_expr.clone(), other_expr.clone())
// We received `&T` arguments. Convert them to `T` by
// stripping `&` or adding `*`. This isn't necessary for
// type checking, but it results in much better error
// messages if something goes wrong.
let convert = |expr: &P<Expr>| {
if let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Not, inner) =
&expr.kind
{
inner.clone()
} else {
cx.expr_deref(field.span, expr.clone())
}
};
cx.expr_binary(field.span, op, convert(&field.self_expr), convert(other_expr))
}
CsFold::Combine(span, expr1, expr2) => cx.expr_binary(span, combiner, expr1, expr2),
CsFold::Fieldless => cx.expr_bool(span, base),

View File

@ -71,10 +71,7 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_
let [other_expr] = &field.other_selflike_exprs[..] else {
cx.span_bug(field.span, "not exactly 2 arguments in `derive(Ord)`");
};
let args = vec![
cx.expr_addr_of(field.span, field.self_expr.clone()),
cx.expr_addr_of(field.span, other_expr.clone()),
];
let args = vec![field.self_expr.clone(), other_expr.clone()];
cx.expr_call_global(field.span, partial_cmp_path.clone(), args)
}
CsFold::Combine(span, expr1, expr2) => {

View File

@ -95,9 +95,8 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
);
args.push(name);
}
// Use double indirection to make sure this works for unsized types
// Use an extra indirection to make sure this works for unsized types.
let field = cx.expr_addr_of(field.span, field.self_expr.clone());
let field = cx.expr_addr_of(field.span, field);
args.push(field);
}
let expr = cx.expr_call_global(span, fn_path_debug, args);
@ -115,9 +114,9 @@ fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>
));
}
// Use double indirection to make sure this works for unsized types
let value_ref = cx.expr_addr_of(field.span, field.self_expr.clone());
value_exprs.push(cx.expr_addr_of(field.span, value_ref));
// Use an extra indirection to make sure this works for unsized types.
let field = cx.expr_addr_of(field.span, field.self_expr.clone());
value_exprs.push(field);
}
// `let names: &'static _ = &["field1", "field2"];`

View File

@ -1004,7 +1004,7 @@ impl<'a> MethodDef<'a> {
/// ```
/// #[derive(PartialEq)]
/// # struct Dummy;
/// struct A { x: i32, y: i32 }
/// struct A { x: u8, y: u8 }
///
/// // equivalent to:
/// impl PartialEq for A {
@ -1016,9 +1016,9 @@ impl<'a> MethodDef<'a> {
/// But if the struct is `repr(packed)`, we can't use something like
/// `&self.x` on a packed type (as required for e.g. `Debug` and `Hash`)
/// because that might cause an unaligned ref. So we use let-destructuring
/// instead.
/// instead. If the struct impls `Copy`:
/// ```
/// # struct A { x: i32, y: i32 }
/// # struct A { x: u8, y: u8 }
/// impl PartialEq for A {
/// fn eq(&self, other: &A) -> bool {
/// let Self { x: __self_0_0, y: __self_0_1 } = *self;
@ -1027,6 +1027,19 @@ impl<'a> MethodDef<'a> {
/// }
/// }
/// ```
/// If it doesn't impl `Copy`:
/// ```
/// # struct A { x: u8, y: u8 }
/// impl PartialEq for A {
/// fn eq(&self, other: &A) -> bool {
/// let Self { x: ref __self_0_0, y: ref __self_0_1 } = *self;
/// let Self { x: ref __self_1_0, y: ref __self_1_1 } = *other;
/// *__self_0_0 == *__self_1_0 && *__self_0_1 == *__self_1_1
/// }
/// }
/// ```
/// This latter case only works if the fields match the alignment required
/// by the `packed(N)` attribute.
fn expand_struct_method_body<'b>(
&self,
cx: &mut ExtCtxt<'_>,
@ -1058,9 +1071,9 @@ impl<'a> MethodDef<'a> {
} else {
let prefixes: Vec<_> =
(0..selflike_args.len()).map(|i| format!("__self_{}", i)).collect();
let no_deref = always_copy;
let addr_of = always_copy;
let selflike_fields =
trait_.create_struct_pattern_fields(cx, struct_def, &prefixes, no_deref);
trait_.create_struct_pattern_fields(cx, struct_def, &prefixes, addr_of);
let mut body = mk_body(cx, selflike_fields);
let struct_path = cx.path(span, vec![Ident::new(kw::SelfUpper, type_ident.span)]);
@ -1194,9 +1207,9 @@ impl<'a> MethodDef<'a> {
// A single arm has form (&VariantK, &VariantK, ...) => BodyK
// (see "Final wrinkle" note below for why.)
let no_deref = false; // because enums can't be repr(packed)
let addr_of = false; // because enums can't be repr(packed)
let fields =
trait_.create_struct_pattern_fields(cx, &variant.data, &prefixes, no_deref);
trait_.create_struct_pattern_fields(cx, &variant.data, &prefixes, addr_of);
let sp = variant.span.with_ctxt(trait_.span.ctxt());
let variant_path = cx.path(sp, vec![type_ident, variant.ident]);
@ -1512,7 +1525,7 @@ impl<'a> TraitDef<'a> {
cx: &mut ExtCtxt<'_>,
struct_def: &'a VariantData,
prefixes: &[String],
no_deref: bool,
addr_of: bool,
) -> Vec<FieldInfo> {
self.create_fields(struct_def, |i, _struct_field, sp| {
prefixes
@ -1520,7 +1533,7 @@ impl<'a> TraitDef<'a> {
.map(|prefix| {
let ident = self.mk_pattern_ident(prefix, i);
let expr = cx.expr_path(cx.path_ident(sp, ident));
if no_deref { expr } else { cx.expr_deref(sp, expr) }
if addr_of { cx.expr_addr_of(sp, expr) } else { expr }
})
.collect()
})
@ -1536,17 +1549,20 @@ impl<'a> TraitDef<'a> {
selflike_args
.iter()
.map(|selflike_arg| {
// Note: we must use `struct_field.span` rather than `span` in the
// Note: we must use `struct_field.span` rather than `sp` in the
// `unwrap_or_else` case otherwise the hygiene is wrong and we get
// "field `0` of struct `Point` is private" errors on tuple
// structs.
cx.expr(
cx.expr_addr_of(
sp,
ast::ExprKind::Field(
selflike_arg.clone(),
struct_field.ident.unwrap_or_else(|| {
Ident::from_str_and_span(&i.to_string(), struct_field.span)
}),
cx.expr(
sp,
ast::ExprKind::Field(
selflike_arg.clone(),
struct_field.ident.unwrap_or_else(|| {
Ident::from_str_and_span(&i.to_string(), struct_field.span)
}),
),
),
)
})

View File

@ -52,14 +52,13 @@ fn hash_substructure(
let [state_expr] = substr.nonselflike_args else {
cx.span_bug(trait_span, "incorrect number of arguments in `derive(Hash)`");
};
let call_hash = |span, thing_expr| {
let call_hash = |span, expr| {
let hash_path = {
let strs = cx.std_path(&[sym::hash, sym::Hash, sym::hash]);
cx.expr_path(cx.path_global(span, strs))
};
let ref_thing = cx.expr_addr_of(span, thing_expr);
let expr = cx.expr_call(span, hash_path, vec![ref_thing, state_expr.clone()]);
let expr = cx.expr_call(span, hash_path, vec![expr, state_expr.clone()]);
cx.stmt_expr(expr)
};
let mut stmts = Vec::new();
@ -67,11 +66,14 @@ fn hash_substructure(
let fields = match substr.fields {
Struct(_, fs) | EnumMatching(_, 1, .., fs) => fs,
EnumMatching(.., fs) => {
let variant_value = deriving::call_intrinsic(
cx,
let variant_value = cx.expr_addr_of(
trait_span,
sym::discriminant_value,
vec![cx.expr_self(trait_span)],
deriving::call_intrinsic(
cx,
trait_span,
sym::discriminant_value,
vec![cx.expr_self(trait_span)],
),
);
stmts.push(call_hash(trait_span, variant_value));

View File

@ -525,7 +525,7 @@ impl ::core::clone::Clone for PackedNonCopy {
#[inline]
fn clone(&self) -> PackedNonCopy {
let Self(ref __self_0_0) = *self;
PackedNonCopy(::core::clone::Clone::clone(&*__self_0_0))
PackedNonCopy(::core::clone::Clone::clone(__self_0_0))
}
}
#[automatically_derived]
@ -534,7 +534,7 @@ impl ::core::fmt::Debug for PackedNonCopy {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
let Self(ref __self_0_0) = *self;
::core::fmt::Formatter::debug_tuple_field1_finish(f, "PackedNonCopy",
&&*__self_0_0)
&__self_0_0)
}
}
#[automatically_derived]
@ -550,7 +550,7 @@ impl ::core::default::Default for PackedNonCopy {
impl ::core::hash::Hash for PackedNonCopy {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
let Self(ref __self_0_0) = *self;
::core::hash::Hash::hash(&*__self_0_0, state)
::core::hash::Hash::hash(__self_0_0, state)
}
}
impl ::core::marker::StructuralPartialEq for PackedNonCopy {}
@ -589,7 +589,7 @@ impl ::core::cmp::PartialOrd for PackedNonCopy {
-> ::core::option::Option<::core::cmp::Ordering> {
let Self(ref __self_0_0) = *self;
let Self(ref __self_1_0) = *other;
::core::cmp::PartialOrd::partial_cmp(&*__self_0_0, &*__self_1_0)
::core::cmp::PartialOrd::partial_cmp(__self_0_0, __self_1_0)
}
}
#[automatically_derived]
@ -599,7 +599,7 @@ impl ::core::cmp::Ord for PackedNonCopy {
fn cmp(&self, other: &PackedNonCopy) -> ::core::cmp::Ordering {
let Self(ref __self_0_0) = *self;
let Self(ref __self_1_0) = *other;
::core::cmp::Ord::cmp(&*__self_0_0, &*__self_1_0)
::core::cmp::Ord::cmp(__self_0_0, __self_1_0)
}
}
@ -677,7 +677,7 @@ impl ::core::clone::Clone for Enum1 {
fn clone(&self) -> Enum1 {
match self {
Enum1::Single { x: __self_0 } =>
Enum1::Single { x: ::core::clone::Clone::clone(&*__self_0) },
Enum1::Single { x: ::core::clone::Clone::clone(__self_0) },
}
}
}
@ -688,7 +688,7 @@ impl ::core::fmt::Debug for Enum1 {
match self {
Enum1::Single { x: __self_0 } =>
::core::fmt::Formatter::debug_struct_field1_finish(f,
"Single", "x", &&*__self_0),
"Single", "x", &__self_0),
}
}
}
@ -698,7 +698,7 @@ impl ::core::hash::Hash for Enum1 {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
match self {
Enum1::Single { x: __self_0 } => {
::core::hash::Hash::hash(&*__self_0, state)
::core::hash::Hash::hash(__self_0, state)
}
}
}
@ -741,7 +741,7 @@ impl ::core::cmp::PartialOrd for Enum1 {
-> ::core::option::Option<::core::cmp::Ordering> {
match (self, other) {
(Enum1::Single { x: __self_0 }, Enum1::Single { x: __arg_1_0 }) =>
::core::cmp::PartialOrd::partial_cmp(&*__self_0, &*__arg_1_0),
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg_1_0),
}
}
}
@ -752,7 +752,7 @@ impl ::core::cmp::Ord for Enum1 {
fn cmp(&self, other: &Enum1) -> ::core::cmp::Ordering {
match (self, other) {
(Enum1::Single { x: __self_0 }, Enum1::Single { x: __arg_1_0 }) =>
::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0),
::core::cmp::Ord::cmp(__self_0, __arg_1_0),
}
}
}
@ -963,10 +963,10 @@ impl ::core::fmt::Debug for Mixed {
Mixed::Q => ::core::fmt::Formatter::write_str(f, "Q"),
Mixed::R(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "R",
&&*__self_0),
&__self_0),
Mixed::S { d1: __self_0, d2: __self_1 } =>
::core::fmt::Formatter::debug_struct_field2_finish(f, "S",
"d1", &&*__self_0, "d2", &&*__self_1),
"d1", &__self_0, "d2", &__self_1),
}
}
}
@ -984,13 +984,13 @@ impl ::core::hash::Hash for Mixed {
Mixed::R(__self_0) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(&*__self_0, state)
::core::hash::Hash::hash(__self_0, state)
}
Mixed::S { d1: __self_0, d2: __self_1 } => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(&*__self_0, state);
::core::hash::Hash::hash(&*__self_1, state)
::core::hash::Hash::hash(__self_0, state);
::core::hash::Hash::hash(__self_1, state)
}
_ => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
@ -1056,16 +1056,14 @@ impl ::core::cmp::PartialOrd for Mixed {
if __self_vi == __arg_1_vi {
match (self, other) {
(Mixed::R(__self_0), Mixed::R(__arg_1_0)) =>
::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0),
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg_1_0),
(Mixed::S { d1: __self_0, d2: __self_1 }, Mixed::S {
d1: __arg_1_0, d2: __arg_1_1 }) =>
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
match ::core::cmp::PartialOrd::partial_cmp(__self_0,
__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::cmp::PartialOrd::partial_cmp(&*__self_1,
&*__arg_1_1),
::core::cmp::PartialOrd::partial_cmp(__self_1, __arg_1_1),
cmp => cmp,
},
_ =>
@ -1086,12 +1084,12 @@ impl ::core::cmp::Ord for Mixed {
if __self_vi == __arg_1_vi {
match (self, other) {
(Mixed::R(__self_0), Mixed::R(__arg_1_0)) =>
::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0),
::core::cmp::Ord::cmp(__self_0, __arg_1_0),
(Mixed::S { d1: __self_0, d2: __self_1 }, Mixed::S {
d1: __arg_1_0, d2: __arg_1_1 }) =>
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
match ::core::cmp::Ord::cmp(__self_0, __arg_1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ord::cmp(&*__self_1, &*__arg_1_1),
::core::cmp::Ord::cmp(__self_1, __arg_1_1),
cmp => cmp,
},
_ => ::core::cmp::Ordering::Equal,
@ -1110,11 +1108,11 @@ impl ::core::clone::Clone for Fielded {
fn clone(&self) -> Fielded {
match self {
Fielded::X(__self_0) =>
Fielded::X(::core::clone::Clone::clone(&*__self_0)),
Fielded::X(::core::clone::Clone::clone(__self_0)),
Fielded::Y(__self_0) =>
Fielded::Y(::core::clone::Clone::clone(&*__self_0)),
Fielded::Y(::core::clone::Clone::clone(__self_0)),
Fielded::Z(__self_0) =>
Fielded::Z(::core::clone::Clone::clone(&*__self_0)),
Fielded::Z(::core::clone::Clone::clone(__self_0)),
}
}
}
@ -1125,13 +1123,13 @@ impl ::core::fmt::Debug for Fielded {
match self {
Fielded::X(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "X",
&&*__self_0),
&__self_0),
Fielded::Y(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Y",
&&*__self_0),
&__self_0),
Fielded::Z(__self_0) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Z",
&&*__self_0),
&__self_0),
}
}
}
@ -1143,17 +1141,17 @@ impl ::core::hash::Hash for Fielded {
Fielded::X(__self_0) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(&*__self_0, state)
::core::hash::Hash::hash(__self_0, state)
}
Fielded::Y(__self_0) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(&*__self_0, state)
::core::hash::Hash::hash(__self_0, state)
}
Fielded::Z(__self_0) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(&*__self_0, state)
::core::hash::Hash::hash(__self_0, state)
}
}
}
@ -1219,14 +1217,11 @@ impl ::core::cmp::PartialOrd for Fielded {
if __self_vi == __arg_1_vi {
match (self, other) {
(Fielded::X(__self_0), Fielded::X(__arg_1_0)) =>
::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0),
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg_1_0),
(Fielded::Y(__self_0), Fielded::Y(__arg_1_0)) =>
::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0),
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg_1_0),
(Fielded::Z(__self_0), Fielded::Z(__arg_1_0)) =>
::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0),
::core::cmp::PartialOrd::partial_cmp(__self_0, __arg_1_0),
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else {
@ -1244,11 +1239,11 @@ impl ::core::cmp::Ord for Fielded {
if __self_vi == __arg_1_vi {
match (self, other) {
(Fielded::X(__self_0), Fielded::X(__arg_1_0)) =>
::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0),
::core::cmp::Ord::cmp(__self_0, __arg_1_0),
(Fielded::Y(__self_0), Fielded::Y(__arg_1_0)) =>
::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0),
::core::cmp::Ord::cmp(__self_0, __arg_1_0),
(Fielded::Z(__self_0), Fielded::Z(__arg_1_0)) =>
::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0),
::core::cmp::Ord::cmp(__self_0, __arg_1_0),
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else { ::core::cmp::Ord::cmp(&__self_vi, &__arg_1_vi) }