Auto merge of #98446 - nnethercote:derive-no-match-destructuring, r=scottmcm

Don't use match-destructuring for derived ops on structs.

r? `@scottmcm`
This commit is contained in:
bors 2022-07-04 01:06:54 +00:00
commit d46c728bcd
6 changed files with 394 additions and 439 deletions

View File

@ -66,7 +66,7 @@
//!
//! # "`cs`" functions
//!
//! The `cs_...` functions ("combine substructure) are designed to
//! The `cs_...` functions ("combine substructure") are designed to
//! make life easier by providing some pre-made recipes for common
//! threads; mostly calling the function being derived on all the
//! arguments and then combining them back together in some way (or
@ -429,6 +429,7 @@ impl<'a> TraitDef<'a> {
generics,
from_scratch,
use_temporaries,
is_packed,
),
ast::ItemKind::Enum(ref enum_def, ref generics) => {
// We ignore `use_temporaries` here, because
@ -448,6 +449,7 @@ impl<'a> TraitDef<'a> {
generics,
from_scratch,
use_temporaries,
is_packed,
)
} else {
cx.span_err(mitem.span, "this trait cannot be derived for unions");
@ -729,6 +731,7 @@ impl<'a> TraitDef<'a> {
generics: &Generics,
from_scratch: bool,
use_temporaries: bool,
is_packed: bool,
) -> P<ast::Item> {
let field_tys: Vec<P<ast::Ty>> =
struct_def.fields().iter().map(|field| field.ty.clone()).collect();
@ -757,6 +760,7 @@ impl<'a> TraitDef<'a> {
&self_args,
&nonself_args,
use_temporaries,
is_packed,
)
};
@ -945,6 +949,7 @@ impl<'a> MethodDef<'a> {
})
}
/// The normal case uses field access.
/// ```
/// #[derive(PartialEq)]
/// # struct Dummy;
@ -953,33 +958,21 @@ impl<'a> MethodDef<'a> {
/// // equivalent to:
/// impl PartialEq for A {
/// fn eq(&self, other: &A) -> bool {
/// match *self {
/// A {x: ref __self_0_0, y: ref __self_0_1} => {
/// match *other {
/// A {x: ref __self_1_0, y: ref __self_1_1} => {
/// __self_0_0.eq(__self_1_0) && __self_0_1.eq(__self_1_1)
/// }
/// }
/// }
/// }
/// self.x == other.x && self.y == other.y
/// }
/// }
/// ```
/// or if A is repr(packed) - note fields are matched by-value
/// instead of by-reference.
/// But if the struct is `repr(packed)`, we can't use something like
/// `&self.x` on a packed type (as required for e.g. `Debug` and `Hash`)
/// because that might cause an unaligned ref. So we use let-destructuring
/// instead.
/// ```
/// # struct A { x: i32, y: i32 }
/// impl PartialEq for A {
/// fn eq(&self, other: &A) -> bool {
/// match *self {
/// A {x: __self_0_0, y: __self_0_1} => {
/// match other {
/// A {x: __self_1_0, y: __self_1_1} => {
/// __self_0_0.eq(&__self_1_0) && __self_0_1.eq(&__self_1_1)
/// }
/// }
/// }
/// }
/// let Self { x: ref __self_0_0, y: ref __self_0_1 } = *self;
/// let Self { x: ref __self_1_0, y: ref __self_1_1 } = *other;
/// *__self_0_0 == *__self_1_0 && *__self_0_1 == *__self_1_1
/// }
/// }
/// ```
@ -992,24 +985,33 @@ impl<'a> MethodDef<'a> {
self_args: &[P<Expr>],
nonself_args: &[P<Expr>],
use_temporaries: bool,
is_packed: bool,
) -> P<Expr> {
let mut raw_fields = Vec::new(); // Vec<[fields of self], [fields of next Self arg], [etc]>
let span = trait_.span;
let mut patterns = Vec::new();
for i in 0..self_args.len() {
// We could use `type_ident` instead of `Self`, but in the case of a type parameter
// shadowing the struct name, that causes a second, unnecessary E0578 error. #97343
let struct_path = cx.path(span, vec![Ident::new(kw::SelfUpper, type_ident.span)]);
let (pat, ident_expr) = trait_.create_struct_pattern(
cx,
struct_path,
struct_def,
&format!("__self_{}", i),
ast::Mutability::Not,
use_temporaries,
);
patterns.push(pat);
raw_fields.push(ident_expr);
for (i, self_arg) in self_args.iter().enumerate() {
let ident_exprs = if !is_packed {
trait_.create_struct_field_accesses(cx, self_arg, struct_def)
} else {
// Get the pattern for the let-destructuring.
//
// We could use `type_ident` instead of `Self`, but in the case of a type parameter
// shadowing the struct name, that causes a second, unnecessary E0578 error. #97343
let struct_path = cx.path(span, vec![Ident::new(kw::SelfUpper, type_ident.span)]);
let (pat, ident_exprs) = trait_.create_struct_pattern(
cx,
struct_path,
struct_def,
&format!("__self_{}", i),
ast::Mutability::Not,
use_temporaries,
);
patterns.push(pat);
ident_exprs
};
raw_fields.push(ident_exprs);
}
// transpose raw_fields
@ -1036,7 +1038,6 @@ impl<'a> MethodDef<'a> {
cx.span_bug(span, "no `self` parameter for method in generic `derive`")
};
// body of the inner most destructuring match
let mut body = self.call_substructure_method(
cx,
trait_,
@ -1045,14 +1046,18 @@ impl<'a> MethodDef<'a> {
&Struct(struct_def, fields),
);
// make a series of nested matches, to destructure the
// structs. This is actually right-to-left, but it shouldn't
// matter.
for (arg_expr, pat) in iter::zip(self_args, patterns) {
body = cx.expr_match(span, arg_expr.clone(), vec![cx.arm(span, pat.clone(), body)])
}
if !is_packed {
body.span = span;
body
} else {
// Do the let-destructuring.
let mut stmts: Vec<_> = iter::zip(self_args, patterns)
.map(|(arg_expr, pat)| cx.stmt_let_pat(span, pat, arg_expr.clone()))
.collect();
stmts.push(cx.stmt_expr(body));
body
cx.expr_block(cx.block(span, stmts))
}
}
fn expand_static_struct_method_body(
@ -1522,8 +1527,6 @@ impl<'a> TraitDef<'a> {
paths.push(ident.with_span_pos(sp));
let val = cx.expr_path(cx.path_ident(sp, ident));
let val = if use_temporaries { val } else { cx.expr_deref(sp, val) };
let val = cx.expr(sp, ast::ExprKind::Paren(val));
ident_exprs.push((sp, struct_field.ident, val, &struct_field.attrs[..]));
}
@ -1555,6 +1558,39 @@ impl<'a> TraitDef<'a> {
(pattern, ident_exprs)
}
fn create_struct_field_accesses(
&self,
cx: &mut ExtCtxt<'_>,
mut self_arg: &P<Expr>,
struct_def: &'a VariantData,
) -> Vec<(Span, Option<Ident>, P<Expr>, &'a [ast::Attribute])> {
let mut ident_exprs = Vec::new();
for (i, struct_field) in struct_def.fields().iter().enumerate() {
let sp = struct_field.span.with_ctxt(self.span.ctxt());
// We don't the need the deref, if there is one.
if let ast::ExprKind::Unary(ast::UnOp::Deref, inner) = &self_arg.kind {
self_arg = inner;
}
// Note: we must use `struct_field.span` rather than `span` in the
// `unwrap_or_else` case otherwise the hygiene is wrong and we get
// "field `0` of struct `Point` is private" errors on tuple
// structs.
let val = cx.expr(
sp,
ast::ExprKind::Field(
self_arg.clone(),
struct_field.ident.unwrap_or_else(|| {
Ident::from_str_and_span(&i.to_string(), struct_field.span)
}),
),
);
ident_exprs.push((sp, struct_field.ident, val, &struct_field.attrs[..]));
}
ident_exprs
}
fn create_enum_variant_pattern(
&self,
cx: &mut ExtCtxt<'_>,
@ -1643,7 +1679,6 @@ where
/// fields.
/// When the `substructure` is an `EnumNonMatchingCollapsed`, the result of `enum_nonmatch_f`
/// is returned. Statics may not be folded over.
/// See `cs_op` in `partial_ord.rs` for a model example.
pub fn cs_fold1<F, B>(
use_foldl: bool,
f: F,

View File

@ -152,6 +152,19 @@ impl<'a> ExtCtxt<'a> {
ast::Stmt { id: ast::DUMMY_NODE_ID, span: expr.span, kind: ast::StmtKind::Expr(expr) }
}
pub fn stmt_let_pat(&self, sp: Span, pat: P<ast::Pat>, ex: P<ast::Expr>) -> ast::Stmt {
let local = P(ast::Local {
pat,
ty: None,
id: ast::DUMMY_NODE_ID,
kind: LocalKind::Init(ex),
span: sp,
attrs: AttrVec::new(),
tokens: None,
});
self.stmt_local(local, sp)
}
pub fn stmt_let(&self, sp: Span, mutbl: bool, ident: Ident, ex: P<ast::Expr>) -> ast::Stmt {
self.stmt_let_ty(sp, mutbl, ident, None, ex)
}

View File

@ -4,98 +4,78 @@
fn <impl at $DIR/combine_clone_of_primitives.rs:6:10: 6:15>::clone(_1: &MyThing<T>) -> MyThing<T> {
debug self => _1; // in scope 0 at $DIR/combine_clone_of_primitives.rs:6:10: 6:15
let mut _0: MyThing<T>; // return place in scope 0 at $DIR/combine_clone_of_primitives.rs:6:10: 6:15
let _2: &T; // in scope 0 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
let _3: &u64; // in scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
let _4: &[f32; 3]; // in scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
let mut _5: T; // in scope 0 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
let mut _6: &T; // in scope 0 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
let _7: &T; // in scope 0 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
let mut _8: u64; // in scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
let mut _9: &u64; // in scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
let _10: &u64; // in scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
let mut _11: [f32; 3]; // in scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
let mut _12: &[f32; 3]; // in scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
let _13: &[f32; 3]; // in scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
scope 1 {
debug __self_0_0 => _2; // in scope 1 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
debug __self_0_1 => _3; // in scope 1 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
debug __self_0_2 => _4; // in scope 1 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
}
let mut _2: T; // in scope 0 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
let mut _3: &T; // in scope 0 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
let _4: &T; // in scope 0 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
let mut _5: u64; // in scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
let mut _6: &u64; // in scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
let _7: &u64; // in scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
let mut _8: [f32; 3]; // in scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
let mut _9: &[f32; 3]; // in scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
let _10: &[f32; 3]; // in scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
bb0: {
StorageLive(_2); // scope 0 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
_2 = &((*_1).0: T); // scope 0 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
StorageLive(_3); // scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
_3 = &((*_1).1: u64); // scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
StorageLive(_4); // scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
_4 = &((*_1).2: [f32; 3]); // scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
StorageLive(_5); // scope 1 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
StorageLive(_6); // scope 1 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
StorageLive(_7); // scope 1 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
- _7 = &(*_2); // scope 1 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
- _6 = &(*_7); // scope 1 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
+ _7 = _2; // scope 1 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
+ _6 = _7; // scope 1 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
_5 = <T as Clone>::clone(move _6) -> bb1; // scope 1 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
StorageLive(_3); // scope 0 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
StorageLive(_4); // scope 0 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
_4 = &((*_1).0: T); // scope 0 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
- _3 = &(*_4); // scope 0 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
+ _3 = _4; // scope 0 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
_2 = <T as Clone>::clone(move _3) -> bb1; // scope 0 at $DIR/combine_clone_of_primitives.rs:8:5: 8:9
// mir::Constant
// + span: $DIR/combine_clone_of_primitives.rs:8:5: 8:9
// + literal: Const { ty: for<'r> fn(&'r T) -> T {<T as Clone>::clone}, val: Value(Scalar(<ZST>)) }
}
bb1: {
StorageDead(_6); // scope 1 at $DIR/combine_clone_of_primitives.rs:8:8: 8:9
StorageLive(_8); // scope 1 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
StorageLive(_9); // scope 1 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
StorageLive(_10); // scope 1 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
- _10 = &(*_3); // scope 1 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
- _9 = &(*_10); // scope 1 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
- _8 = <u64 as Clone>::clone(move _9) -> [return: bb2, unwind: bb4]; // scope 1 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
StorageDead(_3); // scope 0 at $DIR/combine_clone_of_primitives.rs:8:8: 8:9
StorageLive(_5); // scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
StorageLive(_6); // scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
StorageLive(_7); // scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
_7 = &((*_1).1: u64); // scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
- _6 = &(*_7); // scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
- _5 = <u64 as Clone>::clone(move _6) -> [return: bb2, unwind: bb4]; // scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
- // mir::Constant
- // + span: $DIR/combine_clone_of_primitives.rs:9:5: 9:11
- // + literal: Const { ty: for<'r> fn(&'r u64) -> u64 {<u64 as Clone>::clone}, val: Value(Scalar(<ZST>)) }
+ _10 = _3; // scope 1 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
+ _9 = _10; // scope 1 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
+ _8 = (*_9); // scope 1 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
+ goto -> bb2; // scope 1 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
+ _6 = _7; // scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
+ _5 = (*_6); // scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
+ goto -> bb2; // scope 0 at $DIR/combine_clone_of_primitives.rs:9:5: 9:11
}
bb2: {
StorageDead(_9); // scope 1 at $DIR/combine_clone_of_primitives.rs:9:10: 9:11
StorageLive(_11); // scope 1 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
StorageLive(_12); // scope 1 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
StorageLive(_13); // scope 1 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
- _13 = &(*_4); // scope 1 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
- _12 = &(*_13); // scope 1 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
- _11 = <[f32; 3] as Clone>::clone(move _12) -> [return: bb3, unwind: bb4]; // scope 1 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
StorageDead(_6); // scope 0 at $DIR/combine_clone_of_primitives.rs:9:10: 9:11
StorageLive(_8); // scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
StorageLive(_9); // scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
StorageLive(_10); // scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
_10 = &((*_1).2: [f32; 3]); // scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
- _9 = &(*_10); // scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
- _8 = <[f32; 3] as Clone>::clone(move _9) -> [return: bb3, unwind: bb4]; // scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
- // mir::Constant
- // + span: $DIR/combine_clone_of_primitives.rs:10:5: 10:16
- // + literal: Const { ty: for<'r> fn(&'r [f32; 3]) -> [f32; 3] {<[f32; 3] as Clone>::clone}, val: Value(Scalar(<ZST>)) }
+ _13 = _4; // scope 1 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
+ _12 = _13; // scope 1 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
+ _11 = (*_12); // scope 1 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
+ goto -> bb3; // scope 1 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
+ _9 = _10; // scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
+ _8 = (*_9); // scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
+ goto -> bb3; // scope 0 at $DIR/combine_clone_of_primitives.rs:10:5: 10:16
}
bb3: {
StorageDead(_12); // scope 1 at $DIR/combine_clone_of_primitives.rs:10:15: 10:16
Deinit(_0); // scope 1 at $DIR/combine_clone_of_primitives.rs:6:10: 6:15
(_0.0: T) = move _5; // scope 1 at $DIR/combine_clone_of_primitives.rs:6:10: 6:15
(_0.1: u64) = move _8; // scope 1 at $DIR/combine_clone_of_primitives.rs:6:10: 6:15
(_0.2: [f32; 3]) = move _11; // scope 1 at $DIR/combine_clone_of_primitives.rs:6:10: 6:15
StorageDead(_13); // scope 1 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
StorageDead(_11); // scope 1 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
StorageDead(_10); // scope 1 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
StorageDead(_8); // scope 1 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
StorageDead(_7); // scope 1 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
StorageDead(_5); // scope 1 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
StorageDead(_4); // scope 0 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
StorageDead(_3); // scope 0 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
StorageDead(_9); // scope 0 at $DIR/combine_clone_of_primitives.rs:10:15: 10:16
Deinit(_0); // scope 0 at $DIR/combine_clone_of_primitives.rs:6:10: 6:15
(_0.0: T) = move _2; // scope 0 at $DIR/combine_clone_of_primitives.rs:6:10: 6:15
(_0.1: u64) = move _5; // scope 0 at $DIR/combine_clone_of_primitives.rs:6:10: 6:15
(_0.2: [f32; 3]) = move _8; // scope 0 at $DIR/combine_clone_of_primitives.rs:6:10: 6:15
StorageDead(_8); // scope 0 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
StorageDead(_5); // scope 0 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
StorageDead(_2); // scope 0 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
StorageDead(_10); // scope 0 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
StorageDead(_7); // scope 0 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
StorageDead(_4); // scope 0 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
return; // scope 0 at $DIR/combine_clone_of_primitives.rs:6:15: 6:15
}
bb4 (cleanup): {
drop(_5) -> bb5; // scope 1 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
drop(_2) -> bb5; // scope 0 at $DIR/combine_clone_of_primitives.rs:6:14: 6:15
}
bb5 (cleanup): {

View File

@ -36,12 +36,12 @@
22| 2| println!("used_only_from_this_lib_crate_generic_function with {:?}", arg);
23| 2|}
------------------
| used_crate::used_only_from_this_lib_crate_generic_function::<&str>:
| used_crate::used_only_from_this_lib_crate_generic_function::<alloc::vec::Vec<i32>>:
| 21| 1|pub fn used_only_from_this_lib_crate_generic_function<T: Debug>(arg: T) {
| 22| 1| println!("used_only_from_this_lib_crate_generic_function with {:?}", arg);
| 23| 1|}
------------------
| used_crate::used_only_from_this_lib_crate_generic_function::<alloc::vec::Vec<i32>>:
| used_crate::used_only_from_this_lib_crate_generic_function::<&str>:
| 21| 1|pub fn used_only_from_this_lib_crate_generic_function<T: Debug>(arg: T) {
| 22| 1| println!("used_only_from_this_lib_crate_generic_function with {:?}", arg);
| 23| 1|}

View File

@ -28,12 +28,17 @@ struct Point {
y: u32,
}
// A long struct.
// A large struct.
#[derive(Clone, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
struct Big {
b1: u32, b2: u32, b3: u32, b4: u32, b5: u32, b6: u32, b7: u32, b8:u32,
}
// A packed tuple struct.
#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
#[repr(packed)]
struct Packed(u32);
// A C-like, fieldless enum.
#[derive(Clone, Copy, Debug, Default, Hash, PartialEq, Eq, PartialOrd, Ord)]
enum Fieldless {

View File

@ -37,7 +37,7 @@ impl ::core::marker::Copy for Empty { }
#[allow(unused_qualifications)]
impl ::core::fmt::Debug for Empty {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match *self { Self => ::core::fmt::Formatter::write_str(f, "Empty"), }
::core::fmt::Formatter::write_str(f, "Empty")
}
}
#[automatically_derived]
@ -49,18 +49,14 @@ impl ::core::default::Default for Empty {
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Empty {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
match *self { Self => {} }
}
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () { {} }
}
impl ::core::marker::StructuralPartialEq for Empty {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialEq for Empty {
#[inline]
fn eq(&self, other: &Empty) -> bool {
match *other { Self => match *self { Self => true, }, }
}
fn eq(&self, other: &Empty) -> bool { true }
}
impl ::core::marker::StructuralEq for Empty {}
#[automatically_derived]
@ -77,13 +73,7 @@ impl ::core::cmp::PartialOrd for Empty {
#[inline]
fn partial_cmp(&self, other: &Empty)
-> ::core::option::Option<::core::cmp::Ordering> {
match *other {
Self =>
match *self {
Self =>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
},
}
::core::option::Option::Some(::core::cmp::Ordering::Equal)
}
}
#[automatically_derived]
@ -91,9 +81,7 @@ impl ::core::cmp::PartialOrd for Empty {
impl ::core::cmp::Ord for Empty {
#[inline]
fn cmp(&self, other: &Empty) -> ::core::cmp::Ordering {
match *other {
Self => match *self { Self => ::core::cmp::Ordering::Equal, },
}
::core::cmp::Ordering::Equal
}
}
@ -121,11 +109,8 @@ impl ::core::marker::Copy for Point { }
#[allow(unused_qualifications)]
impl ::core::fmt::Debug for Point {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match *self {
Self { x: ref __self_0_0, y: ref __self_0_1 } =>
::core::fmt::Formatter::debug_struct_field2_finish(f, "Point",
"x", &&(*__self_0_0), "y", &&(*__self_0_1)),
}
::core::fmt::Formatter::debug_struct_field2_finish(f, "Point", "x",
&&self.x, "y", &&self.y)
}
}
#[automatically_derived]
@ -143,11 +128,9 @@ impl ::core::default::Default for Point {
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Point {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
match *self {
Self { x: ref __self_0_0, y: ref __self_0_1 } => {
::core::hash::Hash::hash(&(*__self_0_0), state);
::core::hash::Hash::hash(&(*__self_0_1), state)
}
{
::core::hash::Hash::hash(&self.x, state);
::core::hash::Hash::hash(&self.y, state)
}
}
}
@ -157,25 +140,11 @@ impl ::core::marker::StructuralPartialEq for Point {}
impl ::core::cmp::PartialEq for Point {
#[inline]
fn eq(&self, other: &Point) -> bool {
match *other {
Self { x: ref __self_1_0, y: ref __self_1_1 } =>
match *self {
Self { x: ref __self_0_0, y: ref __self_0_1 } =>
(*__self_0_0) == (*__self_1_0) &&
(*__self_0_1) == (*__self_1_1),
},
}
self.x == other.x && self.y == other.y
}
#[inline]
fn ne(&self, other: &Point) -> bool {
match *other {
Self { x: ref __self_1_0, y: ref __self_1_1 } =>
match *self {
Self { x: ref __self_0_0, y: ref __self_0_1 } =>
(*__self_0_0) != (*__self_1_0) ||
(*__self_0_1) != (*__self_1_1),
},
}
self.x != other.x || self.y != other.y
}
}
impl ::core::marker::StructuralEq for Point {}
@ -198,24 +167,16 @@ impl ::core::cmp::PartialOrd for Point {
#[inline]
fn partial_cmp(&self, other: &Point)
-> ::core::option::Option<::core::cmp::Ordering> {
match *other {
Self { x: ref __self_1_0, y: ref __self_1_1 } =>
match *self {
Self { x: ref __self_0_0, y: ref __self_0_1 } =>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_0),
&(*__self_1_0)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_1),
&(*__self_1_1)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
cmp => cmp,
},
match ::core::cmp::PartialOrd::partial_cmp(&self.x, &other.x) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
match ::core::cmp::PartialOrd::partial_cmp(&self.y, &other.y)
{
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
cmp => cmp,
}
}
}
@ -224,27 +185,19 @@ impl ::core::cmp::PartialOrd for Point {
impl ::core::cmp::Ord for Point {
#[inline]
fn cmp(&self, other: &Point) -> ::core::cmp::Ordering {
match *other {
Self { x: ref __self_1_0, y: ref __self_1_1 } =>
match *self {
Self { x: ref __self_0_0, y: ref __self_0_1 } =>
match ::core::cmp::Ord::cmp(&(*__self_0_0), &(*__self_1_0))
{
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_1), &(*__self_1_1))
{
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
cmp => cmp,
},
match ::core::cmp::Ord::cmp(&self.x, &other.x) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&self.y, &other.y) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
cmp => cmp,
}
}
}
// A long struct.
// A large struct.
struct Big {
b1: u32,
b2: u32,
@ -260,26 +213,15 @@ struct Big {
impl ::core::clone::Clone for Big {
#[inline]
fn clone(&self) -> Big {
match *self {
Self {
b1: ref __self_0_0,
b2: ref __self_0_1,
b3: ref __self_0_2,
b4: ref __self_0_3,
b5: ref __self_0_4,
b6: ref __self_0_5,
b7: ref __self_0_6,
b8: ref __self_0_7 } =>
Big {
b1: ::core::clone::Clone::clone(&(*__self_0_0)),
b2: ::core::clone::Clone::clone(&(*__self_0_1)),
b3: ::core::clone::Clone::clone(&(*__self_0_2)),
b4: ::core::clone::Clone::clone(&(*__self_0_3)),
b5: ::core::clone::Clone::clone(&(*__self_0_4)),
b6: ::core::clone::Clone::clone(&(*__self_0_5)),
b7: ::core::clone::Clone::clone(&(*__self_0_6)),
b8: ::core::clone::Clone::clone(&(*__self_0_7)),
},
Big {
b1: ::core::clone::Clone::clone(&self.b1),
b2: ::core::clone::Clone::clone(&self.b2),
b3: ::core::clone::Clone::clone(&self.b3),
b4: ::core::clone::Clone::clone(&self.b4),
b5: ::core::clone::Clone::clone(&self.b5),
b6: ::core::clone::Clone::clone(&self.b6),
b7: ::core::clone::Clone::clone(&self.b7),
b8: ::core::clone::Clone::clone(&self.b8),
}
}
}
@ -287,25 +229,14 @@ impl ::core::clone::Clone for Big {
#[allow(unused_qualifications)]
impl ::core::fmt::Debug for Big {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
match *self {
Self {
b1: ref __self_0_0,
b2: ref __self_0_1,
b3: ref __self_0_2,
b4: ref __self_0_3,
b5: ref __self_0_4,
b6: ref __self_0_5,
b7: ref __self_0_6,
b8: ref __self_0_7 } => {
let names: &'static _ =
&["b1", "b2", "b3", "b4", "b5", "b6", "b7", "b8"];
let values: &[&dyn ::core::fmt::Debug] =
&[&&(*__self_0_0), &&(*__self_0_1), &&(*__self_0_2),
&&(*__self_0_3), &&(*__self_0_4), &&(*__self_0_5),
&&(*__self_0_6), &&(*__self_0_7)];
::core::fmt::Formatter::debug_struct_fields_finish(f, "Big",
names, values)
}
{
let names: &'static _ =
&["b1", "b2", "b3", "b4", "b5", "b6", "b7", "b8"];
let values: &[&dyn ::core::fmt::Debug] =
&[&&self.b1, &&self.b2, &&self.b3, &&self.b4, &&self.b5,
&&self.b6, &&self.b7, &&self.b8];
::core::fmt::Formatter::debug_struct_fields_finish(f, "Big",
names, values)
}
}
}
@ -330,25 +261,15 @@ impl ::core::default::Default for Big {
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Big {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
match *self {
Self {
b1: ref __self_0_0,
b2: ref __self_0_1,
b3: ref __self_0_2,
b4: ref __self_0_3,
b5: ref __self_0_4,
b6: ref __self_0_5,
b7: ref __self_0_6,
b8: ref __self_0_7 } => {
::core::hash::Hash::hash(&(*__self_0_0), state);
::core::hash::Hash::hash(&(*__self_0_1), state);
::core::hash::Hash::hash(&(*__self_0_2), state);
::core::hash::Hash::hash(&(*__self_0_3), state);
::core::hash::Hash::hash(&(*__self_0_4), state);
::core::hash::Hash::hash(&(*__self_0_5), state);
::core::hash::Hash::hash(&(*__self_0_6), state);
::core::hash::Hash::hash(&(*__self_0_7), state)
}
{
::core::hash::Hash::hash(&self.b1, state);
::core::hash::Hash::hash(&self.b2, state);
::core::hash::Hash::hash(&self.b3, state);
::core::hash::Hash::hash(&self.b4, state);
::core::hash::Hash::hash(&self.b5, state);
::core::hash::Hash::hash(&self.b6, state);
::core::hash::Hash::hash(&self.b7, state);
::core::hash::Hash::hash(&self.b8, state)
}
}
}
@ -358,69 +279,17 @@ impl ::core::marker::StructuralPartialEq for Big {}
impl ::core::cmp::PartialEq for Big {
#[inline]
fn eq(&self, other: &Big) -> bool {
match *other {
Self {
b1: ref __self_1_0,
b2: ref __self_1_1,
b3: ref __self_1_2,
b4: ref __self_1_3,
b5: ref __self_1_4,
b6: ref __self_1_5,
b7: ref __self_1_6,
b8: ref __self_1_7 } =>
match *self {
Self {
b1: ref __self_0_0,
b2: ref __self_0_1,
b3: ref __self_0_2,
b4: ref __self_0_3,
b5: ref __self_0_4,
b6: ref __self_0_5,
b7: ref __self_0_6,
b8: ref __self_0_7 } =>
(*__self_0_0) == (*__self_1_0) &&
(*__self_0_1) == (*__self_1_1) &&
(*__self_0_2) == (*__self_1_2) &&
(*__self_0_3) == (*__self_1_3) &&
(*__self_0_4) == (*__self_1_4) &&
(*__self_0_5) == (*__self_1_5) &&
(*__self_0_6) == (*__self_1_6) &&
(*__self_0_7) == (*__self_1_7),
},
}
self.b1 == other.b1 && self.b2 == other.b2 && self.b3 == other.b3 &&
self.b4 == other.b4 && self.b5 == other.b5 &&
self.b6 == other.b6 && self.b7 == other.b7 &&
self.b8 == other.b8
}
#[inline]
fn ne(&self, other: &Big) -> bool {
match *other {
Self {
b1: ref __self_1_0,
b2: ref __self_1_1,
b3: ref __self_1_2,
b4: ref __self_1_3,
b5: ref __self_1_4,
b6: ref __self_1_5,
b7: ref __self_1_6,
b8: ref __self_1_7 } =>
match *self {
Self {
b1: ref __self_0_0,
b2: ref __self_0_1,
b3: ref __self_0_2,
b4: ref __self_0_3,
b5: ref __self_0_4,
b6: ref __self_0_5,
b7: ref __self_0_6,
b8: ref __self_0_7 } =>
(*__self_0_0) != (*__self_1_0) ||
(*__self_0_1) != (*__self_1_1) ||
(*__self_0_2) != (*__self_1_2) ||
(*__self_0_3) != (*__self_1_3) ||
(*__self_0_4) != (*__self_1_4) ||
(*__self_0_5) != (*__self_1_5) ||
(*__self_0_6) != (*__self_1_6) ||
(*__self_0_7) != (*__self_1_7),
},
}
self.b1 != other.b1 || self.b2 != other.b2 || self.b3 != other.b3 ||
self.b4 != other.b4 || self.b5 != other.b5 ||
self.b6 != other.b6 || self.b7 != other.b7 ||
self.b8 != other.b8
}
}
impl ::core::marker::StructuralEq for Big {}
@ -449,63 +318,37 @@ impl ::core::cmp::PartialOrd for Big {
#[inline]
fn partial_cmp(&self, other: &Big)
-> ::core::option::Option<::core::cmp::Ordering> {
match *other {
Self {
b1: ref __self_1_0,
b2: ref __self_1_1,
b3: ref __self_1_2,
b4: ref __self_1_3,
b5: ref __self_1_4,
b6: ref __self_1_5,
b7: ref __self_1_6,
b8: ref __self_1_7 } =>
match *self {
Self {
b1: ref __self_0_0,
b2: ref __self_0_1,
b3: ref __self_0_2,
b4: ref __self_0_3,
b5: ref __self_0_4,
b6: ref __self_0_5,
b7: ref __self_0_6,
b8: ref __self_0_7 } =>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_0),
&(*__self_1_0)) {
match ::core::cmp::PartialOrd::partial_cmp(&self.b1, &other.b1) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
match ::core::cmp::PartialOrd::partial_cmp(&self.b2,
&other.b2) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&self.b3,
&other.b3) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_1),
&(*__self_1_1)) {
match ::core::cmp::PartialOrd::partial_cmp(&self.b4,
&other.b4) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_2),
&(*__self_1_2)) {
match ::core::cmp::PartialOrd::partial_cmp(&self.b5,
&other.b5) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_3),
&(*__self_1_3)) {
match ::core::cmp::PartialOrd::partial_cmp(&self.b6,
&other.b6) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_4),
&(*__self_1_4)) {
match ::core::cmp::PartialOrd::partial_cmp(&self.b7,
&other.b7) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_5),
&(*__self_1_5)) {
match ::core::cmp::PartialOrd::partial_cmp(&self.b8,
&other.b8) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_6),
&(*__self_1_6)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0_7),
&(*__self_1_7)) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
cmp => cmp,
},
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
cmp => cmp,
@ -518,7 +361,9 @@ impl ::core::cmp::PartialOrd for Big {
},
cmp => cmp,
},
cmp => cmp,
},
cmp => cmp,
}
}
}
@ -527,55 +372,23 @@ impl ::core::cmp::PartialOrd for Big {
impl ::core::cmp::Ord for Big {
#[inline]
fn cmp(&self, other: &Big) -> ::core::cmp::Ordering {
match *other {
Self {
b1: ref __self_1_0,
b2: ref __self_1_1,
b3: ref __self_1_2,
b4: ref __self_1_3,
b5: ref __self_1_4,
b6: ref __self_1_5,
b7: ref __self_1_6,
b8: ref __self_1_7 } =>
match *self {
Self {
b1: ref __self_0_0,
b2: ref __self_0_1,
b3: ref __self_0_2,
b4: ref __self_0_3,
b5: ref __self_0_4,
b6: ref __self_0_5,
b7: ref __self_0_6,
b8: ref __self_0_7 } =>
match ::core::cmp::Ord::cmp(&(*__self_0_0), &(*__self_1_0))
{
match ::core::cmp::Ord::cmp(&self.b1, &other.b1) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&self.b2, &other.b2) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&self.b3, &other.b3) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_1), &(*__self_1_1))
{
match ::core::cmp::Ord::cmp(&self.b4, &other.b4) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_2), &(*__self_1_2))
{
match ::core::cmp::Ord::cmp(&self.b5, &other.b5) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_3), &(*__self_1_3))
{
match ::core::cmp::Ord::cmp(&self.b6, &other.b6) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_4), &(*__self_1_4))
{
match ::core::cmp::Ord::cmp(&self.b7, &other.b7) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_5), &(*__self_1_5))
{
match ::core::cmp::Ord::cmp(&self.b8, &other.b8) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_6), &(*__self_1_6))
{
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_0_7), &(*__self_1_7))
{
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
cmp => cmp,
},
::core::cmp::Ordering::Equal,
cmp => cmp,
},
cmp => cmp,
@ -588,7 +401,116 @@ impl ::core::cmp::Ord for Big {
},
cmp => cmp,
},
cmp => cmp,
},
cmp => cmp,
}
}
}
// A packed tuple struct.
#[repr(packed)]
struct Packed(u32);
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::clone::Clone for Packed {
#[inline]
fn clone(&self) -> Packed {
{ let _: ::core::clone::AssertParamIsClone<u32>; *self }
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::marker::Copy for Packed { }
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::fmt::Debug for Packed {
fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result {
{
let Self(__self_0_0) = *self;
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Packed",
&&__self_0_0)
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::default::Default for Packed {
#[inline]
fn default() -> Packed { Packed(::core::default::Default::default()) }
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::hash::Hash for Packed {
fn hash<__H: ::core::hash::Hasher>(&self, state: &mut __H) -> () {
{
let Self(__self_0_0) = *self;
{ ::core::hash::Hash::hash(&__self_0_0, state) }
}
}
}
impl ::core::marker::StructuralPartialEq for Packed {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialEq for Packed {
#[inline]
fn eq(&self, other: &Packed) -> bool {
{
let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other;
__self_0_0 == __self_1_0
}
}
#[inline]
fn ne(&self, other: &Packed) -> bool {
{
let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other;
__self_0_0 != __self_1_0
}
}
}
impl ::core::marker::StructuralEq for Packed {}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Eq for Packed {
#[inline]
#[doc(hidden)]
#[no_coverage]
fn assert_receiver_is_total_eq(&self) -> () {
{ let _: ::core::cmp::AssertParamIsEq<u32>; }
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::PartialOrd for Packed {
#[inline]
fn partial_cmp(&self, other: &Packed)
-> ::core::option::Option<::core::cmp::Ordering> {
{
let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other;
match ::core::cmp::PartialOrd::partial_cmp(&__self_0_0,
&__self_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal) =>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
}
}
}
}
#[automatically_derived]
#[allow(unused_qualifications)]
impl ::core::cmp::Ord for Packed {
#[inline]
fn cmp(&self, other: &Packed) -> ::core::cmp::Ordering {
{
let Self(__self_0_0) = *self;
let Self(__self_1_0) = *other;
match ::core::cmp::Ord::cmp(&__self_0_0, &__self_1_0) {
::core::cmp::Ordering::Equal => ::core::cmp::Ordering::Equal,
cmp => cmp,
}
}
}
}
@ -738,10 +660,10 @@ impl ::core::fmt::Debug for Mixed {
(&Mixed::Q,) => ::core::fmt::Formatter::write_str(f, "Q"),
(&Mixed::R(ref __self_0),) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "R",
&&(*__self_0)),
&&*__self_0),
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },) =>
::core::fmt::Formatter::debug_struct_field2_finish(f, "S",
"d1", &&(*__self_0), "d2", &&(*__self_1)),
"d1", &&*__self_0, "d2", &&*__self_1),
}
}
}
@ -759,13 +681,13 @@ impl ::core::hash::Hash for Mixed {
(&Mixed::R(ref __self_0),) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(&(*__self_0), state)
::core::hash::Hash::hash(&*__self_0, state)
}
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(&(*__self_0), state);
::core::hash::Hash::hash(&(*__self_1), state)
::core::hash::Hash::hash(&*__self_0, state);
::core::hash::Hash::hash(&*__self_1, state)
}
_ => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
@ -786,10 +708,10 @@ impl ::core::cmp::PartialEq for Mixed {
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
(*__self_0) == (*__arg_1_0),
*__self_0 == *__arg_1_0,
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
(*__self_0) == (*__arg_1_0) && (*__self_1) == (*__arg_1_1),
*__self_0 == *__arg_1_0 && *__self_1 == *__arg_1_1,
_ => true,
}
} else { false }
@ -803,10 +725,10 @@ impl ::core::cmp::PartialEq for Mixed {
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
(*__self_0) != (*__arg_1_0),
*__self_0 != *__arg_1_0,
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
(*__self_0) != (*__arg_1_0) || (*__self_1) != (*__arg_1_1),
*__self_0 != *__arg_1_0 || *__self_1 != *__arg_1_1,
_ => false,
}
} else { true }
@ -840,8 +762,8 @@ impl ::core::cmp::PartialOrd for Mixed {
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0),
&(*__arg_1_0)) {
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
@ -849,12 +771,12 @@ impl ::core::cmp::PartialOrd for Mixed {
},
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0),
&(*__arg_1_0)) {
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_1),
&(*__arg_1_1)) {
match ::core::cmp::PartialOrd::partial_cmp(&*__self_1,
&*__arg_1_1) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
@ -883,16 +805,16 @@ impl ::core::cmp::Ord for Mixed {
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Mixed::R(ref __self_0), &Mixed::R(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&(*__self_0), &(*__arg_1_0)) {
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
(&Mixed::S { d1: ref __self_0, d2: ref __self_1 },
&Mixed::S { d1: ref __arg_1_0, d2: ref __arg_1_1 }) =>
match ::core::cmp::Ord::cmp(&(*__self_0), &(*__arg_1_0)) {
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal =>
match ::core::cmp::Ord::cmp(&(*__self_1), &(*__arg_1_1)) {
match ::core::cmp::Ord::cmp(&*__self_1, &*__arg_1_1) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
@ -916,11 +838,11 @@ impl ::core::clone::Clone for Fielded {
fn clone(&self) -> Fielded {
match (&*self,) {
(&Fielded::X(ref __self_0),) =>
Fielded::X(::core::clone::Clone::clone(&(*__self_0))),
Fielded::X(::core::clone::Clone::clone(&*__self_0)),
(&Fielded::Y(ref __self_0),) =>
Fielded::Y(::core::clone::Clone::clone(&(*__self_0))),
Fielded::Y(::core::clone::Clone::clone(&*__self_0)),
(&Fielded::Z(ref __self_0),) =>
Fielded::Z(::core::clone::Clone::clone(&(*__self_0))),
Fielded::Z(::core::clone::Clone::clone(&*__self_0)),
}
}
}
@ -931,13 +853,13 @@ impl ::core::fmt::Debug for Fielded {
match (&*self,) {
(&Fielded::X(ref __self_0),) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "X",
&&(*__self_0)),
&&*__self_0),
(&Fielded::Y(ref __self_0),) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Y",
&&(*__self_0)),
&&*__self_0),
(&Fielded::Z(ref __self_0),) =>
::core::fmt::Formatter::debug_tuple_field1_finish(f, "Z",
&&(*__self_0)),
&&*__self_0),
}
}
}
@ -949,17 +871,17 @@ impl ::core::hash::Hash for Fielded {
(&Fielded::X(ref __self_0),) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(&(*__self_0), state)
::core::hash::Hash::hash(&*__self_0, state)
}
(&Fielded::Y(ref __self_0),) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(&(*__self_0), state)
::core::hash::Hash::hash(&*__self_0, state)
}
(&Fielded::Z(ref __self_0),) => {
::core::hash::Hash::hash(&::core::intrinsics::discriminant_value(self),
state);
::core::hash::Hash::hash(&(*__self_0), state)
::core::hash::Hash::hash(&*__self_0, state)
}
}
}
@ -976,11 +898,11 @@ impl ::core::cmp::PartialEq for Fielded {
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
(*__self_0) == (*__arg_1_0),
*__self_0 == *__arg_1_0,
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
(*__self_0) == (*__arg_1_0),
*__self_0 == *__arg_1_0,
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
(*__self_0) == (*__arg_1_0),
*__self_0 == *__arg_1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else { false }
@ -994,11 +916,11 @@ impl ::core::cmp::PartialEq for Fielded {
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
(*__self_0) != (*__arg_1_0),
*__self_0 != *__arg_1_0,
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
(*__self_0) != (*__arg_1_0),
*__self_0 != *__arg_1_0,
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
(*__self_0) != (*__arg_1_0),
*__self_0 != *__arg_1_0,
_ => unsafe { ::core::intrinsics::unreachable() }
}
} else { true }
@ -1032,24 +954,24 @@ impl ::core::cmp::PartialOrd for Fielded {
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0),
&(*__arg_1_0)) {
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0),
&(*__arg_1_0)) {
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
cmp => cmp,
},
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
match ::core::cmp::PartialOrd::partial_cmp(&(*__self_0),
&(*__arg_1_0)) {
match ::core::cmp::PartialOrd::partial_cmp(&*__self_0,
&*__arg_1_0) {
::core::option::Option::Some(::core::cmp::Ordering::Equal)
=>
::core::option::Option::Some(::core::cmp::Ordering::Equal),
@ -1075,19 +997,19 @@ impl ::core::cmp::Ord for Fielded {
if __self_vi == __arg_1_vi {
match (&*self, &*other) {
(&Fielded::X(ref __self_0), &Fielded::X(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&(*__self_0), &(*__arg_1_0)) {
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
(&Fielded::Y(ref __self_0), &Fielded::Y(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&(*__self_0), &(*__arg_1_0)) {
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,
},
(&Fielded::Z(ref __self_0), &Fielded::Z(ref __arg_1_0)) =>
match ::core::cmp::Ord::cmp(&(*__self_0), &(*__arg_1_0)) {
match ::core::cmp::Ord::cmp(&*__self_0, &*__arg_1_0) {
::core::cmp::Ordering::Equal =>
::core::cmp::Ordering::Equal,
cmp => cmp,