Adopt let else in more places

This commit is contained in:
est31 2022-02-19 00:48:49 +01:00
parent b8c56fa8c3
commit 2ef8af6619
132 changed files with 539 additions and 881 deletions

View File

@ -338,9 +338,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let idx2 = *o.get();
let &(ref op2, op_sp2) = &operands[idx2];
let reg2 = match op2.reg() {
Some(asm::InlineAsmRegOrRegClass::Reg(r)) => r,
_ => unreachable!(),
let Some(asm::InlineAsmRegOrRegClass::Reg(reg2)) = op2.reg() else {
unreachable!();
};
let msg = format!(

View File

@ -326,9 +326,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
args: Vec<AstP<Expr>>,
legacy_args_idx: &[usize],
) -> hir::ExprKind<'hir> {
let path = match f.kind {
ExprKind::Path(None, ref mut path) => path,
_ => unreachable!(),
let ExprKind::Path(None, ref mut path) = f.kind else {
unreachable!();
};
// Split the arguments into const generics and normal arguments

View File

@ -1331,9 +1331,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
// keep track of the Span info. Now, `add_implicitly_sized` in `AstConv` checks both param bounds and
// where clauses for `?Sized`.
for pred in &generics.where_clause.predicates {
let bound_pred = match *pred {
WherePredicate::BoundPredicate(ref bound_pred) => bound_pred,
_ => continue,
let WherePredicate::BoundPredicate(ref bound_pred) = *pred else {
continue;
};
let compute_is_param = || {
// Check if the where clause type is a plain type parameter.

View File

@ -482,9 +482,8 @@ impl<'a> AstValidator<'a> {
}
fn check_foreign_kind_bodyless(&self, ident: Ident, kind: &str, body: Option<Span>) {
let body = match body {
None => return,
Some(body) => body,
let Some(body) = body else {
return;
};
self.err_handler()
.struct_span_err(ident.span, &format!("incorrect `{}` inside `extern` block", kind))
@ -504,9 +503,8 @@ impl<'a> AstValidator<'a> {
/// An `fn` in `extern { ... }` cannot have a body `{ ... }`.
fn check_foreign_fn_bodyless(&self, ident: Ident, body: Option<&Block>) {
let body = match body {
None => return,
Some(body) => body,
let Some(body) = body else {
return;
};
self.err_handler()
.struct_span_err(ident.span, "incorrect function inside `extern` block")

View File

@ -57,9 +57,8 @@ impl<'a> Visitor<'a> for ShowSpanVisitor<'a> {
}
pub fn run(span_diagnostic: &rustc_errors::Handler, mode: &str, krate: &ast::Crate) {
let mode = match mode.parse().ok() {
Some(mode) => mode,
None => return,
let Ok(mode) = mode.parse() else {
return;
};
let mut v = ShowSpanVisitor { span_diagnostic, mode };
visit::walk_crate(&mut v, krate);

View File

@ -531,17 +531,14 @@ pub fn eval_condition(
return false;
}
};
let min_version = match parse_version(min_version.as_str(), false) {
Some(ver) => ver,
None => {
sess.span_diagnostic
.struct_span_warn(
*span,
"unknown version literal format, assuming it refers to a future version",
)
.emit();
return false;
}
let Some(min_version) = parse_version(min_version.as_str(), false) else {
sess.span_diagnostic
.struct_span_warn(
*span,
"unknown version literal format, assuming it refers to a future version",
)
.emit();
return false;
};
let rustc_version = parse_version(env!("CFG_RELEASE"), true).unwrap();
@ -644,9 +641,8 @@ where
break;
}
let meta = match attr.meta() {
Some(meta) => meta,
None => continue,
let Some(meta) = attr.meta() else {
continue;
};
let mut since = None;
let mut note = None;

View File

@ -2071,11 +2071,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
) = rvalue
{
for operand in operands {
let assigned_from = match operand {
Operand::Copy(assigned_from) | Operand::Move(assigned_from) => {
assigned_from
}
_ => continue,
let (Operand::Copy(assigned_from) | Operand::Move(assigned_from)) = operand else {
continue;
};
debug!(
"annotate_argument_and_return_for_borrow: assigned_from={:?}",
@ -2083,10 +2080,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
);
// Find the local from the operand.
let assigned_from_local = match assigned_from.local_or_deref_local()
{
Some(local) => local,
None => continue,
let Some(assigned_from_local) = assigned_from.local_or_deref_local() else {
continue;
};
if assigned_from_local != target {
@ -2138,10 +2133,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
);
// Find the local from the rvalue.
let assigned_from_local = match assigned_from.local_or_deref_local() {
Some(local) => local,
None => continue,
};
let Some(assigned_from_local) = assigned_from.local_or_deref_local() else { continue };
debug!(
"annotate_argument_and_return_for_borrow: \
assigned_from_local={:?}",
@ -2189,11 +2181,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
assigned_to, args
);
for operand in args {
let assigned_from = match operand {
Operand::Copy(assigned_from) | Operand::Move(assigned_from) => {
assigned_from
}
_ => continue,
let (Operand::Copy(assigned_from) | Operand::Move(assigned_from)) = operand else {
continue;
};
debug!(
"annotate_argument_and_return_for_borrow: assigned_from={:?}",

View File

@ -650,13 +650,10 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
// The only kind of statement that we care about is assignments...
if let StatementKind::Assign(box (place, rvalue)) = &stmt.kind {
let into = match place.local_or_deref_local() {
Some(into) => into,
None => {
// Continue at the next location.
queue.push(current_location.successor_within_block());
continue;
}
let Some(into) = place.local_or_deref_local() else {
// Continue at the next location.
queue.push(current_location.successor_within_block());
continue;
};
match rvalue {

View File

@ -444,10 +444,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
debug!("borrowed_content_source: init={:?}", init);
// We're only interested in statements that initialized a value, not the
// initializations from arguments.
let loc = match init.location {
InitLocation::Statement(stmt) => stmt,
_ => continue,
};
let InitLocation::Statement(loc) = init.location else { continue };
let bbd = &self.body[loc.block];
let is_terminator = bbd.statements.len() == loc.statement_index;
@ -787,9 +784,8 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
) -> UseSpans<'tcx> {
use self::UseSpans::*;
let stmt = match self.body[location.block].statements.get(location.statement_index) {
Some(stmt) => stmt,
None => return OtherUse(self.body.source_info(location).span),
let Some(stmt) = self.body[location.block].statements.get(location.statement_index) else {
return OtherUse(self.body.source_info(location).span);
};
debug!("move_spans: moved_place={:?} location={:?} stmt={:?}", moved_place, location, stmt);

View File

@ -188,10 +188,9 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
}
// Error with the pattern
LookupResult::Exact(_) => {
let mpi = match self.move_data.rev_lookup.find(move_from.as_ref()) {
LookupResult::Parent(Some(mpi)) => mpi,
let LookupResult::Parent(Some(mpi)) = self.move_data.rev_lookup.find(move_from.as_ref()) else {
// move_from should be a projection from match_place.
_ => unreachable!("Probably not unreachable..."),
unreachable!("Probably not unreachable...");
};
for ge in &mut *grouped_errors {
if let GroupedMoveError::MovesFromValue {

View File

@ -1914,10 +1914,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
// without going over a Deref.
let mut shortest_uninit_seen = None;
for prefix in this.prefixes(base, PrefixSet::Shallow) {
let mpi = match this.move_path_for_place(prefix) {
Some(mpi) => mpi,
None => continue,
};
let Some(mpi) = this.move_path_for_place(prefix) else { continue };
if maybe_uninits.contains(mpi) {
debug!(

View File

@ -913,9 +913,8 @@ impl<'tcx> RegionInferenceContext<'tcx> {
let TypeTest { generic_kind, lower_bound, locations, verify_bound: _ } = type_test;
let generic_ty = generic_kind.to_ty(tcx);
let subject = match self.try_promote_type_test_subject(infcx, generic_ty) {
Some(s) => s,
None => return false,
let Some(subject) = self.try_promote_type_test_subject(infcx, generic_ty) else {
return false;
};
// For each region outlived by lower_bound find a non-local,
@ -1623,15 +1622,14 @@ impl<'tcx> RegionInferenceContext<'tcx> {
// If we have some bound universal region `'a`, then the only
// elements it can contain is itself -- we don't know anything
// else about it!
let error_element = match {
let Some(error_element) = ({
self.scc_values.elements_contained_in(longer_fr_scc).find(|element| match element {
RegionElement::Location(_) => true,
RegionElement::RootUniversalRegion(_) => true,
RegionElement::PlaceholderRegion(placeholder1) => placeholder != *placeholder1,
})
} {
Some(v) => v,
None => return,
}) else {
return;
};
debug!("check_bound_universal_region: error_element = {:?}", error_element);

View File

@ -810,13 +810,12 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> {
ty::Adt(adt_def, substs) => (&adt_def.variants[variant_index], substs),
ty::Generator(def_id, substs, _) => {
let mut variants = substs.as_generator().state_tys(def_id, tcx);
let mut variant = match variants.nth(variant_index.into()) {
Some(v) => v,
None => bug!(
let Some(mut variant) = variants.nth(variant_index.into()) else {
bug!(
"variant_index of generator out of range: {:?}/{:?}",
variant_index,
substs.as_generator().state_tys(def_id, tcx).count()
),
);
};
return match variant.nth(field.index()) {
Some(ty) => Ok(ty),
@ -2178,35 +2177,29 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
}
CastKind::Pointer(PointerCast::MutToConstPointer) => {
let ty_from = match op.ty(body, tcx).kind() {
ty::RawPtr(ty::TypeAndMut {
ty: ty_from,
mutbl: hir::Mutability::Mut,
}) => ty_from,
_ => {
span_mirbug!(
self,
rvalue,
"unexpected base type for cast {:?}",
ty,
);
return;
}
let ty::RawPtr(ty::TypeAndMut {
ty: ty_from,
mutbl: hir::Mutability::Mut,
}) = op.ty(body, tcx).kind() else {
span_mirbug!(
self,
rvalue,
"unexpected base type for cast {:?}",
ty,
);
return;
};
let ty_to = match ty.kind() {
ty::RawPtr(ty::TypeAndMut {
ty: ty_to,
mutbl: hir::Mutability::Not,
}) => ty_to,
_ => {
span_mirbug!(
self,
rvalue,
"unexpected target type for cast {:?}",
ty,
);
return;
}
let ty::RawPtr(ty::TypeAndMut {
ty: ty_to,
mutbl: hir::Mutability::Not,
}) = ty.kind() else {
span_mirbug!(
self,
rvalue,
"unexpected target type for cast {:?}",
ty,
);
return;
};
if let Err(terr) = self.sub_types(
*ty_from,
@ -2238,17 +2231,14 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
_ => None,
};
let (ty_elem, ty_mut) = match opt_ty_elem_mut {
Some(ty_elem_mut) => ty_elem_mut,
None => {
span_mirbug!(
self,
rvalue,
"ArrayToPointer cast from unexpected type {:?}",
ty_from,
);
return;
}
let Some((ty_elem, ty_mut)) = opt_ty_elem_mut else {
span_mirbug!(
self,
rvalue,
"ArrayToPointer cast from unexpected type {:?}",
ty_from,
);
return;
};
let (ty_to, ty_to_mut) = match ty.kind() {

View File

@ -641,9 +641,8 @@ impl<'cx, 'tcx> UniversalRegionsBuilder<'cx, 'tcx> {
let (&output, tuplized_inputs) =
inputs_and_output.skip_binder().split_last().unwrap();
assert_eq!(tuplized_inputs.len(), 1, "multiple closure inputs");
let inputs = match tuplized_inputs[0].kind() {
ty::Tuple(inputs) => inputs,
_ => bug!("closure inputs not a tuple: {:?}", tuplized_inputs[0]),
let ty::Tuple(inputs) = tuplized_inputs[0].kind() else {
bug!("closure inputs not a tuple: {:?}", tuplized_inputs[0]);
};
ty::Binder::bind_with_vars(

View File

@ -44,9 +44,8 @@ impl MultiItemModifier for Expander {
template,
);
let path = match validate_input(ecx, meta_item) {
Some(path) => path,
None => return ExpandResult::Ready(Vec::new()),
let Some(path) = validate_input(ecx, meta_item) else {
return ExpandResult::Ready(Vec::new());
};
match ecx.resolver.cfg_accessible(ecx.current_expansion.id, path) {

View File

@ -9,9 +9,8 @@ pub fn expand_compile_error<'cx>(
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> {
let var = match get_single_str_from_tts(cx, sp, tts, "compile_error!") {
None => return DummyResult::any(sp),
Some(v) => v,
let Some(var) = get_single_str_from_tts(cx, sp, tts, "compile_error!") else {
return DummyResult::any(sp);
};
cx.span_err(sp, &var);

View File

@ -10,9 +10,8 @@ pub fn expand_concat(
sp: rustc_span::Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let es = match base::get_exprs_from_tts(cx, sp, tts) {
Some(e) => e,
None => return DummyResult::any(sp),
let Some(es) = base::get_exprs_from_tts(cx, sp, tts) else {
return DummyResult::any(sp);
};
let mut accumulator = String::new();
let mut missing_literal = vec![];

View File

@ -121,9 +121,8 @@ pub fn expand_concat_bytes(
sp: rustc_span::Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let es = match base::get_exprs_from_tts(cx, sp, tts) {
Some(e) => e,
None => return DummyResult::any(sp),
let Some(es) = base::get_exprs_from_tts(cx, sp, tts) else {
return DummyResult::any(sp);
};
let mut accumulator = Vec::new();
let mut missing_literals = vec![];

View File

@ -196,12 +196,11 @@ fn cs_clone(
let fields = all_fields
.iter()
.map(|field| {
let ident = match field.name {
Some(i) => i,
None => cx.span_bug(
let Some(ident) = field.name else {
cx.span_bug(
trait_span,
&format!("unnamed field in normal struct in `derive({})`", name,),
),
);
};
let call = subcall(cx, field);
cx.field_imm(field.span, ident, call)

View File

@ -83,9 +83,8 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<
// }
let new = {
let other_f = match other_fs {
[o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"),
let [other_f] = other_fs else {
cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`");
};
let args =

View File

@ -26,9 +26,8 @@ pub fn expand_deriving_partial_eq(
base: bool,
) -> P<Expr> {
let op = |cx: &mut ExtCtxt<'_>, span: Span, self_f: P<Expr>, other_fs: &[P<Expr>]| {
let other_f = match other_fs {
[o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`"),
let [other_f] = other_fs else {
cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`");
};
cx.expr_binary(span, op, self_f, other_f.clone())

View File

@ -86,9 +86,8 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_
// }
let new = {
let other_f = match other_fs {
[o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"),
let [other_f] = other_fs else {
cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`");
};
let args =

View File

@ -101,9 +101,8 @@ fn default_enum_substructure(
trait_span: Span,
enum_def: &EnumDef,
) -> P<Expr> {
let default_variant = match extract_default_variant(cx, enum_def, trait_span) {
Ok(value) => value,
Err(()) => return DummyResult::raw_expr(trait_span, true),
let Ok(default_variant) = extract_default_variant(cx, enum_def, trait_span) else {
return DummyResult::raw_expr(trait_span, true);
};
// At this point, we know that there is exactly one variant with a `#[default]` attribute. The

View File

@ -48,9 +48,8 @@ pub fn expand_deriving_hash(
}
fn hash_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, substr: &Substructure<'_>) -> P<Expr> {
let state_expr = match substr.nonself_args {
[o_f] => o_f,
_ => cx.span_bug(trait_span, "incorrect number of arguments in `derive(Hash)`"),
let [state_expr] = substr.nonself_args else {
cx.span_bug(trait_span, "incorrect number of arguments in `derive(Hash)`");
};
let call_hash = |span, thing_expr| {
let hash_path = {

View File

@ -116,9 +116,8 @@ fn inject_impl_of_structural_trait(
structural_path: generic::ty::Path,
push: &mut dyn FnMut(Annotatable),
) {
let item = match *item {
Annotatable::Item(ref item) => item,
_ => unreachable!(),
let Annotatable::Item(ref item) = *item else {
unreachable!();
};
let generics = match item.kind {

View File

@ -16,9 +16,8 @@ pub fn expand_option_env<'cx>(
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> {
let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") {
None => return DummyResult::any(sp),
Some(v) => v,
let Some(var) = get_single_str_from_tts(cx, sp, tts, "option_env!") else {
return DummyResult::any(sp);
};
let sp = cx.with_def_site_ctxt(sp);
@ -62,9 +61,8 @@ pub fn expand_env<'cx>(
Some(exprs) => exprs.into_iter(),
};
let var = match expr_to_string(cx, exprs.next().unwrap(), "expected string literal") {
None => return DummyResult::any(sp),
Some((v, _style)) => v,
let Some((var, _style)) = expr_to_string(cx, exprs.next().unwrap(), "expected string literal") else {
return DummyResult::any(sp);
};
let msg = match exprs.next() {
None => Symbol::intern(&format!("environment variable `{}` not defined", var)),

View File

@ -108,11 +108,9 @@ impl<'a> CollectProcMacros<'a> {
}
fn collect_custom_derive(&mut self, item: &'a ast::Item, attr: &'a ast::Attribute) {
let (trait_name, proc_attrs) =
match parse_macro_name_and_helper_attrs(self.handler, attr, "derive") {
Some(name_and_attrs) => name_and_attrs,
None => return,
};
let Some((trait_name, proc_attrs)) = parse_macro_name_and_helper_attrs(self.handler, attr, "derive") else {
return;
};
if self.in_root && item.vis.kind.is_pub() {
self.macros.push(ProcMacro::Derive(ProcMacroDerive {
@ -224,15 +222,12 @@ impl<'a> Visitor<'a> for CollectProcMacros<'a> {
}
}
let attr = match found_attr {
None => {
self.check_not_pub_in_root(&item.vis, self.source_map.guess_head_span(item.span));
let prev_in_root = mem::replace(&mut self.in_root, false);
visit::walk_item(self, item);
self.in_root = prev_in_root;
return;
}
Some(attr) => attr,
let Some(attr) = found_attr else {
self.check_not_pub_in_root(&item.vis, self.source_map.guess_head_span(item.span));
let prev_in_root = mem::replace(&mut self.in_root, false);
visit::walk_item(self, item);
self.in_root = prev_in_root;
return;
};
if !is_fn {

View File

@ -98,9 +98,8 @@ pub fn expand_include<'cx>(
tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> {
let sp = cx.with_def_site_ctxt(sp);
let file = match get_single_str_from_tts(cx, sp, tts, "include!") {
Some(f) => f,
None => return DummyResult::any(sp),
let Some(file) = get_single_str_from_tts(cx, sp, tts, "include!") else {
return DummyResult::any(sp);
};
// The file will be added to the code map by the parser
let file = match cx.resolve_path(file, sp) {
@ -169,9 +168,8 @@ pub fn expand_include_str(
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp);
let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") {
Some(f) => f,
None => return DummyResult::any(sp),
let Some(file) = get_single_str_from_tts(cx, sp, tts, "include_str!") else {
return DummyResult::any(sp);
};
let file = match cx.resolve_path(file, sp) {
Ok(f) => f,
@ -204,9 +202,8 @@ pub fn expand_include_bytes(
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp);
let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") {
Some(f) => f,
None => return DummyResult::any(sp),
let Some(file) = get_single_str_from_tts(cx, sp, tts, "include_bytes!") else {
return DummyResult::any(sp);
};
let file = match cx.resolve_path(file, sp) {
Ok(f) => f,

View File

@ -310,10 +310,7 @@ impl<'a> LlvmArchiveBuilder<'a> {
if let Some(archive) = self.src_archive() {
for child in archive.iter() {
let child = child.map_err(string_to_io_error)?;
let child_name = match child.name() {
Some(s) => s,
None => continue,
};
let Some(child_name) = child.name() else { continue };
if removals.iter().any(|r| r == child_name) {
continue;
}

View File

@ -826,20 +826,14 @@ pub(crate) unsafe fn codegen(
let input =
unsafe { slice::from_raw_parts(input_ptr as *const u8, input_len as usize) };
let input = match str::from_utf8(input) {
Ok(s) => s,
Err(_) => return 0,
};
let Ok(input) = str::from_utf8(input) else { return 0 };
let output = unsafe {
slice::from_raw_parts_mut(output_ptr as *mut u8, output_len as usize)
};
let mut cursor = io::Cursor::new(output);
let demangled = match rustc_demangle::try_demangle(input) {
Ok(d) => d,
Err(_) => return 0,
};
let Ok(demangled) = rustc_demangle::try_demangle(input) else { return 0 };
if write!(cursor, "{:#}", demangled).is_err() {
// Possible only if provided buffer is not big enough

View File

@ -138,10 +138,7 @@ pub fn compile_codegen_unit(tcx: TyCtxt<'_>, cgu_name: Symbol) -> (ModuleCodegen
}
pub fn set_link_section(llval: &Value, attrs: &CodegenFnAttrs) {
let sect = match attrs.link_section {
Some(name) => name,
None => return,
};
let Some(sect) = attrs.link_section else { return };
unsafe {
let buf = SmallCStr::new(sect.as_str());
llvm::LLVMSetSection(llval, buf.as_ptr());

View File

@ -369,10 +369,9 @@ impl<'ll> StaticMethods for CodegenCx<'ll, '_> {
unsafe {
let attrs = self.tcx.codegen_fn_attrs(def_id);
let (v, alloc) = match codegen_static_initializer(self, def_id) {
Ok(v) => v,
let Ok((v, alloc)) = codegen_static_initializer(self, def_id) else {
// Error has already been reported
Err(_) => return,
return;
};
let g = self.get_static(def_id);

View File

@ -766,18 +766,15 @@ pub fn type_metadata<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>, t: Ty<'tcx>) -> &'ll
if already_stored_in_typemap {
// Also make sure that we already have a `TypeMap` entry for the unique type ID.
let metadata_for_uid = match type_map.find_metadata_for_unique_id(unique_type_id) {
Some(metadata) => metadata,
None => {
bug!(
"expected type metadata for unique \
type ID '{}' to already be in \
the `debuginfo::TypeMap` but it \
was not. (Ty = {})",
type_map.get_unique_type_id_as_string(unique_type_id),
t
);
}
let Some(metadata_for_uid) = type_map.find_metadata_for_unique_id(unique_type_id) else {
bug!(
"expected type metadata for unique \
type ID '{}' to already be in \
the `debuginfo::TypeMap` but it \
was not. (Ty = {})",
type_map.get_unique_type_id_as_string(unique_type_id),
t
);
};
match type_map.find_metadata_for_type(t) {

View File

@ -88,9 +88,8 @@ impl<'ll, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'_, 'll, 'tcx> {
let tcx = self.tcx;
let callee_ty = instance.ty(tcx, ty::ParamEnv::reveal_all());
let (def_id, substs) = match *callee_ty.kind() {
ty::FnDef(def_id, substs) => (def_id, substs),
_ => bug!("expected fn item type, found {}", callee_ty),
let ty::FnDef(def_id, substs) = *callee_ty.kind() else {
bug!("expected fn item type, found {}", callee_ty);
};
let sig = callee_ty.fn_sig(tcx);
@ -1000,9 +999,8 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
}
})
.collect();
let indices = match indices {
Some(i) => i,
None => return Ok(bx.const_null(llret_ty)),
let Some(indices) = indices else {
return Ok(bx.const_null(llret_ty));
};
return Ok(bx.shuffle_vector(

View File

@ -339,9 +339,8 @@ impl<'tcx> LayoutLlvmExt<'tcx> for TyAndLayout<'tcx> {
_ => {}
}
let (a, b) = match self.abi {
Abi::ScalarPair(a, b) => (a, b),
_ => bug!("TyAndLayout::scalar_pair_element_llty({:?}): not applicable", self),
let Abi::ScalarPair(a, b) = self.abi else {
bug!("TyAndLayout::scalar_pair_element_llty({:?}): not applicable", self);
};
let scalar = [a, b][index];

View File

@ -674,9 +674,8 @@ fn link_natively<'a, B: ArchiveBuilder<'a>>(
loop {
i += 1;
prog = sess.time("run_linker", || exec_linker(sess, &cmd, out_filename, tmpdir));
let output = match prog {
Ok(ref output) => output,
Err(_) => break,
let Ok(ref output) = prog else {
break;
};
if output.status.success() {
break;
@ -2025,9 +2024,8 @@ fn add_local_native_libraries(
let search_path = OnceCell::new();
let mut last = (NativeLibKind::Unspecified, None);
for lib in relevant_libs {
let name = match lib.name {
Some(l) => l,
None => continue,
let Some(name) = lib.name else {
continue;
};
// Skip if this library is the same as the last.
@ -2382,9 +2380,8 @@ fn add_upstream_native_libraries(
let mut last = (NativeLibKind::Unspecified, None);
for &cnum in &codegen_results.crate_info.used_crates {
for lib in codegen_results.crate_info.native_libraries[&cnum].iter() {
let name = match lib.name {
Some(l) => l,
None => continue,
let Some(name) = lib.name else {
continue;
};
if !relevant_lib(sess, &lib) {
continue;

View File

@ -79,15 +79,14 @@ fn search_for_metadata<'a>(
bytes: &'a [u8],
section: &str,
) -> Result<&'a [u8], String> {
let file = match object::File::parse(bytes) {
Ok(f) => f,
let Ok(file) = object::File::parse(bytes) else {
// The parse above could fail for odd reasons like corruption, but for
// now we just interpret it as this target doesn't support metadata
// emission in object files so the entire byte slice itself is probably
// a metadata file. Ideally though if necessary we could at least check
// the prefix of bytes to see if it's an actual metadata object and if
// not forward the error along here.
Err(_) => return Ok(bytes),
return Ok(bytes);
};
file.section_by_name(section)
.ok_or_else(|| format!("no `{}` section in '{}'", section, path.display()))?

View File

@ -448,10 +448,7 @@ fn wasm_import_module_map(tcx: TyCtxt<'_>, cnum: CrateNum) -> FxHashMap<DefId, S
let mut ret = FxHashMap::default();
for (def_id, lib) in tcx.foreign_modules(cnum).iter() {
let module = def_id_to_native_lib.get(&def_id).and_then(|s| s.wasm_import_module);
let module = match module {
Some(s) => s,
None => continue,
};
let Some(module) = module else { continue };
ret.extend(lib.foreign_items.iter().map(|id| {
assert_eq!(id.krate, cnum);
(*id, module.to_string())

View File

@ -409,18 +409,15 @@ pub fn maybe_create_entry_wrapper<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
// listing.
let main_ret_ty = cx.tcx().erase_regions(main_ret_ty.no_bound_vars().unwrap());
let llfn = match cx.declare_c_main(llfty) {
Some(llfn) => llfn,
None => {
// FIXME: We should be smart and show a better diagnostic here.
let span = cx.tcx().def_span(rust_main_def_id);
cx.sess()
.struct_span_err(span, "entry symbol `main` declared multiple times")
.help("did you use `#[no_mangle]` on `fn main`? Use `#[start]` instead")
.emit();
cx.sess().abort_if_errors();
bug!();
}
let Some(llfn) = cx.declare_c_main(llfty) else {
// FIXME: We should be smart and show a better diagnostic here.
let span = cx.tcx().def_span(rust_main_def_id);
cx.sess()
.struct_span_err(span, "entry symbol `main` declared multiple times")
.help("did you use `#[no_mangle]` on `fn main`? Use `#[start]` instead")
.emit();
cx.sess().abort_if_errors();
bug!();
};
// `main` should respect same config for frame pointer elimination as rest of code

View File

@ -202,11 +202,8 @@ pub fn asm_const_to_str<'tcx>(
const_value: ConstValue<'tcx>,
ty_and_layout: TyAndLayout<'tcx>,
) -> String {
let scalar = match const_value {
ConstValue::Scalar(s) => s,
_ => {
span_bug!(sp, "expected Scalar for promoted asm const, but got {:#?}", const_value)
}
let ConstValue::Scalar(scalar) = const_value else {
span_bug!(sp, "expected Scalar for promoted asm const, but got {:#?}", const_value)
};
let value = scalar.assert_bits(ty_and_layout.size);
match ty_and_layout.ty.kind() {

View File

@ -67,9 +67,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
.map(|field| {
if let Some(prim) = field.val().try_to_scalar() {
let layout = bx.layout_of(field_ty);
let scalar = match layout.abi {
Abi::Scalar(x) => x,
_ => bug!("from_const: invalid ByVal layout: {:#?}", layout),
let Abi::Scalar(scalar) = layout.abi else {
bug!("from_const: invalid ByVal layout: {:#?}", layout);
};
bx.scalar_to_backend(prim, scalar, bx.immediate_backend_type(layout))
} else {

View File

@ -258,14 +258,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let vars = vars.iter().copied().chain(fallback_var);
for var in vars {
let dbg_var = match var.dbg_var {
Some(dbg_var) => dbg_var,
None => continue,
};
let dbg_loc = match self.dbg_loc(var.source_info) {
Some(dbg_loc) => dbg_loc,
None => continue,
};
let Some(dbg_var) = var.dbg_var else { continue };
let Some(dbg_loc) = self.dbg_loc(var.source_info) else { continue };
let mut direct_offset = Size::ZERO;
// FIXME(eddyb) use smallvec here.
@ -410,10 +404,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
mir::VarDebugInfoContents::Const(c) => {
if let Some(dbg_var) = dbg_var {
let dbg_loc = match self.dbg_loc(var.source_info) {
Some(dbg_loc) => dbg_loc,
None => continue,
};
let Some(dbg_loc) = self.dbg_loc(var.source_info) else { continue };
if let Ok(operand) = self.eval_mir_constant_to_operand(bx, &c) {
let base = Self::spill_operand_to_stack(

View File

@ -58,9 +58,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
) {
let callee_ty = instance.ty(bx.tcx(), ty::ParamEnv::reveal_all());
let (def_id, substs) = match *callee_ty.kind() {
ty::FnDef(def_id, substs) => (def_id, substs),
_ => bug!("expected fn item type, found {}", callee_ty),
let ty::FnDef(def_id, substs) = *callee_ty.kind() else {
bug!("expected fn item type, found {}", callee_ty);
};
let sig = callee_ty.fn_sig(bx.tcx());
@ -338,21 +337,18 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
);
return;
}
let (_width, signed) = match int_type_width_signed(ret_ty, bx.tcx()) {
Some(pair) => pair,
None => {
span_invalid_monomorphization_error(
bx.tcx().sess,
span,
&format!(
"invalid monomorphization of `float_to_int_unchecked` \
intrinsic: expected basic integer type, \
found `{}`",
ret_ty
),
);
return;
}
let Some((_width, signed)) = int_type_width_signed(ret_ty, bx.tcx()) else {
span_invalid_monomorphization_error(
bx.tcx().sess,
span,
&format!(
"invalid monomorphization of `float_to_int_unchecked` \
intrinsic: expected basic integer type, \
found `{}`",
ret_ty
),
);
return;
};
if signed {
bx.fptosi(args[0].immediate(), llret_ty)

View File

@ -281,9 +281,8 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
// individual LLVM function arguments.
let arg_ty = fx.monomorphize(arg_decl.ty);
let tupled_arg_tys = match arg_ty.kind() {
ty::Tuple(tys) => tys,
_ => bug!("spread argument isn't a tuple?!"),
let ty::Tuple(tupled_arg_tys) = arg_ty.kind() else {
bug!("spread argument isn't a tuple?!");
};
let place = PlaceRef::alloca(bx, bx.layout_of(arg_ty));

View File

@ -78,17 +78,15 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
let val = match val {
ConstValue::Scalar(x) => {
let scalar = match layout.abi {
Abi::Scalar(x) => x,
_ => bug!("from_const: invalid ByVal layout: {:#?}", layout),
let Abi::Scalar(scalar) = layout.abi else {
bug!("from_const: invalid ByVal layout: {:#?}", layout);
};
let llval = bx.scalar_to_backend(x, scalar, bx.immediate_backend_type(layout));
OperandValue::Immediate(llval)
}
ConstValue::Slice { data, start, end } => {
let a_scalar = match layout.abi {
Abi::ScalarPair(a, _) => a,
_ => bug!("from_const: invalid ScalarPair layout: {:#?}", layout),
let Abi::ScalarPair(a_scalar, _) = layout.abi else {
bug!("from_const: invalid ScalarPair layout: {:#?}", layout);
};
let a = Scalar::from_pointer(
Pointer::new(bx.tcx().create_memory_alloc(data), Size::from_bytes(start)),
@ -307,9 +305,8 @@ impl<'a, 'tcx, V: CodegenObject> OperandValue<V> {
bx.store_with_flags(val, dest.llval, dest.align, flags);
}
OperandValue::Pair(a, b) => {
let (a_scalar, b_scalar) = match dest.layout.abi {
Abi::ScalarPair(a, b) => (a, b),
_ => bug!("store_with_flags: invalid ScalarPair layout: {:#?}", dest.layout),
let Abi::ScalarPair(a_scalar, b_scalar) = dest.layout.abi else {
bug!("store_with_flags: invalid ScalarPair layout: {:#?}", dest.layout);
};
let ty = bx.backend_type(dest.layout);
let b_offset = a_scalar.value.size(bx).align_to(b_scalar.value.align(bx).abi);

View File

@ -10,9 +10,8 @@ pub fn binary_search_slice<'d, E, K>(data: &'d [E], key_fn: impl Fn(&E) -> K, ke
where
K: Ord,
{
let mid = match data.binary_search_by_key(key, &key_fn) {
Ok(mid) => mid,
Err(_) => return &[],
let Ok(mid) = data.binary_search_by_key(key, &key_fn) else {
return &[];
};
let size = data.len();

View File

@ -14,6 +14,7 @@
#![feature(control_flow_enum)]
#![feature(core_intrinsics)]
#![feature(extend_one)]
#![feature(let_else)]
#![feature(hash_raw_entry)]
#![feature(maybe_uninit_uninit_array)]
#![feature(min_specialization)]

View File

@ -190,11 +190,8 @@ impl<T: Eq + Hash> TransitiveRelation<T> {
///
/// Note that this set can, in principle, have any size.
pub fn minimal_upper_bounds(&self, a: &T, b: &T) -> Vec<&T> {
let (mut a, mut b) = match (self.index(a), self.index(b)) {
(Some(a), Some(b)) => (a, b),
(None, _) | (_, None) => {
return vec![];
}
let (Some(mut a), Some(mut b)) = (self.index(a), self.index(b)) else {
return vec![];
};
// in some cases, there are some arbitrary choices to be made;
@ -294,9 +291,8 @@ impl<T: Eq + Hash> TransitiveRelation<T> {
/// then `parents(a)` returns `[b, c]`. The `postdom_parent` function
/// would further reduce this to just `f`.
pub fn parents(&self, a: &T) -> Vec<&T> {
let a = match self.index(a) {
Some(a) => a,
None => return vec![],
let Some(a) = self.index(a) else {
return vec![];
};
// Steal the algorithm for `minimal_upper_bounds` above, but

View File

@ -6,6 +6,7 @@
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
#![feature(nll)]
#![feature(let_else)]
#![feature(once_cell)]
#![recursion_limit = "256"]
#![cfg_attr(not(bootstrap), allow(rustc::potential_query_instability))]
@ -203,10 +204,7 @@ fn run_compiler(
let args = args::arg_expand_all(at_args);
let diagnostic_output = emitter.map_or(DiagnosticOutput::Default, DiagnosticOutput::Raw);
let matches = match handle_options(&args) {
Some(matches) => matches,
None => return Ok(()),
};
let Some(matches) = handle_options(&args) else { return Ok(()) };
let sopts = config::build_session_options(&matches);

View File

@ -1138,9 +1138,8 @@ impl EmitterWriter {
}
fn get_multispan_max_line_num(&mut self, msp: &MultiSpan) -> usize {
let sm = match self.sm {
Some(ref sm) => sm,
None => return 0,
let Some(ref sm) = self.sm else {
return 0;
};
let mut max = 0;
@ -1590,9 +1589,8 @@ impl EmitterWriter {
level: &Level,
max_line_num_len: usize,
) -> io::Result<()> {
let sm = match self.sm {
Some(ref sm) => sm,
None => return Ok(()),
let Some(ref sm) = self.sm else {
return Ok(());
};
// Render the replacements for each suggestion

View File

@ -1299,20 +1299,16 @@ pub fn parse_macro_name_and_helper_attrs(
// Once we've located the `#[proc_macro_derive]` attribute, verify
// that it's of the form `#[proc_macro_derive(Foo)]` or
// `#[proc_macro_derive(Foo, attributes(A, ..))]`
let list = match attr.meta_item_list() {
Some(list) => list,
None => return None,
let Some(list) = attr.meta_item_list() else {
return None;
};
if list.len() != 1 && list.len() != 2 {
diag.span_err(attr.span, "attribute must have either one or two arguments");
return None;
}
let trait_attr = match list[0].meta_item() {
Some(meta_item) => meta_item,
_ => {
diag.span_err(list[0].span(), "not a meta item");
return None;
}
let Some(trait_attr) = list[0].meta_item() else {
diag.span_err(list[0].span(), "not a meta item");
return None;
};
let trait_ident = match trait_attr.ident() {
Some(trait_ident) if trait_attr.is_word() => trait_ident,
@ -1341,12 +1337,9 @@ pub fn parse_macro_name_and_helper_attrs(
})
.iter()
.filter_map(|attr| {
let attr = match attr.meta_item() {
Some(meta_item) => meta_item,
_ => {
diag.span_err(attr.span(), "not a meta item");
return None;
}
let Some(attr) = attr.meta_item() else {
diag.span_err(attr.span(), "not a meta item");
return None;
};
let ident = match attr.ident() {

View File

@ -79,9 +79,8 @@ fn get_features(
continue;
}
let list = match attr.meta_item_list() {
Some(list) => list,
None => continue,
let Some(list) = attr.meta_item_list() else {
continue;
};
for mi in list {
@ -112,9 +111,8 @@ fn get_features(
continue;
}
let list = match attr.meta_item_list() {
Some(list) => list,
None => continue,
let Some(list) = attr.meta_item_list() else {
continue;
};
let bad_input = |span| {
@ -340,10 +338,9 @@ impl<'a> StripUnconfigured<'a> {
/// is in the original source file. Gives a compiler error if the syntax of
/// the attribute is incorrect.
crate fn expand_cfg_attr(&self, attr: Attribute, recursive: bool) -> Vec<Attribute> {
let (cfg_predicate, expanded_attrs) =
match rustc_parse::parse_cfg_attr(&attr, &self.sess.parse_sess) {
None => return vec![],
Some(r) => r,
let Some((cfg_predicate, expanded_attrs)) =
rustc_parse::parse_cfg_attr(&attr, &self.sess.parse_sess) else {
return vec![];
};
// Lint on zero attributes in source.
@ -389,18 +386,16 @@ impl<'a> StripUnconfigured<'a> {
// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
// for `attr` when we expand it to `#[attr]`
let mut orig_trees = orig_tokens.trees();
let pound_token = match orig_trees.next().unwrap() {
TokenTree::Token(token @ Token { kind: TokenKind::Pound, .. }) => token,
_ => panic!("Bad tokens for attribute {:?}", attr),
let TokenTree::Token(pound_token @ Token { kind: TokenKind::Pound, .. }) = orig_trees.next().unwrap() else {
panic!("Bad tokens for attribute {:?}", attr);
};
let pound_span = pound_token.span;
let mut trees = vec![(AttrAnnotatedTokenTree::Token(pound_token), Spacing::Alone)];
if attr.style == AttrStyle::Inner {
// For inner attributes, we do the same thing for the `!` in `#![some_attr]`
let bang_token = match orig_trees.next().unwrap() {
TokenTree::Token(token @ Token { kind: TokenKind::Not, .. }) => token,
_ => panic!("Bad tokens for attribute {:?}", attr),
let TokenTree::Token(bang_token @ Token { kind: TokenKind::Not, .. }) = orig_trees.next().unwrap() else {
panic!("Bad tokens for attribute {:?}", attr);
};
trees.push((AttrAnnotatedTokenTree::Token(bang_token), Spacing::Alone));
}

View File

@ -641,9 +641,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
ExpandResult::Ready(match invoc.kind {
InvocationKind::Bang { mac, .. } => match ext {
SyntaxExtensionKind::Bang(expander) => {
let tok_result = match expander.expand(self.cx, span, mac.args.inner_tokens()) {
Err(_) => return ExpandResult::Ready(fragment_kind.dummy(span)),
Ok(ts) => ts,
let Ok(tok_result) = expander.expand(self.cx, span, mac.args.inner_tokens()) else {
return ExpandResult::Ready(fragment_kind.dummy(span));
};
self.parse_ast_fragment(tok_result, fragment_kind, &mac.path, span)
}
@ -698,9 +697,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
self.cx.span_err(span, "key-value macro attributes are not supported");
}
let inner_tokens = attr_item.args.inner_tokens();
let tok_result = match expander.expand(self.cx, span, inner_tokens, tokens) {
Err(_) => return ExpandResult::Ready(fragment_kind.dummy(span)),
Ok(ts) => ts,
let Ok(tok_result) = expander.expand(self.cx, span, inner_tokens, tokens) else {
return ExpandResult::Ready(fragment_kind.dummy(span));
};
self.parse_ast_fragment(tok_result, fragment_kind, &attr_item.path, span)
}

View File

@ -170,23 +170,20 @@ fn mod_file_path_from_attr(
) -> Option<PathBuf> {
// Extract path string from first `#[path = "path_string"]` attribute.
let first_path = attrs.iter().find(|at| at.has_name(sym::path))?;
let path_sym = match first_path.value_str() {
Some(s) => s,
None => {
// This check is here mainly to catch attempting to use a macro,
// such as #[path = concat!(...)]. This isn't currently supported
// because otherwise the InvocationCollector would need to defer
// loading a module until the #[path] attribute was expanded, and
// it doesn't support that (and would likely add a bit of
// complexity). Usually bad forms are checked in AstValidator (via
// `check_builtin_attribute`), but by the time that runs the macro
// is expanded, and it doesn't give an error.
validate_attr::emit_fatal_malformed_builtin_attribute(
&sess.parse_sess,
first_path,
sym::path,
);
}
let Some(path_sym) = first_path.value_str() else {
// This check is here mainly to catch attempting to use a macro,
// such as #[path = concat!(...)]. This isn't currently supported
// because otherwise the InvocationCollector would need to defer
// loading a module until the #[path] attribute was expanded, and
// it doesn't support that (and would likely add a bit of
// complexity). Usually bad forms are checked in AstValidator (via
// `check_builtin_attribute`), but by the time that runs the macro
// is expanded, and it doesn't give an error.
validate_attr::emit_fatal_malformed_builtin_attribute(
&sess.parse_sess,
first_path,
sym::path,
);
};
let path_str = path_sym.as_str();

View File

@ -596,9 +596,8 @@ impl server::Literal for Rustc<'_, '_> {
let minus_present = parser.eat(&token::BinOp(token::Minus));
let lit_span = parser.token.span.data();
let mut lit = match parser.token.kind {
token::Literal(lit) => lit,
_ => return Err(()),
let token::Literal(mut lit) = parser.token.kind else {
return Err(());
};
// Check no comment or whitespace surrounding the (possibly negative)

View File

@ -635,9 +635,8 @@ pub struct WhereBoundPredicate<'hir> {
impl<'hir> WhereBoundPredicate<'hir> {
/// Returns `true` if `param_def_id` matches the `bounded_ty` of this predicate.
pub fn is_param_bound(&self, param_def_id: DefId) -> bool {
let path = match self.bounded_ty.kind {
TyKind::Path(QPath::Resolved(None, path)) => path,
_ => return false,
let TyKind::Path(QPath::Resolved(None, path)) = self.bounded_ty.kind else {
return false;
};
match path.res {
Res::Def(DefKind::TyParam, def_id)

View File

@ -5,6 +5,7 @@
#![feature(associated_type_defaults)]
#![feature(const_btree_new)]
#![feature(crate_visibility_modifier)]
#![feature(let_else)]
#![feature(once_cell)]
#![feature(min_specialization)]
#![feature(never_type)]

View File

@ -368,9 +368,8 @@ impl<'tcx> DirtyCleanVisitor<'tcx> {
fn check_item(&mut self, item_id: LocalDefId, item_span: Span) {
let def_path_hash = self.tcx.def_path_hash(item_id.to_def_id());
for attr in self.tcx.get_attrs(item_id.to_def_id()).iter() {
let assertion = match self.assertion_maybe(item_id, attr) {
Some(a) => a,
None => continue,
let Some(assertion) = self.assertion_maybe(item_id, attr) else {
continue;
};
self.checked_attrs.insert(attr.id);
for label in assertion.clean {

View File

@ -421,9 +421,8 @@ fn copy_files(sess: &Session, target_dir: &Path, source_dir: &Path) -> Result<bo
return Err(());
};
let source_dir_iterator = match source_dir.read_dir() {
Ok(it) => it,
Err(_) => return Err(()),
let Ok(source_dir_iterator) = source_dir.read_dir() else {
return Err(());
};
let mut files_linked = 0;
@ -700,12 +699,9 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
let mut lock_files = FxHashSet::default();
for dir_entry in crate_directory.read_dir()? {
let dir_entry = match dir_entry {
Ok(dir_entry) => dir_entry,
_ => {
// Ignore any errors
continue;
}
let Ok(dir_entry) = dir_entry else {
// Ignore any errors
continue;
};
let entry_name = dir_entry.file_name();
@ -740,16 +736,13 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
// be some kind of leftover
for (lock_file_name, directory_name) in &lock_file_to_session_dir {
if directory_name.is_none() {
let timestamp = match extract_timestamp_from_session_dir(lock_file_name) {
Ok(timestamp) => timestamp,
Err(()) => {
debug!(
"found lock-file with malformed timestamp: {}",
crate_directory.join(&lock_file_name).display()
);
// Ignore it
continue;
}
let Ok(timestamp) = extract_timestamp_from_session_dir(lock_file_name) else {
debug!(
"found lock-file with malformed timestamp: {}",
crate_directory.join(&lock_file_name).display()
);
// Ignore it
continue;
};
let lock_file_path = crate_directory.join(&**lock_file_name);
@ -798,16 +791,13 @@ pub fn garbage_collect_session_directories(sess: &Session) -> io::Result<()> {
for (lock_file_name, directory_name) in &lock_file_to_session_dir {
debug!("garbage_collect_session_directories() - inspecting: {}", directory_name);
let timestamp = match extract_timestamp_from_session_dir(directory_name) {
Ok(timestamp) => timestamp,
Err(()) => {
debug!(
"found session-dir with malformed timestamp: {}",
crate_directory.join(directory_name).display()
);
// Ignore it
continue;
}
let Ok(timestamp) = extract_timestamp_from_session_dir(directory_name) else {
debug!(
"found session-dir with malformed timestamp: {}",
crate_directory.join(directory_name).display()
);
// Ignore it
continue;
};
if is_finalized(directory_name) {

View File

@ -136,12 +136,9 @@ impl<I: Idx> IntervalSet<I> {
pub fn contains(&self, needle: I) -> bool {
let needle = needle.index() as u32;
let last = match self.map.partition_point(|r| r.0 <= needle).checked_sub(1) {
Some(idx) => idx,
None => {
// All ranges in the map start after the new range's end
return false;
}
let Some(last) = self.map.partition_point(|r| r.0 <= needle).checked_sub(1) else {
// All ranges in the map start after the new range's end
return false;
};
let (_, prev_end) = &self.map[last];
needle <= *prev_end
@ -170,12 +167,9 @@ impl<I: Idx> IntervalSet<I> {
if start > end {
return None;
}
let last = match self.map.partition_point(|r| r.0 <= end).checked_sub(1) {
Some(idx) => idx,
None => {
// All ranges in the map start after the new range's end
return None;
}
let Some(last) = self.map.partition_point(|r| r.0 <= end).checked_sub(1) else {
// All ranges in the map start after the new range's end
return None;
};
let (_, prev_end) = &self.map[last];
if start <= *prev_end { Some(I::new(std::cmp::min(*prev_end, end) as usize)) } else { None }

View File

@ -18,16 +18,14 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
let error = self.error.as_ref()?;
debug!("try_report_mismatched_static_lifetime {:?}", error);
let (origin, sub, sup) = match error.clone() {
RegionResolutionError::ConcreteFailure(origin, sub, sup) => (origin, sub, sup),
_ => return None,
let RegionResolutionError::ConcreteFailure(origin, sub, sup) = error.clone() else {
return None;
};
if !sub.is_static() {
return None;
}
let cause = match origin {
SubregionOrigin::Subtype(box TypeTrace { ref cause, .. }) => cause,
_ => return None,
let SubregionOrigin::Subtype(box TypeTrace { ref cause, .. }) = origin else {
return None;
};
// If we added a "points at argument expression" obligation, we remove it here, we care
// about the original obligation only.
@ -35,13 +33,11 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
ObligationCauseCode::FunctionArgumentObligation { parent_code, .. } => &*parent_code,
_ => cause.code(),
};
let (parent, impl_def_id) = match code {
ObligationCauseCode::MatchImpl(parent, impl_def_id) => (parent, impl_def_id),
_ => return None,
let ObligationCauseCode::MatchImpl(parent, impl_def_id) = code else {
return None;
};
let binding_span = match *parent.code() {
ObligationCauseCode::BindingObligation(_def_id, binding_span) => binding_span,
_ => return None,
let ObligationCauseCode::BindingObligation(_def_id, binding_span) = *parent.code() else {
return None;
};
let mut err = self.tcx().sess.struct_span_err(cause.span, "incompatible lifetime on type");
// FIXME: we should point at the lifetime
@ -55,12 +51,11 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
// be as helpful as possible with implicit lifetimes.
// First, let's get the hir self type of the impl
let impl_self_ty = match impl_node {
hir::Node::Item(hir::Item {
kind: hir::ItemKind::Impl(hir::Impl { self_ty, .. }),
..
}) => self_ty,
_ => bug!("Node not an impl."),
let hir::Node::Item(hir::Item {
kind: hir::ItemKind::Impl(hir::Impl { self_ty: impl_self_ty, .. }),
..
}) = impl_node else {
bug!("Node not an impl.");
};
// Next, let's figure out the set of trait objects with implict static bounds

View File

@ -490,14 +490,13 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
let tcx = self.tcx();
// Find the method being called.
let instance = match ty::Instance::resolve(
let Ok(Some(instance)) = ty::Instance::resolve(
tcx,
ctxt.param_env,
ctxt.assoc_item.def_id,
self.infcx.resolve_vars_if_possible(ctxt.substs),
) {
Ok(Some(instance)) => instance,
_ => return false,
) else {
return false;
};
let mut v = TraitObjectVisitor(FxHashSet::default());
@ -505,11 +504,9 @@ impl<'a, 'tcx> NiceRegionError<'a, 'tcx> {
// Get the `Ident` of the method being called and the corresponding `impl` (to point at
// `Bar` in `impl Foo for dyn Bar {}` and the definition of the method being called).
let (ident, self_ty) =
match self.get_impl_ident_and_self_ty_from_trait(instance.def_id(), &v.0) {
Some((ident, self_ty)) => (ident, self_ty),
None => return false,
};
let Some((ident, self_ty)) = self.get_impl_ident_and_self_ty_from_trait(instance.def_id(), &v.0) else {
return false;
};
// Find the trait object types in the argument, so we point at *only* the trait object.
self.suggest_constrain_dyn_trait_in_impl(err, &v.0, ident, self_ty)

View File

@ -263,9 +263,8 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
while let Some(vid) = changes.pop() {
constraints[vid].retain(|&(a_vid, b_vid)| {
let a_region = match *var_values.value(a_vid) {
VarValue::ErrorValue => return false,
VarValue::Value(a_region) => a_region,
let VarValue::Value(a_region) = *var_values.value(a_vid) else {
return false;
};
let b_data = var_values.value_mut(b_vid);
if self.expand_node(a_region, b_vid, b_data) {
@ -485,9 +484,8 @@ impl<'cx, 'tcx> LexicalResolver<'cx, 'tcx> {
let a_data = var_data.value_mut(a_vid);
debug!("contraction: {:?} == {:?}, {:?}", a_vid, a_data, b_region);
let a_region = match *a_data {
VarValue::ErrorValue => continue,
VarValue::Value(a_region) => a_region,
let VarValue::Value(a_region) = *a_data else {
continue;
};
// Do not report these errors immediately:

View File

@ -259,10 +259,7 @@ impl<'tcx> Queries<'tcx> {
/// to write UI tests that actually test that compilation succeeds without reporting
/// an error.
fn check_for_rustc_errors_attr(tcx: TyCtxt<'_>) {
let def_id = match tcx.entry_fn(()) {
Some((def_id, _)) => def_id,
_ => return,
};
let Some((def_id, _)) = tcx.entry_fn(()) else { return };
let attrs = &*tcx.get_attrs(def_id);
let attrs = attrs.iter().filter(|attr| attr.has_name(sym::rustc_error));

View File

@ -422,10 +422,7 @@ fn get_codegen_sysroot(maybe_sysroot: &Option<PathBuf>, backend_name: &str) -> M
];
for entry in d.filter_map(|e| e.ok()) {
let path = entry.path();
let filename = match path.file_name().and_then(|s| s.to_str()) {
Some(s) => s,
None => continue,
};
let Some(filename) = path.file_name().and_then(|s| s.to_str()) else { continue };
if !(filename.starts_with(DLL_PREFIX) && filename.ends_with(DLL_SUFFIX)) {
continue;
}

View File

@ -278,9 +278,8 @@ impl LintStore {
/// This lint has been renamed; warn about using the new name and apply the lint.
#[track_caller]
pub fn register_renamed(&mut self, old_name: &str, new_name: &str) {
let target = match self.by_name.get(new_name) {
Some(&Id(lint_id)) => lint_id,
_ => bug!("invalid lint renaming of {} to {}", old_name, new_name),
let Some(&Id(target)) = self.by_name.get(new_name) else {
bug!("invalid lint renaming of {} to {}", old_name, new_name);
};
self.by_name.insert(old_name.to_string(), Renamed(new_name.to_string(), target));
}

View File

@ -23,10 +23,7 @@ declare_lint_pass!(DefaultHashTypes => [DEFAULT_HASH_TYPES]);
impl LateLintPass<'_> for DefaultHashTypes {
fn check_path(&mut self, cx: &LateContext<'_>, path: &Path<'_>, hir_id: HirId) {
let def_id = match path.res {
Res::Def(rustc_hir::def::DefKind::Struct, id) => id,
_ => return,
};
let Res::Def(rustc_hir::def::DefKind::Struct, def_id) = path.res else { return };
if matches!(cx.tcx.hir().get(hir_id), Node::Item(Item { kind: ItemKind::Use(..), .. })) {
// don't lint imports, only actual usages
return;

View File

@ -95,9 +95,9 @@ impl<'s> LintLevelsBuilder<'s> {
let orig_level = level;
let lint_flag_val = Symbol::intern(lint_name);
let ids = match store.find_lints(&lint_name) {
Ok(ids) => ids,
Err(_) => continue, // errors handled in check_lint_name_cmdline above
let Ok(ids) = store.find_lints(&lint_name) else {
// errors handled in check_lint_name_cmdline above
continue
};
for id in ids {
// ForceWarn and Forbid cannot be overriden

View File

@ -301,10 +301,7 @@ impl EarlyLintPass for NonAsciiIdents {
BTreeMap::new();
'outerloop: for (augment_script_set, usage) in script_states {
let (mut ch_list, sp) = match usage {
ScriptSetUsage::Verified => continue,
ScriptSetUsage::Suspicious(ch_list, sp) => (ch_list, sp),
};
let ScriptSetUsage::Suspicious(mut ch_list, sp) = usage else { continue };
if augment_script_set.is_all() {
continue;

View File

@ -1331,14 +1331,7 @@ impl<'tcx> LateLintPass<'tcx> for VariantSizeDifferences {
if let hir::ItemKind::Enum(ref enum_definition, _) = it.kind {
let t = cx.tcx.type_of(it.def_id);
let ty = cx.tcx.erase_regions(t);
let layout = match cx.layout_of(ty) {
Ok(layout) => layout,
Err(
ty::layout::LayoutError::Unknown(_)
| ty::layout::LayoutError::SizeOverflow(_)
| ty::layout::LayoutError::NormalizationFailure(_, _),
) => return,
};
let Ok(layout) = cx.layout_of(ty) else { return };
let Variants::Multiple {
tag_encoding: TagEncoding::Direct, tag, ref variants, ..
} = &layout.variants else {

View File

@ -475,9 +475,8 @@ impl<'a> CrateLoader<'a> {
locator.triple = TargetTriple::from_triple(config::host_triple());
locator.filesearch = self.sess.host_filesearch(path_kind);
let host_result = match self.load(locator)? {
Some(host_result) => host_result,
None => return Ok(None),
let Some(host_result) = self.load(locator)? else {
return Ok(None);
};
Ok(Some(if self.sess.opts.debugging_opts.dual_proc_macros {
@ -574,9 +573,8 @@ impl<'a> CrateLoader<'a> {
}
fn load(&self, locator: &mut CrateLocator<'_>) -> Result<Option<LoadResult>, CrateError> {
let library = match locator.maybe_load_library_crate()? {
Some(library) => library,
None => return Ok(None),
let Some(library) = locator.maybe_load_library_crate()? else {
return Ok(None);
};
// In the case that we're loading a crate, but not matching

View File

@ -15,9 +15,8 @@ struct Collector {
impl<'tcx> ItemLikeVisitor<'tcx> for Collector {
fn visit_item(&mut self, it: &'tcx hir::Item<'tcx>) {
let items = match it.kind {
hir::ItemKind::ForeignMod { items, .. } => items,
_ => return,
let hir::ItemKind::ForeignMod { items, .. } = it.kind else {
return;
};
let foreign_items = items.iter().map(|it| it.id.def_id.to_def_id()).collect();

View File

@ -690,14 +690,11 @@ impl<'a> CrateLocator<'a> {
loc.original().clone(),
));
}
let file = match loc.original().file_name().and_then(|s| s.to_str()) {
Some(file) => file,
None => {
return Err(CrateError::ExternLocationNotFile(
self.crate_name,
loc.original().clone(),
));
}
let Some(file) = loc.original().file_name().and_then(|s| s.to_str()) else {
return Err(CrateError::ExternLocationNotFile(
self.crate_name,
loc.original().clone(),
));
};
if file.starts_with("lib") && (file.ends_with(".rlib") || file.ends_with(".rmeta"))

View File

@ -33,9 +33,8 @@ struct Collector<'tcx> {
impl<'tcx> ItemLikeVisitor<'tcx> for Collector<'tcx> {
fn visit_item(&mut self, it: &'tcx hir::Item<'tcx>) {
let (abi, foreign_mod_items) = match it.kind {
hir::ItemKind::ForeignMod { abi, items } => (abi, items),
_ => return,
let hir::ItemKind::ForeignMod { abi, items: foreign_mod_items } = it.kind else {
return;
};
if abi == Abi::Rust || abi == Abi::RustIntrinsic || abi == Abi::PlatformIntrinsic {
@ -45,9 +44,8 @@ impl<'tcx> ItemLikeVisitor<'tcx> for Collector<'tcx> {
// Process all of the #[link(..)]-style arguments
let sess = &self.tcx.sess;
for m in self.tcx.hir().attrs(it.hir_id()).iter().filter(|a| a.has_name(sym::link)) {
let items = match m.meta_item_list() {
Some(item) => item,
None => continue,
let Some(items) = m.meta_item_list() else {
continue;
};
let mut lib = NativeLib {
name: None,
@ -63,9 +61,8 @@ impl<'tcx> ItemLikeVisitor<'tcx> for Collector<'tcx> {
for item in items.iter() {
if item.has_name(sym::kind) {
kind_specified = true;
let kind = match item.value_str() {
Some(name) => name,
None => continue, // skip like historical compilers
let Some(kind) = item.value_str() else {
continue; // skip like historical compilers
};
lib.kind = match kind.as_str() {
"static" => NativeLibKind::Static { bundle: None, whole_archive: None },
@ -101,9 +98,8 @@ impl<'tcx> ItemLikeVisitor<'tcx> for Collector<'tcx> {
} else if item.has_name(sym::name) {
lib.name = item.value_str();
} else if item.has_name(sym::cfg) {
let cfg = match item.meta_item_list() {
Some(list) => list,
None => continue, // skip like historical compilers
let Some(cfg) = item.meta_item_list() else {
continue; // skip like historical compilers
};
if cfg.is_empty() {
sess.span_err(item.span(), "`cfg()` must have an argument");
@ -262,11 +258,8 @@ impl Collector<'_> {
}
// this just unwraps lib.name; we already established that it isn't empty above.
if let (NativeLibKind::RawDylib, Some(lib_name)) = (lib.kind, lib.name) {
let span = match span {
Some(s) => s,
None => {
bug!("raw-dylib libraries are not supported on the command line");
}
let Some(span) = span else {
bug!("raw-dylib libraries are not supported on the command line");
};
if !self.tcx.sess.target.options.is_like_windows {

View File

@ -249,9 +249,8 @@ pub(in crate::rmeta) fn provide(providers: &mut Providers) {
.iter()
.filter(|lib| native_libs::relevant_lib(&tcx.sess, lib))
.find(|lib| {
let fm_id = match lib.foreign_module {
Some(id) => id,
None => return false,
let Some(fm_id) = lib.foreign_module else {
return false;
};
let map = tcx.foreign_modules(id.krate);
map.get(&fm_id)

View File

@ -173,9 +173,8 @@ impl Scope {
/// returned span may not correspond to the span of any `NodeId` in
/// the AST.
pub fn span(&self, tcx: TyCtxt<'_>, scope_tree: &ScopeTree) -> Span {
let hir_id = match self.hir_id(scope_tree) {
Some(hir_id) => hir_id,
None => return DUMMY_SP,
let Some(hir_id) = self.hir_id(scope_tree) else {
return DUMMY_SP;
};
let span = tcx.hir().span(hir_id);
if let ScopeData::Remainder(first_statement_index) = self.data {

View File

@ -89,9 +89,8 @@ pub fn dump_mir<'tcx, F>(
}
pub fn dump_enabled<'tcx>(tcx: TyCtxt<'tcx>, pass_name: &str, def_id: DefId) -> bool {
let filters = match tcx.sess.opts.debugging_opts.dump_mir {
None => return false,
Some(ref filters) => filters,
let Some(ref filters) = tcx.sess.opts.debugging_opts.dump_mir else {
return false;
};
let node_path = ty::print::with_forced_impl_filename_line(|| {
// see notes on #41697 below
@ -586,9 +585,8 @@ fn write_scope_tree(
)?;
}
let children = match scope_tree.get(&parent) {
Some(children) => children,
None => return Ok(()),
let Some(children) = scope_tree.get(&parent) else {
return Ok(());
};
for &child in children {

View File

@ -1123,9 +1123,8 @@ impl<'tcx> TyCtxt<'tcx> {
pub fn layout_scalar_valid_range(self, def_id: DefId) -> (Bound<u128>, Bound<u128>) {
let attrs = self.get_attrs(def_id);
let get = |name| {
let attr = match attrs.iter().find(|a| a.has_name(name)) {
Some(attr) => attr,
None => return Bound::Unbounded,
let Some(attr) = attrs.iter().find(|a| a.has_name(name)) else {
return Bound::Unbounded;
};
debug!("layout_scalar_valid_range: attr={:?}", attr);
if let Some(
@ -1513,9 +1512,8 @@ impl<'tcx> TyCtxt<'tcx> {
scope_def_id: LocalDefId,
) -> Vec<&'tcx hir::Ty<'tcx>> {
let hir_id = self.hir().local_def_id_to_hir_id(scope_def_id);
let hir_output = match self.hir().fn_decl_by_hir_id(hir_id) {
Some(hir::FnDecl { output: hir::FnRetTy::Return(ty), .. }) => ty,
_ => return vec![],
let Some(hir::FnDecl { output: hir::FnRetTy::Return(hir_output), .. }) = self.hir().fn_decl_by_hir_id(hir_id) else {
return vec![];
};
let mut v = TraitObjectVisitor(vec![], self.hir());

View File

@ -861,11 +861,10 @@ fn foo(&self) -> Self::T { String::new() }
body_owner_def_id: DefId,
found: Ty<'tcx>,
) -> bool {
let hir_id =
match body_owner_def_id.as_local().map(|id| self.hir().local_def_id_to_hir_id(id)) {
Some(hir_id) => hir_id,
None => return false,
};
let Some(hir_id) = body_owner_def_id.as_local() else {
return false;
};
let hir_id = self.hir().local_def_id_to_hir_id(hir_id);
// When `body_owner` is an `impl` or `trait` item, look in its associated types for
// `expected` and point at it.
let parent_id = self.hir().get_parent_item(hir_id);

View File

@ -1319,9 +1319,8 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
// Try to use a ScalarPair for all tagged enums.
let mut common_prim = None;
for (field_layouts, layout_variant) in iter::zip(&variants, &layout_variants) {
let offsets = match layout_variant.fields {
FieldsShape::Arbitrary { ref offsets, .. } => offsets,
_ => bug!(),
let FieldsShape::Arbitrary { ref offsets, .. } = layout_variant.fields else {
bug!();
};
let mut fields =
iter::zip(field_layouts, offsets).filter(|p| !p.0.is_zst());
@ -1571,9 +1570,8 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
let tcx = self.tcx;
let subst_field = |ty: Ty<'tcx>| ty.subst(tcx, substs);
let info = match tcx.generator_layout(def_id) {
None => return Err(LayoutError::Unknown(ty)),
Some(info) => info,
let Some(info) = tcx.generator_layout(def_id) else {
return Err(LayoutError::Unknown(ty));
};
let (ineligible_locals, assignments) = self.generator_saved_local_eligibility(&info);
@ -1676,9 +1674,8 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
)?;
variant.variants = Variants::Single { index };
let (offsets, memory_index) = match variant.fields {
FieldsShape::Arbitrary { offsets, memory_index } => (offsets, memory_index),
_ => bug!(),
let FieldsShape::Arbitrary { offsets, memory_index } = variant.fields else {
bug!();
};
// Now, stitch the promoted and variant-only fields back together in

View File

@ -415,9 +415,8 @@ pub trait PrettyPrinter<'tcx>:
cur_def_key = self.tcx().def_key(parent);
}
let visible_parent = match visible_parent_map.get(&def_id).cloned() {
Some(parent) => parent,
None => return Ok((self, false)),
let Some(visible_parent) = visible_parent_map.get(&def_id).cloned() else {
return Ok((self, false));
};
let actual_parent = self.tcx().parent(def_id);

View File

@ -126,11 +126,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
candidate: &Candidate<'pat, 'tcx>,
variants: &mut BitSet<VariantIdx>,
) -> bool {
let match_pair = match candidate.match_pairs.iter().find(|mp| mp.place == *test_place) {
Some(match_pair) => match_pair,
_ => {
return false;
}
let Some(match_pair) = candidate.match_pairs.iter().find(|mp| mp.place == *test_place) else {
return false;
};
match *match_pair.pattern.kind {
@ -421,9 +418,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
}
}
let deref_ty = match *ty.kind() {
ty::Ref(_, deref_ty, _) => deref_ty,
_ => bug!("non_scalar_compare called on non-reference type: {}", ty),
let ty::Ref(_, deref_ty, _) = *ty.kind() else {
bug!("non_scalar_compare called on non-reference type: {}", ty);
};
let eq_def_id = self.tcx.require_lang_item(LangItem::PartialEq, None);

View File

@ -266,9 +266,8 @@ fn liberated_closure_env_ty(
) -> Ty<'_> {
let closure_ty = tcx.typeck_body(body_id).node_type(closure_expr_id);
let (closure_def_id, closure_substs) = match *closure_ty.kind() {
ty::Closure(closure_def_id, closure_substs) => (closure_def_id, closure_substs),
_ => bug!("closure expr does not have closure type: {:?}", closure_ty),
let ty::Closure(closure_def_id, closure_substs) = *closure_ty.kind() else {
bug!("closure expr does not have closure type: {:?}", closure_ty);
};
let bound_vars =

View File

@ -62,10 +62,7 @@ fn parse_float<'tcx>(num: Symbol, fty: ty::FloatTy, neg: bool) -> Option<ConstVa
use rustc_apfloat::ieee::{Double, Single};
let scalar = match fty {
ty::FloatTy::F32 => {
let rust_f = match num.parse::<f32>() {
Ok(f) => f,
Err(_) => return None,
};
let Ok(rust_f) = num.parse::<f32>() else { return None };
let mut f = num.parse::<Single>().unwrap_or_else(|e| {
panic!("apfloat::ieee::Single failed to parse `{}`: {:?}", num, e)
});
@ -85,10 +82,7 @@ fn parse_float<'tcx>(num: Symbol, fty: ty::FloatTy, neg: bool) -> Option<ConstVa
Scalar::from_f32(f)
}
ty::FloatTy::F64 => {
let rust_f = match num.parse::<f64>() {
Ok(f) => f,
Err(_) => return None,
};
let Ok(rust_f) = num.parse::<f64>() else { return None };
let mut f = num.parse::<Double>().unwrap_or_else(|e| {
panic!("apfloat::ieee::Double failed to parse `{}`: {:?}", num, e)
});

View File

@ -503,13 +503,12 @@ impl<'tcx> Cx<'tcx> {
InlineAsmOperand::Const { value, span }
}
hir::InlineAsmOperand::Sym { ref expr } => {
let qpath = match expr.kind {
hir::ExprKind::Path(ref qpath) => qpath,
_ => span_bug!(
let hir::ExprKind::Path(ref qpath) = expr.kind else {
span_bug!(
expr.span,
"asm `sym` operand should be a path, found {:?}",
expr.kind
),
);
};
let temp_lifetime =
self.region_scope_tree.temporary_scope(expr.hir_id.local_id);
@ -577,9 +576,8 @@ impl<'tcx> Cx<'tcx> {
// Now comes the rote stuff:
hir::ExprKind::Repeat(ref v, _) => {
let ty = self.typeck_results().expr_ty(expr);
let count = match ty.kind() {
ty::Array(_, ct) => ct,
_ => span_bug!(expr.span, "unexpected repeat expr ty: {:?}", ty),
let ty::Array(_, count) = ty.kind() else {
span_bug!(expr.span, "unexpected repeat expr ty: {:?}", ty);
};
ExprKind::Repeat { value: self.mirror_expr(v), count: *count }
@ -1007,9 +1005,8 @@ impl<'tcx> Cx<'tcx> {
// Reconstruct the output assuming it's a reference with the
// same region and mutability as the receiver. This holds for
// `Deref(Mut)::Deref(_mut)` and `Index(Mut)::index(_mut)`.
let (region, mutbl) = match *self.thir[args[0]].ty.kind() {
ty::Ref(region, _, mutbl) => (region, mutbl),
_ => span_bug!(span, "overloaded_place: receiver is not a reference"),
let ty::Ref(region, _, mutbl) = *self.thir[args[0]].ty.kind() else {
span_bug!(span, "overloaded_place: receiver is not a reference");
};
let ref_ty = self.tcx.mk_ref(region, ty::TypeAndMut { ty: place_ty, mutbl });

View File

@ -560,9 +560,9 @@ impl SplitVarLenSlice {
/// Pass a set of slices relative to which to split this one.
fn split(&mut self, slices: impl Iterator<Item = SliceKind>) {
let (max_prefix_len, max_suffix_len) = match &mut self.max_slice {
VarLen(prefix, suffix) => (prefix, suffix),
FixedLen(_) => return, // No need to split
let VarLen(max_prefix_len, max_suffix_len) = &mut self.max_slice else {
// No need to split
return;
};
// We grow `self.max_slice` to be larger than all slices encountered, as described above.
// For diagnostics, we keep the prefix and suffix lengths separate, but grow them so that
@ -1181,10 +1181,7 @@ impl<'p, 'tcx> Fields<'p, 'tcx> {
ty: Ty<'tcx>,
variant: &'a VariantDef,
) -> impl Iterator<Item = (Field, Ty<'tcx>)> + Captures<'a> + Captures<'p> {
let (adt, substs) = match ty.kind() {
ty::Adt(adt, substs) => (adt, substs),
_ => bug!(),
};
let ty::Adt(adt, substs) = ty.kind() else { bug!() };
// Whether we must not match the fields of this variant exhaustively.
let is_non_exhaustive = variant.is_field_list_non_exhaustive() && !adt.did.is_local();
@ -1578,9 +1575,8 @@ impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
match self_slice.kind {
FixedLen(_) => bug!("{:?} doesn't cover {:?}", self_slice, other_slice),
VarLen(prefix, suffix) => {
let inner_ty = match *self.ty.kind() {
ty::Slice(ty) | ty::Array(ty, _) => ty,
_ => bug!("bad slice pattern {:?} {:?}", self.ctor, self.ty),
let (ty::Slice(inner_ty) | ty::Array(inner_ty, _)) = *self.ty.kind() else {
bug!("bad slice pattern {:?} {:?}", self.ctor, self.ty);
};
let prefix = &self.fields.fields[..prefix];
let suffix = &self.fields.fields[self_slice.arity() - suffix..];

View File

@ -245,9 +245,8 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
}
hir::PatKind::Tuple(ref pats, ddpos) => {
let tys = match ty.kind() {
ty::Tuple(ref tys) => tys,
_ => span_bug!(pat.span, "unexpected type for tuple pattern: {:?}", ty),
let ty::Tuple(ref tys) = ty.kind() else {
span_bug!(pat.span, "unexpected type for tuple pattern: {:?}", ty);
};
let subpatterns = self.lower_tuple_subpats(pats, tys.len(), ddpos);
PatKind::Leaf { subpatterns }
@ -294,9 +293,8 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
hir::PatKind::TupleStruct(ref qpath, ref pats, ddpos) => {
let res = self.typeck_results.qpath_res(qpath, pat.hir_id);
let adt_def = match ty.kind() {
ty::Adt(adt_def, _) => adt_def,
_ => span_bug!(pat.span, "tuple struct pattern not applied to an ADT {:?}", ty),
let ty::Adt(adt_def, _) = ty.kind() else {
span_bug!(pat.span, "tuple struct pattern not applied to an ADT {:?}", ty);
};
let variant_def = adt_def.variant_of_res(res);
let subpatterns = self.lower_tuple_subpats(pats, variant_def.fields.len(), ddpos);
@ -576,9 +574,8 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
}
hir::ExprKind::Lit(ref lit) => (lit, false),
hir::ExprKind::Unary(hir::UnOp::Neg, ref expr) => {
let lit = match expr.kind {
hir::ExprKind::Lit(ref lit) => lit,
_ => span_bug!(expr.span, "not a literal: {:?}", expr),
let hir::ExprKind::Lit(ref lit) = expr.kind else {
span_bug!(expr.span, "not a literal: {:?}", expr);
};
(lit, true)
}

View File

@ -243,9 +243,8 @@ pub(crate) fn on_all_inactive_variants<'tcx>(
active_variant: VariantIdx,
mut handle_inactive_variant: impl FnMut(MovePathIndex),
) {
let enum_mpi = match move_data.rev_lookup.find(enum_place.as_ref()) {
LookupResult::Exact(mpi) => mpi,
LookupResult::Parent(_) => return,
let LookupResult::Exact(enum_mpi) = move_data.rev_lookup.find(enum_place.as_ref()) else {
return;
};
let enum_path = &move_data.move_paths[enum_mpi];
@ -256,9 +255,8 @@ pub(crate) fn on_all_inactive_variants<'tcx>(
let (downcast, base_proj) = variant_path.place.projection.split_last().unwrap();
assert_eq!(enum_place.projection.len(), base_proj.len());
let variant_idx = match *downcast {
mir::ProjectionElem::Downcast(_, idx) => idx,
_ => unreachable!(),
let mir::ProjectionElem::Downcast(_, variant_idx) = *downcast else {
unreachable!();
};
if variant_idx != active_variant {

View File

@ -274,11 +274,9 @@ where
use std::io::{self, Write};
let def_id = body.source.def_id();
let attrs = match RustcMirAttrs::parse(tcx, def_id) {
Ok(attrs) => attrs,
let Ok(attrs) = RustcMirAttrs::parse(tcx, def_id) else {
// Invalid `rustc_mir` attrs are reported in `RustcMirAttrs::parse`
Err(()) => return Ok(()),
return Ok(());
};
let mut file = match attrs.output_path(A::NAME) {

View File

@ -628,9 +628,8 @@ where
ret
});
let mut html_diff = match html_diff {
Cow::Borrowed(_) => return raw_diff,
Cow::Owned(s) => s,
let Cow::Owned(mut html_diff) = html_diff else {
return raw_diff;
};
if inside_font_tag {

View File

@ -385,16 +385,14 @@ impl<'tcx> GenKillAnalysis<'tcx> for MaybeInitializedPlaces<'_, 'tcx> {
switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
});
let (enum_place, enum_def) = match enum_ {
Some(x) => x,
None => return,
let Some((enum_place, enum_def)) = enum_ else {
return;
};
let mut discriminants = enum_def.discriminants(self.tcx);
edge_effects.apply(|trans, edge| {
let value = match edge.value {
Some(x) => x,
None => return,
let Some(value) = edge.value else {
return;
};
// MIR building adds discriminants to the `values` array in the same order as they
@ -507,16 +505,14 @@ impl<'tcx> GenKillAnalysis<'tcx> for MaybeUninitializedPlaces<'_, 'tcx> {
switch_on_enum_discriminant(self.tcx, &self.body, &self.body[block], discr)
});
let (enum_place, enum_def) = match enum_ {
Some(x) => x,
None => return,
let Some((enum_place, enum_def)) = enum_ else {
return;
};
let mut discriminants = enum_def.discriminants(self.tcx);
edge_effects.apply(|trans, edge| {
let value = match edge.value {
Some(x) => x,
None => return,
let Some(value) = edge.value else {
return;
};
// MIR building adds discriminants to the `values` array in the same order as they

View File

@ -66,10 +66,7 @@ impl<'tcx> MirPass<'tcx> for AbortUnwindingCalls {
if block.is_cleanup {
continue;
}
let terminator = match &block.terminator {
Some(terminator) => terminator,
None => continue,
};
let Some(terminator) = &block.terminator else { continue };
let span = terminator.source_info.span;
let call_can_unwind = match &terminator.kind {

View File

@ -84,9 +84,8 @@ fn add_move_for_packed_drop<'tcx>(
is_cleanup: bool,
) {
debug!("add_move_for_packed_drop({:?} @ {:?})", terminator, loc);
let (place, target, unwind) = match terminator.kind {
TerminatorKind::Drop { ref place, target, unwind } => (place, target, unwind),
_ => unreachable!(),
let TerminatorKind::Drop { ref place, target, unwind } = terminator.kind else {
unreachable!();
};
let source_info = terminator.source_info;

View File

@ -410,12 +410,9 @@ fn check_unused_unsafe(
) {
let body_id = tcx.hir().maybe_body_owned_by(tcx.hir().local_def_id_to_hir_id(def_id));
let body_id = match body_id {
Some(body) => body,
None => {
debug!("check_unused_unsafe({:?}) - no body found", def_id);
return;
}
let Some(body_id) = body_id else {
debug!("check_unused_unsafe({:?}) - no body found", def_id);
return;
};
let body = tcx.hir().body(body_id);
debug!("check_unused_unsafe({:?}, body={:?}, used_unsafe={:?})", def_id, body, used_unsafe);

View File

@ -26,11 +26,8 @@ impl<'tcx> MirPass<'tcx> for Deaggregator {
let stmt = stmt.replace_nop();
let source_info = stmt.source_info;
let (lhs, kind, operands) = match stmt.kind {
StatementKind::Assign(box (lhs, Rvalue::Aggregate(kind, operands))) => {
(lhs, kind, operands)
}
_ => bug!(),
let StatementKind::Assign(box (lhs, Rvalue::Aggregate(kind, operands))) = stmt.kind else {
bug!();
};
Some(expand_aggregate(

View File

@ -98,12 +98,9 @@ fn find_dead_unwinds<'tcx>(
debug!("find_dead_unwinds @ {:?}: {:?}", bb, bb_data);
let path = match env.move_data.rev_lookup.find(place.as_ref()) {
LookupResult::Exact(e) => e,
LookupResult::Parent(..) => {
debug!("find_dead_unwinds: has parent; skipping");
continue;
}
let LookupResult::Exact(path) = env.move_data.rev_lookup.find(place.as_ref()) else {
debug!("find_dead_unwinds: has parent; skipping");
continue;
};
flow_inits.seek_before_primary_effect(body.terminator_loc(bb));

View File

@ -1413,22 +1413,16 @@ impl EnsureGeneratorFieldAssignmentsNeverAlias<'_> {
impl<'tcx> Visitor<'tcx> for EnsureGeneratorFieldAssignmentsNeverAlias<'_> {
fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) {
let lhs = match self.assigned_local {
Some(l) => l,
None => {
// This visitor only invokes `visit_place` for the right-hand side of an assignment
// and only after setting `self.assigned_local`. However, the default impl of
// `Visitor::super_body` may call `visit_place` with a `NonUseContext` for places
// with debuginfo. Ignore them here.
assert!(!context.is_use());
return;
}
let Some(lhs) = self.assigned_local else {
// This visitor only invokes `visit_place` for the right-hand side of an assignment
// and only after setting `self.assigned_local`. However, the default impl of
// `Visitor::super_body` may call `visit_place` with a `NonUseContext` for places
// with debuginfo. Ignore them here.
assert!(!context.is_use());
return;
};
let rhs = match self.saved_local_for_direct_place(*place) {
Some(l) => l,
None => return,
};
let Some(rhs) = self.saved_local_for_direct_place(*place) else { return };
if !self.storage_conflicts.contains(lhs, rhs) {
bug!(

View File

@ -109,9 +109,8 @@ impl<'tcx> Inliner<'tcx> {
continue;
}
let callsite = match self.resolve_callsite(caller_body, bb, bb_data) {
None => continue,
Some(it) => it,
let Some(callsite) = self.resolve_callsite(caller_body, bb, bb_data) else {
continue;
};
let span = trace_span!("process_blocks", %callsite.callee, ?bb);

View File

@ -46,12 +46,9 @@ crate fn mir_callgraph_reachable<'tcx>(
trace!(%caller);
for &(callee, substs) in tcx.mir_inliner_callees(caller.def) {
let substs = caller.subst_mir_and_normalize_erasing_regions(tcx, param_env, substs);
let callee = match ty::Instance::resolve(tcx, param_env, callee, substs).unwrap() {
Some(callee) => callee,
None => {
trace!(?callee, "cannot resolve, skipping");
continue;
}
let Some(callee) = ty::Instance::resolve(tcx, param_env, callee, substs).unwrap() else {
trace!(?callee, "cannot resolve, skipping");
continue;
};
// Found a path.

View File

@ -17,9 +17,8 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
let terminator = block.terminator.as_mut().unwrap();
if let TerminatorKind::Call { func, args, destination, .. } = &mut terminator.kind {
let func_ty = func.ty(local_decls, tcx);
let (intrinsic_name, substs) = match resolve_rust_intrinsic(tcx, func_ty) {
None => continue,
Some(it) => it,
let Some((intrinsic_name, substs)) = resolve_rust_intrinsic(tcx, func_ty) else {
continue;
};
match intrinsic_name {
sym::unreachable => {

Some files were not shown because too many files have changed in this diff Show More