prefer if let to match with None => {} arm in some places

This is a spiritual succesor to #34268/8531d581, in which we replaced a
number of matches of None to the unit value with `if let` conditionals
where it was judged that this made for clearer/simpler code (as would be
recommended by Manishearth/rust-clippy's `single_match` lint). The same
rationale applies to matches of None to the empty block.
This commit is contained in:
Zack M. Davis 2016-07-03 14:38:37 -07:00
parent 5e858f34df
commit d37edef9dd
47 changed files with 213 additions and 347 deletions

View File

@ -840,12 +840,9 @@ pub fn write(output: &mut Write, args: Arguments) -> Result {
} }
// There can be only one trailing string piece left. // There can be only one trailing string piece left.
match pieces.next() { if let Some(piece) = pieces.next() {
Some(piece) => {
formatter.buf.write_str(*piece)?; formatter.buf.write_str(*piece)?;
} }
None => {}
}
Ok(()) Ok(())
} }

View File

@ -144,9 +144,8 @@ impl Rand for char {
// Rejection sampling. About 0.2% of numbers with at most // Rejection sampling. About 0.2% of numbers with at most
// 21-bits are invalid codepoints (surrogates), so this // 21-bits are invalid codepoints (surrogates), so this
// will succeed first go almost every time. // will succeed first go almost every time.
match char::from_u32(rng.next_u32() & CHAR_MASK) { if let Some(c) = char::from_u32(rng.next_u32() & CHAR_MASK) {
Some(c) => return c, return c;
None => {}
} }
} }
} }

View File

@ -1697,16 +1697,13 @@ impl<'a> State<'a> {
self.commasep(Inconsistent, &data.inputs, |s, ty| s.print_type(&ty))?; self.commasep(Inconsistent, &data.inputs, |s, ty| s.print_type(&ty))?;
word(&mut self.s, ")")?; word(&mut self.s, ")")?;
match data.output { if let Some(ref ty) = data.output {
None => {}
Some(ref ty) => {
self.space_if_not_bol()?; self.space_if_not_bol()?;
self.word_space("->")?; self.word_space("->")?;
self.print_type(&ty)?; self.print_type(&ty)?;
} }
} }
} }
}
Ok(()) Ok(())
} }

View File

@ -842,12 +842,9 @@ impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> {
where F: FnMut(&RegionVarBindings<'a, 'gcx, 'tcx>, Region, Region) where F: FnMut(&RegionVarBindings<'a, 'gcx, 'tcx>, Region, Region)
{ {
let vars = TwoRegions { a: a, b: b }; let vars = TwoRegions { a: a, b: b };
match self.combine_map(t).borrow().get(&vars) { if let Some(&c) = self.combine_map(t).borrow().get(&vars) {
Some(&c) => {
return ReVar(c); return ReVar(c);
} }
None => {}
}
let c = self.new_region_var(MiscVariable(origin.span())); let c = self.new_region_var(MiscVariable(origin.span()));
self.combine_map(t).borrow_mut().insert(vars, c); self.combine_map(t).borrow_mut().insert(vars, c);
if self.in_snapshot() { if self.in_snapshot() {

View File

@ -1055,9 +1055,7 @@ impl<'a> ast_visit::Visitor for EarlyContext<'a> {
// Output any lints that were previously added to the session. // Output any lints that were previously added to the session.
impl<'a, 'tcx> IdVisitingOperation for LateContext<'a, 'tcx> { impl<'a, 'tcx> IdVisitingOperation for LateContext<'a, 'tcx> {
fn visit_id(&mut self, id: ast::NodeId) { fn visit_id(&mut self, id: ast::NodeId) {
match self.sess().lints.borrow_mut().remove(&id) { if let Some(lints) = self.sess().lints.borrow_mut().remove(&id) {
None => {}
Some(lints) => {
debug!("LateContext::visit_id: id={:?} lints={:?}", id, lints); debug!("LateContext::visit_id: id={:?} lints={:?}", id, lints);
for (lint_id, span, msg) in lints { for (lint_id, span, msg) in lints {
self.span_lint(lint_id.lint, span, &msg[..]) self.span_lint(lint_id.lint, span, &msg[..])
@ -1065,7 +1063,6 @@ impl<'a, 'tcx> IdVisitingOperation for LateContext<'a, 'tcx> {
} }
} }
} }
}
// This lint pass is defined here because it touches parts of the `LateContext` // This lint pass is defined here because it touches parts of the `LateContext`
// that we don't want to expose. It records the lint level at certain AST // that we don't want to expose. It records the lint level at certain AST

View File

@ -168,9 +168,8 @@ fn build_nodeid_to_index(decl: Option<&hir::FnDecl>,
// into cfg itself? i.e. introduce a fn-based flow-graph in // into cfg itself? i.e. introduce a fn-based flow-graph in
// addition to the current block-based flow-graph, rather than // addition to the current block-based flow-graph, rather than
// have to put traversals like this here? // have to put traversals like this here?
match decl { if let Some(decl) = decl {
None => {} add_entries_from_fn_decl(&mut index, decl, cfg.entry);
Some(decl) => add_entries_from_fn_decl(&mut index, decl, cfg.entry)
} }
cfg.graph.each_node(|node_idx, node| { cfg.graph.each_node(|node_idx, node| {

View File

@ -105,9 +105,8 @@ fn calculate_type(sess: &session::Session,
// If the global prefer_dynamic switch is turned off, first attempt // If the global prefer_dynamic switch is turned off, first attempt
// static linkage (this can fail). // static linkage (this can fail).
config::CrateTypeExecutable if !sess.opts.cg.prefer_dynamic => { config::CrateTypeExecutable if !sess.opts.cg.prefer_dynamic => {
match attempt_static(sess) { if let Some(v) = attempt_static(sess) {
Some(v) => return v, return v;
None => {}
} }
} }
@ -119,9 +118,8 @@ fn calculate_type(sess: &session::Session,
// to be found, we generate some nice pretty errors. // to be found, we generate some nice pretty errors.
config::CrateTypeStaticlib | config::CrateTypeStaticlib |
config::CrateTypeCdylib => { config::CrateTypeCdylib => {
match attempt_static(sess) { if let Some(v) = attempt_static(sess) {
Some(v) => return v, return v;
None => {}
} }
for cnum in sess.cstore.crates() { for cnum in sess.cstore.crates() {
let src = sess.cstore.used_crate_source(cnum); let src = sess.cstore.used_crate_source(cnum);
@ -136,9 +134,8 @@ fn calculate_type(sess: &session::Session,
// to try to eagerly statically link all dependencies. This is normally // to try to eagerly statically link all dependencies. This is normally
// done for end-product dylibs, not intermediate products. // done for end-product dylibs, not intermediate products.
config::CrateTypeDylib if !sess.opts.cg.prefer_dynamic => { config::CrateTypeDylib if !sess.opts.cg.prefer_dynamic => {
match attempt_static(sess) { if let Some(v) = attempt_static(sess) {
Some(v) => return v, return v;
None => {}
} }
} }

View File

@ -735,9 +735,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
for i in 0..autoderefs { for i in 0..autoderefs {
let deref_id = ty::MethodCall::autoderef(expr.id, i as u32); let deref_id = ty::MethodCall::autoderef(expr.id, i as u32);
match self.mc.infcx.node_method_ty(deref_id) { if let Some(method_ty) = self.mc.infcx.node_method_ty(deref_id) {
None => {}
Some(method_ty) => {
let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i)); let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i));
// the method call infrastructure should have // the method call infrastructure should have
@ -757,7 +755,6 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> {
} }
} }
} }
}
fn walk_autoderefref(&mut self, fn walk_autoderefref(&mut self,
expr: &hir::Expr, expr: &hir::Expr,

View File

@ -598,12 +598,9 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
fn arm_pats_bindings<F>(&mut self, pat: Option<&hir::Pat>, f: F) where fn arm_pats_bindings<F>(&mut self, pat: Option<&hir::Pat>, f: F) where
F: FnMut(&mut Liveness<'a, 'tcx>, LiveNode, Variable, Span, NodeId), F: FnMut(&mut Liveness<'a, 'tcx>, LiveNode, Variable, Span, NodeId),
{ {
match pat { if let Some(pat) = pat {
Some(pat) => {
self.pat_bindings(pat, f); self.pat_bindings(pat, f);
} }
None => {}
}
} }
fn define_bindings_in_pat(&mut self, pat: &hir::Pat, succ: LiveNode) fn define_bindings_in_pat(&mut self, pat: &hir::Pat, succ: LiveNode)

View File

@ -284,9 +284,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for LifetimeContext<'a, 'tcx> {
fn visit_generics(&mut self, generics: &hir::Generics) { fn visit_generics(&mut self, generics: &hir::Generics) {
for ty_param in generics.ty_params.iter() { for ty_param in generics.ty_params.iter() {
walk_list!(self, visit_ty_param_bound, &ty_param.bounds); walk_list!(self, visit_ty_param_bound, &ty_param.bounds);
match ty_param.default { if let Some(ref ty) = ty_param.default {
Some(ref ty) => self.visit_ty(&ty), self.visit_ty(&ty);
None => {}
} }
} }
for predicate in &generics.where_clause.predicates { for predicate in &generics.where_clause.predicates {

View File

@ -123,9 +123,8 @@ impl<'a> Context<'a> {
impl<'a, 'v> Visitor<'v> for Context<'a> { impl<'a, 'v> Visitor<'v> for Context<'a> {
fn visit_foreign_item(&mut self, i: &hir::ForeignItem) { fn visit_foreign_item(&mut self, i: &hir::ForeignItem) {
match lang_items::extract(&i.attrs) { if let Some(lang_item) = lang_items::extract(&i.attrs) {
None => {} self.register(&lang_item, i.span);
Some(lang_item) => self.register(&lang_item, i.span),
} }
intravisit::walk_foreign_item(self, i) intravisit::walk_foreign_item(self, i)
} }

View File

@ -250,16 +250,13 @@ impl Session {
msg: String) { msg: String) {
let lint_id = lint::LintId::of(lint); let lint_id = lint::LintId::of(lint);
let mut lints = self.lints.borrow_mut(); let mut lints = self.lints.borrow_mut();
match lints.get_mut(&id) { if let Some(arr) = lints.get_mut(&id) {
Some(arr) => {
let tuple = (lint_id, sp, msg); let tuple = (lint_id, sp, msg);
if !arr.contains(&tuple) { if !arr.contains(&tuple) {
arr.push(tuple); arr.push(tuple);
} }
return; return;
} }
None => {}
}
lints.insert(id, vec!((lint_id, sp, msg))); lints.insert(id, vec!((lint_id, sp, msg)));
} }
pub fn reserve_node_ids(&self, count: ast::NodeId) -> ast::NodeId { pub fn reserve_node_ids(&self, count: ast::NodeId) -> ast::NodeId {

View File

@ -168,13 +168,12 @@ impl<'a, 'tcx> ty::TyS<'tcx> {
// which is incorrect. This value was computed based on the crutch // which is incorrect. This value was computed based on the crutch
// value for the type contents of list. The correct value is // value for the type contents of list. The correct value is
// TC::OwnsOwned. This manifested as issue #4821. // TC::OwnsOwned. This manifested as issue #4821.
match cache.get(&ty) { if let Some(tc) = cache.get(&ty) {
Some(tc) => { return *tc; } return *tc;
None => {}
} }
match tcx.tc_cache.borrow().get(&ty) { // Must check both caches! // Must check both caches!
Some(tc) => { return *tc; } if let Some(tc) = tcx.tc_cache.borrow().get(&ty) {
None => {} return *tc;
} }
cache.insert(ty, TC::None); cache.insert(ty, TC::None);

View File

@ -521,9 +521,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.0 } fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.0 }
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
match self.tcx().normalized_cache.borrow().get(&ty).cloned() { if let Some(u) = self.tcx().normalized_cache.borrow().get(&ty).cloned() {
None => {} return u;
Some(u) => return u
} }
// FIXME(eddyb) should local contexts have a cache too? // FIXME(eddyb) should local contexts have a cache too?
@ -714,4 +713,3 @@ impl<'tcx> TypeVisitor<'tcx> for LateBoundRegionsCollector {
false false
} }
} }

View File

@ -712,8 +712,7 @@ impl<'a, 'tcx> ty::TyS<'tcx> {
// struct Foo; // struct Foo;
// struct Bar<T> { x: Bar<Foo> } // struct Bar<T> { x: Bar<Foo> }
match iter.next() { if let Some(&seen_type) = iter.next() {
Some(&seen_type) => {
if same_struct_or_enum(seen_type, def) { if same_struct_or_enum(seen_type, def) {
debug!("SelfRecursive: {:?} contains {:?}", debug!("SelfRecursive: {:?} contains {:?}",
seen_type, seen_type,
@ -721,8 +720,6 @@ impl<'a, 'tcx> ty::TyS<'tcx> {
return Representability::SelfRecursive; return Representability::SelfRecursive;
} }
} }
None => {}
}
// We also need to know whether the first item contains other types // We also need to know whether the first item contains other types
// that are structurally recursive. If we don't catch this case, we // that are structurally recursive. If we don't catch this case, we

View File

@ -274,12 +274,9 @@ impl<'a, 'tcx> MoveData<'tcx> {
/// `lp` and any of its base paths that do not yet have an index. /// `lp` and any of its base paths that do not yet have an index.
pub fn move_path(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, pub fn move_path(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
lp: Rc<LoanPath<'tcx>>) -> MovePathIndex { lp: Rc<LoanPath<'tcx>>) -> MovePathIndex {
match self.path_map.borrow().get(&lp) { if let Some(&index) = self.path_map.borrow().get(&lp) {
Some(&index) => {
return index; return index;
} }
None => {}
}
let index = match lp.kind { let index = match lp.kind {
LpVar(..) | LpUpvar(..) => { LpVar(..) | LpUpvar(..) => {

View File

@ -176,9 +176,8 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &hir::Expr) {
// Second, if there is a guard on each arm, make sure it isn't // Second, if there is a guard on each arm, make sure it isn't
// assigning or borrowing anything mutably. // assigning or borrowing anything mutably.
match arm.guard { if let Some(ref guard) = arm.guard {
Some(ref guard) => check_for_mutation_in_guard(cx, &guard), check_for_mutation_in_guard(cx, &guard);
None => {}
} }
} }

View File

@ -150,13 +150,10 @@ impl LateLintPass for UnusedResults {
if attr.check_name("must_use") { if attr.check_name("must_use") {
let mut msg = "unused result which must be used".to_string(); let mut msg = "unused result which must be used".to_string();
// check for #[must_use="..."] // check for #[must_use="..."]
match attr.value_str() { if let Some(s) = attr.value_str() {
None => {}
Some(s) => {
msg.push_str(": "); msg.push_str(": ");
msg.push_str(&s); msg.push_str(&s);
} }
}
cx.span_lint(UNUSED_MUST_USE, sp, &msg); cx.span_lint(UNUSED_MUST_USE, sp, &msg);
return true; return true;
} }

View File

@ -24,8 +24,8 @@ fn main() {
let llvm_config = env::var_os("LLVM_CONFIG") let llvm_config = env::var_os("LLVM_CONFIG")
.map(PathBuf::from) .map(PathBuf::from)
.unwrap_or_else(|| { .unwrap_or_else(|| {
match env::var_os("CARGO_TARGET_DIR").map(PathBuf::from) { if let Some(dir) = env::var_os("CARGO_TARGET_DIR")
Some(dir) => { .map(PathBuf::from) {
let to_test = dir.parent() let to_test = dir.parent()
.unwrap() .unwrap()
.parent() .parent()
@ -36,8 +36,6 @@ fn main() {
return to_test; return to_test;
} }
} }
None => {}
}
PathBuf::from("llvm-config") PathBuf::from("llvm-config")
}); });

View File

@ -682,9 +682,7 @@ fn each_child_of_item_or_crate<F, G>(intr: Rc<IdentInterner>,
}; };
// Get the item. // Get the item.
match crate_data.get_item(child_def_id.index) { if let Some(child_item_doc) = crate_data.get_item(child_def_id.index) {
None => {}
Some(child_item_doc) => {
// Hand off the item to the callback. // Hand off the item to the callback.
let child_name = item_name(&intr, child_item_doc); let child_name = item_name(&intr, child_item_doc);
let def_like = item_to_def_like(crate_data, child_item_doc, child_def_id); let def_like = item_to_def_like(crate_data, child_item_doc, child_def_id);
@ -692,7 +690,6 @@ fn each_child_of_item_or_crate<F, G>(intr: Rc<IdentInterner>,
callback(def_like, child_name, visibility); callback(def_like, child_name, visibility);
} }
} }
}
// As a special case, iterate over all static methods of // As a special case, iterate over all static methods of
// associated implementations too. This is a bit of a botch. // associated implementations too. This is a bit of a botch.

View File

@ -503,19 +503,11 @@ impl<'a> Context<'a> {
self.crate_name); self.crate_name);
err.note("candidates:"); err.note("candidates:");
for (_, lib) in libraries { for (_, lib) in libraries {
match lib.dylib { if let Some((ref p, _)) = lib.dylib {
Some((ref p, _)) => { err.note(&format!("path: {}", p.display()));
err.note(&format!("path: {}",
p.display()));
} }
None => {} if let Some((ref p, _)) = lib.rlib {
} err.note(&format!("path: {}", p.display()));
match lib.rlib {
Some((ref p, _)) => {
err.note(&format!("path: {}",
p.display()));
}
None => {}
} }
let data = lib.metadata.as_slice(); let data = lib.metadata.as_slice();
let name = decoder::get_crate_name(data); let name = decoder::get_crate_name(data);

View File

@ -396,8 +396,7 @@ impl<'a,'tcx> TyDecoder<'a,'tcx> {
let pos = self.parse_vuint(); let pos = self.parse_vuint();
let key = ty::CReaderCacheKey { cnum: self.krate, pos: pos }; let key = ty::CReaderCacheKey { cnum: self.krate, pos: pos };
match tcx.rcache.borrow().get(&key).cloned() { if let Some(tt) = tcx.rcache.borrow().get(&key).cloned() {
Some(tt) => {
// If there is a closure buried in the type some where, then we // If there is a closure buried in the type some where, then we
// need to re-convert any def ids (see case 'k', below). That means // need to re-convert any def ids (see case 'k', below). That means
// we can't reuse the cached version. // we can't reuse the cached version.
@ -405,8 +404,6 @@ impl<'a,'tcx> TyDecoder<'a,'tcx> {
return tt; return tt;
} }
} }
None => {}
}
let mut substate = TyDecoder::new(self.data, let mut substate = TyDecoder::new(self.data,
self.krate, self.krate,

View File

@ -64,9 +64,9 @@ pub struct ty_abbrev {
pub type abbrev_map<'tcx> = RefCell<FnvHashMap<Ty<'tcx>, ty_abbrev>>; pub type abbrev_map<'tcx> = RefCell<FnvHashMap<Ty<'tcx>, ty_abbrev>>;
pub fn enc_ty<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>, t: Ty<'tcx>) { pub fn enc_ty<'a, 'tcx>(w: &mut Cursor<Vec<u8>>, cx: &ctxt<'a, 'tcx>, t: Ty<'tcx>) {
match cx.abbrevs.borrow_mut().get(&t) { if let Some(a) = cx.abbrevs.borrow_mut().get(&t) {
Some(a) => { w.write_all(&a.s); return; } w.write_all(&a.s);
None => {} return;
} }
let pos = w.position(); let pos = w.position();

View File

@ -299,13 +299,11 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> {
let mut result = String::from("<"); let mut result = String::from("<");
result.push_str(&rustc::hir::print::ty_to_string(&ty)); result.push_str(&rustc::hir::print::ty_to_string(&ty));
match self.tcx.trait_of_item(self.tcx.map.local_def_id(id)) { if let Some(def_id) = self.tcx
Some(def_id) => { .trait_of_item(self.tcx.map.local_def_id(id)) {
result.push_str(" as "); result.push_str(" as ");
result.push_str(&self.tcx.item_path_str(def_id)); result.push_str(&self.tcx.item_path_str(def_id));
} }
None => {}
}
result.push_str(">"); result.push_str(">");
result result
} }

View File

@ -1706,8 +1706,7 @@ pub fn store_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
// //
// In such cases, the more general path is unsafe, because // In such cases, the more general path is unsafe, because
// it assumes it is matching against a valid value. // it assumes it is matching against a valid value.
match simple_name(pat) { if let Some(name) = simple_name(pat) {
Some(name) => {
let var_scope = cleanup::var_scope(tcx, local.id); let var_scope = cleanup::var_scope(tcx, local.id);
return mk_binding_alloca( return mk_binding_alloca(
bcx, pat.id, name, var_scope, (), bcx, pat.id, name, var_scope, (),
@ -1716,9 +1715,6 @@ pub fn store_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr::SaveIn(v))); expr::SaveIn(v)));
} }
None => {}
}
// General path. // General path.
let init_datum = let init_datum =
unpack_datum!(bcx, expr::trans_to_lvalue(bcx, &init_expr, "let")); unpack_datum!(bcx, expr::trans_to_lvalue(bcx, &init_expr, "let"));

View File

@ -191,9 +191,8 @@ pub fn represent_type<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
t: Ty<'tcx>) t: Ty<'tcx>)
-> Rc<Repr<'tcx>> { -> Rc<Repr<'tcx>> {
debug!("Representing: {}", t); debug!("Representing: {}", t);
match cx.adt_reprs().borrow().get(&t) { if let Some(repr) = cx.adt_reprs().borrow().get(&t) {
Some(repr) => return repr.clone(), return repr.clone();
None => {}
} }
let repr = Rc::new(represent_type_uncached(cx, t)); let repr = Rc::new(represent_type_uncached(cx, t));

View File

@ -136,12 +136,9 @@ pub struct _InsnCtxt {
impl Drop for _InsnCtxt { impl Drop for _InsnCtxt {
fn drop(&mut self) { fn drop(&mut self) {
TASK_LOCAL_INSN_KEY.with(|slot| { TASK_LOCAL_INSN_KEY.with(|slot| {
match slot.borrow_mut().as_mut() { if let Some(ctx) = slot.borrow_mut().as_mut() {
Some(ctx) => {
ctx.pop(); ctx.pop();
} }
None => {}
}
}) })
} }
} }

View File

@ -138,8 +138,7 @@ pub fn addr_of(ccx: &CrateContext,
align: machine::llalign, align: machine::llalign,
kind: &str) kind: &str)
-> ValueRef { -> ValueRef {
match ccx.const_globals().borrow().get(&cv) { if let Some(&gv) = ccx.const_globals().borrow().get(&cv) {
Some(&gv) => {
unsafe { unsafe {
// Upgrade the alignment in cases where the same constant is used with different // Upgrade the alignment in cases where the same constant is used with different
// alignment requirements // alignment requirements
@ -149,8 +148,6 @@ pub fn addr_of(ccx: &CrateContext,
} }
return gv; return gv;
} }
None => {}
}
let gv = addr_of_mut(ccx, cv, align, kind); let gv = addr_of_mut(ccx, cv, align, kind);
unsafe { unsafe {
llvm::LLVMSetGlobalConstant(gv, True); llvm::LLVMSetGlobalConstant(gv, True);

View File

@ -572,12 +572,9 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> {
// will only succeed if both operands are constant. // will only succeed if both operands are constant.
// This is necessary to determine when an overflow Assert // This is necessary to determine when an overflow Assert
// will always panic at runtime, and produce a warning. // will always panic at runtime, and produce a warning.
match const_scalar_checked_binop(bcx.tcx(), op, lhs, rhs, input_ty) { if let Some((val, of)) = const_scalar_checked_binop(bcx.tcx(), op, lhs, rhs, input_ty) {
Some((val, of)) => {
return OperandValue::Pair(val, C_bool(bcx.ccx(), of)); return OperandValue::Pair(val, C_bool(bcx.ccx(), of));
} }
None => {}
}
let (val, of) = match op { let (val, of) = match op {
// These are checked using intrinsics // These are checked using intrinsics

View File

@ -864,9 +864,8 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
// THE ACTUAL SEARCH // THE ACTUAL SEARCH
fn pick(mut self) -> PickResult<'tcx> { fn pick(mut self) -> PickResult<'tcx> {
match self.pick_core() { if let Some(r) = self.pick_core() {
Some(r) => return r, return r;
None => {}
} }
let static_candidates = mem::replace(&mut self.static_candidates, vec![]); let static_candidates = mem::replace(&mut self.static_candidates, vec![]);
@ -929,9 +928,8 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
return None; return None;
} }
match self.pick_by_value_method(step) { if let Some(result) = self.pick_by_value_method(step) {
Some(result) => return Some(result), return Some(result);
None => {}
} }
self.pick_autorefd_method(step) self.pick_autorefd_method(step)
@ -1003,13 +1001,11 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> {
let mut possibly_unsatisfied_predicates = Vec::new(); let mut possibly_unsatisfied_predicates = Vec::new();
debug!("searching inherent candidates"); debug!("searching inherent candidates");
match self.consider_candidates(self_ty, &self.inherent_candidates, if let Some(pick) = self.consider_candidates(self_ty,
&self.inherent_candidates,
&mut possibly_unsatisfied_predicates) { &mut possibly_unsatisfied_predicates) {
None => {}
Some(pick) => {
return Some(pick); return Some(pick);
} }
}
debug!("searching extension candidates"); debug!("searching extension candidates");
let res = self.consider_candidates(self_ty, &self.extension_candidates, let res = self.consider_candidates(self_ty, &self.extension_candidates,

View File

@ -334,13 +334,8 @@ impl<'cx, 'gcx, 'tcx> WritebackCx<'cx, 'gcx, 'tcx> {
}; };
//NB(jroesch): We need to match twice to avoid a double borrow which would cause an ICE //NB(jroesch): We need to match twice to avoid a double borrow which would cause an ICE
match new_method { if let Some(method) = new_method {
Some(method) => { self.tcx().tables.borrow_mut().method_map.insert(method_call, method);
self.tcx().tables.borrow_mut().method_map.insert(
method_call,
method);
}
None => {}
} }
} }

View File

@ -174,13 +174,10 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> {
} }
fn add_inherent_impl(&self, base_def_id: DefId, impl_def_id: DefId) { fn add_inherent_impl(&self, base_def_id: DefId, impl_def_id: DefId) {
match self.inherent_impls.borrow().get(&base_def_id) { if let Some(implementation_list) = self.inherent_impls.borrow().get(&base_def_id) {
Some(implementation_list) => {
implementation_list.borrow_mut().push(impl_def_id); implementation_list.borrow_mut().push(impl_def_id);
return; return;
} }
None => {}
}
self.inherent_impls.borrow_mut().insert( self.inherent_impls.borrow_mut().insert(
base_def_id, base_def_id,

View File

@ -313,14 +313,13 @@ fn check_start_fn_ty(ccx: &CrateCtxt,
fn check_for_entry_fn(ccx: &CrateCtxt) { fn check_for_entry_fn(ccx: &CrateCtxt) {
let tcx = ccx.tcx; let tcx = ccx.tcx;
let _task = tcx.dep_graph.in_task(DepNode::CheckEntryFn); let _task = tcx.dep_graph.in_task(DepNode::CheckEntryFn);
match *tcx.sess.entry_fn.borrow() { if let Some((id, sp)) = *tcx.sess.entry_fn.borrow() {
Some((id, sp)) => match tcx.sess.entry_type.get() { match tcx.sess.entry_type.get() {
Some(config::EntryMain) => check_main_fn_ty(ccx, id, sp), Some(config::EntryMain) => check_main_fn_ty(ccx, id, sp),
Some(config::EntryStart) => check_start_fn_ty(ccx, id, sp), Some(config::EntryStart) => check_start_fn_ty(ccx, id, sp),
Some(config::EntryNone) => {} Some(config::EntryNone) => {}
None => bug!("entry function without a type") None => bug!("entry function without a type")
}, }
None => {}
} }
} }

View File

@ -131,9 +131,8 @@ impl fmt::Display for clean::Generics {
write!(f, ":&nbsp;{}", TyParamBounds(&tp.bounds))?; write!(f, ":&nbsp;{}", TyParamBounds(&tp.bounds))?;
} }
match tp.default { if let Some(ref ty) = tp.default {
Some(ref ty) => { write!(f, "&nbsp;=&nbsp;{}", ty)?; }, write!(f, "&nbsp;=&nbsp;{}", ty)?;
None => {}
}; };
} }
} }
@ -401,16 +400,13 @@ fn primitive_link(f: &mut fmt::Formatter,
} }
(_, render::Unknown) => None, (_, render::Unknown) => None,
}; };
match loc { if let Some(root) = loc {
Some(root) => {
write!(f, "<a class='primitive' href='{}{}/primitive.{}.html'>", write!(f, "<a class='primitive' href='{}{}/primitive.{}.html'>",
root, root,
path.0.first().unwrap(), path.0.first().unwrap(),
prim.to_url_str())?; prim.to_url_str())?;
needs_termination = true; needs_termination = true;
} }
None => {}
}
} }
None => {} None => {}
} }

View File

@ -352,9 +352,8 @@ fn write_header(class: Option<&str>,
out: &mut Write) out: &mut Write)
-> io::Result<()> { -> io::Result<()> {
write!(out, "<pre ")?; write!(out, "<pre ")?;
match id { if let Some(id) = id {
Some(id) => write!(out, "id='{}' ", id)?, write!(out, "id='{}' ", id)?;
None => {}
} }
write!(out, "class='rust {}'>\n", class.unwrap_or("")) write!(out, "class='rust {}'>\n", class.unwrap_or(""))
} }

View File

@ -589,8 +589,7 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
// Attach all orphan methods to the type's definition if the type // Attach all orphan methods to the type's definition if the type
// has since been learned. // has since been learned.
for &(did, ref item) in orphan_methods { for &(did, ref item) in orphan_methods {
match paths.get(&did) { if let Some(&(ref fqp, _)) = paths.get(&did) {
Some(&(ref fqp, _)) => {
search_index.push(IndexItem { search_index.push(IndexItem {
ty: shortty(item), ty: shortty(item),
name: item.name.clone().unwrap(), name: item.name.clone().unwrap(),
@ -600,8 +599,6 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
parent_idx: None, parent_idx: None,
search_type: get_index_search_type(&item), search_type: get_index_search_type(&item),
}); });
},
None => {}
} }
} }
@ -2093,16 +2090,13 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item,
<h2 id='implementors'>Implementors</h2> <h2 id='implementors'>Implementors</h2>
<ul class='item-list' id='implementors-list'> <ul class='item-list' id='implementors-list'>
")?; ")?;
match cache.implementors.get(&it.def_id) { if let Some(implementors) = cache.implementors.get(&it.def_id) {
Some(implementors) => {
for i in implementors { for i in implementors {
write!(w, "<li><code>")?; write!(w, "<li><code>")?;
fmt_impl_for_trait_page(&i.impl_, w)?; fmt_impl_for_trait_page(&i.impl_, w)?;
writeln!(w, "</code></li>")?; writeln!(w, "</code></li>")?;
} }
} }
None => {}
}
write!(w, "</ul>")?; write!(w, "</ul>")?;
write!(w, r#"<script type="text/javascript" async write!(w, r#"<script type="text/javascript" async
src="{root_path}/implementors/{path}/{ty}.{name}.js"> src="{root_path}/implementors/{path}/{ty}.{name}.js">

View File

@ -1764,9 +1764,8 @@ impl<T: Iterator<Item=char>> Parser<T> {
return self.parse_array(first); return self.parse_array(first);
} }
ParseArrayComma => { ParseArrayComma => {
match self.parse_array_comma_or_end() { if let Some(evt) = self.parse_array_comma_or_end() {
Some(evt) => { return evt; } return evt;
None => {}
} }
} }
ParseObject(first) => { ParseObject(first) => {
@ -2583,9 +2582,8 @@ impl<'a, T: Encodable> fmt::Display for AsPrettyJson<'a, T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let mut shim = FormatShim { inner: f }; let mut shim = FormatShim { inner: f };
let mut encoder = PrettyEncoder::new(&mut shim); let mut encoder = PrettyEncoder::new(&mut shim);
match self.indent { if let Some(n) = self.indent {
Some(n) => encoder.set_indent(n), encoder.set_indent(n);
None => {}
} }
match self.inner.encode(&mut encoder) { match self.inner.encode(&mut encoder) {
Ok(_) => Ok(()), Ok(_) => Ok(()),

View File

@ -944,9 +944,8 @@ impl SyntaxEnv {
pub fn find(&self, k: Name) -> Option<Rc<SyntaxExtension>> { pub fn find(&self, k: Name) -> Option<Rc<SyntaxExtension>> {
for frame in self.chain.iter().rev() { for frame in self.chain.iter().rev() {
match frame.map.get(&k) { if let Some(v) = frame.map.get(&k) {
Some(v) => return Some(v.clone()), return Some(v.clone());
None => {}
} }
} }
None None

View File

@ -225,13 +225,10 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
} else { /* repeat */ } else { /* repeat */
*r.repeat_idx.last_mut().unwrap() += 1; *r.repeat_idx.last_mut().unwrap() += 1;
r.stack.last_mut().unwrap().idx = 0; r.stack.last_mut().unwrap().idx = 0;
match r.stack.last().unwrap().sep.clone() { if let Some(tk) = r.stack.last().unwrap().sep.clone() {
Some(tk) => { r.cur_tok = tk; // repeat same span, I guess
r.cur_tok = tk; /* repeat same span, I guess */
return ret_val; return ret_val;
} }
None => {}
}
} }
} }
loop { /* because it's easiest, this handles `TokenTree::Delimited` not starting loop { /* because it's easiest, this handles `TokenTree::Delimited` not starting

View File

@ -160,13 +160,10 @@ impl<'a> Parser<'a> {
_ => None, _ => None,
}; };
match nt_meta { if let Some(meta) = nt_meta {
Some(meta) => {
self.bump(); self.bump();
return Ok(meta); return Ok(meta);
} }
None => {}
}
let lo = self.span.lo; let lo = self.span.lo;
let ident = self.parse_ident()?; let ident = self.parse_ident()?;

View File

@ -470,16 +470,13 @@ impl<'a> StringReader<'a> {
/// PRECONDITION: self.curr is not whitespace /// PRECONDITION: self.curr is not whitespace
/// Eats any kind of comment. /// Eats any kind of comment.
fn scan_comment(&mut self) -> Option<TokenAndSpan> { fn scan_comment(&mut self) -> Option<TokenAndSpan> {
match self.curr { if let Some(c) = self.curr {
Some(c) => {
if c.is_whitespace() { if c.is_whitespace() {
self.span_diagnostic.span_err(syntax_pos::mk_sp(self.last_pos, self.last_pos), self.span_diagnostic.span_err(syntax_pos::mk_sp(self.last_pos, self.last_pos),
"called consume_any_line_comment, but there \ "called consume_any_line_comment, but there \
was whitespace"); was whitespace");
} }
} }
None => {}
}
if self.curr_is('/') { if self.curr_is('/') {
match self.nextch() { match self.nextch() {

View File

@ -2752,9 +2752,8 @@ impl<'a> Parser<'a> {
} }
}; };
match parse_kleene_op(self)? { if let Some(kleene_op) = parse_kleene_op(self)? {
Some(kleene_op) => return Ok((None, kleene_op)), return Ok((None, kleene_op));
None => {}
} }
let separator = self.bump_and_get(); let separator = self.bump_and_get();
@ -5691,16 +5690,13 @@ impl<'a> Parser<'a> {
} }
_ => None _ => None
}; };
match nt_item { if let Some(mut item) = nt_item {
Some(mut item) => {
self.bump(); self.bump();
let mut attrs = attrs; let mut attrs = attrs;
mem::swap(&mut item.attrs, &mut attrs); mem::swap(&mut item.attrs, &mut attrs);
item.attrs.extend(attrs); item.attrs.extend(attrs);
return Ok(Some(P(item))); return Ok(Some(P(item)));
} }
None => {}
}
let lo = self.span.lo; let lo = self.span.lo;

View File

@ -1264,14 +1264,11 @@ impl<'a> State<'a> {
_ => {} _ => {}
} }
match *opt_trait { if let Some(ref t) = *opt_trait {
Some(ref t) => {
try!(self.print_trait_ref(t)); try!(self.print_trait_ref(t));
try!(space(&mut self.s)); try!(space(&mut self.s));
try!(self.word_space("for")); try!(self.word_space("for"));
} }
None => {}
}
try!(self.print_type(&ty)); try!(self.print_type(&ty));
try!(self.print_where_clause(&generics.where_clause)); try!(self.print_where_clause(&generics.where_clause));
@ -1470,12 +1467,9 @@ impl<'a> State<'a> {
try!(self.print_tt(tt_elt)); try!(self.print_tt(tt_elt));
} }
try!(word(&mut self.s, ")")); try!(word(&mut self.s, ")"));
match seq.separator { if let Some(ref tk) = seq.separator {
Some(ref tk) => {
try!(word(&mut self.s, &token_to_string(tk))); try!(word(&mut self.s, &token_to_string(tk)));
} }
None => {},
}
match seq.op { match seq.op {
tokenstream::KleeneOp::ZeroOrMore => word(&mut self.s, "*"), tokenstream::KleeneOp::ZeroOrMore => word(&mut self.s, "*"),
tokenstream::KleeneOp::OneOrMore => word(&mut self.s, "+"), tokenstream::KleeneOp::OneOrMore => word(&mut self.s, "+"),

View File

@ -360,14 +360,11 @@ fn find_type_parameters(ty: &ast::Ty, ty_param_names: &[ast::Name], span: Span,
fn visit_ty(&mut self, ty: &ast::Ty) { fn visit_ty(&mut self, ty: &ast::Ty) {
match ty.node { match ty.node {
ast::TyKind::Path(_, ref path) if !path.global => { ast::TyKind::Path(_, ref path) if !path.global => {
match path.segments.first() { if let Some(segment) = path.segments.first() {
Some(segment) => {
if self.ty_param_names.contains(&segment.identifier.name) { if self.ty_param_names.contains(&segment.identifier.name) {
self.types.push(P(ty.clone())); self.types.push(P(ty.clone()));
} }
} }
None => {}
}
} }
_ => {} _ => {}
} }

View File

@ -88,13 +88,10 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::Token
} }
}; };
match exprs.next() { if let Some(_) = exprs.next() {
None => {}
Some(_) => {
cx.span_err(sp, "env! takes 1 or 2 arguments"); cx.span_err(sp, "env! takes 1 or 2 arguments");
return DummyResult::expr(sp); return DummyResult::expr(sp);
} }
}
let e = match env::var(&var[..]) { let e = match env::var(&var[..]) {
Err(_) => { Err(_) => {

View File

@ -126,16 +126,13 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
panictry!(p.expect(&token::Eq)); panictry!(p.expect(&token::Eq));
let e = panictry!(p.parse_expr()); let e = panictry!(p.parse_expr());
match names.get(name) { if let Some(prev) = names.get(name) {
None => {}
Some(prev) => {
ecx.struct_span_err(e.span, ecx.struct_span_err(e.span,
&format!("duplicate argument named `{}`", &format!("duplicate argument named `{}`",
name)) name))
.span_note(prev.span, "previously here") .span_note(prev.span, "previously here")
.emit(); .emit();
continue continue;
}
} }
order.push(name.to_string()); order.push(name.to_string());
names.insert(name.to_string(), e); names.insert(name.to_string(), e);
@ -665,14 +662,11 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
Some(piece) => { Some(piece) => {
if !parser.errors.is_empty() { break } if !parser.errors.is_empty() { break }
cx.verify_piece(&piece); cx.verify_piece(&piece);
match cx.trans_piece(&piece) { if let Some(piece) = cx.trans_piece(&piece) {
Some(piece) => {
let s = cx.trans_literal_string(); let s = cx.trans_literal_string();
cx.str_pieces.push(s); cx.str_pieces.push(s);
cx.pieces.push(piece); cx.pieces.push(piece);
} }
None => {}
}
} }
None => break None => break
} }

View File

@ -747,13 +747,10 @@ pub fn run_tests_console(opts: &TestOpts, tests: Vec<TestDescAndFn>) -> io::Resu
PadOnRight => t.desc.name.as_slice().len(), PadOnRight => t.desc.name.as_slice().len(),
} }
} }
match tests.iter().max_by_key(|t| len_if_padded(*t)) { if let Some(t) = tests.iter().max_by_key(|t| len_if_padded(*t)) {
Some(t) => {
let n = t.desc.name.as_slice(); let n = t.desc.name.as_slice();
st.max_name_len = n.len(); st.max_name_len = n.len();
} }
None => {}
}
run_tests(opts, tests, |x| callback(&x, &mut st))?; run_tests(opts, tests, |x| callback(&x, &mut st))?;
return st.write_run_finish(); return st.write_run_finish();
} }