Rollup merge of #70269 - matthiaskrgr:clippy_closures, r=Dylan-DPC

remove redundant closures (clippy::redundant_closure)
This commit is contained in:
Mazdak Farrokhzad 2020-03-23 04:26:15 +01:00 committed by GitHub
commit c984a96189
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 33 additions and 41 deletions

View File

@ -196,7 +196,7 @@ impl<K: Clone, V: Clone> Clone for BTreeMap<K, V> {
(root, length) (root, length)
}; };
out_node.push(k, v, subroot.unwrap_or_else(|| node::Root::new_leaf())); out_node.push(k, v, subroot.unwrap_or_else(node::Root::new_leaf));
out_tree.length += 1 + sublength; out_tree.length += 1 + sublength;
} }
} }
@ -2147,7 +2147,7 @@ impl<K, V> BTreeMap<K, V> {
/// If the root node is the empty (non-allocated) root node, allocate our /// If the root node is the empty (non-allocated) root node, allocate our
/// own node. /// own node.
fn ensure_root_is_owned(&mut self) -> &mut node::Root<K, V> { fn ensure_root_is_owned(&mut self) -> &mut node::Root<K, V> {
self.root.get_or_insert_with(|| node::Root::new_leaf()) self.root.get_or_insert_with(node::Root::new_leaf)
} }
} }

View File

@ -245,7 +245,7 @@ impl DepGraph {
C: DepGraphSafe + StableHashingContextProvider<'a>, C: DepGraphSafe + StableHashingContextProvider<'a>,
{ {
if let Some(ref data) = self.data { if let Some(ref data) = self.data {
let task_deps = create_task(key).map(|deps| Lock::new(deps)); let task_deps = create_task(key).map(Lock::new);
// In incremental mode, hash the result of the task. We don't // In incremental mode, hash the result of the task. We don't
// do anything with the hash yet, but we are computing it // do anything with the hash yet, but we are computing it

View File

@ -796,7 +796,7 @@ impl UndefMask {
} }
// FIXME(oli-obk): optimize this for allocations larger than a block. // FIXME(oli-obk): optimize this for allocations larger than a block.
let idx = (start.bytes()..end.bytes()).map(|i| Size::from_bytes(i)).find(|&i| !self.get(i)); let idx = (start.bytes()..end.bytes()).map(Size::from_bytes).find(|&i| !self.get(i));
match idx { match idx {
Some(idx) => Err(idx), Some(idx) => Err(idx),

View File

@ -250,7 +250,7 @@ impl ParenthesizedArgs {
pub fn as_angle_bracketed_args(&self) -> AngleBracketedArgs { pub fn as_angle_bracketed_args(&self) -> AngleBracketedArgs {
AngleBracketedArgs { AngleBracketedArgs {
span: self.span, span: self.span,
args: self.inputs.iter().cloned().map(|input| GenericArg::Type(input)).collect(), args: self.inputs.iter().cloned().map(GenericArg::Type).collect(),
constraints: vec![], constraints: vec![],
} }
} }

View File

@ -272,7 +272,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
if !generic_args.parenthesized && !has_lifetimes { if !generic_args.parenthesized && !has_lifetimes {
generic_args.args = self generic_args.args = self
.elided_path_lifetimes(path_span, expected_lifetimes) .elided_path_lifetimes(path_span, expected_lifetimes)
.map(|lt| GenericArg::Lifetime(lt)) .map(GenericArg::Lifetime)
.chain(generic_args.args.into_iter()) .chain(generic_args.args.into_iter())
.collect(); .collect();
if expected_lifetimes > 0 && param_mode == ParamMode::Explicit { if expected_lifetimes > 0 && param_mode == ParamMode::Explicit {

View File

@ -76,8 +76,8 @@ impl<'a> Path<'a> {
self.params.iter().map(|t| t.to_ty(cx, span, self_ty, self_generics)).collect(); self.params.iter().map(|t| t.to_ty(cx, span, self_ty, self_generics)).collect();
let params = lt let params = lt
.into_iter() .into_iter()
.map(|lt| GenericArg::Lifetime(lt)) .map(GenericArg::Lifetime)
.chain(tys.into_iter().map(|ty| GenericArg::Type(ty))) .chain(tys.into_iter().map(GenericArg::Type))
.collect(); .collect();
match self.kind { match self.kind {

View File

@ -30,7 +30,7 @@ pub struct Sharded<T> {
impl<T: Default> Default for Sharded<T> { impl<T: Default> Default for Sharded<T> {
#[inline] #[inline]
fn default() -> Self { fn default() -> Self {
Self::new(|| T::default()) Self::new(T::default)
} }
} }

View File

@ -162,7 +162,7 @@ impl<'a> DiagnosticBuilder<'a> {
message: &str, message: &str,
span: Option<S>, span: Option<S>,
) -> &mut Self { ) -> &mut Self {
let span = span.map(|s| s.into()).unwrap_or_else(|| MultiSpan::new()); let span = span.map(|s| s.into()).unwrap_or_else(MultiSpan::new);
self.0.diagnostic.sub(level, message, span, None); self.0.diagnostic.sub(level, message, span, None);
self self
} }

View File

@ -51,7 +51,7 @@ pub struct Feature {
impl Feature { impl Feature {
fn issue(&self) -> Option<NonZeroU32> { fn issue(&self) -> Option<NonZeroU32> {
self.issue.and_then(|i| NonZeroU32::new(i)) self.issue.and_then(NonZeroU32::new)
} }
} }

View File

@ -703,7 +703,7 @@ impl<'tcx> QueryContext<'tcx> {
where where
F: FnOnce(TyCtxt<'tcx>) -> R, F: FnOnce(TyCtxt<'tcx>) -> R,
{ {
ty::tls::enter_global(self.0, |tcx| f(tcx)) ty::tls::enter_global(self.0, f)
} }
pub fn print_stats(&mut self) { pub fn print_stats(&mut self) {

View File

@ -327,7 +327,7 @@ impl<'a> CrateLocator<'a> {
.into_iter() .into_iter()
.filter_map(|entry| entry.files()) .filter_map(|entry| entry.files())
.flatten() .flatten()
.map(|location| PathBuf::from(location)) .map(PathBuf::from)
.collect() .collect()
} else { } else {
// SVH being specified means this is a transitive dependency, // SVH being specified means this is a transitive dependency,

View File

@ -577,7 +577,7 @@ fn write_diff<A: Analysis<'tcx>>(
let mut clear = HybridBitSet::new_empty(len); let mut clear = HybridBitSet::new_empty(len);
// FIXME: Implement a lazy iterator over the symmetric difference of two bitsets. // FIXME: Implement a lazy iterator over the symmetric difference of two bitsets.
for i in (0..len).map(|i| A::Idx::new(i)) { for i in (0..len).map(A::Idx::new) {
match (from.contains(i), to.contains(i)) { match (from.contains(i), to.contains(i)) {
(false, true) => set.insert(i), (false, true) => set.insert(i),
(true, false) => clear.insert(i), (true, false) => clear.insert(i),

View File

@ -895,7 +895,7 @@ fn create_mono_items_for_vtable_methods<'tcx>(
.unwrap() .unwrap()
}) })
.filter(|&instance| should_monomorphize_locally(tcx, &instance)) .filter(|&instance| should_monomorphize_locally(tcx, &instance))
.map(|instance| create_fn_mono_item(instance)); .map(create_fn_mono_item);
output.extend(methods); output.extend(methods);
} }

View File

@ -2066,7 +2066,7 @@ fn split_grouped_constructors<'p, 'tcx>(
} }
intersection intersection
}) })
.flat_map(|range| range_borders(range)); .flat_map(range_borders);
let ctor_borders = range_borders(ctor_range.clone()); let ctor_borders = range_borders(ctor_range.clone());
let mut borders: Vec<_> = row_borders.chain(ctor_borders).collect(); let mut borders: Vec<_> = row_borders.chain(ctor_borders).collect();
borders.sort_unstable(); borders.sort_unstable();

View File

@ -1148,7 +1148,7 @@ impl<'l, 'tcx> DumpVisitor<'l, 'tcx> {
let sub_span = path.segments.last().unwrap().ident.span; let sub_span = path.segments.last().unwrap().ident.span;
if !self.span.filter_generated(sub_span) { if !self.span.filter_generated(sub_span) {
let ref_id = self.lookup_def_id(id).map(|id| id_from_def_id(id)); let ref_id = self.lookup_def_id(id).map(id_from_def_id);
let alias_span = alias.map(|i| self.span_from_span(i.span)); let alias_span = alias.map(|i| self.span_from_span(i.span));
let span = self.span_from_span(sub_span); let span = self.span_from_span(sub_span);
self.dumper.import( self.dumper.import(

View File

@ -326,7 +326,7 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> {
.as_ref() .as_ref()
.and_then(|t| self.lookup_def_id(t.ref_id)) .and_then(|t| self.lookup_def_id(t.ref_id))
.map(id_from_def_id) .map(id_from_def_id)
.unwrap_or_else(|| null_id()), .unwrap_or_else(null_id),
}, },
Impl { Impl {
id: impl_id, id: impl_id,
@ -487,9 +487,9 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> {
qualname, qualname,
// FIXME you get better data here by using the visitor. // FIXME you get better data here by using the visitor.
value: String::new(), value: String::new(),
parent: parent_scope.map(|id| id_from_def_id(id)), parent: parent_scope.map(id_from_def_id),
children: vec![], children: vec![],
decl_id: decl_id.map(|id| id_from_def_id(id)), decl_id: decl_id.map(id_from_def_id),
docs, docs,
sig: None, sig: None,
attributes: lower_attributes(attributes, self), attributes: lower_attributes(attributes, self),
@ -541,7 +541,7 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> {
.tcx .tcx
.find_field_index(ident, variant) .find_field_index(ident, variant)
.map(|index| id_from_def_id(variant.fields[index].did)) .map(|index| id_from_def_id(variant.fields[index].did))
.unwrap_or_else(|| null_id()), .unwrap_or_else(null_id),
})) }))
} }
ty::Tuple(..) => None, ty::Tuple(..) => None,
@ -590,14 +590,11 @@ impl<'l, 'tcx> SaveContext<'l, 'tcx> {
Some(Data::RefData(Ref { Some(Data::RefData(Ref {
kind: RefKind::Function, kind: RefKind::Function,
span, span,
ref_id: def_id ref_id: def_id.or(decl_id).map(id_from_def_id).unwrap_or_else(|| null_id()),
.or(decl_id)
.map(|id| id_from_def_id(id))
.unwrap_or_else(|| null_id()),
})) }))
} }
ast::ExprKind::Path(_, ref path) => { ast::ExprKind::Path(_, ref path) => {
self.get_path_data(expr.id, path).map(|d| Data::RefData(d)) self.get_path_data(expr.id, path).map(Data::RefData)
} }
_ => { _ => {
// FIXME // FIXME
@ -1075,7 +1072,7 @@ fn id_from_def_id(id: DefId) -> rls_data::Id {
fn id_from_node_id(id: NodeId, scx: &SaveContext<'_, '_>) -> rls_data::Id { fn id_from_node_id(id: NodeId, scx: &SaveContext<'_, '_>) -> rls_data::Id {
let def_id = scx.tcx.hir().opt_local_def_id_from_node_id(id); let def_id = scx.tcx.hir().opt_local_def_id_from_node_id(id);
def_id.map(|id| id_from_def_id(id)).unwrap_or_else(|| { def_id.map(id_from_def_id).unwrap_or_else(|| {
// Create a *fake* `DefId` out of a `NodeId` by subtracting the `NodeId` // Create a *fake* `DefId` out of a `NodeId` by subtracting the `NodeId`
// out of the maximum u32 value. This will work unless you have *billions* // out of the maximum u32 value. This will work unless you have *billions*
// of definitions in a single crate (very unlikely to actually happen). // of definitions in a single crate (very unlikely to actually happen).

View File

@ -1310,7 +1310,7 @@ fn select_incremental_path(
(None, Some(path)) => Some(path), (None, Some(path)) => Some(path),
(None, None) => None, (None, None) => None,
} }
.map(|m| PathBuf::from(m)) .map(PathBuf::from)
} }
fn collect_print_requests( fn collect_print_requests(

View File

@ -131,7 +131,7 @@ impl<'a, 'tcx> FulfillmentContext<'tcx> {
// FIXME: if we kept the original cache key, we could mark projection // FIXME: if we kept the original cache key, we could mark projection
// obligations as complete for the projection cache here. // obligations as complete for the projection cache here.
errors.extend(outcome.errors.into_iter().map(|e| to_fulfillment_error(e))); errors.extend(outcome.errors.into_iter().map(to_fulfillment_error));
// If nothing new was added, no need to keep looping. // If nothing new was added, no need to keep looping.
if outcome.stalled { if outcome.stalled {
@ -214,7 +214,7 @@ impl<'tcx> TraitEngine<'tcx> for FulfillmentContext<'tcx> {
.predicates .predicates
.to_errors(CodeAmbiguity) .to_errors(CodeAmbiguity)
.into_iter() .into_iter()
.map(|e| to_fulfillment_error(e)) .map(to_fulfillment_error)
.collect(); .collect();
if errors.is_empty() { Ok(()) } else { Err(errors) } if errors.is_empty() { Ok(()) } else { Err(errors) }
} }

View File

@ -39,7 +39,7 @@ pub fn astconv_object_safety_violations(
let violations = traits::supertrait_def_ids(tcx, trait_def_id) let violations = traits::supertrait_def_ids(tcx, trait_def_id)
.map(|def_id| predicates_reference_self(tcx, def_id, true)) .map(|def_id| predicates_reference_self(tcx, def_id, true))
.filter(|spans| !spans.is_empty()) .filter(|spans| !spans.is_empty())
.map(|spans| ObjectSafetyViolation::SupertraitSelf(spans)) .map(ObjectSafetyViolation::SupertraitSelf)
.collect(); .collect();
debug!("astconv_object_safety_violations(trait_def_id={:?}) = {:?}", trait_def_id, violations); debug!("astconv_object_safety_violations(trait_def_id={:?}) = {:?}", trait_def_id, violations);

View File

@ -2947,13 +2947,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let existential_predicates = data_a.map_bound(|data_a| { let existential_predicates = data_a.map_bound(|data_a| {
let iter = data_a let iter = data_a
.principal() .principal()
.map(|x| ty::ExistentialPredicate::Trait(x)) .map(ty::ExistentialPredicate::Trait)
.into_iter() .into_iter()
.chain( .chain(data_a.projection_bounds().map(ty::ExistentialPredicate::Projection))
data_a
.projection_bounds()
.map(|x| ty::ExistentialPredicate::Projection(x)),
)
.chain(data_b.auto_traits().map(ty::ExistentialPredicate::AutoTrait)); .chain(data_b.auto_traits().map(ty::ExistentialPredicate::AutoTrait));
tcx.mk_existential_predicates(iter) tcx.mk_existential_predicates(iter)
}); });

View File

@ -1693,9 +1693,8 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
}; };
// Erase the `dummy_self` (`trait_object_dummy_self`) used above. // Erase the `dummy_self` (`trait_object_dummy_self`) used above.
let existential_trait_refs = regular_traits let existential_trait_refs =
.iter() regular_traits.iter().map(|i| i.trait_ref().map_bound(trait_ref_to_existential));
.map(|i| i.trait_ref().map_bound(|trait_ref| trait_ref_to_existential(trait_ref)));
let existential_projections = bounds.projection_bounds.iter().map(|(bound, _)| { let existential_projections = bounds.projection_bounds.iter().map(|(bound, _)| {
bound.map_bound(|b| { bound.map_bound(|b| {
let trait_ref = trait_ref_to_existential(b.projection_ty.trait_ref(tcx)); let trait_ref = trait_ref_to_existential(b.projection_ty.trait_ref(tcx));

View File

@ -677,7 +677,7 @@ fn compare_number_of_generics<'tcx>(
impl_count, impl_count,
kind, kind,
pluralize!(impl_count), pluralize!(impl_count),
suffix.unwrap_or_else(|| String::new()), suffix.unwrap_or_else(String::new),
), ),
); );
} }