Rollup merge of #110366 - matthiaskrgr:compl_123, r=Nilstrieb

fix some clippy::complexity

r? `@Nilstrieb`
This commit is contained in:
Yuki Okushi 2023-04-16 06:55:24 +09:00 committed by GitHub
commit a8983749bd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 18 additions and 23 deletions

View File

@ -528,7 +528,7 @@ impl<'cx, 'tcx> BorrowckInferCtxt<'cx, 'tcx> {
where where
F: Fn() -> RegionCtxt, F: Fn() -> RegionCtxt,
{ {
let next_region = self.infcx.next_nll_region_var(origin.clone()); let next_region = self.infcx.next_nll_region_var(origin);
let vid = next_region.as_var(); let vid = next_region.as_var();
if cfg!(debug_assertions) && !self.inside_canonicalization_ctxt() { if cfg!(debug_assertions) && !self.inside_canonicalization_ctxt() {

View File

@ -645,9 +645,8 @@ pub(super) fn implied_predicates_with_filter(
}; };
// Combine the two lists to form the complete set of superbounds: // Combine the two lists to form the complete set of superbounds:
let implied_bounds = &*tcx let implied_bounds =
.arena &*tcx.arena.alloc_from_iter(superbounds.predicates().chain(where_bounds_that_match));
.alloc_from_iter(superbounds.predicates().into_iter().chain(where_bounds_that_match));
debug!(?implied_bounds); debug!(?implied_bounds);
// Now require that immediate supertraits are converted, // Now require that immediate supertraits are converted,

View File

@ -164,24 +164,20 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
{ {
for param in for param in
[param_to_point_at, fallback_param_to_point_at, self_param_to_point_at] [param_to_point_at, fallback_param_to_point_at, self_param_to_point_at]
.into_iter()
.flatten()
{ {
if let Some(param) = param { let refined_expr =
let refined_expr = self.point_at_field_if_possible( self.point_at_field_if_possible(def_id, param, variant_def_id, fields);
def_id,
param,
variant_def_id,
fields,
);
match refined_expr { match refined_expr {
None => {} None => {}
Some((refined_expr, _)) => { Some((refined_expr, _)) => {
error.obligation.cause.span = refined_expr error.obligation.cause.span = refined_expr
.span .span
.find_ancestor_in_same_ctxt(error.obligation.cause.span) .find_ancestor_in_same_ctxt(error.obligation.cause.span)
.unwrap_or(refined_expr.span); .unwrap_or(refined_expr.span);
return true; return true;
}
} }
} }
} }

View File

@ -89,7 +89,7 @@ impl<'tcx> Partitioner<'tcx> for DefaultPartitioning {
} }
PreInliningPartitioning { PreInliningPartitioning {
codegen_units: codegen_units.into_values().map(|codegen_unit| codegen_unit).collect(), codegen_units: codegen_units.into_values().collect(),
roots, roots,
internalization_candidates, internalization_candidates,
} }

View File

@ -231,7 +231,7 @@ pub(crate) fn alloc_self_profile_query_strings_for_query_cache<'tcx, C>(
// locked while doing so. Instead we copy out the // locked while doing so. Instead we copy out the
// `(query_key, dep_node_index)` pairs and release the lock again. // `(query_key, dep_node_index)` pairs and release the lock again.
let mut query_keys_and_indices = Vec::new(); let mut query_keys_and_indices = Vec::new();
query_cache.iter(&mut |k, _, i| query_keys_and_indices.push((k.clone(), i))); query_cache.iter(&mut |k, _, i| query_keys_and_indices.push((*k, i)));
// Now actually allocate the strings. If allocating the strings // Now actually allocate the strings. If allocating the strings
// generates new entries in the query cache, we'll miss them but // generates new entries in the query cache, we'll miss them but

View File

@ -1349,7 +1349,7 @@ impl LinkCollector<'_, '_> {
if has_derive_trait_collision { if has_derive_trait_collision {
candidates.macro_ns = None; candidates.macro_ns = None;
} }
candidates.into_iter().filter_map(|res| res).flatten().collect::<Vec<_>>() candidates.into_iter().flatten().flatten().collect::<Vec<_>>()
} }
} }
} }