mirror of
https://github.com/rust-lang/rust.git
synced 2025-02-16 17:03:35 +00:00
Fix some clippy::complexity
This commit is contained in:
parent
6fceb0f645
commit
81c320ea77
@ -1176,7 +1176,7 @@ impl FieldsShape {
|
||||
|
||||
/// Gets source indices of the fields by increasing offsets.
|
||||
#[inline]
|
||||
pub fn index_by_increasing_offset<'a>(&'a self) -> impl Iterator<Item = usize> + 'a {
|
||||
pub fn index_by_increasing_offset(&self) -> impl Iterator<Item = usize> + '_ {
|
||||
let mut inverse_small = [0u8; 64];
|
||||
let mut inverse_big = IndexVec::new();
|
||||
let use_small = self.count() <= inverse_small.len();
|
||||
|
@ -691,7 +691,7 @@ fn validate_generic_param_order(
|
||||
GenericParamKind::Lifetime => (),
|
||||
GenericParamKind::Const { ty: _, kw_span: _, default: Some(default) } => {
|
||||
ordered_params += " = ";
|
||||
ordered_params += &pprust::expr_to_string(&*default.value);
|
||||
ordered_params += &pprust::expr_to_string(&default.value);
|
||||
}
|
||||
GenericParamKind::Const { ty: _, kw_span: _, default: None } => (),
|
||||
}
|
||||
|
@ -404,11 +404,14 @@ impl<'a> Visitor<'a> for PostExpansionVisitor<'a> {
|
||||
);
|
||||
} else {
|
||||
// And if it isn't, cancel the early-pass warning.
|
||||
self.sess
|
||||
if let Some(err) = self
|
||||
.sess
|
||||
.parse_sess
|
||||
.span_diagnostic
|
||||
.steal_diagnostic(e.span, StashKey::EarlySyntaxWarning)
|
||||
.map(|err| err.cancel());
|
||||
{
|
||||
err.cancel()
|
||||
}
|
||||
}
|
||||
}
|
||||
ast::ExprKind::TryBlock(_) => {
|
||||
|
@ -988,7 +988,9 @@ impl<'a> State<'a> {
|
||||
|
||||
pub fn print_assoc_constraint(&mut self, constraint: &ast::AssocConstraint) {
|
||||
self.print_ident(constraint.ident);
|
||||
constraint.gen_args.as_ref().map(|args| self.print_generic_args(args, false));
|
||||
if let Some(args) = constraint.gen_args.as_ref() {
|
||||
self.print_generic_args(args, false)
|
||||
}
|
||||
self.space();
|
||||
match &constraint.kind {
|
||||
ast::AssocConstraintKind::Equality { term } => {
|
||||
|
@ -206,17 +206,11 @@ impl<N: Debug, E: Debug> Graph<N, E> {
|
||||
AdjacentEdges { graph: self, direction, next: first_edge }
|
||||
}
|
||||
|
||||
pub fn successor_nodes<'a>(
|
||||
&'a self,
|
||||
source: NodeIndex,
|
||||
) -> impl Iterator<Item = NodeIndex> + 'a {
|
||||
pub fn successor_nodes(&self, source: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
|
||||
self.outgoing_edges(source).targets()
|
||||
}
|
||||
|
||||
pub fn predecessor_nodes<'a>(
|
||||
&'a self,
|
||||
target: NodeIndex,
|
||||
) -> impl Iterator<Item = NodeIndex> + 'a {
|
||||
pub fn predecessor_nodes(&self, target: NodeIndex) -> impl Iterator<Item = NodeIndex> + '_ {
|
||||
self.incoming_edges(target).sources()
|
||||
}
|
||||
|
||||
|
@ -40,7 +40,7 @@ impl Deref for Mmap {
|
||||
|
||||
impl AsRef<[u8]> for Mmap {
|
||||
fn as_ref(&self) -> &[u8] {
|
||||
&*self.0
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -312,14 +312,14 @@ impl<CTX> HashStable<CTX> for ::std::num::NonZeroUsize {
|
||||
|
||||
impl<CTX> HashStable<CTX> for f32 {
|
||||
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
||||
let val: u32 = unsafe { ::std::mem::transmute(*self) };
|
||||
let val: u32 = self.to_bits();
|
||||
val.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
impl<CTX> HashStable<CTX> for f64 {
|
||||
fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) {
|
||||
let val: u64 = unsafe { ::std::mem::transmute(*self) };
|
||||
let val: u64 = self.to_bits();
|
||||
val.hash_stable(ctx, hasher);
|
||||
}
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ const RED_ZONE: usize = 100 * 1024; // 100k
|
||||
|
||||
// Only the first stack that is pushed, grows exponentially (2^n * STACK_PER_RECURSION) from then
|
||||
// on. This flag has performance relevant characteristics. Don't set it too high.
|
||||
const STACK_PER_RECURSION: usize = 1 * 1024 * 1024; // 1MB
|
||||
const STACK_PER_RECURSION: usize = 1024 * 1024; // 1MB
|
||||
|
||||
/// Grows the stack on demand to prevent stack overflow. Call this in strategic locations
|
||||
/// to "break up" recursive calls. E.g. almost any call to `visit_expr` or equivalent can benefit
|
||||
|
@ -84,7 +84,7 @@ impl<T: Copy> AppendOnlyVec<T> {
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = T> + '_ {
|
||||
(0..).map(|i| self.get(i)).take_while(|o| o.is_some()).filter_map(|o| o)
|
||||
(0..).map(|i| self.get(i)).take_while(|o| o.is_some()).flatten()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -224,7 +224,7 @@ impl<V: Eq + Hash> UnordSet<V> {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn items<'a>(&'a self) -> UnordItems<&'a V, impl Iterator<Item = &'a V>> {
|
||||
pub fn items(&self) -> UnordItems<&V, impl Iterator<Item = &V>> {
|
||||
UnordItems(self.inner.iter())
|
||||
}
|
||||
|
||||
@ -415,7 +415,7 @@ impl<K: Eq + Hash, V> UnordMap<K, V> {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn items<'a>(&'a self) -> UnordItems<(&'a K, &'a V), impl Iterator<Item = (&'a K, &'a V)>> {
|
||||
pub fn items(&self) -> UnordItems<(&K, &V), impl Iterator<Item = (&K, &V)>> {
|
||||
UnordItems(self.inner.iter())
|
||||
}
|
||||
|
||||
|
@ -956,7 +956,7 @@ impl Diagnostic {
|
||||
// Exact iteration order of diagnostic arguments shouldn't make a difference to output because
|
||||
// they're only used in interpolation.
|
||||
#[allow(rustc::potential_query_instability)]
|
||||
pub fn args<'a>(&'a self) -> impl Iterator<Item = DiagnosticArg<'a, 'static>> {
|
||||
pub fn args(&self) -> impl Iterator<Item = DiagnosticArg<'_, 'static>> {
|
||||
self.args.iter()
|
||||
}
|
||||
|
||||
|
@ -1407,7 +1407,7 @@ impl EmitterWriter {
|
||||
// Account for newlines to align output to its label.
|
||||
for (line, text) in normalize_whitespace(&text).lines().enumerate() {
|
||||
buffer.append(
|
||||
0 + line,
|
||||
line,
|
||||
&format!(
|
||||
"{}{}",
|
||||
if line == 0 { String::new() } else { " ".repeat(label_width) },
|
||||
@ -1918,7 +1918,7 @@ impl EmitterWriter {
|
||||
let last_line = unhighlighted_lines.pop();
|
||||
let first_line = unhighlighted_lines.drain(..).next();
|
||||
|
||||
first_line.map(|(p, l)| {
|
||||
if let Some((p, l)) = first_line {
|
||||
self.draw_code_line(
|
||||
&mut buffer,
|
||||
&mut row_num,
|
||||
@ -1930,12 +1930,12 @@ impl EmitterWriter {
|
||||
&file_lines,
|
||||
is_multiline,
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
buffer.puts(row_num, max_line_num_len - 1, "...", Style::LineNumber);
|
||||
row_num += 1;
|
||||
|
||||
last_line.map(|(p, l)| {
|
||||
if let Some((p, l)) = last_line {
|
||||
self.draw_code_line(
|
||||
&mut buffer,
|
||||
&mut row_num,
|
||||
@ -1947,7 +1947,7 @@ impl EmitterWriter {
|
||||
&file_lines,
|
||||
is_multiline,
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -466,7 +466,7 @@ impl<'a> StripUnconfigured<'a> {
|
||||
//
|
||||
// N.B., this is intentionally not part of the visit_expr() function
|
||||
// in order for filter_map_expr() to be able to avoid this check
|
||||
if let Some(attr) = expr.attrs().iter().find(|a| is_cfg(*a)) {
|
||||
if let Some(attr) = expr.attrs().iter().find(|a| is_cfg(a)) {
|
||||
self.sess.emit_err(RemoveExprNotSupported { span: attr.span });
|
||||
}
|
||||
|
||||
|
@ -41,7 +41,7 @@ impl MetaVarExpr {
|
||||
};
|
||||
check_trailing_token(&mut tts, sess)?;
|
||||
let mut iter = args.trees();
|
||||
let rslt = match &*ident.as_str() {
|
||||
let rslt = match ident.as_str() {
|
||||
"count" => parse_count(&mut iter, sess, ident.span)?,
|
||||
"ignore" => MetaVarExpr::Ignore(parse_ident(&mut iter, sess, ident.span)?),
|
||||
"index" => MetaVarExpr::Index(parse_depth(&mut iter, sess, ident.span)?),
|
||||
|
@ -49,7 +49,7 @@ impl LanguageItems {
|
||||
self.get(it).ok_or_else(|| LangItemError(it))
|
||||
}
|
||||
|
||||
pub fn iter<'a>(&'a self) -> impl Iterator<Item = (LangItem, DefId)> + 'a {
|
||||
pub fn iter(&self) -> impl Iterator<Item = (LangItem, DefId)> + '_ {
|
||||
self.items
|
||||
.iter()
|
||||
.enumerate()
|
||||
|
@ -1850,7 +1850,7 @@ impl<R: Idx, C: Idx> SparseBitMatrix<R, C> {
|
||||
|
||||
/// Iterates through all the columns set to true in a given row of
|
||||
/// the matrix.
|
||||
pub fn iter<'a>(&'a self, row: R) -> impl Iterator<Item = C> + 'a {
|
||||
pub fn iter(&self, row: R) -> impl Iterator<Item = C> + '_ {
|
||||
self.row(row).into_iter().flat_map(|r| r.iter())
|
||||
}
|
||||
|
||||
|
@ -201,18 +201,15 @@ impl<I: Idx, T> IndexVec<I, T> {
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn drain<'a, R: RangeBounds<usize>>(
|
||||
&'a mut self,
|
||||
range: R,
|
||||
) -> impl Iterator<Item = T> + 'a {
|
||||
pub fn drain<R: RangeBounds<usize>>(&mut self, range: R) -> impl Iterator<Item = T> + '_ {
|
||||
self.raw.drain(range)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn drain_enumerated<'a, R: RangeBounds<usize>>(
|
||||
&'a mut self,
|
||||
pub fn drain_enumerated<R: RangeBounds<usize>>(
|
||||
&mut self,
|
||||
range: R,
|
||||
) -> impl Iterator<Item = (I, T)> + 'a {
|
||||
) -> impl Iterator<Item = (I, T)> + '_ {
|
||||
let begin = match range.start_bound() {
|
||||
std::ops::Bound::Included(i) => *i,
|
||||
std::ops::Bound::Excluded(i) => i.checked_add(1).unwrap(),
|
||||
|
@ -119,7 +119,7 @@ impl DiagnosticDeriveBuilder {
|
||||
impl<'a> DiagnosticDeriveVariantBuilder<'a> {
|
||||
/// Generates calls to `code` and similar functions based on the attributes on the type or
|
||||
/// variant.
|
||||
pub fn preamble<'s>(&mut self, variant: &VariantInfo<'s>) -> TokenStream {
|
||||
pub fn preamble(&mut self, variant: &VariantInfo<'_>) -> TokenStream {
|
||||
let ast = variant.ast();
|
||||
let attrs = &ast.attrs;
|
||||
let preamble = attrs.iter().map(|attr| {
|
||||
@ -133,7 +133,7 @@ impl<'a> DiagnosticDeriveVariantBuilder<'a> {
|
||||
|
||||
/// Generates calls to `span_label` and similar functions based on the attributes on fields or
|
||||
/// calls to `set_arg` when no attributes are present.
|
||||
pub fn body<'s>(&mut self, variant: &VariantInfo<'s>) -> TokenStream {
|
||||
pub fn body(&mut self, variant: &VariantInfo<'_>) -> TokenStream {
|
||||
let mut body = quote! {};
|
||||
// Generate `set_arg` calls first..
|
||||
for binding in variant.bindings().iter().filter(|bi| should_generate_set_arg(bi.ast())) {
|
||||
|
@ -915,7 +915,7 @@ pub enum LocalInfo<'tcx> {
|
||||
|
||||
impl<'tcx> LocalDecl<'tcx> {
|
||||
pub fn local_info(&self) -> &LocalInfo<'tcx> {
|
||||
&**self.local_info.as_ref().assert_crate_local()
|
||||
&self.local_info.as_ref().assert_crate_local()
|
||||
}
|
||||
|
||||
/// Returns `true` only if local is a binding that can itself be
|
||||
|
@ -923,7 +923,7 @@ impl ObjectSafetyViolation {
|
||||
}
|
||||
}
|
||||
ObjectSafetyViolation::SupertraitNonLifetimeBinder(_) => {
|
||||
format!("where clause cannot reference non-lifetime `for<...>` variables").into()
|
||||
"where clause cannot reference non-lifetime `for<...>` variables".into()
|
||||
}
|
||||
ObjectSafetyViolation::Method(name, MethodViolationCode::StaticMethod(_), _) => {
|
||||
format!("associated function `{}` has no `self` parameter", name).into()
|
||||
|
@ -105,7 +105,7 @@ impl<'tcx> std::ops::Deref for ExternalConstraints<'tcx> {
|
||||
type Target = ExternalConstraintsData<'tcx>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&*self.0
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -924,7 +924,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
crate_name,
|
||||
// Don't print the whole stable crate id. That's just
|
||||
// annoying in debug output.
|
||||
stable_crate_id.to_u64() >> 8 * 6,
|
||||
stable_crate_id.to_u64() >> (8 * 6),
|
||||
self.def_path(def_id).to_string_no_crate_verbose()
|
||||
)
|
||||
}
|
||||
@ -2379,7 +2379,7 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
pub fn in_scope_traits(self, id: HirId) -> Option<&'tcx [TraitCandidate]> {
|
||||
let map = self.in_scope_traits_map(id.owner)?;
|
||||
let candidates = map.get(&id.local_id)?;
|
||||
Some(&*candidates)
|
||||
Some(candidates)
|
||||
}
|
||||
|
||||
pub fn named_bound_var(self, id: HirId) -> Option<resolve_bound_vars::ResolvedArg> {
|
||||
|
@ -336,8 +336,8 @@ const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
|
||||
("\"", "Quotation Mark", None),
|
||||
];
|
||||
|
||||
pub(super) fn check_for_substitution<'a>(
|
||||
reader: &StringReader<'a>,
|
||||
pub(super) fn check_for_substitution(
|
||||
reader: &StringReader<'_>,
|
||||
pos: BytePos,
|
||||
ch: char,
|
||||
count: usize,
|
||||
|
@ -53,7 +53,7 @@ impl<'a> Parser<'a> {
|
||||
let snapshot = self.create_snapshot_for_diagnostic();
|
||||
match self.parse_ty() {
|
||||
Ok(p) => {
|
||||
if let TyKind::ImplTrait(_, bounds) = &(*p).kind {
|
||||
if let TyKind::ImplTrait(_, bounds) = &p.kind {
|
||||
let span = impl_span.to(self.token.span.shrink_to_lo());
|
||||
let mut err = self.struct_span_err(
|
||||
span,
|
||||
|
@ -136,7 +136,9 @@ where
|
||||
}
|
||||
|
||||
fn iter(&self, f: &mut dyn FnMut(&Self::Key, &Self::Value, DepNodeIndex)) {
|
||||
self.cache.lock().as_ref().map(|value| f(&(), &value.0, value.1));
|
||||
if let Some(value) = self.cache.lock().as_ref() {
|
||||
f(&(), &value.0, value.1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1426,7 +1426,7 @@ pub fn rustc_short_optgroups() -> Vec<RustcOptGroup> {
|
||||
opt::opt_s(
|
||||
"",
|
||||
"edition",
|
||||
&*EDITION_STRING,
|
||||
&EDITION_STRING,
|
||||
EDITION_NAME_LIST,
|
||||
),
|
||||
opt::multi_s(
|
||||
|
@ -84,12 +84,12 @@ impl SymbolGallery {
|
||||
|
||||
/// Construct a diagnostic for a language feature error due to the given `span`.
|
||||
/// The `feature`'s `Symbol` is the one you used in `active.rs` and `rustc_span::symbols`.
|
||||
pub fn feature_err<'a>(
|
||||
sess: &'a ParseSess,
|
||||
pub fn feature_err(
|
||||
sess: &ParseSess,
|
||||
feature: Symbol,
|
||||
span: impl Into<MultiSpan>,
|
||||
explain: impl Into<DiagnosticMessage>,
|
||||
) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
|
||||
) -> DiagnosticBuilder<'_, ErrorGuaranteed> {
|
||||
feature_err_issue(sess, feature, span, GateIssue::Language, explain)
|
||||
}
|
||||
|
||||
@ -98,20 +98,21 @@ pub fn feature_err<'a>(
|
||||
/// This variant allows you to control whether it is a library or language feature.
|
||||
/// Almost always, you want to use this for a language feature. If so, prefer `feature_err`.
|
||||
#[track_caller]
|
||||
pub fn feature_err_issue<'a>(
|
||||
sess: &'a ParseSess,
|
||||
pub fn feature_err_issue(
|
||||
sess: &ParseSess,
|
||||
feature: Symbol,
|
||||
span: impl Into<MultiSpan>,
|
||||
issue: GateIssue,
|
||||
explain: impl Into<DiagnosticMessage>,
|
||||
) -> DiagnosticBuilder<'a, ErrorGuaranteed> {
|
||||
) -> DiagnosticBuilder<'_, ErrorGuaranteed> {
|
||||
let span = span.into();
|
||||
|
||||
// Cancel an earlier warning for this same error, if it exists.
|
||||
if let Some(span) = span.primary_span() {
|
||||
sess.span_diagnostic
|
||||
.steal_diagnostic(span, StashKey::EarlySyntaxWarning)
|
||||
.map(|err| err.cancel());
|
||||
if let Some(err) = sess.span_diagnostic.steal_diagnostic(span, StashKey::EarlySyntaxWarning)
|
||||
{
|
||||
err.cancel()
|
||||
}
|
||||
}
|
||||
|
||||
let mut err = sess.create_err(FeatureGateError { span, explain: explain.into() });
|
||||
|
@ -1353,16 +1353,16 @@ impl Clone for SourceFile {
|
||||
Self {
|
||||
name: self.name.clone(),
|
||||
src: self.src.clone(),
|
||||
src_hash: self.src_hash.clone(),
|
||||
src_hash: self.src_hash,
|
||||
external_src: Lock::new(self.external_src.borrow().clone()),
|
||||
start_pos: self.start_pos.clone(),
|
||||
end_pos: self.end_pos.clone(),
|
||||
start_pos: self.start_pos,
|
||||
end_pos: self.end_pos,
|
||||
lines: Lock::new(self.lines.borrow().clone()),
|
||||
multibyte_chars: self.multibyte_chars.clone(),
|
||||
non_narrow_chars: self.non_narrow_chars.clone(),
|
||||
normalized_pos: self.normalized_pos.clone(),
|
||||
name_hash: self.name_hash.clone(),
|
||||
cnum: self.cnum.clone(),
|
||||
name_hash: self.name_hash,
|
||||
cnum: self.cnum,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user