more clippy::complexity fixes

This commit is contained in:
Matthias Krüger 2022-12-15 00:06:34 +01:00
parent fbf8b937b4
commit de59844c98
13 changed files with 23 additions and 41 deletions

View File

@ -2466,7 +2466,7 @@ pub enum ModKind {
Unloaded, Unloaded,
} }
#[derive(Copy, Clone, Encodable, Decodable, Debug)] #[derive(Copy, Clone, Encodable, Decodable, Debug, Default)]
pub struct ModSpans { pub struct ModSpans {
/// `inner_span` covers the body of the module; for a file module, its the whole file. /// `inner_span` covers the body of the module; for a file module, its the whole file.
/// For an inline module, its the span inside the `{ ... }`, not including the curly braces. /// For an inline module, its the span inside the `{ ... }`, not including the curly braces.
@ -2474,12 +2474,6 @@ pub struct ModSpans {
pub inject_use_span: Span, pub inject_use_span: Span,
} }
impl Default for ModSpans {
fn default() -> ModSpans {
ModSpans { inner_span: Default::default(), inject_use_span: Default::default() }
}
}
/// Foreign module declaration. /// Foreign module declaration.
/// ///
/// E.g., `extern { .. }` or `extern "C" { .. }`. /// E.g., `extern { .. }` or `extern "C" { .. }`.

View File

@ -126,13 +126,13 @@ impl<K: Ord, V> SortedMap<K, V> {
/// Iterate over the keys, sorted /// Iterate over the keys, sorted
#[inline] #[inline]
pub fn keys(&self) -> impl Iterator<Item = &K> + ExactSizeIterator + DoubleEndedIterator { pub fn keys(&self) -> impl Iterator<Item = &K> + ExactSizeIterator + DoubleEndedIterator {
self.data.iter().map(|&(ref k, _)| k) self.data.iter().map(|(k, _)| k)
} }
/// Iterate over values, sorted by key /// Iterate over values, sorted by key
#[inline] #[inline]
pub fn values(&self) -> impl Iterator<Item = &V> + ExactSizeIterator + DoubleEndedIterator { pub fn values(&self) -> impl Iterator<Item = &V> + ExactSizeIterator + DoubleEndedIterator {
self.data.iter().map(|&(_, ref v)| v) self.data.iter().map(|(_, v)| v)
} }
#[inline] #[inline]
@ -222,7 +222,7 @@ impl<K: Ord, V> SortedMap<K, V> {
K: Borrow<Q>, K: Borrow<Q>,
Q: Ord + ?Sized, Q: Ord + ?Sized,
{ {
self.data.binary_search_by(|&(ref x, _)| x.borrow().cmp(key)) self.data.binary_search_by(|(x, _)| x.borrow().cmp(key))
} }
#[inline] #[inline]
@ -300,7 +300,7 @@ impl<K: Ord, V> FromIterator<(K, V)> for SortedMap<K, V> {
fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self { fn from_iter<T: IntoIterator<Item = (K, V)>>(iter: T) -> Self {
let mut data: Vec<(K, V)> = iter.into_iter().collect(); let mut data: Vec<(K, V)> = iter.into_iter().collect();
data.sort_unstable_by(|&(ref k1, _), &(ref k2, _)| k1.cmp(k2)); data.sort_unstable_by(|(k1, _), (k2, _)| k1.cmp(k2));
data.dedup_by(|&mut (ref k1, _), &mut (ref k2, _)| k1.cmp(k2) == Ordering::Equal); data.dedup_by(|&mut (ref k1, _), &mut (ref k2, _)| k1.cmp(k2) == Ordering::Equal);
SortedMap { data } SortedMap { data }

View File

@ -2313,7 +2313,7 @@ impl FileWithAnnotatedLines {
} }
// Find overlapping multiline annotations, put them at different depths // Find overlapping multiline annotations, put them at different depths
multiline_annotations.sort_by_key(|&(_, ref ml)| (ml.line_start, usize::MAX - ml.line_end)); multiline_annotations.sort_by_key(|(_, ml)| (ml.line_start, usize::MAX - ml.line_end));
for (_, ann) in multiline_annotations.clone() { for (_, ann) in multiline_annotations.clone() {
for (_, a) in multiline_annotations.iter_mut() { for (_, a) in multiline_annotations.iter_mut() {
// Move all other multiline annotations overlapping with this one // Move all other multiline annotations overlapping with this one

View File

@ -324,7 +324,7 @@ impl CodeSuggestion {
// Account for the difference between the width of the current code and the // Account for the difference between the width of the current code and the
// snippet being suggested, so that the *later* suggestions are correctly // snippet being suggested, so that the *later* suggestions are correctly
// aligned on the screen. // aligned on the screen.
acc += len as isize - (cur_hi.col.0 - cur_lo.col.0) as isize; acc += len - (cur_hi.col.0 - cur_lo.col.0) as isize;
} }
prev_hi = cur_hi; prev_hi = cur_hi;
prev_line = sf.get_line(prev_hi.line - 1); prev_line = sf.get_line(prev_hi.line - 1);

View File

@ -1757,7 +1757,6 @@ impl<'a> State<'a> {
self.print_qpath(qpath, true); self.print_qpath(qpath, true);
self.popen(); self.popen();
if let Some(ddpos) = ddpos.as_opt_usize() { if let Some(ddpos) = ddpos.as_opt_usize() {
let ddpos = ddpos as usize;
self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p)); self.commasep(Inconsistent, &elts[..ddpos], |s, p| s.print_pat(p));
if ddpos != 0 { if ddpos != 0 {
self.word_space(","); self.word_space(",");

View File

@ -192,7 +192,7 @@ impl Mismatch {
let crate_name = std::env::var("CARGO_CRATE_NAME").ok()?; let crate_name = std::env::var("CARGO_CRATE_NAME").ok()?;
// If we're not in a "rustc_" crate, bail. // If we're not in a "rustc_" crate, bail.
let Some(("rustc", slug_prefix)) = crate_name.split_once("_") else { return None }; let Some(("rustc", slug_prefix)) = crate_name.split_once('_') else { return None };
let slug_name = slug.segments.first()?.ident.to_string(); let slug_name = slug.segments.first()?.ident.to_string();
if !slug_name.starts_with(slug_prefix) { if !slug_name.starts_with(slug_prefix) {

View File

@ -875,18 +875,12 @@ pub struct PacRet {
pub key: PAuthKey, pub key: PAuthKey,
} }
#[derive(Clone, Copy, Hash, Debug, PartialEq)] #[derive(Clone, Copy, Hash, Debug, PartialEq, Default)]
pub struct BranchProtection { pub struct BranchProtection {
pub bti: bool, pub bti: bool,
pub pac_ret: Option<PacRet>, pub pac_ret: Option<PacRet>,
} }
impl Default for BranchProtection {
fn default() -> Self {
BranchProtection { bti: false, pac_ret: None }
}
}
pub const fn default_lib_output() -> CrateType { pub const fn default_lib_output() -> CrateType {
CrateType::Rlib CrateType::Rlib
} }
@ -1875,7 +1869,7 @@ fn parse_opt_level(
.into_iter() .into_iter()
.flat_map(|(i, s)| { .flat_map(|(i, s)| {
// NB: This can match a string without `=`. // NB: This can match a string without `=`.
if let Some("opt-level") = s.splitn(2, '=').next() { Some(i) } else { None } if let Some("opt-level") = s.split('=').next() { Some(i) } else { None }
}) })
.max(); .max();
if max_o > max_c { if max_o > max_c {
@ -1912,7 +1906,7 @@ fn select_debuginfo(
.into_iter() .into_iter()
.flat_map(|(i, s)| { .flat_map(|(i, s)| {
// NB: This can match a string without `=`. // NB: This can match a string without `=`.
if let Some("debuginfo") = s.splitn(2, '=').next() { Some(i) } else { None } if let Some("debuginfo") = s.split('=').next() { Some(i) } else { None }
}) })
.max(); .max();
if max_g > max_c { if max_g > max_c {

View File

@ -175,7 +175,7 @@ cfg_if::cfg_if! {
// There might still be a tail left to analyze // There might still be a tail left to analyze
let tail_start = chunk_count * CHUNK_SIZE + intra_chunk_offset; let tail_start = chunk_count * CHUNK_SIZE + intra_chunk_offset;
if tail_start < src.len() { if tail_start < src.len() {
analyze_source_file_generic(&src[tail_start as usize ..], analyze_source_file_generic(&src[tail_start ..],
src.len() - tail_start, src.len() - tail_start,
output_offset + BytePos::from_usize(tail_start), output_offset + BytePos::from_usize(tail_start),
lines, lines,
@ -219,7 +219,7 @@ fn analyze_source_file_generic(
while i < scan_len { while i < scan_len {
let byte = unsafe { let byte = unsafe {
// We verified that i < scan_len <= src.len() // We verified that i < scan_len <= src.len()
*src_bytes.get_unchecked(i as usize) *src_bytes.get_unchecked(i)
}; };
// How much to advance in order to get to the next UTF-8 char in the // How much to advance in order to get to the next UTF-8 char in the

View File

@ -1381,7 +1381,7 @@ impl<S: Encoder> Encodable<S> for SourceFile {
4 => { 4 => {
raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs); raw_diffs = Vec::with_capacity(bytes_per_diff * num_diffs);
for diff in diff_iter { for diff in diff_iter {
raw_diffs.extend_from_slice(&(diff.0 as u32).to_le_bytes()); raw_diffs.extend_from_slice(&(diff.0).to_le_bytes());
} }
} }
_ => unreachable!(), _ => unreachable!(),

View File

@ -941,7 +941,7 @@ impl SourceMap {
/// Otherwise, the span reached to limit is returned. /// Otherwise, the span reached to limit is returned.
pub fn span_look_ahead(&self, span: Span, expect: Option<&str>, limit: Option<usize>) -> Span { pub fn span_look_ahead(&self, span: Span, expect: Option<&str>, limit: Option<usize>) -> Span {
let mut sp = span; let mut sp = span;
for _ in 0..limit.unwrap_or(100 as usize) { for _ in 0..limit.unwrap_or(100_usize) {
sp = self.next_point(sp); sp = self.next_point(sp);
if let Ok(ref snippet) = self.span_to_snippet(sp) { if let Ok(ref snippet) = self.span_to_snippet(sp) {
if expect.map_or(false, |es| snippet == es) { if expect.map_or(false, |es| snippet == es) {

View File

@ -81,7 +81,7 @@ fn pre_link_args(os: &'static str, arch: Arch, abi: &'static str) -> LinkArgs {
_ => os.into(), _ => os.into(),
}; };
let platform_version: StaticCow<str> = match os.as_ref() { let platform_version: StaticCow<str> = match os {
"ios" => ios_lld_platform_version(), "ios" => ios_lld_platform_version(),
"tvos" => tvos_lld_platform_version(), "tvos" => tvos_lld_platform_version(),
"watchos" => watchos_lld_platform_version(), "watchos" => watchos_lld_platform_version(),

View File

@ -5,12 +5,7 @@ pub fn target() -> Target {
base.max_atomic_width = Some(64); base.max_atomic_width = Some(64);
base.add_pre_link_args( base.add_pre_link_args(
LinkerFlavor::Unix(Cc::No), LinkerFlavor::Unix(Cc::No),
&[ &["-b64", "-bpT:0x100000000", "-bpD:0x110000000", "-bcdtors:all:0:s"],
"-b64".into(),
"-bpT:0x100000000".into(),
"-bpD:0x110000000".into(),
"-bcdtors:all:0:s".into(),
],
); );
Target { Target {

View File

@ -250,9 +250,9 @@ impl<I: Interner> Clone for TyKind<I> {
match self { match self {
Bool => Bool, Bool => Bool,
Char => Char, Char => Char,
Int(i) => Int(i.clone()), Int(i) => Int(*i),
Uint(u) => Uint(u.clone()), Uint(u) => Uint(*u),
Float(f) => Float(f.clone()), Float(f) => Float(*f),
Adt(d, s) => Adt(d.clone(), s.clone()), Adt(d, s) => Adt(d.clone(), s.clone()),
Foreign(d) => Foreign(d.clone()), Foreign(d) => Foreign(d.clone()),
Str => Str, Str => Str,
@ -262,7 +262,7 @@ impl<I: Interner> Clone for TyKind<I> {
Ref(r, t, m) => Ref(r.clone(), t.clone(), m.clone()), Ref(r, t, m) => Ref(r.clone(), t.clone(), m.clone()),
FnDef(d, s) => FnDef(d.clone(), s.clone()), FnDef(d, s) => FnDef(d.clone(), s.clone()),
FnPtr(s) => FnPtr(s.clone()), FnPtr(s) => FnPtr(s.clone()),
Dynamic(p, r, repr) => Dynamic(p.clone(), r.clone(), repr.clone()), Dynamic(p, r, repr) => Dynamic(p.clone(), r.clone(), *repr),
Closure(d, s) => Closure(d.clone(), s.clone()), Closure(d, s) => Closure(d.clone(), s.clone()),
Generator(d, s, m) => Generator(d.clone(), s.clone(), m.clone()), Generator(d, s, m) => Generator(d.clone(), s.clone(), m.clone()),
GeneratorWitness(g) => GeneratorWitness(g.clone()), GeneratorWitness(g) => GeneratorWitness(g.clone()),
@ -270,7 +270,7 @@ impl<I: Interner> Clone for TyKind<I> {
Tuple(t) => Tuple(t.clone()), Tuple(t) => Tuple(t.clone()),
Alias(k, p) => Alias(*k, p.clone()), Alias(k, p) => Alias(*k, p.clone()),
Param(p) => Param(p.clone()), Param(p) => Param(p.clone()),
Bound(d, b) => Bound(d.clone(), b.clone()), Bound(d, b) => Bound(*d, b.clone()),
Placeholder(p) => Placeholder(p.clone()), Placeholder(p) => Placeholder(p.clone()),
Infer(t) => Infer(t.clone()), Infer(t) => Infer(t.clone()),
Error(e) => Error(e.clone()), Error(e) => Error(e.clone()),
@ -936,7 +936,7 @@ impl<I: Interner> Clone for RegionKind<I> {
fn clone(&self) -> Self { fn clone(&self) -> Self {
match self { match self {
ReEarlyBound(r) => ReEarlyBound(r.clone()), ReEarlyBound(r) => ReEarlyBound(r.clone()),
ReLateBound(d, r) => ReLateBound(d.clone(), r.clone()), ReLateBound(d, r) => ReLateBound(*d, r.clone()),
ReFree(r) => ReFree(r.clone()), ReFree(r) => ReFree(r.clone()),
ReStatic => ReStatic, ReStatic => ReStatic,
ReVar(r) => ReVar(r.clone()), ReVar(r) => ReVar(r.clone()),