mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-25 16:24:46 +00:00
couple of clippy::complexity fixes
This commit is contained in:
parent
0d13f6afeb
commit
7c2d57e0fa
@ -832,7 +832,7 @@ impl Diagnostic {
|
|||||||
name: impl Into<Cow<'static, str>>,
|
name: impl Into<Cow<'static, str>>,
|
||||||
arg: DiagnosticArgValue<'static>,
|
arg: DiagnosticArgValue<'static>,
|
||||||
) -> &mut Self {
|
) -> &mut Self {
|
||||||
self.args.push((name.into(), arg.into()));
|
self.args.push((name.into(), arg));
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -270,7 +270,7 @@ fn check_binders(
|
|||||||
MISSING_FRAGMENT_SPECIFIER,
|
MISSING_FRAGMENT_SPECIFIER,
|
||||||
span,
|
span,
|
||||||
node_id,
|
node_id,
|
||||||
&format!("missing fragment specifier"),
|
"missing fragment specifier",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
if !macros.is_empty() {
|
if !macros.is_empty() {
|
||||||
|
@ -63,9 +63,9 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
|
|||||||
/// common state. Used in coherence.
|
/// common state. Used in coherence.
|
||||||
pub fn fork(&self) -> Self {
|
pub fn fork(&self) -> Self {
|
||||||
Self {
|
Self {
|
||||||
tcx: self.tcx.clone(),
|
tcx: self.tcx,
|
||||||
defining_use_anchor: self.defining_use_anchor.clone(),
|
defining_use_anchor: self.defining_use_anchor,
|
||||||
in_progress_typeck_results: self.in_progress_typeck_results.clone(),
|
in_progress_typeck_results: self.in_progress_typeck_results,
|
||||||
inner: self.inner.clone(),
|
inner: self.inner.clone(),
|
||||||
skip_leak_check: self.skip_leak_check.clone(),
|
skip_leak_check: self.skip_leak_check.clone(),
|
||||||
lexical_region_resolutions: self.lexical_region_resolutions.clone(),
|
lexical_region_resolutions: self.lexical_region_resolutions.clone(),
|
||||||
|
@ -44,7 +44,7 @@ pub fn symbols(input: TokenStream) -> TokenStream {
|
|||||||
#[proc_macro]
|
#[proc_macro]
|
||||||
#[allow_internal_unstable(step_trait, rustc_attrs, trusted_step)]
|
#[allow_internal_unstable(step_trait, rustc_attrs, trusted_step)]
|
||||||
pub fn newtype_index(input: TokenStream) -> TokenStream {
|
pub fn newtype_index(input: TokenStream) -> TokenStream {
|
||||||
newtype::newtype(input).into()
|
newtype::newtype(input)
|
||||||
}
|
}
|
||||||
|
|
||||||
decl_derive!([HashStable, attributes(stable_hasher)] => hash_stable::hash_stable_derive);
|
decl_derive!([HashStable, attributes(stable_hasher)] => hash_stable::hash_stable_derive);
|
||||||
|
@ -191,7 +191,7 @@ impl<'tcx> Ty<'tcx> {
|
|||||||
tcx: TyCtxt<'tcx>,
|
tcx: TyCtxt<'tcx>,
|
||||||
param_env: ty::ParamEnv<'tcx>,
|
param_env: ty::ParamEnv<'tcx>,
|
||||||
) -> DefIdForest<'tcx> {
|
) -> DefIdForest<'tcx> {
|
||||||
tcx.type_uninhabited_from(param_env.and(self)).clone()
|
tcx.type_uninhabited_from(param_env.and(self))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -39,7 +39,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
Constant { span, user_ty: None, literal: literal.into() }
|
Constant { span, user_ty: None, literal }
|
||||||
}
|
}
|
||||||
ExprKind::NonHirLiteral { lit, user_ty } => {
|
ExprKind::NonHirLiteral { lit, user_ty } => {
|
||||||
let user_ty = user_ty.map(|user_ty| {
|
let user_ty = user_ty.map(|user_ty| {
|
||||||
|
@ -423,11 +423,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
thir::InlineAsmOperand::Const { value, span } => {
|
thir::InlineAsmOperand::Const { value, span } => {
|
||||||
mir::InlineAsmOperand::Const {
|
mir::InlineAsmOperand::Const {
|
||||||
value: Box::new(Constant {
|
value: Box::new(Constant { span, user_ty: None, literal: value }),
|
||||||
span,
|
|
||||||
user_ty: None,
|
|
||||||
literal: value.into(),
|
|
||||||
}),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
thir::InlineAsmOperand::SymFn { expr } => mir::InlineAsmOperand::SymFn {
|
thir::InlineAsmOperand::SymFn { expr } => mir::InlineAsmOperand::SymFn {
|
||||||
|
@ -441,7 +441,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
|||||||
// Need to experiment.
|
// Need to experiment.
|
||||||
user_ty: None,
|
user_ty: None,
|
||||||
|
|
||||||
literal: method.into(),
|
literal: method,
|
||||||
})),
|
})),
|
||||||
args: vec![val, expect],
|
args: vec![val, expect],
|
||||||
destination: Some((eq_result, eq_block)),
|
destination: Some((eq_result, eq_block)),
|
||||||
|
@ -539,13 +539,13 @@ fn report_unused_unsafe(tcx: TyCtxt<'_>, kind: UnusedUnsafe, id: HirId) {
|
|||||||
UnusedUnsafe::InUnsafeBlock(id) => {
|
UnusedUnsafe::InUnsafeBlock(id) => {
|
||||||
db.span_label(
|
db.span_label(
|
||||||
tcx.sess.source_map().guess_head_span(tcx.hir().span(id)),
|
tcx.sess.source_map().guess_head_span(tcx.hir().span(id)),
|
||||||
format!("because it's nested under this `unsafe` block"),
|
"because it's nested under this `unsafe` block",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
UnusedUnsafe::InUnsafeFn(id, usage_lint_root) => {
|
UnusedUnsafe::InUnsafeFn(id, usage_lint_root) => {
|
||||||
db.span_label(
|
db.span_label(
|
||||||
tcx.sess.source_map().guess_head_span(tcx.hir().span(id)),
|
tcx.sess.source_map().guess_head_span(tcx.hir().span(id)),
|
||||||
format!("because it's nested under this `unsafe` fn"),
|
"because it's nested under this `unsafe` fn",
|
||||||
)
|
)
|
||||||
.note(
|
.note(
|
||||||
"this `unsafe` block does contain unsafe operations, \
|
"this `unsafe` block does contain unsafe operations, \
|
||||||
|
@ -234,13 +234,13 @@ impl<'a> StringReader<'a> {
|
|||||||
rustc_lexer::TokenKind::InvalidIdent
|
rustc_lexer::TokenKind::InvalidIdent
|
||||||
// Do not recover an identifier with emoji if the codepoint is a confusable
|
// Do not recover an identifier with emoji if the codepoint is a confusable
|
||||||
// with a recoverable substitution token, like `➖`.
|
// with a recoverable substitution token, like `➖`.
|
||||||
if UNICODE_ARRAY
|
if !UNICODE_ARRAY
|
||||||
.iter()
|
.iter()
|
||||||
.find(|&&(c, _, _)| {
|
.any(|&(c, _, _)| {
|
||||||
let sym = self.str_from(start);
|
let sym = self.str_from(start);
|
||||||
sym.chars().count() == 1 && c == sym.chars().next().unwrap()
|
sym.chars().count() == 1 && c == sym.chars().next().unwrap()
|
||||||
})
|
})
|
||||||
.is_none() =>
|
=>
|
||||||
{
|
{
|
||||||
let sym = nfc_normalize(self.str_from(start));
|
let sym = nfc_normalize(self.str_from(start));
|
||||||
let span = self.mk_sp(start, self.pos);
|
let span = self.mk_sp(start, self.pos);
|
||||||
|
@ -133,9 +133,9 @@ impl<'a, 'tcx> Annotator<'a, 'tcx> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// `Deprecation` is just two pointers, no need to intern it
|
// `Deprecation` is just two pointers, no need to intern it
|
||||||
let depr_entry = DeprecationEntry::local(depr.clone(), def_id);
|
let depr_entry = DeprecationEntry::local(*depr, def_id);
|
||||||
self.index.depr_map.insert(def_id, depr_entry);
|
self.index.depr_map.insert(def_id, depr_entry);
|
||||||
} else if let Some(parent_depr) = self.parent_depr.clone() {
|
} else if let Some(parent_depr) = self.parent_depr {
|
||||||
if inherit_deprecation.yes() {
|
if inherit_deprecation.yes() {
|
||||||
is_deprecated = true;
|
is_deprecated = true;
|
||||||
info!("tagging child {:?} as deprecated from parent", def_id);
|
info!("tagging child {:?} as deprecated from parent", def_id);
|
||||||
|
@ -238,7 +238,7 @@ impl Session {
|
|||||||
}
|
}
|
||||||
diag.emit();
|
diag.emit();
|
||||||
// If we should err, make sure we did.
|
// If we should err, make sure we did.
|
||||||
if must_err && !self.has_errors().is_some() {
|
if must_err && self.has_errors().is_none() {
|
||||||
// We have skipped a feature gate, and not run into other errors... reject.
|
// We have skipped a feature gate, and not run into other errors... reject.
|
||||||
self.err(
|
self.err(
|
||||||
"`-Zunleash-the-miri-inside-of-you` may not be used to circumvent feature \
|
"`-Zunleash-the-miri-inside-of-you` may not be used to circumvent feature \
|
||||||
|
@ -113,11 +113,11 @@ where
|
|||||||
data = arg_scalar(cx, &scalar, offset, data);
|
data = arg_scalar(cx, &scalar, offset, data);
|
||||||
}
|
}
|
||||||
abi::Abi::Aggregate { .. } => {
|
abi::Abi::Aggregate { .. } => {
|
||||||
for i in 0..layout.fields.count().clone() {
|
for i in 0..layout.fields.count() {
|
||||||
if offset < layout.fields.offset(i) {
|
if offset < layout.fields.offset(i) {
|
||||||
offset = layout.fields.offset(i);
|
offset = layout.fields.offset(i);
|
||||||
}
|
}
|
||||||
data = parse_structure(cx, layout.field(cx, i).clone(), data.clone(), offset);
|
data = parse_structure(cx, layout.field(cx, i), data.clone(), offset);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
@ -161,7 +161,7 @@ where
|
|||||||
|
|
||||||
let mut data = parse_structure(
|
let mut data = parse_structure(
|
||||||
cx,
|
cx,
|
||||||
arg.layout.clone(),
|
arg.layout,
|
||||||
Sdata {
|
Sdata {
|
||||||
prefix: [None; 8],
|
prefix: [None; 8],
|
||||||
prefix_index: 0,
|
prefix_index: 0,
|
||||||
|
@ -258,7 +258,7 @@ impl<'tcx> OnUnimplementedDirective {
|
|||||||
enclosing_scope = Some(enclosing_scope_.clone());
|
enclosing_scope = Some(enclosing_scope_.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
append_const_msg = command.append_const_msg.clone();
|
append_const_msg = command.append_const_msg;
|
||||||
}
|
}
|
||||||
|
|
||||||
OnUnimplementedNote {
|
OnUnimplementedNote {
|
||||||
|
@ -415,8 +415,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
.get_if_local(def_id)
|
.get_if_local(def_id)
|
||||||
.and_then(|node| node.body_id())
|
.and_then(|node| node.body_id())
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|id| tcx.hir().body(id).params)
|
.flat_map(|id| tcx.hir().body(id).params)
|
||||||
.flatten();
|
;
|
||||||
|
|
||||||
for param in params {
|
for param in params {
|
||||||
spans.push_span_label(param.span, String::new());
|
spans.push_span_label(param.span, String::new());
|
||||||
|
@ -646,7 +646,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
|||||||
|
|
||||||
// now get all predicates in the same types as the where bounds, so we can chain them
|
// now get all predicates in the same types as the where bounds, so we can chain them
|
||||||
let predicates_from_where =
|
let predicates_from_where =
|
||||||
where_predicates.iter().flatten().map(|bounds| bounds.iter()).flatten();
|
where_predicates.iter().flatten().flat_map(|bounds| bounds.iter());
|
||||||
|
|
||||||
// extract all bounds from the source code using their spans
|
// extract all bounds from the source code using their spans
|
||||||
let all_matching_bounds_strs = expected_generic_param
|
let all_matching_bounds_strs = expected_generic_param
|
||||||
|
Loading…
Reference in New Issue
Block a user