use is_empty() instead of len() == x to determine if structs are empty.

This commit is contained in:
Matthias Krüger 2020-02-28 14:20:33 +01:00
parent e2223c94bf
commit 9523c89f18
54 changed files with 78 additions and 82 deletions

View File

@ -3823,7 +3823,7 @@ where
// The last index of self.v is already checked and found to match // The last index of self.v is already checked and found to match
// by the last iteration, so we start searching a new match // by the last iteration, so we start searching a new match
// one index to the left. // one index to the left.
let remainder = if self.v.len() == 0 { &[] } else { &self.v[..(self.v.len() - 1)] }; let remainder = if self.v.is_empty() { &[] } else { &self.v[..(self.v.len() - 1)] };
let idx = remainder.iter().rposition(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(0); let idx = remainder.iter().rposition(|x| (self.pred)(x)).map(|idx| idx + 1).unwrap_or(0);
if idx == 0 { if idx == 0 {
self.finished = true; self.finished = true;
@ -4033,7 +4033,7 @@ where
return None; return None;
} }
let idx_opt = if self.v.len() == 0 { let idx_opt = if self.v.is_empty() {
None None
} else { } else {
// work around borrowck limitations // work around borrowck limitations

View File

@ -250,7 +250,7 @@ impl<'tcx> Arena<'tcx> {
#[inline] #[inline]
pub fn alloc_slice<T: Copy>(&self, value: &[T]) -> &mut [T] { pub fn alloc_slice<T: Copy>(&self, value: &[T]) -> &mut [T] {
if value.len() == 0 { if value.is_empty() {
return &mut []; return &mut [];
} }
self.dropless.alloc_slice(value) self.dropless.alloc_slice(value)

View File

@ -809,7 +809,7 @@ impl DepGraph {
dep_node dep_node
); );
if unlikely!(diagnostics.len() > 0) { if unlikely!(!diagnostics.is_empty()) {
self.emit_diagnostics(tcx, data, dep_node_index, prev_dep_node_index, diagnostics); self.emit_diagnostics(tcx, data, dep_node_index, prev_dep_node_index, diagnostics);
} }

View File

@ -19,7 +19,7 @@ use smallvec::SmallVec;
use std::cmp::Ord; use std::cmp::Ord;
fn compute_ignored_attr_names() -> FxHashSet<Symbol> { fn compute_ignored_attr_names() -> FxHashSet<Symbol> {
debug_assert!(ich::IGNORED_ATTRIBUTES.len() > 0); debug_assert!(!ich::IGNORED_ATTRIBUTES.is_empty());
ich::IGNORED_ATTRIBUTES.iter().map(|&s| s).collect() ich::IGNORED_ATTRIBUTES.iter().map(|&s| s).collect()
} }

View File

@ -14,7 +14,7 @@ impl<'ctx> rustc_target::HashStableContext for StableHashingContext<'ctx> {}
impl<'a> HashStable<StableHashingContext<'a>> for [ast::Attribute] { impl<'a> HashStable<StableHashingContext<'a>> for [ast::Attribute] {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
if self.len() == 0 { if self.is_empty() {
self.len().hash_stable(hcx, hasher); self.len().hash_stable(hcx, hasher);
return; return;
} }

View File

@ -171,7 +171,7 @@ impl<'tcx> ConstEvalErr<'tcx> {
// Skip the last, which is just the environment of the constant. The stacktrace // Skip the last, which is just the environment of the constant. The stacktrace
// is sometimes empty because we create "fake" eval contexts in CTFE to do work // is sometimes empty because we create "fake" eval contexts in CTFE to do work
// on constant values. // on constant values.
if self.stacktrace.len() > 0 { if !self.stacktrace.is_empty() {
for frame_info in &self.stacktrace[..self.stacktrace.len() - 1] { for frame_info in &self.stacktrace[..self.stacktrace.len() - 1] {
err.span_label(frame_info.call_site, frame_info.to_string()); err.span_label(frame_info.call_site, frame_info.to_string());
} }

View File

@ -2219,7 +2219,7 @@ impl<'tcx> Debug for Rvalue<'tcx> {
}); });
let region = if print_region { let region = if print_region {
let mut region = region.to_string(); let mut region = region.to_string();
if region.len() > 0 { if !region.is_empty() {
region.push(' '); region.push(' ');
} }
region region

View File

@ -2473,7 +2473,7 @@ impl<'tcx> TyCtxt<'tcx> {
// FIXME consider asking the input slice to be sorted to avoid // FIXME consider asking the input slice to be sorted to avoid
// re-interning permutations, in which case that would be asserted // re-interning permutations, in which case that would be asserted
// here. // here.
if preds.len() == 0 { if preds.is_empty() {
// The macro-generated method below asserts we don't intern an empty slice. // The macro-generated method below asserts we don't intern an empty slice.
List::empty() List::empty()
} else { } else {
@ -2482,31 +2482,31 @@ impl<'tcx> TyCtxt<'tcx> {
} }
pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List<Ty<'tcx>> { pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx List<Ty<'tcx>> {
if ts.len() == 0 { List::empty() } else { self._intern_type_list(ts) } if ts.is_empty() { List::empty() } else { self._intern_type_list(ts) }
} }
pub fn intern_substs(self, ts: &[GenericArg<'tcx>]) -> &'tcx List<GenericArg<'tcx>> { pub fn intern_substs(self, ts: &[GenericArg<'tcx>]) -> &'tcx List<GenericArg<'tcx>> {
if ts.len() == 0 { List::empty() } else { self._intern_substs(ts) } if ts.is_empty() { List::empty() } else { self._intern_substs(ts) }
} }
pub fn intern_projs(self, ps: &[ProjectionKind]) -> &'tcx List<ProjectionKind> { pub fn intern_projs(self, ps: &[ProjectionKind]) -> &'tcx List<ProjectionKind> {
if ps.len() == 0 { List::empty() } else { self._intern_projs(ps) } if ps.is_empty() { List::empty() } else { self._intern_projs(ps) }
} }
pub fn intern_place_elems(self, ts: &[PlaceElem<'tcx>]) -> &'tcx List<PlaceElem<'tcx>> { pub fn intern_place_elems(self, ts: &[PlaceElem<'tcx>]) -> &'tcx List<PlaceElem<'tcx>> {
if ts.len() == 0 { List::empty() } else { self._intern_place_elems(ts) } if ts.is_empty() { List::empty() } else { self._intern_place_elems(ts) }
} }
pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'tcx> { pub fn intern_canonical_var_infos(self, ts: &[CanonicalVarInfo]) -> CanonicalVarInfos<'tcx> {
if ts.len() == 0 { List::empty() } else { self._intern_canonical_var_infos(ts) } if ts.is_empty() { List::empty() } else { self._intern_canonical_var_infos(ts) }
} }
pub fn intern_clauses(self, ts: &[Clause<'tcx>]) -> Clauses<'tcx> { pub fn intern_clauses(self, ts: &[Clause<'tcx>]) -> Clauses<'tcx> {
if ts.len() == 0 { List::empty() } else { self._intern_clauses(ts) } if ts.is_empty() { List::empty() } else { self._intern_clauses(ts) }
} }
pub fn intern_goals(self, ts: &[Goal<'tcx>]) -> Goals<'tcx> { pub fn intern_goals(self, ts: &[Goal<'tcx>]) -> Goals<'tcx> {
if ts.len() == 0 { List::empty() } else { self._intern_goals(ts) } if ts.is_empty() { List::empty() } else { self._intern_goals(ts) }
} }
pub fn mk_fn_sig<I>( pub fn mk_fn_sig<I>(

View File

@ -314,7 +314,7 @@ impl<'tcx> Instance<'tcx> {
) -> Option<Instance<'tcx>> { ) -> Option<Instance<'tcx>> {
debug!("resolve(def_id={:?}, substs={:?})", def_id, substs); debug!("resolve(def_id={:?}, substs={:?})", def_id, substs);
let fn_sig = tcx.fn_sig(def_id); let fn_sig = tcx.fn_sig(def_id);
let is_vtable_shim = fn_sig.inputs().skip_binder().len() > 0 let is_vtable_shim = !fn_sig.inputs().skip_binder().is_empty()
&& fn_sig.input(0).skip_binder().is_param(0) && fn_sig.input(0).skip_binder().is_param(0)
&& tcx.generics_of(def_id).has_self; && tcx.generics_of(def_id).has_self;
if is_vtable_shim { if is_vtable_shim {

View File

@ -798,7 +798,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
// (Typechecking will reject discriminant-sizing attrs.) // (Typechecking will reject discriminant-sizing attrs.)
let v = present_first.unwrap(); let v = present_first.unwrap();
let kind = if def.is_enum() || variants[v].len() == 0 { let kind = if def.is_enum() || variants[v].is_empty() {
StructKind::AlwaysSized StructKind::AlwaysSized
} else { } else {
let param_env = tcx.param_env(def.did); let param_env = tcx.param_env(def.did);

View File

@ -698,7 +698,7 @@ impl<T: Copy> List<T> {
fn from_arena<'tcx>(arena: &'tcx Arena<'tcx>, slice: &[T]) -> &'tcx List<T> { fn from_arena<'tcx>(arena: &'tcx Arena<'tcx>, slice: &[T]) -> &'tcx List<T> {
assert!(!mem::needs_drop::<T>()); assert!(!mem::needs_drop::<T>());
assert!(mem::size_of::<T>() != 0); assert!(mem::size_of::<T>() != 0);
assert!(slice.len() != 0); assert!(!slice.is_empty());
// Align up the size of the len (usize) field // Align up the size of the len (usize) field
let align = mem::align_of::<T>(); let align = mem::align_of::<T>();

View File

@ -229,7 +229,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
err.span_label(data.span, "only `Fn` traits may use parentheses"); err.span_label(data.span, "only `Fn` traits may use parentheses");
if let Ok(snippet) = self.sess.source_map().span_to_snippet(data.span) { if let Ok(snippet) = self.sess.source_map().span_to_snippet(data.span) {
// Do not suggest going from `Trait()` to `Trait<>` // Do not suggest going from `Trait()` to `Trait<>`
if data.inputs.len() > 0 { if !data.inputs.is_empty() {
if let Some(split) = snippet.find('(') { if let Some(split) = snippet.find('(') {
let trait_name = &snippet[0..split]; let trait_name = &snippet[0..split];
let args = &snippet[split + 1..snippet.len() - 1]; let args = &snippet[split + 1..snippet.len() - 1];

View File

@ -791,7 +791,7 @@ impl<'a> PrintState<'a> for State<'a> {
s.print_generic_arg(generic_arg) s.print_generic_arg(generic_arg)
}); });
let mut comma = data.args.len() != 0; let mut comma = !data.args.is_empty();
for constraint in data.constraints.iter() { for constraint in data.constraints.iter() {
if comma { if comma {

View File

@ -49,7 +49,7 @@ pub fn expand_concat(
} }
} }
} }
if missing_literal.len() > 0 { if !missing_literal.is_empty() {
let mut err = cx.struct_span_err(missing_literal, "expected a literal"); let mut err = cx.struct_span_err(missing_literal, "expected a literal");
err.note("only literals (like `\"foo\"`, `42` and `3.14`) can be passed to `concat!()`"); err.note("only literals (like `\"foo\"`, `42` and `3.14`) can be passed to `concat!()`");
err.emit(); err.emit();

View File

@ -1096,7 +1096,7 @@ pub fn expand_preparsed_format_args(
cx.str_pieces.push(s); cx.str_pieces.push(s);
} }
if cx.invalid_refs.len() >= 1 { if !cx.invalid_refs.is_empty() {
cx.report_invalid_references(numbered_position_args); cx.report_invalid_references(numbered_position_args);
} }

View File

@ -237,7 +237,7 @@ fn fat_lto(
let module: ModuleCodegen<ModuleLlvm> = match costliest_module { let module: ModuleCodegen<ModuleLlvm> = match costliest_module {
Some((_cost, i)) => in_memory.remove(i), Some((_cost, i)) => in_memory.remove(i),
None => { None => {
assert!(serialized_modules.len() > 0, "must have at least one serialized module"); assert!(!serialized_modules.is_empty(), "must have at least one serialized module");
let (buffer, name) = serialized_modules.remove(0); let (buffer, name) = serialized_modules.remove(0);
info!("no in-memory regular modules to choose from, parsing {:?}", name); info!("no in-memory regular modules to choose from, parsing {:?}", name);
ModuleCodegen { ModuleCodegen {

View File

@ -61,7 +61,7 @@ unsafe fn configure_llvm(sess: &Session) {
let sess_args = cg_opts.chain(tg_opts); let sess_args = cg_opts.chain(tg_opts);
let user_specified_args: FxHashSet<_> = let user_specified_args: FxHashSet<_> =
sess_args.clone().map(|s| llvm_arg_to_arg_name(s)).filter(|s| s.len() > 0).collect(); sess_args.clone().map(|s| llvm_arg_to_arg_name(s)).filter(|s| !s.is_empty()).collect();
{ {
// This adds the given argument to LLVM. Unless `force` is true // This adds the given argument to LLVM. Unless `force` is true

View File

@ -1524,12 +1524,12 @@ fn add_upstream_rust_crates<'a, B: ArchiveBuilder<'a>>(
for &(cnum, _) in deps.iter().rev() { for &(cnum, _) in deps.iter().rev() {
if let Some(missing) = info.missing_lang_items.get(&cnum) { if let Some(missing) = info.missing_lang_items.get(&cnum) {
end_with.extend(missing.iter().cloned()); end_with.extend(missing.iter().cloned());
if end_with.len() > 0 && group_end.is_none() { if !end_with.is_empty() && group_end.is_none() {
group_end = Some(cnum); group_end = Some(cnum);
} }
} }
end_with.retain(|item| info.lang_item_to_crate.get(item) != Some(&cnum)); end_with.retain(|item| info.lang_item_to_crate.get(item) != Some(&cnum));
if end_with.len() == 0 && group_end.is_some() { if end_with.is_empty() && group_end.is_some() {
group_start = Some(cnum); group_start = Some(cnum);
break; break;
} }

View File

@ -1244,10 +1244,10 @@ fn start_executing_work<B: ExtraBackendMethods>(
while !codegen_done while !codegen_done
|| running > 0 || running > 0
|| (!codegen_aborted || (!codegen_aborted
&& (work_items.len() > 0 && (!work_items.is_empty()
|| needs_fat_lto.len() > 0 || !needs_fat_lto.is_empty()
|| needs_thin_lto.len() > 0 || !needs_thin_lto.is_empty()
|| lto_import_only_modules.len() > 0 || !lto_import_only_modules.is_empty()
|| main_thread_worker_state != MainThreadWorkerState::Idle)) || main_thread_worker_state != MainThreadWorkerState::Idle))
{ {
// While there are still CGUs to be codegened, the coordinator has // While there are still CGUs to be codegened, the coordinator has
@ -1289,7 +1289,7 @@ fn start_executing_work<B: ExtraBackendMethods>(
// Perform the serial work here of figuring out what we're // Perform the serial work here of figuring out what we're
// going to LTO and then push a bunch of work items onto our // going to LTO and then push a bunch of work items onto our
// queue to do LTO // queue to do LTO
if work_items.len() == 0 if work_items.is_empty()
&& running == 0 && running == 0
&& main_thread_worker_state == MainThreadWorkerState::Idle && main_thread_worker_state == MainThreadWorkerState::Idle
{ {
@ -1354,7 +1354,7 @@ fn start_executing_work<B: ExtraBackendMethods>(
// Spin up what work we can, only doing this while we've got available // Spin up what work we can, only doing this while we've got available
// parallelism slots and work left to spawn. // parallelism slots and work left to spawn.
while !codegen_aborted && work_items.len() > 0 && running < tokens.len() { while !codegen_aborted && !work_items.is_empty() && running < tokens.len() {
let (item, _) = work_items.pop().unwrap(); let (item, _) = work_items.pop().unwrap();
maybe_start_llvm_timer(prof, cgcx.config(item.module_kind()), &mut llvm_start_time); maybe_start_llvm_timer(prof, cgcx.config(item.module_kind()), &mut llvm_start_time);

View File

@ -425,7 +425,7 @@ impl SelfProfiler {
} }
// Warn about any unknown event names // Warn about any unknown event names
if unknown_events.len() > 0 { if !unknown_events.is_empty() {
unknown_events.sort(); unknown_events.sort();
unknown_events.dedup(); unknown_events.dedup();

View File

@ -231,7 +231,7 @@ pub trait Emitter {
].contains(&sugg.style) ].contains(&sugg.style)
{ {
let substitution = &sugg.substitutions[0].parts[0].snippet.trim(); let substitution = &sugg.substitutions[0].parts[0].snippet.trim();
let msg = if substitution.len() == 0 || sugg.style.hide_inline() { let msg = if substitution.is_empty() || sugg.style.hide_inline() {
// This substitution is only removal OR we explicitly don't want to show the // This substitution is only removal OR we explicitly don't want to show the
// code inline (`hide_inline`). Therefore, we don't show the substitution. // code inline (`hide_inline`). Therefore, we don't show the substitution.
format!("help: {}", sugg.msg) format!("help: {}", sugg.msg)

View File

@ -152,7 +152,7 @@ impl Annotation {
// | // |
// //
// Note that this would be the complete output users would see. // Note that this would be the complete output users would see.
label.len() > 0 !label.is_empty()
} else { } else {
false false
} }

View File

@ -421,7 +421,7 @@ impl<'cx, 'tcx> InferCtxt<'cx, 'tcx> {
for _ in num_universes_in_query..num_universes_in_response { for _ in num_universes_in_query..num_universes_in_response {
universe_map.push(self.create_next_universe()); universe_map.push(self.create_next_universe());
} }
assert!(universe_map.len() >= 1); // always have the root universe assert!(!universe_map.is_empty()); // always have the root universe
assert_eq!(universe_map[ty::UniverseIndex::ROOT.as_usize()], ty::UniverseIndex::ROOT); assert_eq!(universe_map[ty::UniverseIndex::ROOT.as_usize()], ty::UniverseIndex::ROOT);
// Every canonical query result includes values for each of // Every canonical query result includes values for each of

View File

@ -587,7 +587,7 @@ impl<'tcx> RegionConstraintCollector<'tcx> {
// skip no-op cases known to be satisfied // skip no-op cases known to be satisfied
if let VerifyBound::AllBounds(ref bs) = verify.bound { if let VerifyBound::AllBounds(ref bs) = verify.bound {
if bs.len() == 0 { if bs.is_empty() {
return; return;
} }
} }

View File

@ -440,7 +440,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
/// going to help). /// going to help).
pub fn report_overflow_error_cycle(&self, cycle: &[PredicateObligation<'tcx>]) -> ! { pub fn report_overflow_error_cycle(&self, cycle: &[PredicateObligation<'tcx>]) -> ! {
let cycle = self.resolve_vars_if_possible(&cycle.to_owned()); let cycle = self.resolve_vars_if_possible(&cycle.to_owned());
assert!(cycle.len() > 0); assert!(!cycle.is_empty());
debug!("report_overflow_error_cycle: cycle={:?}", cycle); debug!("report_overflow_error_cycle: cycle={:?}", cycle);

View File

@ -2157,7 +2157,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
debug!("builtin_bound: nested={:?}", nested); debug!("builtin_bound: nested={:?}", nested);
candidates candidates
.vec .vec
.push(BuiltinCandidate { has_nested: nested.skip_binder().len() > 0 }); .push(BuiltinCandidate { has_nested: !nested.skip_binder().is_empty() });
} }
BuiltinImplConditions::None => {} BuiltinImplConditions::None => {}
BuiltinImplConditions::Ambiguous => { BuiltinImplConditions::Ambiguous => {

View File

@ -375,7 +375,7 @@ impl<'s> LintLevelsBuilder<'s> {
} }
let prev = self.cur; let prev = self.cur;
if specs.len() > 0 { if !specs.is_empty() {
self.cur = self.sets.list.len() as u32; self.cur = self.sets.list.len() as u32;
self.sets.list.push(LintSet::Node { specs: specs, parent: prev }); self.sets.list.push(LintSet::Node { specs: specs, parent: prev });
} }

View File

@ -339,7 +339,7 @@ fn activate_injected_dep(
// there's only going to be one allocator in the output. // there's only going to be one allocator in the output.
fn verify_ok(tcx: TyCtxt<'_>, list: &[Linkage]) { fn verify_ok(tcx: TyCtxt<'_>, list: &[Linkage]) {
let sess = &tcx.sess; let sess = &tcx.sess;
if list.len() == 0 { if list.is_empty() {
return; return;
} }
let mut panic_runtime = None; let mut panic_runtime = None;

View File

@ -264,7 +264,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
#[inline(always)] #[inline(always)]
pub fn cur_frame(&self) -> usize { pub fn cur_frame(&self) -> usize {
assert!(self.stack.len() > 0); assert!(!self.stack.is_empty());
self.stack.len() - 1 self.stack.len() - 1
} }
@ -505,7 +505,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
return_place: Option<PlaceTy<'tcx, M::PointerTag>>, return_place: Option<PlaceTy<'tcx, M::PointerTag>>,
return_to_block: StackPopCleanup, return_to_block: StackPopCleanup,
) -> InterpResult<'tcx> { ) -> InterpResult<'tcx> {
if self.stack.len() > 0 { if !self.stack.is_empty() {
info!("PAUSING({}) {}", self.cur_frame(), self.frame().instance); info!("PAUSING({}) {}", self.cur_frame(), self.frame().instance);
} }
::log_settings::settings().indentation += 1; ::log_settings::settings().indentation += 1;
@ -698,7 +698,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
} }
} }
if self.stack.len() > 0 { if !self.stack.is_empty() {
info!( info!(
"CONTINUING({}) {} (unwinding = {})", "CONTINUING({}) {} (unwinding = {})",
self.cur_frame(), self.cur_frame(),

View File

@ -28,7 +28,7 @@ fn get_switched_on_type<'tcx>(
// Only bother checking blocks which terminate by switching on a local. // Only bother checking blocks which terminate by switching on a local.
if let Some(local) = get_discriminant_local(&terminator.kind) { if let Some(local) = get_discriminant_local(&terminator.kind) {
let stmt_before_term = (block_data.statements.len() > 0) let stmt_before_term = (!block_data.statements.is_empty())
.then(|| &block_data.statements[block_data.statements.len() - 1].kind); .then(|| &block_data.statements[block_data.statements.len() - 1].kind);
if let Some(StatementKind::Assign(box (l, Rvalue::Discriminant(place)))) = stmt_before_term if let Some(StatementKind::Assign(box (l, Rvalue::Discriminant(place)))) = stmt_before_term

View File

@ -369,7 +369,7 @@ where
fn open_drop_for_adt(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>) -> BasicBlock { fn open_drop_for_adt(&mut self, adt: &'tcx ty::AdtDef, substs: SubstsRef<'tcx>) -> BasicBlock {
debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs); debug!("open_drop_for_adt({:?}, {:?}, {:?})", self, adt, substs);
if adt.variants.len() == 0 { if adt.variants.is_empty() {
return self.elaborator.patch().new_block(BasicBlockData { return self.elaborator.patch().new_block(BasicBlockData {
statements: vec![], statements: vec![],
terminator: Some(Terminator { terminator: Some(Terminator {

View File

@ -37,7 +37,7 @@ impl<'tcx> MirPatch<'tcx> {
let mut resume_stmt_block = None; let mut resume_stmt_block = None;
for (bb, block) in body.basic_blocks().iter_enumerated() { for (bb, block) in body.basic_blocks().iter_enumerated() {
if let TerminatorKind::Resume = block.terminator().kind { if let TerminatorKind::Resume = block.terminator().kind {
if block.statements.len() > 0 { if !block.statements.is_empty() {
assert!(resume_stmt_block.is_none()); assert!(resume_stmt_block.is_none());
resume_stmt_block = Some(bb); resume_stmt_block = Some(bb);
} else { } else {

View File

@ -1438,7 +1438,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let target_blocks: Vec<_> = target_candidates let target_blocks: Vec<_> = target_candidates
.into_iter() .into_iter()
.map(|mut candidates| { .map(|mut candidates| {
if candidates.len() != 0 { if !candidates.is_empty() {
let candidate_start = this.cfg.start_new_block(); let candidate_start = this.cfg.start_new_block();
this.match_candidates( this.match_candidates(
span, span,

View File

@ -222,7 +222,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
TestKind::SwitchInt { switch_ty, ref options, indices: _ } => { TestKind::SwitchInt { switch_ty, ref options, indices: _ } => {
let target_blocks = make_target_blocks(self); let target_blocks = make_target_blocks(self);
let terminator = if switch_ty.kind == ty::Bool { let terminator = if switch_ty.kind == ty::Bool {
assert!(options.len() > 0 && options.len() <= 2); assert!(!options.is_empty() && options.len() <= 2);
if let [first_bb, second_bb] = *target_blocks { if let [first_bb, second_bb] = *target_blocks {
let (true_bb, false_bb) = match options[0] { let (true_bb, false_bb) = match options[0] {
1 => (first_bb, second_bb), 1 => (first_bb, second_bb),

View File

@ -141,7 +141,7 @@ impl<'a> TokenTreesReader<'a> {
self.last_delim_empty_block_spans.insert(delim, empty_block_span); self.last_delim_empty_block_spans.insert(delim, empty_block_span);
} }
if self.open_braces.len() == 0 { if self.open_braces.is_empty() {
// Clear up these spans to avoid suggesting them as we've found // Clear up these spans to avoid suggesting them as we've found
// properly matched delimiters so far for an entire block. // properly matched delimiters so far for an entire block.
self.matching_delim_spans.clear(); self.matching_delim_spans.clear();

View File

@ -371,7 +371,7 @@ fn prepend_attrs(
span: rustc_span::Span, span: rustc_span::Span,
) -> Option<tokenstream::TokenStream> { ) -> Option<tokenstream::TokenStream> {
let tokens = tokens?; let tokens = tokens?;
if attrs.len() == 0 { if attrs.is_empty() {
return Some(tokens.clone()); return Some(tokens.clone());
} }
let mut builder = tokenstream::TokenStreamBuilder::new(); let mut builder = tokenstream::TokenStreamBuilder::new();

View File

@ -470,7 +470,7 @@ impl<'a> Parser<'a> {
// FIXME: we would like to report this in ast_validation instead, but we currently do not // FIXME: we would like to report this in ast_validation instead, but we currently do not
// preserve ordering of generic parameters with respect to associated type binding, so we // preserve ordering of generic parameters with respect to associated type binding, so we
// lose that information after parsing. // lose that information after parsing.
if misplaced_assoc_ty_constraints.len() > 0 { if !misplaced_assoc_ty_constraints.is_empty() {
let mut err = self.struct_span_err( let mut err = self.struct_span_err(
args_lo.to(self.prev_span), args_lo.to(self.prev_span),
"associated type bindings must be declared after generic parameters", "associated type bindings must be declared after generic parameters",

View File

@ -158,7 +158,7 @@ fn calc_unused_spans(
} }
} }
ast::UseTreeKind::Nested(ref nested) => { ast::UseTreeKind::Nested(ref nested) => {
if nested.len() == 0 { if nested.is_empty() {
return UnusedSpanResult::FlatUnused(use_tree.span, full_span); return UnusedSpanResult::FlatUnused(use_tree.span, full_span);
} }

View File

@ -1478,7 +1478,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
} }
}); });
if reexports.len() > 0 { if !reexports.is_empty() {
if let Some(def_id) = module.def_id() { if let Some(def_id) = module.def_id() {
self.r.export_map.insert(def_id, reexports); self.r.export_map.insert(def_id, reexports);
} }

View File

@ -1016,7 +1016,7 @@ impl<'a, 'b, 'ast> LateResolutionVisitor<'a, 'b, 'ast> {
trait_items trait_items
.iter() .iter()
.filter_map(|item| match &item.kind { .filter_map(|item| match &item.kind {
AssocItemKind::TyAlias(_, _, bounds, _) if bounds.len() == 0 => { AssocItemKind::TyAlias(_, _, bounds, _) if bounds.is_empty() => {
Some(item.ident) Some(item.ident)
} }
_ => None, _ => None,

View File

@ -354,7 +354,7 @@ impl<'a> LateResolutionVisitor<'a, '_, '_> {
let mut has_self_arg = None; let mut has_self_arg = None;
if let PathSource::Expr(parent) = source { if let PathSource::Expr(parent) = source {
match &parent?.kind { match &parent?.kind {
ExprKind::Call(_, args) if args.len() > 0 => { ExprKind::Call(_, args) if !args.is_empty() => {
let mut expr_kind = &args[0].kind; let mut expr_kind = &args[0].kind;
loop { loop {
match expr_kind { match expr_kind {
@ -968,18 +968,14 @@ impl<'tcx> LifetimeContext<'_, 'tcx> {
for missing in &self.missing_named_lifetime_spots { for missing in &self.missing_named_lifetime_spots {
match missing { match missing {
MissingLifetimeSpot::Generics(generics) => { MissingLifetimeSpot::Generics(generics) => {
let (span, sugg) = if let Some(param) = generics let (span, sugg) = if let Some(param) =
.params generics.params.iter().find(|p| match p.kind {
.iter()
.filter(|p| match p.kind {
hir::GenericParamKind::Type { hir::GenericParamKind::Type {
synthetic: Some(hir::SyntheticTyParamKind::ImplTrait), synthetic: Some(hir::SyntheticTyParamKind::ImplTrait),
.. ..
} => false, } => false,
_ => true, _ => true,
}) }) {
.next()
{
(param.span.shrink_to_lo(), format!("{}, ", lifetime_ref)) (param.span.shrink_to_lo(), format!("{}, ", lifetime_ref))
} else { } else {
(generics.span, format!("<{}>", lifetime_ref)) (generics.span, format!("<{}>", lifetime_ref))

View File

@ -1129,7 +1129,7 @@ pub fn parse_error_format(
// Conservatively require that the `--json` argument is coupled with // Conservatively require that the `--json` argument is coupled with
// `--error-format=json`. This means that `--json` is specified we // `--error-format=json`. This means that `--json` is specified we
// should actually be emitting JSON blobs. // should actually be emitting JSON blobs.
_ if matches.opt_strs("json").len() > 0 => { _ if !matches.opt_strs("json").is_empty() => {
early_error( early_error(
ErrorOutputType::default(), ErrorOutputType::default(),
"using `--json` requires also using `--error-format=json`", "using `--json` requires also using `--error-format=json`",

View File

@ -1205,7 +1205,7 @@ impl SourceFile {
/// number. If the source_file is empty or the position is located before the /// number. If the source_file is empty or the position is located before the
/// first line, `None` is returned. /// first line, `None` is returned.
pub fn lookup_line(&self, pos: BytePos) -> Option<usize> { pub fn lookup_line(&self, pos: BytePos) -> Option<usize> {
if self.lines.len() == 0 { if self.lines.is_empty() {
return None; return None;
} }

View File

@ -92,7 +92,7 @@ fn dropck_outlives<'tcx>(
// information and will just decrease the speed at which we can emit these errors // information and will just decrease the speed at which we can emit these errors
// (since we'll be printing for just that much longer for the often enormous types // (since we'll be printing for just that much longer for the often enormous types
// that result here). // that result here).
if result.overflows.len() >= 1 { if !result.overflows.is_empty() {
break; break;
} }

View File

@ -1743,7 +1743,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
potential_assoc_types: Vec<Span>, potential_assoc_types: Vec<Span>,
trait_bounds: &[hir::PolyTraitRef<'_>], trait_bounds: &[hir::PolyTraitRef<'_>],
) { ) {
if !associated_types.values().any(|v| v.len() > 0) { if !associated_types.values().any(|v| !v.is_empty()) {
return; return;
} }
let tcx = self.tcx(); let tcx = self.tcx();

View File

@ -529,7 +529,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.lookup_op_method(fn_sig.output(), &[other_ty], Op::Binary(op, is_assign)) .lookup_op_method(fn_sig.output(), &[other_ty], Op::Binary(op, is_assign))
.is_ok() .is_ok()
{ {
let (variable_snippet, applicability) = if fn_sig.inputs().len() > 0 { let (variable_snippet, applicability) = if !fn_sig.inputs().is_empty() {
( (
format!("{}( /* arguments */ )", source_map.span_to_snippet(span).unwrap()), format!("{}( /* arguments */ )", source_map.span_to_snippet(span).unwrap()),
Applicability::HasPlaceholders, Applicability::HasPlaceholders,

View File

@ -339,7 +339,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}); });
} }
if pat_adjustments.len() > 0 { if !pat_adjustments.is_empty() {
debug!("default binding mode is now {:?}", def_bm); debug!("default binding mode is now {:?}", def_bm);
self.inh.tables.borrow_mut().pat_adjustments_mut().insert(pat.hir_id, pat_adjustments); self.inh.tables.borrow_mut().pat_adjustments_mut().insert(pat.hir_id, pat_adjustments);
} }
@ -987,7 +987,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
.filter(|ident| !used_fields.contains_key(&ident)) .filter(|ident| !used_fields.contains_key(&ident))
.collect::<Vec<_>>(); .collect::<Vec<_>>();
if inexistent_fields.len() > 0 && !variant.recovered { if !inexistent_fields.is_empty() && !variant.recovered {
self.error_inexistent_fields( self.error_inexistent_fields(
kind_name, kind_name,
&inexistent_fields, &inexistent_fields,
@ -1018,7 +1018,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if etc { if etc {
tcx.sess.struct_span_err(span, "`..` cannot be used in union patterns").emit(); tcx.sess.struct_span_err(span, "`..` cannot be used in union patterns").emit();
} }
} else if !etc && unmentioned_fields.len() > 0 { } else if !etc && !unmentioned_fields.is_empty() {
self.error_unmentioned_fields(span, &unmentioned_fields, variant); self.error_unmentioned_fields(span, &unmentioned_fields, variant);
} }
no_field_errors no_field_errors

View File

@ -2481,7 +2481,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs {
// purpose functions as they wouldn't have the right target features // purpose functions as they wouldn't have the right target features
// enabled. For that reason we also forbid #[inline(always)] as it can't be // enabled. For that reason we also forbid #[inline(always)] as it can't be
// respected. // respected.
if codegen_fn_attrs.target_features.len() > 0 { if !codegen_fn_attrs.target_features.is_empty() {
if codegen_fn_attrs.inline == InlineAttr::Always { if codegen_fn_attrs.inline == InlineAttr::Always {
if let Some(span) = inline_span { if let Some(span) = inline_span {
tcx.sess.span_err( tcx.sess.span_err(

View File

@ -398,7 +398,7 @@ impl Clean<Lifetime> for hir::GenericParam<'_> {
fn clean(&self, _: &DocContext<'_>) -> Lifetime { fn clean(&self, _: &DocContext<'_>) -> Lifetime {
match self.kind { match self.kind {
hir::GenericParamKind::Lifetime { .. } => { hir::GenericParamKind::Lifetime { .. } => {
if self.bounds.len() > 0 { if !self.bounds.is_empty() {
let mut bounds = self.bounds.iter().map(|bound| match bound { let mut bounds = self.bounds.iter().map(|bound| match bound {
hir::GenericBound::Outlives(lt) => lt, hir::GenericBound::Outlives(lt) => lt,
_ => panic!(), _ => panic!(),
@ -607,7 +607,7 @@ impl Clean<GenericParamDef> for hir::GenericParam<'_> {
fn clean(&self, cx: &DocContext<'_>) -> GenericParamDef { fn clean(&self, cx: &DocContext<'_>) -> GenericParamDef {
let (name, kind) = match self.kind { let (name, kind) = match self.kind {
hir::GenericParamKind::Lifetime { .. } => { hir::GenericParamKind::Lifetime { .. } => {
let name = if self.bounds.len() > 0 { let name = if !self.bounds.is_empty() {
let mut bounds = self.bounds.iter().map(|bound| match bound { let mut bounds = self.bounds.iter().map(|bound| match bound {
hir::GenericBound::Outlives(lt) => lt, hir::GenericBound::Outlives(lt) => lt,
_ => panic!(), _ => panic!(),

View File

@ -201,7 +201,7 @@ impl Item {
classes.push("deprecated"); classes.push("deprecated");
} }
if classes.len() != 0 { Some(classes.join(" ")) } else { None } if !classes.is_empty() { Some(classes.join(" ")) } else { None }
}) })
} }

View File

@ -2783,7 +2783,7 @@ fn assoc_type(
fn render_stability_since_raw(w: &mut Buffer, ver: Option<&str>, containing_ver: Option<&str>) { fn render_stability_since_raw(w: &mut Buffer, ver: Option<&str>, containing_ver: Option<&str>) {
if let Some(v) = ver { if let Some(v) = ver {
if containing_ver != ver && v.len() > 0 { if containing_ver != ver && !v.is_empty() {
write!(w, "<span class='since' title='Stable since Rust version {0}'>{0}</span>", v) write!(w, "<span class='since' title='Stable since Rust version {0}'>{0}</span>", v)
} }
} }
@ -3143,7 +3143,7 @@ fn render_attribute(attr: &ast::MetaItem) -> Option<String> {
.filter_map(|attr| attr.meta_item().and_then(|mi| render_attribute(mi))) .filter_map(|attr| attr.meta_item().and_then(|mi| render_attribute(mi)))
.collect(); .collect();
if display.len() > 0 { Some(format!("{}({})", path, display.join(", "))) } else { None } if !display.is_empty() { Some(format!("{}({})", path, display.join(", "))) } else { None }
} else { } else {
None None
} }
@ -3178,7 +3178,7 @@ fn render_attributes(w: &mut Buffer, it: &clean::Item, top: bool) {
attrs.push_str(&format!("#[{}]\n", s)); attrs.push_str(&format!("#[{}]\n", s));
} }
} }
if attrs.len() > 0 { if !attrs.is_empty() {
write!( write!(
w, w,
"<span class=\"docblock attributes{}\">{}</span>", "<span class=\"docblock attributes{}\">{}</span>",

View File

@ -191,7 +191,7 @@ fn build_rule(v: &[u8], positions: &[usize]) -> String {
.replace("{", "") .replace("{", "")
.replace("}", "") .replace("}", "")
.split(' ') .split(' ')
.filter(|s| s.len() > 0) .filter(|s| !s.is_empty())
.collect::<Vec<&str>>() .collect::<Vec<&str>>()
.join(" "), .join(" "),
) )

View File

@ -304,7 +304,7 @@ impl Backtrace {
// If no frames came out assume that this is an unsupported platform // If no frames came out assume that this is an unsupported platform
// since `backtrace` doesn't provide a way of learning this right now, // since `backtrace` doesn't provide a way of learning this right now,
// and this should be a good enough approximation. // and this should be a good enough approximation.
let inner = if frames.len() == 0 { let inner = if frames.is_empty() {
Inner::Unsupported Inner::Unsupported
} else { } else {
Inner::Captured(Mutex::new(Capture { Inner::Captured(Mutex::new(Capture {

View File

@ -80,7 +80,7 @@ impl<T: Write> OutputFormatter for JsonFormatter<T> {
state: &ConsoleTestState, state: &ConsoleTestState,
) -> io::Result<()> { ) -> io::Result<()> {
let display_stdout = state.options.display_output || *result != TestResult::TrOk; let display_stdout = state.options.display_output || *result != TestResult::TrOk;
let stdout = if display_stdout && stdout.len() > 0 { let stdout = if display_stdout && !stdout.is_empty() {
Some(String::from_utf8_lossy(stdout)) Some(String::from_utf8_lossy(stdout))
} else { } else {
None None