Auto merge of #69746 - Dylan-DPC:rollup-wr6dvdk, r=Dylan-DPC

Rollup of 8 pull requests

Successful merges:

 - #69697 (Add explanation for E0380)
 - #69698 (Use associated constants of integer types)
 - #69711 (Update macros.rs: fix documentation typo.)
 - #69713 (more clippy cleanups)
 - #69728 (Make link to `std::str` active)
 - #69732 (Clean E0382 and E0384 explanations)
 - #69736 (even more clippy cleanups)
 - #69742 (Fixed a typo)

Failed merges:

r? @ghost
This commit is contained in:
bors 2020-03-05 21:39:00 +00:00
commit b818ccc74c
57 changed files with 128 additions and 129 deletions

View File

@ -407,7 +407,7 @@ impl String {
///
/// assert_eq!(s.capacity(), cap);
///
/// // ...but this may make the vector reallocate
/// // ...but this may make the string reallocate
/// s.push('a');
/// ```
#[inline]

View File

@ -2,7 +2,9 @@
//! String manipulation.
//!
//! For more details, see the `std::str` module.
//! For more details, see the [`std::str`] module.
//!
//! [`std::str`]: ../../std/str/index.html
#![stable(feature = "rust1", since = "1.0.0")]

View File

@ -524,7 +524,7 @@ impl DepGraph {
edge_list_indices.push((start, end));
}
debug_assert!(edge_list_data.len() <= ::std::u32::MAX as usize);
debug_assert!(edge_list_data.len() <= u32::MAX as usize);
debug_assert_eq!(edge_list_data.len(), total_edge_count);
SerializedDepGraph { nodes, fingerprints, edge_list_indices, edge_list_data }

View File

@ -818,9 +818,9 @@ impl UndefMask {
// First set all bits except the first `bita`,
// then unset the last `64 - bitb` bits.
let range = if bitb == 0 {
u64::max_value() << bita
u64::MAX << bita
} else {
(u64::max_value() << bita) & (u64::max_value() >> (64 - bitb))
(u64::MAX << bita) & (u64::MAX >> (64 - bitb))
};
if new_state {
self.blocks[blocka] |= range;
@ -832,21 +832,21 @@ impl UndefMask {
// across block boundaries
if new_state {
// Set `bita..64` to `1`.
self.blocks[blocka] |= u64::max_value() << bita;
self.blocks[blocka] |= u64::MAX << bita;
// Set `0..bitb` to `1`.
if bitb != 0 {
self.blocks[blockb] |= u64::max_value() >> (64 - bitb);
self.blocks[blockb] |= u64::MAX >> (64 - bitb);
}
// Fill in all the other blocks (much faster than one bit at a time).
for block in (blocka + 1)..blockb {
self.blocks[block] = u64::max_value();
self.blocks[block] = u64::MAX;
}
} else {
// Set `bita..64` to `0`.
self.blocks[blocka] &= !(u64::max_value() << bita);
self.blocks[blocka] &= !(u64::MAX << bita);
// Set `0..bitb` to `0`.
if bitb != 0 {
self.blocks[blockb] &= !(u64::max_value() >> (64 - bitb));
self.blocks[blockb] &= !(u64::MAX >> (64 - bitb));
}
// Fill in all the other blocks (much faster than one bit at a time).
for block in (blocka + 1)..blockb {

View File

@ -78,9 +78,9 @@ pub trait PointerArithmetic: layout::HasDataLayout {
fn overflowing_signed_offset(&self, val: u64, i: i128) -> (u64, bool) {
// FIXME: is it possible to over/underflow here?
if i < 0 {
// Trickery to ensure that `i64::min_value()` works fine: compute `n = -i`.
// Trickery to ensure that `i64::MIN` works fine: compute `n = -i`.
// This formula only works for true negative values; it overflows for zero!
let n = u64::max_value() - (i as u64) + 1;
let n = u64::MAX - (i as u64) + 1;
let res = val.overflowing_sub(n);
self.truncate_to_ptr(res)
} else {

View File

@ -1198,7 +1198,7 @@ impl<'tcx> TerminatorKind<'tcx> {
t: BasicBlock,
f: BasicBlock,
) -> TerminatorKind<'tcx> {
static BOOL_SWITCH_FALSE: &'static [u128] = &[0];
static BOOL_SWITCH_FALSE: &[u128] = &[0];
TerminatorKind::SwitchInt {
discr: cond,
switch_ty: tcx.types.bool,

View File

@ -415,9 +415,9 @@ impl<'a, 'tcx> Lift<'tcx> for traits::ObligationCauseCode<'a> {
super::ReferenceOutlivesReferent(ty) => {
tcx.lift(&ty).map(super::ReferenceOutlivesReferent)
}
super::ObjectTypeBound(ty, r) => tcx
.lift(&ty)
.and_then(|ty| tcx.lift(&r).and_then(|r| Some(super::ObjectTypeBound(ty, r)))),
super::ObjectTypeBound(ty, r) => {
tcx.lift(&ty).and_then(|ty| tcx.lift(&r).map(|r| super::ObjectTypeBound(ty, r)))
}
super::ObjectCastObligation(ty) => tcx.lift(&ty).map(super::ObjectCastObligation),
super::Coercion { source, target } => {
Some(super::Coercion { source: tcx.lift(&source)?, target: tcx.lift(&target)? })

View File

@ -7,7 +7,6 @@ use rustc_span::DUMMY_SP;
use std::cmp;
use std::fmt;
use std::i128;
use std::iter;
use std::mem;
use std::ops::Bound;
@ -1001,7 +1000,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
}
}
let (mut min, mut max) = (i128::max_value(), i128::min_value());
let (mut min, mut max) = (i128::MAX, i128::MIN);
let discr_type = def.repr.discr_type();
let bits = Integer::from_attr(self, discr_type).size().bits();
for (i, discr) in def.discriminants(tcx) {
@ -1021,7 +1020,7 @@ impl<'tcx> LayoutCx<'tcx, TyCtxt<'tcx>> {
}
}
// We might have no inhabited variants, so pretend there's at least one.
if (min, max) == (i128::max_value(), i128::min_value()) {
if (min, max) == (i128::MAX, i128::MIN) {
min = 0;
max = 0;
}

View File

@ -920,7 +920,7 @@ pub trait PrettyPrinter<'tcx>:
}
(ConstValue::Scalar(Scalar::Raw { data, .. }), ty::Uint(ui)) => {
let bit_size = Integer::from_attr(&self.tcx(), UnsignedInt(*ui)).size();
let max = truncate(u128::max_value(), bit_size);
let max = truncate(u128::MAX, bit_size);
let ui_str = ui.name_str();
if data == max {

View File

@ -92,7 +92,7 @@ struct AbsoluteBytePos(u32);
impl AbsoluteBytePos {
fn new(pos: usize) -> AbsoluteBytePos {
debug_assert!(pos <= ::std::u32::MAX as usize);
debug_assert!(pos <= u32::MAX as usize);
AbsoluteBytePos(pos as u32)
}

View File

@ -50,11 +50,11 @@ fn signed_min(size: Size) -> i128 {
}
fn signed_max(size: Size) -> i128 {
i128::max_value() >> (128 - size.bits())
i128::MAX >> (128 - size.bits())
}
fn unsigned_max(size: Size) -> u128 {
u128::max_value() >> (128 - size.bits())
u128::MAX >> (128 - size.bits())
}
fn int_size_and_signed<'tcx>(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> (Size, bool) {
@ -77,7 +77,7 @@ impl<'tcx> Discr<'tcx> {
let min = signed_min(size);
let max = signed_max(size);
let val = sign_extend(self.val, size) as i128;
assert!(n < (i128::max_value() as u128));
assert!(n < (i128::MAX as u128));
let n = n as i128;
let oflo = val > max - n;
let val = if oflo { min + (n - (max - val) - 1) } else { val + n };

View File

@ -284,7 +284,7 @@ impl<'a, 'b> Context<'a, 'b> {
err.tool_only_span_suggestion(
sp,
&format!("use the `{}` trait", name),
fmt.to_string(),
(*fmt).to_string(),
Applicability::MaybeIncorrect,
);
}
@ -476,7 +476,7 @@ impl<'a, 'b> Context<'a, 'b> {
match ty {
Placeholder(_) => {
// record every (position, type) combination only once
let ref mut seen_ty = self.arg_unique_types[arg];
let seen_ty = &mut self.arg_unique_types[arg];
let i = seen_ty.iter().position(|x| *x == ty).unwrap_or_else(|| {
let i = seen_ty.len();
seen_ty.push(ty);
@ -526,7 +526,7 @@ impl<'a, 'b> Context<'a, 'b> {
// Map the arguments
for i in 0..args_len {
let ref arg_types = self.arg_types[i];
let arg_types = &self.arg_types[i];
let arg_offsets = arg_types.iter().map(|offset| sofar + *offset).collect::<Vec<_>>();
self.arg_index_map.push(arg_offsets);
sofar += self.arg_unique_types[i].len();
@ -597,7 +597,7 @@ impl<'a, 'b> Context<'a, 'b> {
let arg_idx = match arg_index_consumed.get_mut(i) {
None => 0, // error already emitted elsewhere
Some(offset) => {
let ref idx_map = self.arg_index_map[i];
let idx_map = &self.arg_index_map[i];
// unwrap_or branch: error already emitted elsewhere
let arg_idx = *idx_map.get(*offset).unwrap_or(&0);
*offset += 1;
@ -721,7 +721,7 @@ impl<'a, 'b> Context<'a, 'b> {
let name = names_pos[i];
let span = self.ecx.with_def_site_ctxt(e.span);
pats.push(self.ecx.pat_ident(span, name));
for ref arg_ty in self.arg_unique_types[i].iter() {
for arg_ty in self.arg_unique_types[i].iter() {
locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name));
}
heads.push(self.ecx.expr_addr_of(e.span, e));

View File

@ -57,12 +57,12 @@ impl AllocFnFactory<'_, '_> {
fn allocator_fn(&self, method: &AllocatorMethod) -> Stmt {
let mut abi_args = Vec::new();
let mut i = 0;
let ref mut mk = || {
let mut mk = || {
let name = self.cx.ident_of(&format!("arg{}", i), self.span);
i += 1;
name
};
let args = method.inputs.iter().map(|ty| self.arg_ty(ty, &mut abi_args, mk)).collect();
let args = method.inputs.iter().map(|ty| self.arg_ty(ty, &mut abi_args, &mut mk)).collect();
let result = self.call_allocator(method.name, args);
let (output_ty, output_expr) = self.ret_ty(&method.output, result);
let decl = self.cx.fn_decl(abi_args, ast::FnRetTy::Ty(output_ty));

View File

@ -313,7 +313,7 @@ fn should_fail(i: &ast::Item) -> bool {
fn should_panic(cx: &ExtCtxt<'_>, i: &ast::Item) -> ShouldPanic {
match attr::find_by_name(&i.attrs, sym::should_panic) {
Some(attr) => {
let ref sd = cx.parse_sess.span_diagnostic;
let sd = &cx.parse_sess.span_diagnostic;
match attr.meta_item_list() {
// Handle #[should_panic(expected = "foo")]
@ -378,7 +378,7 @@ fn test_type(cx: &ExtCtxt<'_>) -> TestType {
fn has_test_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool {
let has_should_panic_attr = attr::contains_name(&i.attrs, sym::should_panic);
let ref sd = cx.parse_sess.span_diagnostic;
let sd = &cx.parse_sess.span_diagnostic;
if let ast::ItemKind::Fn(_, ref sig, ref generics, _) = i.kind {
if let ast::Unsafe::Yes(span) = sig.header.unsafety {
sd.struct_span_err(i.span, "unsafe functions cannot be used for tests")

View File

@ -326,7 +326,7 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
/// &[&test1, &test2]
fn mk_tests_slice(cx: &TestCtxt<'_>, sp: Span) -> P<ast::Expr> {
debug!("building test vector from {} tests", cx.test_cases.len());
let ref ecx = cx.ext_cx;
let ecx = &cx.ext_cx;
ecx.expr_vec_slice(
sp,

View File

@ -60,7 +60,7 @@ impl AsmBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
.chain(ia.inputs.iter().map(|s| s.to_string()))
.chain(ext_constraints)
.chain(clobbers)
.chain(arch_clobbers.iter().map(|s| s.to_string()))
.chain(arch_clobbers.iter().map(|s| (*s).to_string()))
.collect::<Vec<String>>()
.join(",");

View File

@ -725,7 +725,7 @@ pub(crate) unsafe fn codegen(
Err(_) => return 0,
};
if let Err(_) = write!(cursor, "{:#}", demangled) {
if write!(cursor, "{:#}", demangled).is_err() {
// Possible only if provided buffer is not big enough
return 0;
}

View File

@ -174,7 +174,6 @@ pub unsafe fn create_module(
let llvm_data_layout = llvm::LLVMGetDataLayout(llmod);
let llvm_data_layout = str::from_utf8(CStr::from_ptr(llvm_data_layout).to_bytes())
.ok()
.expect("got a non-UTF8 data-layout from LLVM");
// Unfortunately LLVM target specs change over time, and right now we

View File

@ -1257,7 +1257,7 @@ fn start_executing_work<B: ExtraBackendMethods>(
if main_thread_worker_state == MainThreadWorkerState::Idle {
if !queue_full_enough(work_items.len(), running, max_workers) {
// The queue is not full enough, codegen more items:
if let Err(_) = codegen_worker_send.send(Message::CodegenItem) {
if codegen_worker_send.send(Message::CodegenItem).is_err() {
panic!("Could not send Message::CodegenItem to main thread")
}
main_thread_worker_state = MainThreadWorkerState::Codegenning;

View File

@ -1,4 +1,14 @@
Auto traits cannot have methods or associated items.
For more information see the [opt-in builtin traits RFC][RFC 19].
An auto trait was declared with a method or an associated item.
Erroneous code example:
```compile_fail,E0380
unsafe auto trait Trait {
type Output; // error!
}
```
Auto traits cannot have methods or associated items. For more information see
the [opt-in builtin traits RFC][RFC 19].
[RFC 19]: https://github.com/rust-lang/rfcs/blob/master/text/0019-opt-in-builtin-traits.md

View File

@ -1,5 +1,4 @@
This error occurs when an attempt is made to use a variable after its contents
have been moved elsewhere.
A variable was used after its contents have been moved elsewhere.
Erroneous code example:

View File

@ -1,4 +1,4 @@
This error occurs when an attempt is made to reassign an immutable variable.
An immutable variable was reassigned.
Erroneous code example:

View File

@ -163,7 +163,7 @@ impl CodeSuggestion {
None => buf.push_str(&line[lo..]),
}
}
if let None = hi_opt {
if hi_opt.is_none() {
buf.push('\n');
}
}

View File

@ -27,6 +27,6 @@ impl Registry {
if !self.long_descriptions.contains_key(code) {
return Err(InvalidErrorCode);
}
Ok(self.long_descriptions.get(code).unwrap().clone())
Ok(*self.long_descriptions.get(code).unwrap())
}
}

View File

@ -343,7 +343,8 @@ impl DirtyCleanVisitor<'tcx> {
&format!("clean/dirty auto-assertions not yet defined for {:?}", node),
),
};
let labels = Labels::from_iter(labels.iter().flat_map(|s| s.iter().map(|l| l.to_string())));
let labels =
Labels::from_iter(labels.iter().flat_map(|s| s.iter().map(|l| (*l).to_string())));
(name, labels)
}

View File

@ -150,7 +150,7 @@ impl<'tcx> AutoTraitFinder<'tcx> {
// SelectionContext to return it back to us.
let (new_env, user_env) = match self.evaluate_predicates(
&mut infcx,
&infcx,
trait_did,
ty,
orig_env,

View File

@ -1341,7 +1341,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
stack: &TraitObligationStack<'o, 'tcx>,
) -> Result<SelectionCandidateSet<'tcx>, SelectionError<'tcx>> {
let TraitObligationStack { obligation, .. } = *stack;
let ref obligation = Obligation {
let obligation = &Obligation {
param_env: obligation.param_env,
cause: obligation.cause.clone(),
recursion_depth: obligation.recursion_depth,

View File

@ -426,7 +426,7 @@ pub(crate) fn check_attr_crate_type(attrs: &[ast::Attribute], lint_buffer: &mut
for a in attrs.iter() {
if a.check_name(sym::crate_type) {
if let Some(n) = a.value_str() {
if let Some(_) = categorize_crate_type(n) {
if categorize_crate_type(n).is_some() {
return;
}

View File

@ -335,7 +335,7 @@ impl LintStore {
lint_name.to_string()
};
// If the lint was scoped with `tool::` check if the tool lint exists
if let Some(_) = tool_name {
if tool_name.is_some() {
match self.by_name.get(&complete_name) {
None => match self.lint_groups.get(&*complete_name) {
None => return CheckLintNameResult::Tool(Err((None, String::new()))),
@ -369,7 +369,7 @@ impl LintStore {
return if *silent {
CheckLintNameResult::Ok(&lint_ids)
} else {
CheckLintNameResult::Tool(Err((Some(&lint_ids), name.to_string())))
CheckLintNameResult::Tool(Err((Some(&lint_ids), (*name).to_string())))
};
}
CheckLintNameResult::Ok(&lint_ids)
@ -404,7 +404,7 @@ impl LintStore {
return if *silent {
CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name)))
} else {
CheckLintNameResult::Tool(Err((Some(&lint_ids), name.to_string())))
CheckLintNameResult::Tool(Err((Some(&lint_ids), (*name).to_string())))
};
}
CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name)))

View File

@ -1905,7 +1905,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
// expressions evaluate through `as_temp` or `into` a return
// slot or local, so to find all unsized rvalues it is enough
// to check all temps, return slots and locals.
if let None = self.reported_errors.replace((ty, span)) {
if self.reported_errors.replace((ty, span)).is_none() {
let mut diag = struct_span_err!(
self.tcx().sess,
span,

View File

@ -64,7 +64,7 @@ impl TypeRelatingDelegate<'tcx> for NllTypeRelatingDelegate<'_, '_, 'tcx> {
}
fn next_existential_region_var(&mut self, from_forall: bool) -> ty::Region<'tcx> {
if let Some(_) = &mut self.borrowck_context {
if self.borrowck_context.is_some() {
let origin = NLLRegionVariableOrigin::Existential { from_forall };
self.infcx.next_nll_region_var(origin)
} else {

View File

@ -604,8 +604,8 @@ fn write_diff<A: Analysis<'tcx>>(
Ok(())
}
const BR_LEFT: &'static str = r#"<br align="left"/>"#;
const BR_LEFT_SPACE: &'static str = r#"<br align="left"/> "#;
const BR_LEFT: &str = r#"<br align="left"/>"#;
const BR_LEFT_SPACE: &str = r#"<br align="left"/> "#;
/// Line break policy that breaks at 40 characters and starts the next line with a single space.
const LIMIT_30_ALIGN_1: Option<LineBreak> = Some(LineBreak { sequence: BR_LEFT_SPACE, limit: 30 });

View File

@ -22,20 +22,20 @@ pub fn visit_results<F>(
let loc = Location { block, statement_index };
results.reconstruct_before_statement_effect(&mut state, stmt, loc);
vis.visit_statement(&mut state, stmt, loc);
vis.visit_statement(&state, stmt, loc);
results.reconstruct_statement_effect(&mut state, stmt, loc);
vis.visit_statement_exit(&mut state, stmt, loc);
vis.visit_statement_exit(&state, stmt, loc);
}
let loc = body.terminator_loc(block);
let term = block_data.terminator();
results.reconstruct_before_terminator_effect(&mut state, term, loc);
vis.visit_terminator(&mut state, term, loc);
vis.visit_terminator(&state, term, loc);
results.reconstruct_terminator_effect(&mut state, term, loc);
vis.visit_terminator_exit(&mut state, term, loc);
vis.visit_terminator_exit(&state, term, loc);
}
}

View File

@ -16,7 +16,6 @@ use std::borrow::Borrow;
use std::fmt;
use std::io;
use std::path::PathBuf;
use std::usize;
pub use self::at_location::{FlowAtLocation, FlowsAtLocation};
pub(crate) use self::drop_flag_effects::*;

View File

@ -203,7 +203,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
if is_add {
// max unsigned
Scalar::from_uint(
u128::max_value() >> (128 - num_bits),
u128::MAX >> (128 - num_bits),
Size::from_bits(num_bits),
)
} else {
@ -381,11 +381,11 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
dest: PlaceTy<'tcx, M::PointerTag>,
) -> InterpResult<'tcx> {
// Performs an exact division, resulting in undefined behavior where
// `x % y != 0` or `y == 0` or `x == T::min_value() && y == -1`.
// `x % y != 0` or `y == 0` or `x == T::MIN && y == -1`.
// First, check x % y != 0 (or if that computation overflows).
let (res, overflow, _ty) = self.overflowing_binary_op(BinOp::Rem, a, b)?;
if overflow || res.assert_bits(a.layout.size) != 0 {
// Then, check if `b` is -1, which is the "min_value / -1" case.
// Then, check if `b` is -1, which is the "MIN / -1" case.
let minus1 = Scalar::from_int(-1, dest.layout.size);
let b_scalar = b.to_scalar().unwrap();
if b_scalar == minus1 {

View File

@ -565,7 +565,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> {
// # Function pointers
// (both global from `alloc_map` and local from `extra_fn_ptr_map`)
if let Some(_) = self.get_fn_alloc(id) {
if self.get_fn_alloc(id).is_some() {
return if let AllocCheck::Dereferenceable = liveness {
// The caller requested no function pointers.
throw_unsup!(DerefFunctionPointer)

View File

@ -311,9 +311,8 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// taking into account the `spread_arg`. If we could write
// this is a single iterator (that handles `spread_arg`), then
// `pass_argument` would be the loop body. It takes care to
// not advance `caller_iter` for ZSTs.
let mut locals_iter = body.args_iter();
while let Some(local) = locals_iter.next() {
// not advance `caller_iter` for ZSTs
for local in body.args_iter() {
let dest = self.eval_place(&mir::Place::from(local))?;
if Some(local) == body.spread_arg {
// Must be a tuple

View File

@ -463,7 +463,7 @@ impl<'rt, 'mir, 'tcx, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, 'tcx, M
let (lo, hi) = valid_range.clone().into_inner();
// Determine the allowed range
// `max_hi` is as big as the size fits
let max_hi = u128::max_value() >> (128 - op.layout.size.bits());
let max_hi = u128::MAX >> (128 - op.layout.size.bits());
assert!(hi <= max_hi);
// We could also write `(hi + 1) % (max_hi + 1) == lo` but `max_hi + 1` overflows for `u128`
if (lo == 0 && hi == max_hi) || (hi + 1 == lo) {

View File

@ -920,7 +920,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
let (blocks, local_decls) = self.source.basic_blocks_and_local_decls_mut();
match candidate {
Candidate::Ref(loc) => {
let ref mut statement = blocks[loc.block].statements[loc.statement_index];
let statement = &mut blocks[loc.block].statements[loc.statement_index];
match statement.kind {
StatementKind::Assign(box (
_,
@ -971,7 +971,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
}
}
Candidate::Repeat(loc) => {
let ref mut statement = blocks[loc.block].statements[loc.statement_index];
let statement = &mut blocks[loc.block].statements[loc.statement_index];
match statement.kind {
StatementKind::Assign(box (_, Rvalue::Repeat(ref mut operand, _))) => {
let ty = operand.ty(local_decls, self.tcx);

View File

@ -2331,7 +2331,7 @@ fn specialize_one_pattern<'p, 'tcx>(
PatKind::Binding { .. } | PatKind::Wild => Some(ctor_wild_subpatterns.iter().collect()),
PatKind::Variant { adt_def, variant_index, ref subpatterns, .. } => {
let ref variant = adt_def.variants[variant_index];
let variant = &adt_def.variants[variant_index];
let is_non_exhaustive = cx.is_foreign_non_exhaustive_variant(pat.ty, variant);
Some(Variant(variant.def_id))
.filter(|variant_constructor| variant_constructor == constructor)

View File

@ -37,7 +37,7 @@ impl<'a> Parser<'a> {
let inner_parse_policy = InnerAttributeParsePolicy::NotPermitted {
reason: inner_error_reason,
saw_doc_comment: just_parsed_doc_comment,
prev_attr_sp: attrs.last().and_then(|a| Some(a.span)),
prev_attr_sp: attrs.last().map(|a| a.span),
};
let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?;
attrs.push(attr);

View File

@ -19,7 +19,7 @@ use rustc_span::{MultiSpan, Span, SpanSnippetError, DUMMY_SP};
use log::{debug, trace};
use std::mem;
const TURBOFISH: &'static str = "use `::<...>` instead of `<...>` to specify type arguments";
const TURBOFISH: &str = "use `::<...>` instead of `<...>` to specify type arguments";
/// Creates a placeholder argument.
pub(super) fn dummy_arg(ident: Ident) -> Param {

View File

@ -196,7 +196,7 @@ fn no_main_err(tcx: TyCtxt<'_>, visitor: &EntryContext<'_, '_>) {
// The file may be empty, which leads to the diagnostic machinery not emitting this
// note. This is a relatively simple way to detect that case and emit a span-less
// note instead.
if let Ok(_) = tcx.sess.source_map().lookup_line(sp.lo()) {
if tcx.sess.source_map().lookup_line(sp.lo()).is_ok() {
err.set_span(sp);
err.span_label(sp, &note);
} else {

View File

@ -1432,7 +1432,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
}
msg
}
ref s @ _ => bug!("unexpected import subclass {:?}", s),
ref s => bug!("unexpected import subclass {:?}", s),
};
let mut err = this.session.struct_span_err(binding.span, &msg);

View File

@ -1086,7 +1086,7 @@ impl<'tcx> LifetimeContext<'_, 'tcx> {
for param in params {
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(param.span)
{
if snippet.starts_with("&") && !snippet.starts_with("&'") {
if snippet.starts_with('&') && !snippet.starts_with("&'") {
introduce_suggestion
.push((param.span, format!("&'a {}", &snippet[1..])));
} else if snippet.starts_with("&'_ ") {
@ -1118,7 +1118,7 @@ impl<'tcx> LifetimeContext<'_, 'tcx> {
(1, Some(name), Some("'_")) => {
suggest_existing(err, name.to_string());
}
(1, Some(name), Some(snippet)) if !snippet.ends_with(">") => {
(1, Some(name), Some(snippet)) if !snippet.ends_with('>') => {
suggest_existing(err, format!("{}<{}>", snippet, name));
}
(0, _, Some("&")) => {
@ -1127,7 +1127,7 @@ impl<'tcx> LifetimeContext<'_, 'tcx> {
(0, _, Some("'_")) => {
suggest_new(err, "'a");
}
(0, _, Some(snippet)) if !snippet.ends_with(">") => {
(0, _, Some(snippet)) if !snippet.ends_with('>') => {
suggest_new(err, &format!("{}<'a>", snippet));
}
_ => {

View File

@ -1550,21 +1550,18 @@ impl<'a, 'tcx> ProbeContext<'a, 'tcx> {
let method_names = pcx.candidate_method_names();
pcx.allow_similar_names = false;
let applicable_close_candidates: Vec<ty::AssocItem> =
method_names
.iter()
.filter_map(|&method_name| {
pcx.reset();
pcx.method_name = Some(method_name);
pcx.assemble_inherent_candidates();
pcx.assemble_extension_candidates_for_traits_in_scope(hir::DUMMY_HIR_ID)
.map_or(None, |_| {
pcx.pick_core()
.and_then(|pick| pick.ok())
.and_then(|pick| Some(pick.item))
})
})
.collect();
let applicable_close_candidates: Vec<ty::AssocItem> = method_names
.iter()
.filter_map(|&method_name| {
pcx.reset();
pcx.method_name = Some(method_name);
pcx.assemble_inherent_candidates();
pcx.assemble_extension_candidates_for_traits_in_scope(hir::DUMMY_HIR_ID)
.map_or(None, |_| {
pcx.pick_core().and_then(|pick| pick.ok()).map(|pick| pick.item)
})
})
.collect();
if applicable_close_candidates.is_empty() {
Ok(None)

View File

@ -737,8 +737,8 @@ impl ItemLikeVisitor<'tcx> for CheckItemTypesVisitor<'tcx> {
}
pub fn check_wf_new(tcx: TyCtxt<'_>) {
let mut visit = wfcheck::CheckTypeWellFormedVisitor::new(tcx);
tcx.hir().krate().par_visit_all_item_likes(&mut visit);
let visit = wfcheck::CheckTypeWellFormedVisitor::new(tcx);
tcx.hir().krate().par_visit_all_item_likes(&visit);
}
fn check_mod_item_types(tcx: TyCtxt<'_>, module_def_id: DefId) {

View File

@ -151,7 +151,7 @@ crate fn placeholder_type_error(
.unwrap_or(&"ParamName");
let mut sugg: Vec<_> =
placeholder_types.iter().map(|sp| (*sp, type_name.to_string())).collect();
placeholder_types.iter().map(|sp| (*sp, (*type_name).to_string())).collect();
if generics.is_empty() {
sugg.push((span, format!("<{}>", type_name)));
} else if let Some(arg) = generics.iter().find(|arg| match arg.name {
@ -160,7 +160,7 @@ crate fn placeholder_type_error(
}) {
// Account for `_` already present in cases like `struct S<_>(_);` and suggest
// `struct S<T>(T);` instead of `struct S<_, T>(T);`.
sugg.push((arg.span, type_name.to_string()));
sugg.push((arg.span, (*type_name).to_string()));
} else {
sugg.push((
generics.iter().last().unwrap().span.shrink_to_hi(),

View File

@ -50,7 +50,7 @@ pub use self::types::Type::*;
pub use self::types::Visibility::{Inherited, Public};
pub use self::types::*;
const FN_OUTPUT_NAME: &'static str = "Output";
const FN_OUTPUT_NAME: &str = "Output";
pub trait Clean<T> {
fn clean(&self, cx: &DocContext<'_>) -> T;

View File

@ -121,9 +121,7 @@ pub fn external_generic_args(
let args: Vec<_> = substs
.iter()
.filter_map(|kind| match kind.unpack() {
GenericArgKind::Lifetime(lt) => {
lt.clean(cx).and_then(|lt| Some(GenericArg::Lifetime(lt)))
}
GenericArgKind::Lifetime(lt) => lt.clean(cx).map(|lt| GenericArg::Lifetime(lt)),
GenericArgKind::Type(_) if skip_self => {
skip_self = false;
None

View File

@ -90,14 +90,14 @@ impl DocFS {
let sender = self.errors.sender.clone().unwrap();
rayon::spawn(move || match fs::write(&path, &contents) {
Ok(_) => {
sender
.send(None)
.expect(&format!("failed to send error on \"{}\"", path.display()));
sender.send(None).unwrap_or_else(|_| {
panic!("failed to send error on \"{}\"", path.display())
});
}
Err(e) => {
sender
.send(Some(format!("\"{}\": {}", path.display(), e)))
.expect(&format!("failed to send non-error on \"{}\"", path.display()));
sender.send(Some(format!("\"{}\": {}", path.display(), e))).unwrap_or_else(
|_| panic!("failed to send non-error on \"{}\"", path.display()),
);
}
});
Ok(())

View File

@ -62,7 +62,7 @@ impl<'a> From<&'a clean::Item> for ItemType {
fn from(item: &'a clean::Item) -> ItemType {
let inner = match item.inner {
clean::StrippedItem(box ref item) => item,
ref inner @ _ => inner,
ref inner => inner,
};
match *inner {
@ -194,7 +194,7 @@ impl fmt::Display for ItemType {
}
}
pub const NAMESPACE_TYPE: &'static str = "t";
pub const NAMESPACE_VALUE: &'static str = "v";
pub const NAMESPACE_MACRO: &'static str = "m";
pub const NAMESPACE_KEYWORD: &'static str = "k";
pub const NAMESPACE_TYPE: &str = "t";
pub const NAMESPACE_VALUE: &str = "v";
pub const NAMESPACE_MACRO: &str = "m";
pub const NAMESPACE_KEYWORD: &str = "k";

View File

@ -296,7 +296,7 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'_, 'a, I> {
""
}
)),
playground_button.as_ref().map(String::as_str),
playground_button.as_deref(),
Some((s1.as_str(), s2)),
));
Some(Event::Html(s.into()))
@ -315,7 +315,7 @@ impl<'a, I: Iterator<Item = Event<'a>>> Iterator for CodeBlocks<'_, 'a, I> {
""
}
)),
playground_button.as_ref().map(String::as_str),
playground_button.as_deref(),
None,
));
Some(Event::Html(s.into()))
@ -869,12 +869,8 @@ pub fn plain_summary_line(md: &str) -> String {
}
}
let mut s = String::with_capacity(md.len() * 3 / 2);
let mut p = ParserWrapper { inner: Parser::new(md), is_in: 0, is_first: true };
while let Some(t) = p.next() {
if !t.is_empty() {
s.push_str(&t);
}
}
let p = ParserWrapper { inner: Parser::new(md), is_in: 0, is_first: true };
p.into_iter().filter(|t| !t.is_empty()).for_each(|i| s.push_str(&i));
s
}

View File

@ -2727,7 +2727,7 @@ fn naive_assoc_href(it: &clean::Item, link: AssocItemLink<'_>) -> String {
let name = it.name.as_ref().unwrap();
let ty = match it.type_() {
Typedef | AssocType => AssocType,
s @ _ => s,
s => s,
};
let anchor = format!("#{}.{}", ty, name);
@ -3150,7 +3150,7 @@ fn render_attribute(attr: &ast::MetaItem) -> Option<String> {
}
}
const ATTRIBUTE_WHITELIST: &'static [Symbol] = &[
const ATTRIBUTE_WHITELIST: &[Symbol] = &[
sym::export_name,
sym::lang,
sym::link_section,
@ -4610,7 +4610,7 @@ fn item_keyword(w: &mut Buffer, cx: &Context, it: &clean::Item) {
document(w, cx, it)
}
crate const BASIC_KEYWORDS: &'static str = "rust, rustlang, rust-lang";
crate const BASIC_KEYWORDS: &str = "rust, rustlang, rust-lang";
fn make_item_keywords(it: &clean::Item) -> String {
format!("{}, {}", BASIC_KEYWORDS, it.name.as_ref().unwrap())

View File

@ -185,7 +185,7 @@ macro_rules! eprintln {
/// builds or when debugging in release mode is significantly faster.
///
/// Note that the macro is intended as a debugging tool and therefore you
/// should avoid having uses of it in version control for longer periods.
/// should avoid having uses of it in version control for long periods.
/// Use cases involving debug output that should be added to version control
/// are better served by macros such as [`debug!`] from the [`log`] crate.
///

View File

@ -19,9 +19,9 @@ cfg_if::cfg_if! {
if #[cfg(target_os = "fuchsia")] {
// fuchsia doesn't have /dev/null
} else if #[cfg(target_os = "redox")] {
const DEV_NULL: &'static str = "null:\0";
const DEV_NULL: &str = "null:\0";
} else {
const DEV_NULL: &'static str = "/dev/null\0";
const DEV_NULL: &str = "/dev/null\0";
}
}

View File

@ -96,7 +96,7 @@ use time::TestExecTime;
// Process exit code to be used to indicate test failures.
const ERROR_EXIT_CODE: i32 = 101;
const SECONDARY_TEST_INVOKER_VAR: &'static str = "__RUST_TEST_INVOKE";
const SECONDARY_TEST_INVOKER_VAR: &str = "__RUST_TEST_INVOKE";
// The default console test runner. It accepts the command line
// arguments and a vector of test_descs.
@ -158,7 +158,7 @@ pub fn test_main_static_abort(tests: &[&TestDescAndFn]) {
.filter(|test| test.desc.name.as_slice() == name)
.map(make_owned_test)
.next()
.expect(&format!("couldn't find a test with the provided name '{}'", name));
.unwrap_or_else(|| panic!("couldn't find a test with the provided name '{}'", name));
let TestDescAndFn { desc, testfn } = test;
let testfn = match testfn {
StaticTestFn(f) => f,