Rollup merge of #62131 - Xanewok:clip-some-nits, r=petrochenkov

libsyntax: Fix some Clippy warnings

When I was working on it before a lot of these popped up in the RLS so I figured I'll send a small patch fixing only the (hopefully) uncontroversial ones.

Others that could be also fixed included also [`clippy::print_with_newline`](https://rust-lang.github.io/rust-clippy/master/index.html#print_with_newline) and [`clippy::cast_lossless`](https://rust-lang.github.io/rust-clippy/master/index.html#cast_lossless). Should I add them as well?

since most of it touches libsyntax...
r? @petrochenkov
This commit is contained in:
Mazdak Farrokhzad 2019-06-27 23:01:09 +02:00 committed by GitHub
commit 4aa3e27231
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 49 additions and 49 deletions

View File

@ -39,8 +39,8 @@ impl Fingerprint {
// you want.
#[inline]
pub fn combine_commutative(self, other: Fingerprint) -> Fingerprint {
let a = (self.1 as u128) << 64 | self.0 as u128;
let b = (other.1 as u128) << 64 | other.0 as u128;
let a = u128::from(self.1) << 64 | u128::from(self.0);
let b = u128::from(other.1) << 64 | u128::from(other.0);
let c = a.wrapping_add(b);

View File

@ -263,7 +263,7 @@ impl<O: ForestObligation> ObligationForest<O> {
done_cache: Default::default(),
waiting_cache: Default::default(),
scratch: Some(vec![]),
obligation_tree_id_generator: (0..).map(|i| ObligationTreeId(i)),
obligation_tree_id_generator: (0..).map(ObligationTreeId),
error_cache: Default::default(),
}
}

View File

@ -70,15 +70,15 @@ unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 {
let mut i = 0; // current byte index (from LSB) in the output u64
let mut out = 0;
if i + 3 < len {
out = load_int_le!(buf, start + i, u32) as u64;
out = u64::from(load_int_le!(buf, start + i, u32));
i += 4;
}
if i + 1 < len {
out |= (load_int_le!(buf, start + i, u16) as u64) << (i * 8);
out |= u64::from(load_int_le!(buf, start + i, u16)) << (i * 8);
i += 2
}
if i < len {
out |= (*buf.get_unchecked(start + i) as u64) << (i * 8);
out |= u64::from(*buf.get_unchecked(start + i)) << (i * 8);
i += 1;
}
debug_assert_eq!(i, len);
@ -237,7 +237,7 @@ impl Hasher for SipHasher128 {
if self.ntail != 0 {
needed = 8 - self.ntail;
self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << 8 * self.ntail;
self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << (8 * self.ntail);
if length < needed {
self.ntail += length;
return

View File

@ -44,7 +44,7 @@ impl<W: StableHasherResult> StableHasher<W> {
impl StableHasherResult for u128 {
fn finish(hasher: StableHasher<Self>) -> Self {
let (_0, _1) = hasher.finalize();
(_0 as u128) | ((_1 as u128) << 64)
u128::from(_0) | (u128::from(_1) << 64)
}
}

View File

@ -8,7 +8,7 @@ where
Ls: Links,
{
VecLinkedListIterator {
links: links,
links,
current: first,
}
}

View File

@ -94,7 +94,7 @@ impl<'a> DiagnosticConverter<'a> {
annotation_type: Self::annotation_type_for_level(self.level),
}),
footer: vec![],
slices: slices,
slices,
})
} else {
// FIXME(#59346): Is it ok to return None if there's no source_map?

View File

@ -388,7 +388,7 @@ impl Diagnostic {
}],
msg: msg.to_owned(),
style: SuggestionStyle::CompletelyHidden,
applicability: applicability,
applicability,
});
self
}

View File

@ -1339,7 +1339,7 @@ impl EmitterWriter {
}
let mut dst = self.dst.writable();
match write!(dst, "\n") {
match writeln!(dst) {
Err(e) => panic!("failed to emit error: {}", e),
_ => {
match dst.flush() {
@ -1598,7 +1598,7 @@ fn emit_to_destination(rendered_buffer: &[Vec<StyledString>],
dst.reset()?;
}
if !short_message && (!lvl.is_failure_note() || pos != rendered_buffer.len() - 1) {
write!(dst, "\n")?;
writeln!(dst)?;
}
}
dst.flush()?;

View File

@ -19,7 +19,7 @@ pub fn opts() -> TargetOptions {
is_like_fuchsia: true,
linker_is_gnu: true,
has_rpath: false,
pre_link_args: pre_link_args,
pre_link_args,
pre_link_objects_exe: vec![
"Scrt1.o".to_string()
],

View File

@ -461,7 +461,7 @@ impl<'a> Encoder<'a> {
/// Creates a new JSON encoder whose output will be written to the writer
/// specified.
pub fn new(writer: &'a mut dyn fmt::Write) -> Encoder<'a> {
Encoder { writer: writer, is_emitting_map_key: false, }
Encoder { writer, is_emitting_map_key: false, }
}
}
@ -513,7 +513,7 @@ impl<'a> crate::Encoder for Encoder<'a> {
emit_enquoted_if_mapkey!(self, fmt_number_or_null(v))
}
fn emit_f32(&mut self, v: f32) -> EncodeResult {
self.emit_f64(v as f64)
self.emit_f64(f64::from(v))
}
fn emit_char(&mut self, v: char) -> EncodeResult {
@ -763,7 +763,7 @@ impl<'a> crate::Encoder for PrettyEncoder<'a> {
emit_enquoted_if_mapkey!(self, fmt_number_or_null(v))
}
fn emit_f32(&mut self, v: f32) -> EncodeResult {
self.emit_f64(v as f64)
self.emit_f64(f64::from(v))
}
fn emit_char(&mut self, v: char) -> EncodeResult {
@ -1698,12 +1698,12 @@ impl<T: Iterator<Item=char>> Parser<T> {
if n2 < 0xDC00 || n2 > 0xDFFF {
return self.error(LoneLeadingSurrogateInHexEscape)
}
let c = (((n1 - 0xD800) as u32) << 10 |
(n2 - 0xDC00) as u32) + 0x1_0000;
let c = (u32::from(n1 - 0xD800) << 10 |
u32::from(n2 - 0xDC00)) + 0x1_0000;
res.push(char::from_u32(c).unwrap());
}
n => match char::from_u32(n as u32) {
n => match char::from_u32(u32::from(n)) {
Some(c) => res.push(c),
None => return self.error(InvalidUnicodeCodePoint),
},
@ -2405,7 +2405,7 @@ impl ToJson for Json {
}
impl ToJson for f32 {
fn to_json(&self) -> Json { (*self as f64).to_json() }
fn to_json(&self) -> Json { f64::from(*self).to_json() }
}
impl ToJson for f64 {

View File

@ -123,7 +123,7 @@ pub fn read_signed_leb128(data: &[u8], start_position: usize) -> (i128, usize) {
loop {
byte = data[position];
position += 1;
result |= ((byte & 0x7F) as i128) << shift;
result |= i128::from(byte & 0x7F) << shift;
shift += 7;
if (byte & 0x80) == 0 {

View File

@ -296,13 +296,13 @@ impl<'a> serialize::Decoder for Decoder<'a> {
#[inline]
fn read_f64(&mut self) -> Result<f64, Self::Error> {
let bits = self.read_u64()?;
Ok(unsafe { ::std::mem::transmute(bits) })
Ok(f64::from_bits(bits))
}
#[inline]
fn read_f32(&mut self) -> Result<f32, Self::Error> {
let bits = self.read_u32()?;
Ok(unsafe { ::std::mem::transmute(bits) })
Ok(f32::from_bits(bits))
}
#[inline]

View File

@ -1832,7 +1832,7 @@ impl Arg {
lt,
MutTy {
ty: infer_ty,
mutbl: mutbl,
mutbl,
},
),
span,
@ -2120,7 +2120,7 @@ impl PolyTraitRef {
PolyTraitRef {
bound_generic_params: generic_params,
trait_ref: TraitRef {
path: path,
path,
ref_id: DUMMY_NODE_ID,
},
span,

View File

@ -815,7 +815,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
fn pat(&self, span: Span, pat: PatKind) -> P<ast::Pat> {
P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: span })
P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span })
}
fn pat_wild(&self, span: Span) -> P<ast::Pat> {
self.pat(span, PatKind::Wild)

View File

@ -231,7 +231,7 @@ pub struct MacroExpander<'a, 'b> {
impl<'a, 'b> MacroExpander<'a, 'b> {
pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self {
MacroExpander { cx: cx, monotonic: monotonic }
MacroExpander { cx, monotonic }
}
pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate {
@ -377,7 +377,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
_ => item.clone(),
};
invocations.push(Invocation {
kind: InvocationKind::Derive { path: path.clone(), item: item },
kind: InvocationKind::Derive { path: path.clone(), item },
fragment_kind: invoc.fragment_kind,
expansion_data: ExpansionData {
mark,
@ -944,7 +944,7 @@ impl<'a, 'b> InvocationCollector<'a, 'b> {
}
fn collect_bang(&mut self, mac: ast::Mac, span: Span, kind: AstFragmentKind) -> AstFragment {
self.collect(kind, InvocationKind::Bang { mac: mac, ident: None, span: span })
self.collect(kind, InvocationKind::Bang { mac, ident: None, span })
}
fn collect_attr(&mut self,

View File

@ -319,7 +319,7 @@ fn parse_tree(
tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
span,
Lrc::new(Delimited {
delim: delim,
delim,
tts: parse(
tts.into(),
expect_matchers,

View File

@ -23,8 +23,8 @@ enum Frame {
impl Frame {
/// Construct a new frame around the delimited set of tokens.
fn new(tts: Vec<quoted::TokenTree>) -> Frame {
let forest = Lrc::new(quoted::Delimited { delim: token::NoDelim, tts: tts });
Frame::Delimited { forest: forest, idx: 0, span: DelimSpan::dummy() }
let forest = Lrc::new(quoted::Delimited { delim: token::NoDelim, tts });
Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() }
}
}
@ -248,7 +248,7 @@ pub fn transcribe(
// the previous results (from outside the Delimited).
quoted::TokenTree::Delimited(mut span, delimited) => {
span = span.apply_mark(cx.current_expansion.mark);
stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
stack.push(Frame::Delimited { forest: delimited, idx: 0, span });
result_stack.push(mem::replace(&mut result, Vec::new()));
}

View File

@ -1665,7 +1665,7 @@ impl<'a> Context<'a> {
}
pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, features: &Features) {
let cx = Context { features: features, parse_sess: parse_sess, plugin_attributes: &[] };
let cx = Context { features, parse_sess, plugin_attributes: &[] };
cx.check_attribute(
attr,
attr.ident().and_then(|ident| BUILTIN_ATTRIBUTE_MAP.get(&ident.name).map(|a| *a)),

View File

@ -290,10 +290,10 @@ crate enum LastToken {
}
impl TokenCursorFrame {
fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
fn new(span: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
TokenCursorFrame {
delim: delim,
span: sp,
delim,
span,
open_delim: delim == token::NoDelim,
tree_cursor: tts.clone().into_trees(),
close_delim: delim == token::NoDelim,
@ -1449,7 +1449,7 @@ impl<'a> Parser<'a> {
let opt_lifetime = if self.check_lifetime() { Some(self.expect_lifetime()) } else { None };
let mutbl = self.parse_mutability();
let ty = self.parse_ty_no_plus()?;
return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty: ty, mutbl: mutbl }));
return Ok(TyKind::Rptr(opt_lifetime, MutTy { ty, mutbl }));
}
fn parse_ptr(&mut self) -> PResult<'a, MutTy> {
@ -1467,7 +1467,7 @@ impl<'a> Parser<'a> {
Mutability::Immutable
};
let t = self.parse_ty_no_plus()?;
Ok(MutTy { ty: t, mutbl: mutbl })
Ok(MutTy { ty: t, mutbl })
}
fn is_named_argument(&self) -> bool {
@ -4366,7 +4366,7 @@ impl<'a> Parser<'a> {
self.report_invalid_macro_expansion_item();
}
(ident, ast::MacroDef { tokens: tokens, legacy: true })
(ident, ast::MacroDef { tokens, legacy: true })
}
_ => return Ok(None),
};
@ -6789,12 +6789,12 @@ impl<'a> Parser<'a> {
let hi = self.token.span;
self.expect(&token::Semi)?;
Ok(ast::ForeignItem {
ident: ident,
attrs: attrs,
ident,
attrs,
node: ForeignItemKind::Ty,
id: ast::DUMMY_NODE_ID,
span: lo.to(hi),
vis: vis
vis
})
}

View File

@ -497,7 +497,7 @@ impl<'a> Printer<'a> {
pub fn print_newline(&mut self, amount: isize) -> io::Result<()> {
debug!("NEWLINE {}", amount);
let ret = write!(self.out, "\n");
let ret = writeln!(self.out);
self.pending_indentation = 0;
self.indent(amount);
ret

View File

@ -150,7 +150,7 @@ impl SourceMap {
-> SourceMap {
SourceMap {
files: Default::default(),
file_loader: file_loader,
file_loader,
path_mapping,
}
}
@ -396,7 +396,7 @@ impl SourceMap {
let f = (*self.files.borrow().source_files)[idx].clone();
match f.lookup_line(pos) {
Some(line) => Ok(SourceFileAndLine { sf: f, line: line }),
Some(line) => Ok(SourceFileAndLine { sf: f, line }),
None => Err(f)
}
}
@ -511,7 +511,7 @@ impl SourceMap {
start_col,
end_col: hi.col });
Ok(FileLines {file: lo.file, lines: lines})
Ok(FileLines {file: lo.file, lines})
}
/// Extracts the source surrounding the given `Span` using the `extract_source` function. The
@ -820,7 +820,7 @@ impl SourceMap {
let idx = self.lookup_source_file_idx(bpos);
let sf = (*self.files.borrow().source_files)[idx].clone();
let offset = bpos - sf.start_pos;
SourceFileAndBytePos {sf: sf, pos: offset}
SourceFileAndBytePos {sf, pos: offset}
}
/// Converts an absolute BytePos to a CharPos relative to the source_file.