don't use .into() to convert types into identical types.

example:
    let s: String = format!("hello").into();
This commit is contained in:
Matthias Krüger 2020-02-25 18:10:34 +01:00
parent 49c68bd53f
commit 7be94a8a95
32 changed files with 42 additions and 62 deletions

View File

@ -472,7 +472,7 @@ impl<'tcx, Tag: Copy, Extra: AllocationExtra<Tag>> Allocation<Tag, Extra> {
val: ScalarMaybeUndef<Tag>,
) -> InterpResult<'tcx> {
let ptr_size = cx.data_layout().pointer_size;
self.write_scalar(cx, ptr.into(), val, ptr_size)
self.write_scalar(cx, ptr, val, ptr_size)
}
}

View File

@ -1519,7 +1519,7 @@ impl<'tcx> TerminatorKind<'tcx> {
values
.iter()
.map(|&u| {
ty::Const::from_scalar(tcx, Scalar::from_uint(u, size).into(), switch_ty)
ty::Const::from_scalar(tcx, Scalar::from_uint(u, size), switch_ty)
.to_string()
.into()
})

View File

@ -156,7 +156,7 @@ impl<'tcx> Rvalue<'tcx> {
}
Rvalue::AddressOf(mutability, ref place) => {
let place_ty = place.ty(local_decls, tcx).ty;
tcx.mk_ptr(ty::TypeAndMut { ty: place_ty, mutbl: mutability.into() })
tcx.mk_ptr(ty::TypeAndMut { ty: place_ty, mutbl: mutability })
}
Rvalue::Len(..) => tcx.types.usize,
Rvalue::Cast(.., ty) => ty,

View File

@ -820,8 +820,7 @@ impl ObjectSafetyViolation {
MethodViolationCode::UndispatchableReceiver,
span,
) => (
format!("consider changing method `{}`'s `self` parameter to be `&self`", name)
.into(),
format!("consider changing method `{}`'s `self` parameter to be `&self`", name),
Some(("&Self".to_string(), span)),
),
ObjectSafetyViolation::AssocConst(name, _)

View File

@ -831,8 +831,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
.last()
.cloned()
.map(|id| Ok(self.lower_node_id(id)))
.unwrap_or(Err(hir::LoopIdError::OutsideLoopScope))
.into(),
.unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)),
};
hir::Destination { label: destination.map(|(_, label)| label), target_id }
}
@ -841,7 +840,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
if self.is_in_loop_condition && opt_label.is_none() {
hir::Destination {
label: None,
target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition).into(),
target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition),
}
} else {
self.lower_loop_destination(opt_label.map(|label| (id, label)))
@ -912,7 +911,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
.collect(),
asm: asm.asm,
asm_str_style: asm.asm_str_style,
clobbers: asm.clobbers.clone().into(),
clobbers: asm.clobbers.clone(),
volatile: asm.volatile,
alignstack: asm.alignstack,
dialect: asm.dialect,

View File

@ -92,8 +92,7 @@ impl<'a, 'tcx, V: CodegenObject> OperandRef<'tcx, V> {
let a = Scalar::from(Pointer::new(
bx.tcx().alloc_map.lock().create_memory_alloc(data),
Size::from_bytes(start as u64),
))
.into();
));
let a_llval = bx.scalar_to_backend(
a,
a_scalar,

View File

@ -387,7 +387,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
mir::Rvalue::AddressOf(mutability, ref place) => {
let mk_ptr = move |tcx: TyCtxt<'tcx>, ty: Ty<'tcx>| {
tcx.mk_ptr(ty::TypeAndMut { ty, mutbl: mutability.into() })
tcx.mk_ptr(ty::TypeAndMut { ty, mutbl: mutability })
};
self.codegen_place_to_pointer(bx, place, mk_ptr)
}

View File

@ -112,7 +112,7 @@ fn parse_tree(
sess.span_diagnostic.span_err(span.entire(), &msg);
}
// Parse the contents of the sequence itself
let sequence = parse(tts.into(), expect_matchers, sess);
let sequence = parse(tts, expect_matchers, sess);
// Get the Kleene operator and optional separator
let (separator, kleene) = parse_sep_and_kleene_op(trees, span.entire(), sess);
// Count the number of captured "names" (i.e., named metavars)
@ -159,7 +159,7 @@ fn parse_tree(
// descend into the delimited set and further parse it.
tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
span,
Lrc::new(Delimited { delim, tts: parse(tts.into(), expect_matchers, sess) }),
Lrc::new(Delimited { delim, tts: parse(tts, expect_matchers, sess) }),
),
}
}

View File

@ -155,8 +155,7 @@ pub(super) fn transcribe(
}
// Step back into the parent Delimited.
let tree =
TokenTree::Delimited(span, forest.delim, TokenStream::new(result).into());
let tree = TokenTree::Delimited(span, forest.delim, TokenStream::new(result));
result = result_stack.pop().unwrap();
result.push(tree.into());
}

View File

@ -60,7 +60,7 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
let Token { kind, span } = match tree {
tokenstream::TokenTree::Delimited(span, delim, tts) => {
let delimiter = Delimiter::from_internal(delim);
return TokenTree::Group(Group { delimiter, stream: tts.into(), span });
return TokenTree::Group(Group { delimiter, stream: tts, span });
}
tokenstream::TokenTree::Token(token) => token,
};
@ -196,12 +196,8 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
let (ch, joint, span) = match self {
TokenTree::Punct(Punct { ch, joint, span }) => (ch, joint, span),
TokenTree::Group(Group { delimiter, stream, span }) => {
return tokenstream::TokenTree::Delimited(
span,
delimiter.to_internal(),
stream.into(),
)
.into();
return tokenstream::TokenTree::Delimited(span, delimiter.to_internal(), stream)
.into();
}
TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
return tokenstream::TokenTree::token(Ident(sym, is_raw), span).into();

View File

@ -669,7 +669,7 @@ impl<'cx, 'tcx> Canonicalizer<'cx, 'tcx> {
} else {
let var = self.canonical_var(info, const_var.into());
self.tcx().mk_const(ty::Const {
val: ty::ConstKind::Bound(self.binder_index, var.into()),
val: ty::ConstKind::Bound(self.binder_index, var),
ty: self.fold_ty(const_var.ty),
})
}

View File

@ -140,7 +140,6 @@ impl<'cx, 'tcx> VerifyBoundCx<'cx, 'tcx> {
// Extend with bounds that we can find from the trait.
let trait_bounds = self
.projection_declared_bounds_from_trait(projection_ty)
.into_iter()
.map(|r| VerifyBound::OutlivedBy(r));
// see the extensive comment in projection_must_outlive

View File

@ -3202,7 +3202,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
obligation.predicate.def_id(),
obligation.recursion_depth + 1,
a_last.expect_ty(),
&[b_last.into()],
&[b_last],
));
}

View File

@ -639,7 +639,7 @@ impl<'a, 'b> ReplaceBodyWithLoop<'a, 'b> {
ast::GenericArg::Type(ty) => Some(ty),
_ => None,
});
any_involves_impl_trait(types.into_iter())
any_involves_impl_trait(types)
|| data.constraints.iter().any(|c| match c.kind {
ast::AssocTyConstraintKind::Bound { .. } => true,
ast::AssocTyConstraintKind::Equality { ref ty } => {

View File

@ -56,7 +56,7 @@ impl RegionInferenceContext<'_> {
let mut scc_regions = FxHashMap::default();
let mut start = 0;
for (scc, group) in &paired_scc_regions.into_iter().group_by(|(scc, _)| *scc) {
let group_size = group.into_iter().count();
let group_size = group.count();
scc_regions.insert(scc, start..start + group_size);
start += group_size;
}

View File

@ -52,7 +52,7 @@ pub(crate) fn const_caller_location<'tcx>(
let loc_place = ecx.alloc_caller_location(file, line, col);
intern_const_alloc_recursive(&mut ecx, InternKind::Constant, loc_place, false).unwrap();
ConstValue::Scalar(loc_place.ptr.into())
ConstValue::Scalar(loc_place.ptr)
}
// this function uses `unwrap` copiously, because an already validated constant

View File

@ -67,7 +67,7 @@ crate fn eval_nullary_intrinsic<'tcx>(
};
ConstValue::from_machine_usize(n, &tcx)
}
sym::type_id => ConstValue::from_u64(tcx.type_id_hash(tp_ty).into()),
sym::type_id => ConstValue::from_u64(tcx.type_id_hash(tp_ty)),
other => bug!("`{}` is not a zero arg intrinsic", other),
})
}

View File

@ -293,7 +293,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let (&untuple_arg, args) = args.split_last().unwrap();
trace!("eval_fn_call: Will pass last argument by untupling");
Cow::from(args.iter().map(|&a| Ok(a))
.chain((0..untuple_arg.layout.fields.count()).into_iter()
.chain((0..untuple_arg.layout.fields.count())
.map(|i| self.operand_field(untuple_arg, i as u64))
)
.collect::<InterpResult<'_, Vec<OpTy<'tcx, M::PointerTag>>>>()?)

View File

@ -209,7 +209,7 @@ fn mir_const_qualif(tcx: TyCtxt<'_>, def_id: DefId) -> ConstQualifs {
// We return the qualifs in the return place for every MIR body, even though it is only used
// when deciding to promote a reference to a `const` for now.
validator.qualifs_in_return_place().into()
validator.qualifs_in_return_place()
}
fn mir_const(tcx: TyCtxt<'_>, def_id: DefId) -> &Steal<BodyAndCache<'_>> {

View File

@ -49,7 +49,6 @@ pub fn expand_aggregate<'tcx>(
};
operands
.into_iter()
.enumerate()
.map(move |(i, (op, ty))| {
let lhs_field = if let AggregateKind::Array(_) = kind {

View File

@ -1942,8 +1942,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
let tcx = self.hir.tcx();
let debug_source_info = SourceInfo { span: source_info.span, scope: visibility_scope };
let binding_mode = match mode {
BindingMode::ByValue => ty::BindingMode::BindByValue(mutability.into()),
BindingMode::ByRef(_) => ty::BindingMode::BindByReference(mutability.into()),
BindingMode::ByValue => ty::BindingMode::BindByValue(mutability),
BindingMode::ByRef(_) => ty::BindingMode::BindByReference(mutability),
};
debug!("declare_binding: user_ty={:?}", user_ty);
let local = LocalDecl::<'tcx> {

View File

@ -882,7 +882,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
span: tcx_hir.span(var_id),
},
place: Place {
local: closure_env_arg.into(),
local: closure_env_arg,
projection: tcx.intern_place_elems(&projs),
},
});
@ -927,7 +927,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
self.local_decls[local].local_info = if let Some(kind) = self_binding {
LocalInfo::User(ClearCrossCrate::Set(BindingForm::ImplicitSelf(*kind)))
} else {
let binding_mode = ty::BindingMode::BindByValue(mutability.into());
let binding_mode = ty::BindingMode::BindByValue(mutability);
LocalInfo::User(ClearCrossCrate::Set(BindingForm::Var(
VarBindingForm {
binding_mode,

View File

@ -327,8 +327,7 @@ impl<'a> StringReader<'a> {
match kind {
rustc_lexer::LiteralKind::Char { terminated } => {
if !terminated {
self.fatal_span_(start, suffix_start, "unterminated character literal".into())
.raise()
self.fatal_span_(start, suffix_start, "unterminated character literal").raise()
}
let content_start = start + BytePos(1);
let content_end = suffix_start - BytePos(1);
@ -338,12 +337,8 @@ impl<'a> StringReader<'a> {
}
rustc_lexer::LiteralKind::Byte { terminated } => {
if !terminated {
self.fatal_span_(
start + BytePos(1),
suffix_start,
"unterminated byte constant".into(),
)
.raise()
self.fatal_span_(start + BytePos(1), suffix_start, "unterminated byte constant")
.raise()
}
let content_start = start + BytePos(2);
let content_end = suffix_start - BytePos(1);
@ -353,7 +348,7 @@ impl<'a> StringReader<'a> {
}
rustc_lexer::LiteralKind::Str { terminated } => {
if !terminated {
self.fatal_span_(start, suffix_start, "unterminated double quote string".into())
self.fatal_span_(start, suffix_start, "unterminated double quote string")
.raise()
}
let content_start = start + BytePos(1);
@ -367,7 +362,7 @@ impl<'a> StringReader<'a> {
self.fatal_span_(
start + BytePos(1),
suffix_start,
"unterminated double quote byte string".into(),
"unterminated double quote byte string",
)
.raise()
}

View File

@ -212,7 +212,7 @@ impl<'a> TokenTreesReader<'a> {
_ => {}
}
Ok(TokenTree::Delimited(delim_span, delim, tts.into()).into())
Ok(TokenTree::Delimited(delim_span, delim, tts).into())
}
token::CloseDelim(delim) => {
// An unexpected closing delimiter (i.e., there is no

View File

@ -420,7 +420,7 @@ fn prepend_attrs(
builder.push(tokenstream::TokenTree::Delimited(
delim_span,
token::DelimToken::Bracket,
brackets.build().into(),
brackets.build(),
));
}
builder.push(tokens.clone());

View File

@ -263,8 +263,7 @@ impl TokenCursor {
]
.iter()
.cloned()
.collect::<TokenStream>()
.into(),
.collect::<TokenStream>(),
);
self.stack.push(mem::replace(
@ -389,7 +388,7 @@ impl<'a> Parser<'a> {
root_module_name: None,
expected_tokens: Vec::new(),
token_cursor: TokenCursor {
frame: TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, &tokens.into()),
frame: TokenCursorFrame::new(DelimSpan::dummy(), token::NoDelim, &tokens),
stack: Vec::new(),
},
desugar_doc_comments,
@ -1006,7 +1005,7 @@ impl<'a> Parser<'a> {
);
self.set_token(Token::new(TokenKind::CloseDelim(frame.delim), frame.span.close));
self.bump();
TokenTree::Delimited(frame.span, frame.delim, frame.tree_cursor.stream.into())
TokenTree::Delimited(frame.span, frame.delim, frame.tree_cursor.stream)
}
token::CloseDelim(_) | token::Eof => unreachable!(),
_ => {

View File

@ -169,7 +169,7 @@ impl<'a> Parser<'a> {
}
fn parse_local_mk(&mut self, lo: Span, attrs: AttrVec) -> PResult<'a, Stmt> {
let local = self.parse_local(attrs.into())?;
let local = self.parse_local(attrs)?;
Ok(self.mk_stmt(lo.to(self.prev_span), StmtKind::Local(local)))
}

View File

@ -77,7 +77,7 @@ impl<'a, 'hir> Visitor<'hir> for CheckLoopVisitor<'a, 'hir> {
return;
}
let loop_id = match label.target_id.into() {
let loop_id = match label.target_id {
Ok(loop_id) => loop_id,
Err(hir::LoopIdError::OutsideLoopScope) => hir::DUMMY_HIR_ID,
Err(hir::LoopIdError::UnlabeledCfInWhileCondition) => {

View File

@ -24,7 +24,6 @@ crate fn fn_ptr(
) -> Ty<'tcx> {
let inputs_and_output = tcx.mk_type_list(
(0..arity_and_output)
.into_iter()
.map(|i| ty::BoundVar::from(i))
// DebruijnIndex(1) because we are going to inject these in a `PolyFnSig`
.map(|var| tcx.mk_ty(ty::Bound(ty::DebruijnIndex::from(1usize), var.into()))),
@ -37,7 +36,6 @@ crate fn fn_ptr(
crate fn type_list(tcx: TyCtxt<'tcx>, arity: usize) -> SubstsRef<'tcx> {
tcx.mk_substs(
(0..arity)
.into_iter()
.map(|i| ty::BoundVar::from(i))
.map(|var| tcx.mk_ty(ty::Bound(ty::INNERMOST, var.into())))
.map(|ty| GenericArg::from(ty)),

View File

@ -312,6 +312,6 @@ pub fn check_explicit_predicates<'tcx>(
let predicate = outlives_predicate.subst(tcx, substs);
debug!("predicate = {:?}", &predicate);
insert_outlives_predicate(tcx, predicate.0.into(), predicate.1, span, required_predicates);
insert_outlives_predicate(tcx, predicate.0, predicate.1, span, required_predicates);
}
}

View File

@ -287,9 +287,7 @@ impl CStringArray {
fn construct_envp(env: BTreeMap<OsString, OsString>, saw_nul: &mut bool) -> CStringArray {
let mut result = CStringArray::with_capacity(env.len());
for (k, v) in env {
let mut k: OsString = k.into();
for (mut k, v) in env {
// Reserve additional space for '=' and null terminator
k.reserve_exact(v.len() + 2);
k.push("=");

View File

@ -529,7 +529,7 @@ impl MetaItemKind {
TokenTree::Delimited(
DelimSpan::from_single(span),
token::Paren,
TokenStream::new(tokens).into(),
TokenStream::new(tokens),
)
.into(),
]