avoid converting types into themselves via .into() (clippy::useless-conversion)

example: let x: String = String::from("hello world").into();
This commit is contained in:
Matthias Krüger 2021-03-17 01:27:56 +01:00
parent 83e6940efb
commit 966c23f529
24 changed files with 56 additions and 61 deletions

View File

@ -197,7 +197,7 @@ impl ChangeFixture {
change.change_file(file_id, Some(Arc::new(text)));
let path = VfsPath::new_virtual_path(meta.path);
file_set.insert(file_id, path.into());
file_set.insert(file_id, path);
files.push(file_id);
file_id.0 += 1;
}

View File

@ -124,5 +124,5 @@ fn resolve_doc_path(
Some(Namespace::Macros) => return None,
None => resolved.iter_items().find_map(|it| it.as_module_def_id())?,
};
Some(def.into())
Some(def)
}

View File

@ -1335,7 +1335,7 @@ impl Local {
// FIXME: why is this an option? It shouldn't be?
pub fn name(self, db: &dyn HirDatabase) -> Option<Name> {
let body = db.body(self.parent.into());
let body = db.body(self.parent);
match &body[self.pat_id] {
Pat::Bind { name, .. } => Some(name.clone()),
_ => None,
@ -1347,7 +1347,7 @@ impl Local {
}
pub fn is_mut(self, db: &dyn HirDatabase) -> bool {
let body = db.body(self.parent.into());
let body = db.body(self.parent);
matches!(&body[self.pat_id], Pat::Bind { mode: BindingAnnotation::Mutable, .. })
}
@ -1360,7 +1360,7 @@ impl Local {
}
pub fn ty(self, db: &dyn HirDatabase) -> Type {
let def = DefWithBodyId::from(self.parent);
let def = self.parent;
let infer = db.infer(def);
let ty = infer[self.pat_id].clone();
let krate = def.module(db.upcast()).krate();
@ -1368,7 +1368,7 @@ impl Local {
}
pub fn source(self, db: &dyn HirDatabase) -> InFile<Either<ast::IdentPat, ast::SelfParam>> {
let (_body, source_map) = db.body_with_source_map(self.parent.into());
let (_body, source_map) = db.body_with_source_map(self.parent);
let src = source_map.pat_syntax(self.pat_id).unwrap(); // Hmm...
let root = src.file_syntax(db.upcast());
src.map(|ast| {
@ -1393,12 +1393,12 @@ impl Label {
}
pub fn name(self, db: &dyn HirDatabase) -> Name {
let body = db.body(self.parent.into());
let body = db.body(self.parent);
body[self.label_id].name.clone()
}
pub fn source(self, db: &dyn HirDatabase) -> InFile<ast::Label> {
let (_body, source_map) = db.body_with_source_map(self.parent.into());
let (_body, source_map) = db.body_with_source_map(self.parent);
let src = source_map.label_syntax(self.label_id);
let root = src.file_syntax(db.upcast());
src.map(|ast| ast.to_node(&root))

View File

@ -835,7 +835,7 @@ impl<'a> SemanticsScope<'a> {
resolver::ScopeDef::AdtSelfType(it) => ScopeDef::AdtSelfType(it.into()),
resolver::ScopeDef::GenericParam(id) => ScopeDef::GenericParam(id.into()),
resolver::ScopeDef::Local(pat_id) => {
let parent = resolver.body_owner().unwrap().into();
let parent = resolver.body_owner().unwrap();
ScopeDef::Local(Local { parent, pat_id })
}
};

View File

@ -484,7 +484,7 @@ fn resolve_hir_path_(
resolver.resolve_path_in_value_ns_fully(db.upcast(), path.mod_path()).and_then(|val| {
let res = match val {
ValueNs::LocalBinding(pat_id) => {
let var = Local { parent: body_owner?.into(), pat_id };
let var = Local { parent: body_owner?, pat_id };
PathResolution::Local(var)
}
ValueNs::FunctionId(it) => PathResolution::Def(Function::from(it).into()),

View File

@ -325,7 +325,7 @@ impl Attrs {
if docs.is_empty() {
None
} else {
Some(Documentation(docs.into()))
Some(Documentation(docs))
}
}
}

View File

@ -203,7 +203,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: fn_src.file_id,
ident_type: IdentType::Function,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
suggested_text: replacement.suggested_text,
@ -261,7 +261,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: fn_src.file_id,
ident_type: IdentType::Argument,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: param_to_rename.expected_case,
ident_text: param_to_rename.current_name.to_string(),
suggested_text: param_to_rename.suggested_text,
@ -313,7 +313,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: source_ptr.file_id,
ident_type: IdentType::Variable,
ident: AstPtr::new(&name_ast).into(),
ident: AstPtr::new(&name_ast),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
suggested_text: replacement.suggested_text,
@ -403,7 +403,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: struct_src.file_id,
ident_type: IdentType::Structure,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
suggested_text: replacement.suggested_text,
@ -448,7 +448,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: struct_src.file_id,
ident_type: IdentType::Field,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: field_to_rename.expected_case,
ident_text: field_to_rename.current_name.to_string(),
suggested_text: field_to_rename.suggested_text,
@ -527,7 +527,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: enum_src.file_id,
ident_type: IdentType::Enum,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
suggested_text: replacement.suggested_text,
@ -572,7 +572,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: enum_src.file_id,
ident_type: IdentType::Variant,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: variant_to_rename.expected_case,
ident_text: variant_to_rename.current_name.to_string(),
suggested_text: variant_to_rename.suggested_text,
@ -617,7 +617,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: const_src.file_id,
ident_type: IdentType::Constant,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
suggested_text: replacement.suggested_text,
@ -665,7 +665,7 @@ impl<'a, 'b> DeclValidator<'a, 'b> {
let diagnostic = IncorrectCase {
file: static_src.file_id,
ident_type: IdentType::StaticVariable,
ident: AstPtr::new(&ast_ptr).into(),
ident: AstPtr::new(&ast_ptr),
expected_case: replacement.expected_case,
ident_text: replacement.current_name.to_string(),
suggested_text: replacement.suggested_text,

View File

@ -44,7 +44,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) {
self.check_for_filter_map_next(db);
let body = db.body(self.owner.into());
let body = db.body(self.owner);
for (id, expr) in body.exprs.iter() {
if let Some((variant_def, missed_fields, true)) =
@ -98,7 +98,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
missed_fields: Vec<LocalFieldId>,
) {
// XXX: only look at source_map if we do have missing fields
let (_, source_map) = db.body_with_source_map(self.owner.into());
let (_, source_map) = db.body_with_source_map(self.owner);
if let Ok(source_ptr) = source_map.expr_syntax(id) {
let root = source_ptr.file_syntax(db.upcast());
@ -128,7 +128,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
missed_fields: Vec<LocalFieldId>,
) {
// XXX: only look at source_map if we do have missing fields
let (_, source_map) = db.body_with_source_map(self.owner.into());
let (_, source_map) = db.body_with_source_map(self.owner);
if let Ok(source_ptr) = source_map.pat_syntax(id) {
if let Some(expr) = source_ptr.value.as_ref().left() {
@ -175,7 +175,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
};
// Search function body for instances of .filter_map(..).next()
let body = db.body(self.owner.into());
let body = db.body(self.owner);
let mut prev = None;
for (id, expr) in body.exprs.iter() {
if let Expr::MethodCall { receiver, .. } = expr {
@ -192,7 +192,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
if function_id == *next_function_id {
if let Some(filter_map_id) = prev {
if *receiver == filter_map_id {
let (_, source_map) = db.body_with_source_map(self.owner.into());
let (_, source_map) = db.body_with_source_map(self.owner);
if let Ok(next_source_ptr) = source_map.expr_syntax(id) {
self.sink.push(ReplaceFilterMapNextWithFindMap {
file: next_source_ptr.file_id,
@ -262,7 +262,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
let mut arg_count = args.len();
if arg_count != param_count {
let (_, source_map) = db.body_with_source_map(self.owner.into());
let (_, source_map) = db.body_with_source_map(self.owner);
if let Ok(source_ptr) = source_map.expr_syntax(call_id) {
if is_method_call {
param_count -= 1;
@ -287,7 +287,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
infer: Arc<InferenceResult>,
) {
let (body, source_map): (Arc<Body>, Arc<BodySourceMap>) =
db.body_with_source_map(self.owner.into());
db.body_with_source_map(self.owner);
let match_expr_ty = if infer.type_of_expr[match_expr].is_unknown() {
return;
@ -393,7 +393,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
};
if params.len() > 0 && params[0] == mismatch.actual {
let (_, source_map) = db.body_with_source_map(self.owner.into());
let (_, source_map) = db.body_with_source_map(self.owner);
if let Ok(source_ptr) = source_map.expr_syntax(id) {
self.sink.push(MissingOkOrSomeInTailExpr {
@ -425,7 +425,7 @@ impl<'a, 'b> ExprValidator<'a, 'b> {
return;
}
let (_, source_map) = db.body_with_source_map(self.owner.into());
let (_, source_map) = db.body_with_source_map(self.owner);
if let Ok(source_ptr) = source_map.expr_syntax(possible_tail_id) {
self.sink

View File

@ -29,7 +29,7 @@ impl<'a, 'b> UnsafeValidator<'a, 'b> {
}
pub(super) fn validate_body(&mut self, db: &dyn HirDatabase) {
let def = self.owner.into();
let def = self.owner;
let unsafe_expressions = unsafe_expressions(db, self.infer.as_ref(), def);
let is_unsafe = match self.owner {
DefWithBodyId::FunctionId(it) => db.function_data(it).qualifier.is_unsafe,

View File

@ -52,7 +52,7 @@ impl ToChalk for Ty {
TyKind::Tuple(cardinality, substs) => {
let substitution = substs.to_chalk(db);
chalk_ir::TyKind::Tuple(cardinality.into(), substitution).intern(&Interner)
chalk_ir::TyKind::Tuple(cardinality, substitution).intern(&Interner)
}
TyKind::Raw(mutability, ty) => {
let ty = ty.to_chalk(db);

View File

@ -180,7 +180,7 @@ fn missing_record_expr_field_fix(
let def_id = sema.resolve_variant(record_lit)?;
let module;
let def_file_id;
let record_fields = match VariantDef::from(def_id) {
let record_fields = match def_id {
VariantDef::Struct(s) => {
module = s.module(sema.db);
let source = s.source(sema.db)?;

View File

@ -56,7 +56,7 @@ impl Builder {
impl Completions {
pub(crate) fn add(&mut self, item: CompletionItem) {
self.buf.push(item.into())
self.buf.push(item)
}
pub(crate) fn add_all<I>(&mut self, items: I)

View File

@ -89,7 +89,7 @@ enum State {
impl FormatStrParser {
pub(crate) fn new(input: String) -> Self {
Self {
input: input.into(),
input: input,
output: String::new(),
extracted_expressions: Vec::new(),
state: State::NotExpr,

View File

@ -181,7 +181,7 @@ impl NameClass {
},
ast::SelfParam(it) => {
let def = sema.to_def(&it)?;
Some(NameClass::Definition(Definition::Local(def.into())))
Some(NameClass::Definition(Definition::Local(def)))
},
ast::RecordField(it) => {
let field: hir::Field = sema.to_def(&it)?;

View File

@ -80,7 +80,7 @@ impl ImportScope {
})
.last()
.map(|last_inner_element| {
(InsertPosition::After(last_inner_element.into()), AddBlankLine::BeforeTwice)
(InsertPosition::After(last_inner_element), AddBlankLine::BeforeTwice)
})
.unwrap_or_else(|| self.first_insert_pos())
}

View File

@ -120,7 +120,7 @@ fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt
Some("pat") => parent.token_trees.push(make_ident("foo")),
Some("path") => parent.token_trees.push(make_ident("foo")),
Some("literal") => parent.token_trees.push(make_literal("1")),
Some("expr") => parent.token_trees.push(make_ident("foo").into()),
Some("expr") => parent.token_trees.push(make_ident("foo")),
Some("lifetime") => {
parent.token_trees.push(make_punct('\''));
parent.token_trees.push(make_ident("a"));
@ -157,17 +157,15 @@ fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt
if i + 1 != cnt {
if let Some(sep) = separator {
match sep {
Separator::Literal(it) => parent
.token_trees
.push(tt::Leaf::Literal(it.clone().into()).into()),
Separator::Ident(it) => parent
.token_trees
.push(tt::Leaf::Ident(it.clone().into()).into()),
Separator::Literal(it) => {
parent.token_trees.push(tt::Leaf::Literal(it.clone()).into())
}
Separator::Ident(it) => {
parent.token_trees.push(tt::Leaf::Ident(it.clone()).into())
}
Separator::Puncts(puncts) => {
for it in puncts {
parent
.token_trees
.push(tt::Leaf::Punct(it.clone().into()).into())
parent.token_trees.push(tt::Leaf::Punct(it.clone()).into())
}
}
};

View File

@ -722,7 +722,7 @@ fn match_meta_var(kind: &str, input: &mut TtIter) -> ExpandResult<Option<Fragmen
input
.expect_literal()
.map(|literal| {
let lit = tt::Leaf::from(literal.clone());
let lit = literal.clone();
match neg {
None => Some(lit.into()),
Some(neg) => Some(tt::TokenTree::Subtree(tt::Subtree {

View File

@ -130,7 +130,7 @@ pub fn parse_exprs_with_sep(tt: &tt::Subtree, sep: char) -> Vec<tt::Subtree> {
res.push(match expanded.value {
None => break,
Some(tt @ tt::TokenTree::Leaf(_)) => {
tt::Subtree { delimiter: None, token_trees: vec![tt.into()] }
tt::Subtree { delimiter: None, token_trees: vec![tt] }
}
Some(tt::TokenTree::Subtree(tt)) => tt,
});
@ -727,7 +727,7 @@ impl<'a> TreeSink for TtTreeSink<'a> {
// Note: We always assume the semi-colon would be the last token in
// other parts of RA such that we don't add whitespace here.
if curr.spacing == tt::Spacing::Alone && curr.char != ';' {
self.inner.token(WHITESPACE, " ".into());
self.inner.token(WHITESPACE, " ");
self.text_pos += TextSize::of(' ');
}
}

View File

@ -35,7 +35,7 @@ mod rule_parsing {
fn test_invalid_arms() {
fn check(macro_body: &str, err: ParseError) {
let m = parse_macro_arm(macro_body);
assert_eq!(m, Err(err.into()));
assert_eq!(m, Err(err));
}
check("invalid", ParseError::Expected("expected subtree".into()));

View File

@ -236,13 +236,10 @@ mod tests {
subtree
.token_trees
.push(TokenTree::Leaf(Ident { text: "Foo".into(), id: TokenId(1) }.into()));
subtree.token_trees.push(TokenTree::Subtree(
Subtree {
delimiter: Some(Delimiter { id: TokenId(2), kind: DelimiterKind::Brace }),
token_trees: vec![],
}
.into(),
));
subtree.token_trees.push(TokenTree::Subtree(Subtree {
delimiter: Some(Delimiter { id: TokenId(2), kind: DelimiterKind::Brace }),
token_trees: vec![],
}));
subtree
}

View File

@ -36,7 +36,7 @@ impl Progress {
impl GlobalState {
pub(crate) fn show_message(&mut self, typ: lsp_types::MessageType, message: String) {
let message = message.into();
let message = message;
self.send_notification::<lsp_types::notification::ShowMessage>(
lsp_types::ShowMessageParams { typ, message },
)

View File

@ -287,7 +287,7 @@ pub(crate) fn signature_help(
let params = call_info
.parameter_ranges()
.iter()
.map(|it| [u32::from(it.start()).into(), u32::from(it.end()).into()])
.map(|it| [u32::from(it.start()), u32::from(it.end())])
.map(|label_offsets| lsp_types::ParameterInformation {
label: lsp_types::ParameterLabel::LabelOffsets(label_offsets),
documentation: None,

View File

@ -479,7 +479,7 @@ impl ast::MatchArmList {
Some(t) => t,
None => return self.clone(),
};
let position = InsertPosition::Before(r_curly.into());
let position = InsertPosition::Before(r_curly);
let arm_ws = tokens::WsBuilder::new(" ");
let match_indent = &leading_indent(self.syntax()).unwrap_or_default();
let match_ws = tokens::WsBuilder::new(&format!("\n{}", match_indent));

View File

@ -122,5 +122,5 @@ fn ws_between(left: &SyntaxElement, right: &SyntaxElement) -> Option<SyntaxToken
if right.kind() == T![;] || right.kind() == T![,] {
return None;
}
Some(make::tokens::single_space().into())
Some(make::tokens::single_space())
}