9007: Internal: `clippy::redundant_clone` fixes r=lnicola a=lnicola

bors r+

Co-authored-by: Laurențiu Nicola <lnicola@dend.ro>
This commit is contained in:
bors[bot] 2021-05-26 15:36:14 +00:00 committed by GitHub
commit f3aaae6555
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 22 additions and 27 deletions

View File

@ -197,7 +197,7 @@ fn eager_macro_recur(
macro_resolver: &dyn Fn(ast::Path) -> Option<MacroDefId>,
mut diagnostic_sink: &mut dyn FnMut(mbe::ExpandError),
) -> Result<SyntaxNode, ErrorEmitted> {
let original = curr.value.clone().clone_for_update();
let original = curr.value.clone_for_update();
let children = original.descendants().filter_map(ast::MacroCall::cast);
let mut replacements = Vec::new();

View File

@ -76,17 +76,17 @@ impl<'a> InferenceContext<'a> {
// way around first would mean we make the type variable `!`, instead of
// just marking it as possibly diverging.
if self.coerce(&ty2, &ty1) {
ty1.clone()
ty1
} else if self.coerce(&ty1, &ty2) {
ty2.clone()
ty2
} else {
if let Some(id) = id {
self.result
.type_mismatches
.insert(id.into(), TypeMismatch { expected: ty1.clone(), actual: ty2.clone() });
.insert(id.into(), TypeMismatch { expected: ty1.clone(), actual: ty2 });
}
cov_mark::hit!(coerce_merge_fail_fallback);
ty1.clone()
ty1
}
}
@ -183,7 +183,7 @@ impl<'a> InferenceContext<'a> {
// details of coercion errors though, so I think it's useful to leave
// the structure like it is.
let canonicalized = self.canonicalize(from_ty.clone());
let canonicalized = self.canonicalize(from_ty);
let autoderef = autoderef::autoderef(
self.db,
self.resolver.krate(),
@ -389,7 +389,7 @@ impl<'a> InferenceContext<'a> {
// The CoerceUnsized trait should have two generic params: Self and T.
return Err(TypeError);
}
b.push(coerce_from.clone()).push(to_ty.clone()).build()
b.push(coerce_from).push(to_ty.clone()).build()
};
let goal: InEnvironment<DomainGoal> =

View File

@ -44,7 +44,7 @@ impl<'a> InferenceContext<'a> {
if !could_unify {
self.result.type_mismatches.insert(
tgt_expr.into(),
TypeMismatch { expected: expected_ty.clone(), actual: ty.clone() },
TypeMismatch { expected: expected_ty, actual: ty.clone() },
);
}
}
@ -57,15 +57,14 @@ impl<'a> InferenceContext<'a> {
let ty = self.infer_expr_inner(expr, &expected);
let ty = if let Some(target) = expected.only_has_type(&mut self.table) {
if !self.coerce(&ty, &target) {
self.result.type_mismatches.insert(
expr.into(),
TypeMismatch { expected: target.clone(), actual: ty.clone() },
);
self.result
.type_mismatches
.insert(expr.into(), TypeMismatch { expected: target, actual: ty.clone() });
// Return actual type when type mismatch.
// This is needed for diagnostic when return type mismatch.
ty
} else {
target.clone()
target
}
} else {
ty

View File

@ -196,7 +196,7 @@ impl<'a> InferenceContext<'a> {
let inner_ty = if let Some(subpat) = subpat {
self.infer_pat(*subpat, &expected, default_bm)
} else {
expected.clone()
expected
};
let inner_ty = self.insert_type_vars_shallow(inner_ty);
@ -266,10 +266,9 @@ impl<'a> InferenceContext<'a> {
// use a new type variable if we got error type here
let ty = self.insert_type_vars_shallow(ty);
if !self.unify(&ty, &expected) {
self.result.type_mismatches.insert(
pat.into(),
TypeMismatch { expected: expected.clone(), actual: ty.clone() },
);
self.result
.type_mismatches
.insert(pat.into(), TypeMismatch { expected: expected, actual: ty.clone() });
}
self.write_pat_ty(pat, ty.clone());
ty

View File

@ -55,7 +55,7 @@ pub(crate) fn expand_glob_import(acc: &mut Assists, ctx: &AssistContext) -> Opti
let refs_in_target = find_refs_in_mod(ctx, target_module, Some(current_module))?;
let imported_defs = find_imported_defs(ctx, star)?;
let target = parent.clone().either(|n| n.syntax().clone(), |n| n.syntax().clone());
let target = parent.either(|n| n.syntax().clone(), |n| n.syntax().clone());
acc.add(
AssistId("expand_glob_import", AssistKind::RefactorRewrite),
"Expand glob import",

View File

@ -214,7 +214,7 @@ impl WorkspaceBuildData {
acc
};
let package_build_data =
res.per_package.entry(package_id.repr.clone()).or_default();
res.per_package.entry(package_id.repr).or_default();
// cargo_metadata crate returns default (empty) path for
// older cargos, which is not absolute, so work around that.
if !out_dir.as_str().is_empty() {
@ -237,13 +237,13 @@ impl WorkspaceBuildData {
{
let filename = AbsPathBuf::assert(PathBuf::from(&filename));
let package_build_data =
res.per_package.entry(package_id.repr.clone()).or_default();
res.per_package.entry(package_id.repr).or_default();
package_build_data.proc_macro_dylib_path = Some(filename);
}
}
}
Message::CompilerMessage(message) => {
progress(message.target.name.clone());
progress(message.target.name);
}
Message::BuildFinished(_) => {}
Message::TextLine(_) => {}

View File

@ -346,11 +346,8 @@ impl CargoWorkspace {
let workspace_root =
AbsPathBuf::assert(PathBuf::from(meta.workspace_root.into_os_string()));
let build_data_config = BuildDataConfig::new(
cargo_toml.to_path_buf(),
config.clone(),
Arc::new(meta.packages.clone()),
);
let build_data_config =
BuildDataConfig::new(cargo_toml.to_path_buf(), config.clone(), Arc::new(meta.packages));
Ok(CargoWorkspace { packages, targets, workspace_root, build_data_config })
}