Auto merge of #16521 - tetsuharuohzeki:experiment-enable-str_to_string, r=lnicola

internal: Enable str_to_string Clippy rule

This fix [the FIXME comment](bb0de88f24/Cargo.toml (L183-L184))
This commit is contained in:
bors 2024-02-10 08:15:39 +00:00
commit 7e9265506d
149 changed files with 382 additions and 391 deletions

View File

@ -180,5 +180,4 @@ print_stdout = "warn"
print_stderr = "warn"
rc_buffer = "warn"
# FIXME enable this, we use this pattern a lot so its annoying work ...
# str_to_string = "warn"
str_to_string = "warn"

View File

@ -782,7 +782,7 @@ impl FromStr for Edition {
"2018" => Edition::Edition2018,
"2021" => Edition::Edition2021,
"2024" => Edition::Edition2024,
_ => return Err(ParseEditionError { invalid_input: s.to_string() }),
_ => return Err(ParseEditionError { invalid_input: s.to_owned() }),
};
Ok(res)
}

View File

@ -29,11 +29,11 @@ pub(super) fn print_body_hir(db: &dyn DefDatabase, body: &Body, owner: DefWithBo
"const {} = ",
match &it.name {
Some(name) => name.display(db.upcast()).to_string(),
None => "_".to_string(),
None => "_".to_owned(),
}
)
}),
DefWithBodyId::InTypeConstId(_) => "In type const = ".to_string(),
DefWithBodyId::InTypeConstId(_) => "In type const = ".to_owned(),
DefWithBodyId::VariantId(it) => {
let loc = it.lookup(db);
let enum_loc = loc.parent.lookup(db);
@ -123,7 +123,7 @@ impl Printer<'_> {
wln!(self);
f(self);
self.indent_level -= 1;
self.buf = self.buf.trim_end_matches('\n').to_string();
self.buf = self.buf.trim_end_matches('\n').to_owned();
}
fn whitespace(&mut self) {

View File

@ -859,7 +859,7 @@ mod tests {
check_search(
ra_fixture,
"main",
Query::new("fmt".to_string()).fuzzy(),
Query::new("fmt".to_owned()).fuzzy(),
expect![[r#"
dep::fmt (t)
dep::fmt::Display::FMT_CONST (a)
@ -888,9 +888,7 @@ mod tests {
check_search(
ra_fixture,
"main",
Query::new("fmt".to_string())
.fuzzy()
.assoc_search_mode(AssocSearchMode::AssocItemsOnly),
Query::new("fmt".to_owned()).fuzzy().assoc_search_mode(AssocSearchMode::AssocItemsOnly),
expect![[r#"
dep::fmt::Display::FMT_CONST (a)
dep::fmt::Display::format_function (a)
@ -901,7 +899,7 @@ mod tests {
check_search(
ra_fixture,
"main",
Query::new("fmt".to_string()).fuzzy().assoc_search_mode(AssocSearchMode::Exclude),
Query::new("fmt".to_owned()).fuzzy().assoc_search_mode(AssocSearchMode::Exclude),
expect![[r#"
dep::fmt (t)
"#]],
@ -937,7 +935,7 @@ pub mod fmt {
check_search(
ra_fixture,
"main",
Query::new("fmt".to_string()).fuzzy(),
Query::new("fmt".to_owned()).fuzzy(),
expect![[r#"
dep::Fmt (m)
dep::Fmt (t)
@ -951,7 +949,7 @@ pub mod fmt {
check_search(
ra_fixture,
"main",
Query::new("fmt".to_string()),
Query::new("fmt".to_owned()),
expect![[r#"
dep::Fmt (m)
dep::Fmt (t)
@ -991,7 +989,7 @@ pub mod fmt {
check_search(
ra_fixture,
"main",
Query::new("fmt".to_string()),
Query::new("fmt".to_owned()),
expect![[r#"
dep::Fmt (m)
dep::Fmt (t)
@ -1015,7 +1013,7 @@ pub mod fmt {
check_search(
ra_fixture,
"main",
Query::new("FMT".to_string()),
Query::new("FMT".to_owned()),
expect![[r#"
dep::FMT (t)
dep::FMT (v)
@ -1027,7 +1025,7 @@ pub mod fmt {
check_search(
ra_fixture,
"main",
Query::new("FMT".to_string()).case_sensitive(),
Query::new("FMT".to_owned()).case_sensitive(),
expect![[r#"
dep::FMT (t)
dep::FMT (v)

View File

@ -672,7 +672,7 @@ impl ItemScope {
format_to!(
buf,
"{}:",
name.map_or("_".to_string(), |name| name.display(db).to_string())
name.map_or("_".to_owned(), |name| name.display(db).to_string())
);
if let Some((.., i)) = def.types {

View File

@ -24,7 +24,7 @@ pub(super) fn print_item_tree(db: &dyn DefDatabase, tree: &ItemTree) -> String {
p.print_mod_item(*item);
}
let mut s = p.buf.trim_end_matches('\n').to_string();
let mut s = p.buf.trim_end_matches('\n').to_owned();
s.push('\n');
s
}
@ -58,7 +58,7 @@ impl Printer<'_> {
wln!(self);
f(self);
self.indent_level -= 1;
self.buf = self.buf.trim_end_matches('\n').to_string();
self.buf = self.buf.trim_end_matches('\n').to_owned();
}
/// Ensures that a blank line is output before the next text.

View File

@ -224,7 +224,7 @@ fn reindent(indent: IndentLevel, pp: String) -> String {
return pp;
}
let mut lines = pp.split_inclusive('\n');
let mut res = lines.next().unwrap().to_string();
let mut res = lines.next().unwrap().to_owned();
for line in lines {
if line.trim().is_empty() {
res.push_str(line)

View File

@ -515,7 +515,7 @@ fn concat_bytes_expand(
tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => {
let token = ast::make::tokens::literal(&lit.to_string());
match token.kind() {
syntax::SyntaxKind::BYTE => bytes.push(token.text().to_string()),
syntax::SyntaxKind::BYTE => bytes.push(token.text().to_owned()),
syntax::SyntaxKind::BYTE_STRING => {
let components = unquote_byte_string(lit).unwrap_or_default();
components.into_iter().for_each(|it| bytes.push(it.to_string()));
@ -570,7 +570,7 @@ fn concat_bytes_expand_subtree(
let lit = ast::make::tokens::literal(&lit.to_string());
match lit.kind() {
syntax::SyntaxKind::BYTE | syntax::SyntaxKind::INT_NUMBER => {
bytes.push(lit.text().to_string())
bytes.push(lit.text().to_owned())
}
_ => {
return Err(mbe::ExpandError::UnexpectedToken.into());
@ -749,7 +749,7 @@ fn env_expand(
// We cannot use an empty string here, because for
// `include!(concat!(env!("OUT_DIR"), "/foo.rs"))` will become
// `include!("foo.rs"), which might go to infinite loop
"UNRESOLVED_ENV_VAR".to_string()
"UNRESOLVED_ENV_VAR".to_owned()
});
let expanded = quote! {span => #s };

View File

@ -133,7 +133,7 @@ fn bit_op() {
check_number(r#"const GOAL: i8 = 1 << 7"#, (1i8 << 7) as i128);
check_number(r#"const GOAL: i8 = -1 << 2"#, (-1i8 << 2) as i128);
check_fail(r#"const GOAL: i8 = 1 << 8"#, |e| {
e == ConstEvalError::MirEvalError(MirEvalError::Panic("Overflow in Shl".to_string()))
e == ConstEvalError::MirEvalError(MirEvalError::Panic("Overflow in Shl".to_owned()))
});
check_number(r#"const GOAL: i32 = 100000000i32 << 11"#, (100000000i32 << 11) as i128);
}
@ -2756,7 +2756,7 @@ fn memory_limit() {
"#,
|e| {
e == ConstEvalError::MirEvalError(MirEvalError::Panic(
"Memory allocation of 30000000000 bytes failed".to_string(),
"Memory allocation of 30000000000 bytes failed".to_owned(),
))
},
);

View File

@ -194,17 +194,15 @@ impl CapturedItem {
}
let variant_data = f.parent.variant_data(db.upcast());
let field = match &*variant_data {
VariantData::Record(fields) => fields[f.local_id]
.name
.as_str()
.unwrap_or("[missing field]")
.to_string(),
VariantData::Record(fields) => {
fields[f.local_id].name.as_str().unwrap_or("[missing field]").to_owned()
}
VariantData::Tuple(fields) => fields
.iter()
.position(|it| it.0 == f.local_id)
.unwrap_or_default()
.to_string(),
VariantData::Unit => "[missing field]".to_string(),
VariantData::Unit => "[missing field]".to_owned(),
};
result = format!("{result}.{field}");
field_need_paren = false;

View File

@ -1763,7 +1763,7 @@ impl Evaluator<'_> {
}
};
mem.get(pos..pos + size)
.ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory read".to_string()))
.ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory read".to_owned()))
}
fn write_memory_using_ref(&mut self, addr: Address, size: usize) -> Result<&mut [u8]> {
@ -1777,7 +1777,7 @@ impl Evaluator<'_> {
}
};
mem.get_mut(pos..pos + size)
.ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory write".to_string()))
.ok_or_else(|| MirEvalError::UndefinedBehavior("out of bound memory write".to_owned()))
}
fn write_memory(&mut self, addr: Address, r: &[u8]) -> Result<()> {
@ -1800,7 +1800,7 @@ impl Evaluator<'_> {
return Ok(());
}
let oob = || MirEvalError::UndefinedBehavior("out of bounds memory write".to_string());
let oob = || MirEvalError::UndefinedBehavior("out of bounds memory write".to_owned());
match (addr, r.addr) {
(Stack(dst), Stack(src)) => {
@ -2653,7 +2653,7 @@ pub fn render_const_using_debug_impl(
ptr: ArenaMap::new(),
body: db
.mir_body(owner.into())
.map_err(|_| MirEvalError::NotSupported("unreachable".to_string()))?,
.map_err(|_| MirEvalError::NotSupported("unreachable".to_owned()))?,
drop_flags: DropFlags::default(),
};
let data = evaluator.allocate_const_in_heap(locals, c)?;

View File

@ -304,7 +304,7 @@ impl Evaluator<'_> {
use LangItem::*;
let mut args = args.iter();
match it {
BeginPanic => Err(MirEvalError::Panic("<unknown-panic-payload>".to_string())),
BeginPanic => Err(MirEvalError::Panic("<unknown-panic-payload>".to_owned())),
PanicFmt => {
let message = (|| {
let resolver = self

View File

@ -1634,7 +1634,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.set_goto(prev_block, begin, span);
f(self, begin)?;
let my = mem::replace(&mut self.current_loop_blocks, prev).ok_or(
MirLowerError::ImplementationError("current_loop_blocks is corrupt".to_string()),
MirLowerError::ImplementationError("current_loop_blocks is corrupt".to_owned()),
)?;
if let Some(prev) = prev_label {
self.labeled_loop_blocks.insert(label.unwrap(), prev);
@ -1669,7 +1669,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
.current_loop_blocks
.as_mut()
.ok_or(MirLowerError::ImplementationError(
"Current loop access out of loop".to_string(),
"Current loop access out of loop".to_owned(),
))?
.end
{
@ -1679,7 +1679,7 @@ impl<'ctx> MirLowerCtx<'ctx> {
self.current_loop_blocks
.as_mut()
.ok_or(MirLowerError::ImplementationError(
"Current loop access out of loop".to_string(),
"Current loop access out of loop".to_owned(),
))?
.end = Some(s);
s

View File

@ -225,7 +225,7 @@ impl MirLowerCtx<'_> {
{
let Some(index_fn) = self.infer.method_resolution(expr_id) else {
return Err(MirLowerError::UnresolvedMethod(
"[overloaded index]".to_string(),
"[overloaded index]".to_owned(),
));
};
let Some((base_place, current)) =

View File

@ -100,7 +100,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
if only_types {
types.insert(file_range, expected);
} else if expected.starts_with("type: ") {
types.insert(file_range, expected.trim_start_matches("type: ").to_string());
types.insert(file_range, expected.trim_start_matches("type: ").to_owned());
} else if expected.starts_with("expected") {
mismatches.insert(file_range, expected);
} else if expected.starts_with("adjustments:") {
@ -110,7 +110,7 @@ fn check_impl(ra_fixture: &str, allow_none: bool, only_types: bool, display_sour
.trim_start_matches("adjustments:")
.trim()
.split(',')
.map(|it| it.trim().to_string())
.map(|it| it.trim().to_owned())
.filter(|it| !it.is_empty())
.collect(),
);
@ -331,7 +331,7 @@ fn infer_with_mismatches(content: &str, include_mismatches: bool) -> String {
});
for (node, ty) in &types {
let (range, text) = if let Some(self_param) = ast::SelfParam::cast(node.value.clone()) {
(self_param.name().unwrap().syntax().text_range(), "self".to_string())
(self_param.name().unwrap().syntax().text_range(), "self".to_owned())
} else {
(node.value.text_range(), node.value.text().to_string().replace('\n', " "))
};

View File

@ -104,8 +104,8 @@ pub(crate) fn trait_solve_query(
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::Implemented(it))) => {
db.trait_data(it.hir_trait_id()).name.display(db.upcast()).to_string()
}
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_string(),
_ => "??".to_string(),
GoalData::DomainGoal(DomainGoal::Holds(WhereClause::AliasEq(_))) => "alias_eq".to_owned(),
_ => "??".to_owned(),
};
let _p = tracing::span!(tracing::Level::INFO, "trait_solve_query", ?detail).entered();
tracing::info!("trait_solve_query({:?})", goal.value.goal);

View File

@ -1933,7 +1933,7 @@ impl Function {
};
let (result, output) = interpret_mir(db, body, false, None);
let mut text = match result {
Ok(_) => "pass".to_string(),
Ok(_) => "pass".to_owned(),
Err(e) => {
let mut r = String::new();
_ = e.pretty_print(&mut r, db, &span_formatter);

View File

@ -96,7 +96,7 @@ pub(crate) fn apply_demorgan(acc: &mut Assists, ctx: &AssistContext<'_>) -> Opti
let dm_lhs = demorganed.lhs()?;
acc.add_group(
&GroupLabel("Apply De Morgan's law".to_string()),
&GroupLabel("Apply De Morgan's law".to_owned()),
AssistId("apply_demorgan", AssistKind::RefactorRewrite),
"Apply De Morgan's law",
op_range,
@ -187,7 +187,7 @@ pub(crate) fn apply_demorgan_iterator(acc: &mut Assists, ctx: &AssistContext<'_>
let op_range = method_call.syntax().text_range();
let label = format!("Apply De Morgan's law to `Iterator::{}`", name.text().as_str());
acc.add_group(
&GroupLabel("Apply De Morgan's law".to_string()),
&GroupLabel("Apply De Morgan's law".to_owned()),
AssistId("apply_demorgan_iterator", AssistKind::RefactorRewrite),
label,
op_range,

View File

@ -57,7 +57,7 @@ fn block_to_line(acc: &mut Assists, comment: ast::Comment) -> Option<()> {
// Don't introduce trailing whitespace
if line.is_empty() {
line_prefix.to_string()
line_prefix.to_owned()
} else {
format!("{line_prefix} {line}")
}

View File

@ -244,7 +244,7 @@ fn make_function_name(semantics_scope: &hir::SemanticsScope<'_>) -> ast::NameRef
let default_name = "fun_name";
let mut name = default_name.to_string();
let mut name = default_name.to_owned();
let mut counter = 0;
while names_in_scope.contains(&name) {
counter += 1;
@ -1949,7 +1949,7 @@ fn with_tail_expr(block: ast::BlockExpr, tail_expr: ast::Expr) -> ast::BlockExpr
}
fn format_type(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> String {
ty.display_source_code(ctx.db(), module.into(), true).ok().unwrap_or_else(|| "_".to_string())
ty.display_source_code(ctx.db(), module.into(), true).ok().unwrap_or_else(|| "_".to_owned())
}
fn make_ty(ty: &hir::Type, ctx: &AssistContext<'_>, module: hir::Module) -> ast::Type {

View File

@ -115,7 +115,7 @@ pub(crate) fn extract_variable(acc: &mut Assists, ctx: &AssistContext<'_>) -> Op
let trailing_ws = if prev_ws.is_some_and(|it| it.text().starts_with('\n')) {
format!("\n{indent_to}")
} else {
" ".to_string()
" ".to_owned()
};
ted::insert_all_raw(

View File

@ -416,9 +416,9 @@ fn arguments_from_params(param_list: &ast::ParamList) -> String {
true => format!("&mut {name}"),
false => name.to_string(),
},
None => "_".to_string(),
None => "_".to_owned(),
},
_ => "_".to_string(),
_ => "_".to_owned(),
});
args_iter.format(", ").to_string()
}

View File

@ -162,7 +162,7 @@ fn make_record_field_list(
fn name_from_field(field: &ast::RecordExprField) -> ast::Name {
let text = match field.name_ref() {
Some(it) => it.to_string(),
None => name_from_field_shorthand(field).unwrap_or("unknown".to_string()),
None => name_from_field_shorthand(field).unwrap_or("unknown".to_owned()),
};
make::name(&text)
}

View File

@ -202,7 +202,7 @@ fn get_adt_source(
let file = ctx.sema.parse(range.file_id);
let adt_source =
ctx.sema.find_node_at_offset_with_macros(file.syntax(), range.range.start())?;
find_struct_impl(ctx, &adt_source, &[fn_name.to_string()]).map(|impl_| (impl_, range.file_id))
find_struct_impl(ctx, &adt_source, &[fn_name.to_owned()]).map(|impl_| (impl_, range.file_id))
}
struct FunctionTemplate {
@ -1007,7 +1007,7 @@ fn fn_arg_name(sema: &Semantics<'_, RootDatabase>, arg_expr: &ast::Expr) -> Stri
name
}
Some(name) => name,
None => "arg".to_string(),
None => "arg".to_owned(),
}
}

View File

@ -79,7 +79,7 @@ pub(crate) fn generate_is_empty_from_len(acc: &mut Assists, ctx: &AssistContext<
pub fn is_empty(&self) -> bool {
self.len() == 0
}"#
.to_string();
.to_owned();
builder.insert(range.end(), code)
},
)

View File

@ -118,7 +118,7 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_
let arg_list = if let Some(genpars) = impl_ast.generic_param_list() {
genpars.to_generic_args().to_string()
} else {
"".to_string()
"".to_owned()
};
if let Some(snippet_cap) = ctx.config.snippet_cap {

View File

@ -60,7 +60,7 @@ pub(crate) fn inline_const_as_literal(acc: &mut Assists, ctx: &AssistContext<'_>
let id = AssistId("inline_const_as_literal", AssistKind::RefactorInline);
let label = "Inline const as literal".to_string();
let label = "Inline const as literal".to_owned();
let target = variable.syntax().text_range();
return acc.add(id, label, target, |edit| {

View File

@ -41,7 +41,7 @@ pub(crate) fn inline_macro(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option
acc.add(
AssistId("inline_macro", AssistKind::RefactorInline),
"Inline macro".to_string(),
"Inline macro".to_owned(),
text_range,
|builder| builder.replace(text_range, expanded.to_string()),
)

View File

@ -129,7 +129,7 @@ fn generate_unique_lifetime_param_name(
type_params.lifetime_params().map(|p| p.syntax().text().to_string()).collect();
('a'..='z').map(|it| format!("'{it}")).find(|it| !used_lifetime_params.contains(it))
}
None => Some("'a".to_string()),
None => Some("'a".to_owned()),
}
.map(|it| make::lifetime(&it))
}

View File

@ -75,7 +75,7 @@ pub(crate) fn move_module_to_file(acc: &mut Assists, ctx: &AssistContext<'_>) ->
let contents = {
let items = module_items.dedent(IndentLevel(1)).to_string();
let mut items =
items.trim_start_matches('{').trim_end_matches('}').trim().to_string();
items.trim_start_matches('{').trim_end_matches('}').trim().to_owned();
if !items.is_empty() {
items.push('\n');
}

View File

@ -33,7 +33,7 @@ pub(crate) fn reformat_number_literal(acc: &mut Assists, ctx: &AssistContext<'_>
}
let radix = literal.radix();
let mut converted = prefix.to_string();
let mut converted = prefix.to_owned();
converted.push_str(&add_group_separators(value, group_size(radix)));
converted.push_str(suffix);

View File

@ -474,7 +474,7 @@ pub fn test_some_range(a: int) -> bool {
&db,
&cfg,
AssistResolveStrategy::Single(SingleResolve {
assist_id: "SOMETHING_MISMATCHING".to_string(),
assist_id: "SOMETHING_MISMATCHING".to_owned(),
assist_kind: AssistKind::RefactorExtract,
}),
frange,
@ -520,7 +520,7 @@ pub fn test_some_range(a: int) -> bool {
&db,
&cfg,
AssistResolveStrategy::Single(SingleResolve {
assist_id: "extract_variable".to_string(),
assist_id: "extract_variable".to_owned(),
assist_kind: AssistKind::RefactorExtract,
}),
frange,

View File

@ -15,7 +15,7 @@ fn sourcegen_assists_docs() {
let mut buf = "
use super::check_doc_test;
"
.to_string();
.to_owned();
for assist in assists.iter() {
for (idx, section) in assist.sections.iter().enumerate() {
let test_id =
@ -101,7 +101,7 @@ impl Assist {
let mut assist = Assist { id, location, sections: Vec::new() };
while lines.peek().is_some() {
let doc = take_until(lines.by_ref(), "```").trim().to_string();
let doc = take_until(lines.by_ref(), "```").trim().to_owned();
assert!(
(doc.chars().next().unwrap().is_ascii_uppercase() && doc.ends_with('.'))
|| !assist.sections.is_empty(),

View File

@ -673,7 +673,7 @@ impl ReferenceConversion {
pub(crate) fn convert_type(&self, db: &dyn HirDatabase) -> ast::Type {
let ty = match self.conversion {
ReferenceConversionType::Copy => self.ty.display(db).to_string(),
ReferenceConversionType::AsRefStr => "&str".to_string(),
ReferenceConversionType::AsRefStr => "&str".to_owned(),
ReferenceConversionType::AsRefSlice => {
let type_argument_name =
self.ty.type_arguments().next().unwrap().display(db).to_string();

View File

@ -77,7 +77,7 @@ pub(crate) fn for_unique_generic_name(
p => p.to_string(),
})
.collect::<FxHashSet<_>>();
let mut name = name.to_string();
let mut name = name.to_owned();
let base_len = name.len();
let mut count = 0;
while param_names.contains(&name) {
@ -165,7 +165,7 @@ pub(crate) fn for_variable(expr: &ast::Expr, sema: &Semantics<'_, RootDatabase>)
}
}
"var_name".to_string()
"var_name".to_owned()
}
fn normalize(name: &str) -> Option<String> {

View File

@ -46,7 +46,7 @@ mod other_mod {}
let completion_list = completion_list_no_kw(case);
assert_eq!("md other_crate_a\n".to_string(), completion_list);
assert_eq!("md other_crate_a\n".to_owned(), completion_list);
}
#[test]
@ -66,6 +66,6 @@ mod other_mod {}
let completion_list = completion_list_no_kw(case);
assert_eq!("md other_crate_a\n".to_string(), completion_list);
assert_eq!("md other_crate_a\n".to_owned(), completion_list);
}
}

View File

@ -326,7 +326,7 @@ fn build_postfix_snippet_builder<'ctx>(
delete_range: TextRange,
) -> impl Fn(&str, &str, &str) -> Builder + 'ctx {
move |label, detail, snippet| {
let edit = TextEdit::replace(delete_range, snippet.to_string());
let edit = TextEdit::replace(delete_range, snippet.to_owned());
let mut item =
CompletionItem::new(CompletionItemKind::Snippet, ctx.source_range(), label);
item.detail(detail).snippet_edit(cap, edit);

View File

@ -665,7 +665,7 @@ impl<'a> CompletionContext<'a> {
// actual completion.
let file_with_fake_ident = {
let parse = db.parse(file_id);
let edit = Indel::insert(offset, COMPLETION_MARKER.to_string());
let edit = Indel::insert(offset, COMPLETION_MARKER.to_owned());
parse.reparse(&edit).tree()
};

View File

@ -553,7 +553,7 @@ impl Builder {
self.detail = detail.map(Into::into);
if let Some(detail) = &self.detail {
if never!(detail.contains('\n'), "multiline detail:\n{}", detail) {
self.detail = Some(detail.split('\n').next().unwrap().to_string());
self.detail = Some(detail.split('\n').next().unwrap().to_owned());
}
}
self

View File

@ -167,14 +167,14 @@ pub(crate) fn render_field(
if !expected_fn_type {
if let Some(receiver) = &dot_access.receiver {
if let Some(receiver) = ctx.completion.sema.original_ast_node(receiver.clone()) {
builder.insert(receiver.syntax().text_range().start(), "(".to_string());
builder.insert(ctx.source_range().end(), ")".to_string());
builder.insert(receiver.syntax().text_range().start(), "(".to_owned());
builder.insert(ctx.source_range().end(), ")".to_owned());
let is_parens_needed =
!matches!(dot_access.kind, DotAccessKind::Method { has_parens: true });
if is_parens_needed {
builder.insert(ctx.source_range().end(), "()".to_string());
builder.insert(ctx.source_range().end(), "()".to_owned());
}
}
}

View File

@ -184,12 +184,12 @@ pub(super) fn add_call_parens<'b>(
}
None => {
let name = match param.ty().as_adt() {
None => "_".to_string(),
None => "_".to_owned(),
Some(adt) => adt
.name(ctx.db)
.as_text()
.map(|s| to_lower_snake_case(s.as_str()))
.unwrap_or_else(|| "_".to_string()),
.unwrap_or_else(|| "_".to_owned()),
};
f(&format_args!("${{{}:{name}}}", index + offset))
}

View File

@ -140,7 +140,7 @@ fn render_pat(
StructKind::Record => {
render_record_as_pat(ctx.db(), ctx.snippet_cap(), fields, name, fields_omitted)
}
StructKind::Unit => name.to_string(),
StructKind::Unit => name.to_owned(),
};
let needs_ascription = matches!(

View File

@ -23,7 +23,7 @@ pub(crate) fn render_record_lit(
path: &str,
) -> RenderedLiteral {
if snippet_cap.is_none() {
return RenderedLiteral { literal: path.to_string(), detail: path.to_string() };
return RenderedLiteral { literal: path.to_owned(), detail: path.to_owned() };
}
let completions = fields.iter().enumerate().format_with(", ", |(idx, field), f| {
if snippet_cap.is_some() {
@ -52,7 +52,7 @@ pub(crate) fn render_tuple_lit(
path: &str,
) -> RenderedLiteral {
if snippet_cap.is_none() {
return RenderedLiteral { literal: path.to_string(), detail: path.to_string() };
return RenderedLiteral { literal: path.to_owned(), detail: path.to_owned() };
}
let completions = fields.iter().enumerate().format_with(", ", |(idx, _), f| {
if snippet_cap.is_some() {

View File

@ -203,7 +203,7 @@ mod tests {
use expect_test::{expect, Expect};
fn check(input: &str, expect: &Expect) {
let (output, exprs) = parse_format_exprs(input).unwrap_or(("-".to_string(), vec![]));
let (output, exprs) = parse_format_exprs(input).unwrap_or(("-".to_owned(), vec![]));
let outcome_repr = if !exprs.is_empty() {
format!("{output}; {}", with_placeholders(exprs).join(", "))
} else {

View File

@ -52,7 +52,7 @@ pub struct LintGroup {
generate_lint_descriptor(sh, &mut contents);
contents.push('\n');
let cargo = std::env::var("CARGO").unwrap_or_else(|_| "cargo".to_string());
let cargo = std::env::var("CARGO").unwrap_or_else(|_| "cargo".to_owned());
let unstable_book = project_root().join("./target/unstable-book-gen");
cmd!(
sh,
@ -283,7 +283,7 @@ fn generate_descriptor_clippy(buf: &mut String, path: &Path) {
let line = &line[..up_to];
let clippy_lint = clippy_lints.last_mut().expect("clippy lint must already exist");
clippy_lint.help = unescape(line).trim().to_string();
clippy_lint.help = unescape(line).trim().to_owned();
}
}
clippy_lints.sort_by(|lint, lint2| lint.id.cmp(&lint2.id));

View File

@ -16,7 +16,7 @@ pub(crate) fn inactive_code(
}
let inactive = DnfExpr::new(d.cfg.clone()).why_inactive(&d.opts);
let mut message = "code is inactive due to #[cfg] directives".to_string();
let mut message = "code is inactive due to #[cfg] directives".to_owned();
if let Some(inactive) = inactive {
let inactive_reasons = inactive.to_string();

View File

@ -9,7 +9,7 @@ pub(crate) fn incoherent_impl(ctx: &DiagnosticsContext<'_>, d: &hir::IncoherentI
Diagnostic::new_with_syntax_node_ptr(
ctx,
DiagnosticCode::RustcHardError("E0210"),
"cannot define inherent `impl` for foreign type".to_string(),
"cannot define inherent `impl` for foreign type".to_owned(),
InFile::new(d.file_id, d.impl_.into()),
)
}

View File

@ -512,7 +512,7 @@ impl BAD_TRAIT for () {
fn BadFunction() {}
}
"#,
std::iter::once("unused_variables".to_string()),
std::iter::once("unused_variables".to_owned()),
);
}

View File

@ -42,12 +42,12 @@ impl State {
v.push("Deserialize");
}
match v.as_slice() {
[] => "".to_string(),
[] => "".to_owned(),
[x] => format!("#[derive({x})]\n"),
[x, y] => format!("#[derive({x}, {y})]\n"),
_ => {
never!();
"".to_string()
"".to_owned()
}
}
}
@ -176,7 +176,7 @@ mod tests {
#[test]
fn diagnostic_for_simple_case() {
let mut config = DiagnosticsConfig::test_sample();
config.disabled.insert("syntax-error".to_string());
config.disabled.insert("syntax-error".to_owned());
check_diagnostics_with_config(
config,
r#"

View File

@ -99,7 +99,7 @@ pub macro panic {
// FIXME: This is a false-positive, the file is actually linked in via
// `include!` macro
config.disabled.insert("unlinked-file".to_string());
config.disabled.insert("unlinked-file".to_owned());
check_diagnostics_with_config(
config,
@ -268,8 +268,8 @@ fn f() {
#[test]
fn include_does_not_break_diagnostics() {
let mut config = DiagnosticsConfig::test_sample();
config.disabled.insert("inactive-code".to_string());
config.disabled.insert("unlinked-file".to_string());
config.disabled.insert("inactive-code".to_owned());
config.disabled.insert("unlinked-file".to_owned());
check_diagnostics_with_config(
config,
r#"

View File

@ -170,7 +170,7 @@ fn make_ty(ty: &hir::Type, db: &dyn HirDatabase, module: hir::Module) -> ast::Ty
let ty_str = match ty.as_adt() {
Some(adt) => adt.name(db).display(db.upcast()).to_string(),
None => {
ty.display_source_code(db, module.into(), false).ok().unwrap_or_else(|| "_".to_string())
ty.display_source_code(db, module.into(), false).ok().unwrap_or_else(|| "_".to_owned())
}
};

View File

@ -31,7 +31,7 @@ mod tests {
#[test]
fn empty_body() {
let mut config = DiagnosticsConfig::test_sample();
config.disabled.insert("syntax-error".to_string());
config.disabled.insert("syntax-error".to_owned());
check_diagnostics_with_config(
config,
r#"

View File

@ -19,7 +19,7 @@ pub(crate) fn need_mut(ctx: &DiagnosticsContext<'_>, d: &hir::NeedMut) -> Diagno
for source in d.local.sources(ctx.sema.db) {
let Some(ast) = source.name() else { continue };
// FIXME: macros
edit_builder.insert(ast.value.syntax().text_range().start(), "mut ".to_string());
edit_builder.insert(ast.value.syntax().text_range().start(), "mut ".to_owned());
}
let edit = edit_builder.finish();
Some(vec![fix(
@ -448,7 +448,7 @@ fn main(b: bool) {
&mut x;
}
"#,
std::iter::once("remove-unnecessary-else".to_string()),
std::iter::once("remove-unnecessary-else".to_owned()),
);
check_diagnostics_with_disabled(
r#"
@ -463,7 +463,7 @@ fn main(b: bool) {
&mut x;
}
"#,
std::iter::once("remove-unnecessary-else".to_string()),
std::iter::once("remove-unnecessary-else".to_owned()),
);
}

View File

@ -140,7 +140,7 @@ fn foo(x: usize) -> u8 {
} //^^^^^^^^^ 💡 weak: replace return <expr>; with <expr>
}
"#,
std::iter::once("remove-unnecessary-else".to_string()),
std::iter::once("remove-unnecessary-else".to_owned()),
);
}
@ -309,7 +309,7 @@ fn foo(x: usize) -> u8 {
}
}
"#,
std::iter::once("remove-unnecessary-else".to_string()),
std::iter::once("remove-unnecessary-else".to_owned()),
);
check_fix(
r#"

View File

@ -90,7 +90,7 @@ mod tests {
use crate::tests::{check_diagnostics, check_diagnostics_with_disabled, check_fix};
fn check_diagnostics_with_needless_return_disabled(ra_fixture: &str) {
check_diagnostics_with_disabled(ra_fixture, std::iter::once("needless_return".to_string()));
check_diagnostics_with_disabled(ra_fixture, std::iter::once("needless_return".to_owned()));
}
#[test]

View File

@ -63,8 +63,8 @@ mod tests {
#[track_caller]
pub(crate) fn check_diagnostics(ra_fixture: &str) {
let mut config = DiagnosticsConfig::test_sample();
config.disabled.insert("inactive-code".to_string());
config.disabled.insert("E0599".to_string());
config.disabled.insert("inactive-code".to_owned());
config.disabled.insert("E0599".to_owned());
check_diagnostics_with_config(config, ra_fixture)
}

View File

@ -13,7 +13,7 @@ pub(crate) fn trait_impl_orphan(
ctx,
DiagnosticCode::RustcHardError("E0117"),
"only traits defined in the current crate can be implemented for arbitrary types"
.to_string(),
.to_owned(),
InFile::new(d.file_id, d.impl_.into()),
)
// Not yet checked for false positives

View File

@ -103,7 +103,7 @@ fn quickfix_for_redundant_assoc_item(
Some(vec![Assist {
id: AssistId("add assoc item def into trait def", AssistKind::QuickFix),
label: Label::new("Add assoc item def into trait def".to_string()),
label: Label::new("Add assoc item def into trait def".to_owned()),
group: None,
target: range,
source_change: Some(source_change_builder.finish()),

View File

@ -120,7 +120,7 @@ fn add_missing_ok_or_some(
let mut builder = TextEdit::builder();
builder.insert(expr.syntax().text_range().start(), format!("{variant_name}("));
builder.insert(expr.syntax().text_range().end(), ")".to_string());
builder.insert(expr.syntax().text_range().end(), ")".to_owned());
let source_change =
SourceChange::from_text_edit(expr_ptr.file_id.original_file(ctx.sema.db), builder.finish());
let name = format!("Wrap in {variant_name}");
@ -174,7 +174,7 @@ fn str_ref_to_owned(
let expr = expr_ptr.value.to_node(&root);
let expr_range = expr.syntax().text_range();
let to_owned = ".to_owned()".to_string();
let to_owned = ".to_owned()".to_owned();
let edit = TextEdit::insert(expr.syntax().text_range().end(), to_owned);
let source_change =
@ -729,7 +729,7 @@ fn f() -> i32 {
}
fn g() { return; }
"#,
std::iter::once("needless_return".to_string()),
std::iter::once("needless_return".to_owned()),
);
}

View File

@ -10,7 +10,7 @@ pub(crate) fn unimplemented_builtin_macro(
Diagnostic::new_with_syntax_node_ptr(
ctx,
DiagnosticCode::Ra("unimplemented-builtin-macro", Severity::WeakWarning),
"unimplemented built-in macro".to_string(),
"unimplemented built-in macro".to_owned(),
d.node,
)
}

View File

@ -65,7 +65,7 @@ fn method_fix(
let FileRange { range, file_id } = ctx.sema.original_range_opt(expr.syntax())?;
Some(vec![Assist {
id: AssistId("expected-field-found-method-call-fix", AssistKind::QuickFix),
label: Label::new("Use parentheses to call the method".to_string()),
label: Label::new("Use parentheses to call the method".to_owned()),
group: None,
target: range,
source_change: Some(SourceChange::from_text_edit(

View File

@ -101,7 +101,7 @@ fn field_fix(
};
Some(Assist {
id: AssistId("expected-method-found-field-fix", AssistKind::QuickFix),
label: Label::new("Use parentheses to call the value of the field".to_string()),
label: Label::new("Use parentheses to call the value of the field".to_owned()),
group: None,
target: range,
source_change: Some(SourceChange::from_iter([

View File

@ -16,7 +16,7 @@ pub(crate) fn unresolved_module(
ctx,
DiagnosticCode::RustcHardError("E0583"),
match &*d.candidates {
[] => "unresolved module".to_string(),
[] => "unresolved module".to_owned(),
[candidate] => format!("unresolved module, can't find module file: {candidate}"),
[candidates @ .., last] => {
format!(
@ -46,7 +46,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::UnresolvedModule) -> Option<Vec<
anchor: d.decl.file_id.original_file(ctx.sema.db),
path: candidate.clone(),
},
initial_contents: "".to_string(),
initial_contents: "".to_owned(),
}
.into(),
unresolved_module.syntax().text_range(),

View File

@ -27,7 +27,7 @@ pub(crate) fn unresolved_proc_macro(
let not_expanded_message = match &d.macro_name {
Some(name) => format!("proc macro `{name}` not expanded"),
None => "proc macro not expanded".to_string(),
None => "proc macro not expanded".to_owned(),
};
let severity = if config_enabled { Severity::Error } else { Severity::WeakWarning };
let def_map = ctx.sema.db.crate_def_map(d.krate);

View File

@ -40,7 +40,7 @@ pub(crate) fn useless_braces(
acc.push(
Diagnostic::new(
DiagnosticCode::RustcLint("unused_braces"),
"Unnecessary braces in use statement".to_string(),
"Unnecessary braces in use statement".to_owned(),
FileRange { file_id, range: use_range },
)
.with_main_node(InFile::new(file_id.into(), node.clone()))
@ -112,7 +112,7 @@ mod a {
);
let mut config = DiagnosticsConfig::test_sample();
config.disabled.insert("syntax-error".to_string());
config.disabled.insert("syntax-error".to_owned());
check_diagnostics_with_config(
config,
r#"

View File

@ -563,7 +563,7 @@ fn unresolved_fix(id: &'static str, label: &str, target: TextRange) -> Assist {
assert!(!id.contains(' '));
Assist {
id: AssistId(id, AssistKind::QuickFix),
label: Label::new(label.to_string()),
label: Label::new(label.to_owned()),
group: None,
target,
source_change: None,

View File

@ -108,7 +108,7 @@ pub(crate) fn check_no_fix(ra_fixture: &str) {
#[track_caller]
pub(crate) fn check_diagnostics(ra_fixture: &str) {
let mut config = DiagnosticsConfig::test_sample();
config.disabled.insert("inactive-code".to_string());
config.disabled.insert("inactive-code".to_owned());
check_diagnostics_with_config(config, ra_fixture)
}
@ -207,8 +207,8 @@ fn minicore_smoke_test() {
let source = minicore.source_code();
let mut config = DiagnosticsConfig::test_sample();
// This should be ignored since we conditionally remove code which creates single item use with braces
config.disabled.insert("unused_braces".to_string());
config.disabled.insert("unused_variables".to_string());
config.disabled.insert("unused_braces".to_owned());
config.disabled.insert("unused_variables".to_owned());
check_diagnostics_with_config(config, &source);
}

View File

@ -456,7 +456,7 @@ impl<'db, 'sema> Matcher<'db, 'sema> {
SyntaxElement::Token(t) => Some(t.clone()),
SyntaxElement::Node(n) => n.first_token(),
})
.map(|p| p.text().to_string());
.map(|p| p.text().to_owned());
let first_matched_token = child.clone();
let mut last_matched_token = child;
// Read code tokens util we reach one equal to the next token from our pattern
@ -795,7 +795,7 @@ mod tests {
let edits = match_finder.edits();
assert_eq!(edits.len(), 1);
let edit = &edits[&position.file_id];
let mut after = input.to_string();
let mut after = input.to_owned();
edit.apply(&mut after);
assert_eq!(after, "fn foo() {} fn bar() {} fn main() { bar(1+2); }");
}

View File

@ -152,7 +152,7 @@ impl FromStr for SsrRule {
.next()
.ok_or_else(|| SsrError("Cannot find delimiter `==>>`".into()))?
.trim()
.to_string();
.to_owned();
if it.next().is_some() {
return Err(SsrError("More than one delimiter found".into()));
}

View File

@ -58,7 +58,7 @@ pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: Defin
// and valid URLs so we choose to be too eager to try to resolve what might be
// a URL.
if target.contains("://") {
(Some(LinkType::Inline), target.to_string(), title.to_string())
(Some(LinkType::Inline), target.to_owned(), title.to_owned())
} else {
// Two possibilities:
// * path-based links: `../../module/struct.MyStruct.html`
@ -66,9 +66,9 @@ pub(crate) fn rewrite_links(db: &RootDatabase, markdown: &str, definition: Defin
if let Some((target, title)) = rewrite_intra_doc_link(db, definition, target, title) {
(None, target, title)
} else if let Some(target) = rewrite_url_link(db, definition, target) {
(Some(LinkType::Inline), target, title.to_string())
(Some(LinkType::Inline), target, title.to_owned())
} else {
(None, target.to_string(), title.to_string())
(None, target.to_owned(), title.to_owned())
}
}
});
@ -186,7 +186,7 @@ pub(crate) fn extract_definitions_from_docs(
let (link, ns) = parse_intra_doc_link(&target);
Some((
TextRange::new(range.start.try_into().ok()?, range.end.try_into().ok()?),
link.to_string(),
link.to_owned(),
ns,
))
}
@ -388,7 +388,7 @@ fn rewrite_intra_doc_link(
url = url.join(&file).ok()?;
url.set_fragment(anchor);
Some((url.into(), strip_prefixes_suffixes(title).to_string()))
Some((url.into(), strip_prefixes_suffixes(title).to_owned()))
}
/// Try to resolve path to local documentation via path-based links (i.e. `../gateway/struct.Shard.html`).

View File

@ -193,7 +193,7 @@ fn structure_token(token: SyntaxToken) -> Option<StructureNode> {
if let Some(region_name) = text.strip_prefix("// region:").map(str::trim) {
return Some(StructureNode {
parent: None,
label: region_name.to_string(),
label: region_name.to_owned(),
navigation_range: comment.syntax().text_range(),
node_range: comment.syntax().text_range(),
kind: StructureNodeKind::Region,

View File

@ -521,7 +521,7 @@ mod tests {
ReferenceCategory::Import => "import",
ReferenceCategory::Test => "test",
}
.to_string()
.to_owned()
}),
)
})

View File

@ -621,7 +621,7 @@ fn closure_ty(
})
.join("\n");
if captures_rendered.trim().is_empty() {
captures_rendered = "This closure captures nothing".to_string();
captures_rendered = "This closure captures nothing".to_owned();
}
let mut targets: Vec<hir::ModuleDef> = Vec::new();
let mut push_new_def = |item: hir::ModuleDef| {
@ -823,7 +823,7 @@ fn keyword_hints(
}
}
_ => KeywordHint {
description: token.text().to_string(),
description: token.text().to_owned(),
keyword_mod,
actions: Vec::new(),
},
@ -835,9 +835,9 @@ fn keyword_hints(
Some(_) => format!("prim_{}", token.text()),
None => format!("{}_keyword", token.text()),
};
KeywordHint::new(token.text().to_string(), module)
KeywordHint::new(token.text().to_owned(), module)
}
T![Self] => KeywordHint::new(token.text().to_string(), "self_upper_keyword".into()),
_ => KeywordHint::new(token.text().to_string(), format!("{}_keyword", token.text())),
T![Self] => KeywordHint::new(token.text().to_owned(), "self_upper_keyword".into()),
_ => KeywordHint::new(token.text().to_owned(), format!("{}_keyword", token.text())),
}
}

View File

@ -15,8 +15,8 @@ use syntax::{algo::ancestors_at_offset, ast, AstNode, TextRange};
// |===
pub(crate) fn interpret_function(db: &RootDatabase, position: FilePosition) -> String {
let start_time = Instant::now();
let mut result = find_and_interpret(db, position)
.unwrap_or_else(|| "Not inside a function body".to_string());
let mut result =
find_and_interpret(db, position).unwrap_or_else(|| "Not inside a function body".to_owned());
let duration = Instant::now() - start_time;
writeln!(result).unwrap();
writeln!(result, "----------------------").unwrap();

View File

@ -115,7 +115,7 @@ fn remove_newline(
let range = TextRange::at(offset, ((n_spaces_after_line_break + 1) as u32).into());
let replace_with = if no_space { "" } else { " " };
edit.replace(range, replace_with.to_string());
edit.replace(range, replace_with.to_owned());
return;
}
@ -140,7 +140,7 @@ fn remove_newline(
};
edit.replace(
TextRange::new(prev.text_range().start(), token.text_range().end()),
space.to_string(),
space.to_owned(),
);
return;
}
@ -154,7 +154,7 @@ fn remove_newline(
Some(_) => cov_mark::hit!(join_two_ifs_with_existing_else),
None => {
cov_mark::hit!(join_two_ifs);
edit.replace(token.text_range(), " else ".to_string());
edit.replace(token.text_range(), " else ".to_owned());
return;
}
}
@ -203,7 +203,7 @@ fn remove_newline(
}
// Remove newline but add a computed amount of whitespace characters
edit.replace(token.text_range(), compute_ws(prev.kind(), next.kind()).to_string());
edit.replace(token.text_range(), compute_ws(prev.kind(), next.kind()).to_owned());
}
fn join_single_expr_block(edit: &mut TextEditBuilder, token: &SyntaxToken) -> Option<()> {

View File

@ -238,7 +238,7 @@ impl Analysis {
let mut host = AnalysisHost::default();
let file_id = FileId::from_raw(0);
let mut file_set = FileSet::default();
file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_string()));
file_set.insert(file_id, VfsPath::new_virtual_path("/main.rs".to_owned()));
let source_root = SourceRoot::new_local(file_set);
let mut change = Change::new();

View File

@ -383,18 +383,18 @@ pub(crate) fn def_to_moniker(
let (name, repo, version) = match krate.origin(db) {
CrateOrigin::Library { repo, name } => (name, repo, krate.version(db)),
CrateOrigin::Local { repo, name } => (
name.unwrap_or(krate.display_name(db)?.canonical_name().to_string()),
name.unwrap_or(krate.display_name(db)?.canonical_name().to_owned()),
repo,
krate.version(db),
),
CrateOrigin::Rustc { name } => (
name.clone(),
Some("https://github.com/rust-lang/rust/".to_string()),
Some("https://github.com/rust-lang/rust/".to_owned()),
Some(format!("https://github.com/rust-lang/rust/compiler/{name}",)),
),
CrateOrigin::Lang(lang) => (
krate.display_name(db)?.canonical_name().to_string(),
Some("https://github.com/rust-lang/rust/".to_string()),
krate.display_name(db)?.canonical_name().to_owned(),
Some("https://github.com/rust-lang/rust/".to_owned()),
Some(match lang {
LangCrateOrigin::Other => {
"https://github.com/rust-lang/rust/library/".into()

View File

@ -860,7 +860,7 @@ fn foo() { enum FooInner { } }
"#,
);
let navs = analysis.symbol_search(Query::new("FooInner".to_string()), !0).unwrap();
let navs = analysis.symbol_search(Query::new("FooInner".to_owned()), !0).unwrap();
expect![[r#"
[
NavigationTarget {
@ -898,7 +898,7 @@ struct Foo;
"#,
);
let navs = analysis.symbol_search(Query::new("foo".to_string()), !0).unwrap();
let navs = analysis.symbol_search(Query::new("foo".to_owned()), !0).unwrap();
assert_eq!(navs.len(), 2)
}
}

View File

@ -105,7 +105,7 @@ pub(crate) fn parallel_prime_caches(
work_sender
.send((
crate_id,
graph[crate_id].display_name.as_deref().unwrap_or_default().to_string(),
graph[crate_id].display_name.as_deref().unwrap_or_default().to_owned(),
))
.ok();
}

View File

@ -72,7 +72,7 @@ impl Runnable {
RunnableKind::Bench { test_id } => format!("bench {test_id}"),
RunnableKind::DocTest { test_id, .. } => format!("doctest {test_id}"),
RunnableKind::Bin => {
target.map_or_else(|| "run binary".to_string(), |t| format!("run {t}"))
target.map_or_else(|| "run binary".to_owned(), |t| format!("run {t}"))
}
}
}

View File

@ -41,7 +41,7 @@ pub(crate) fn ssr_assists(
for (label, source_change) in assists.into_iter() {
let assist = Assist {
id,
label: Label::new(label.to_string()),
label: Label::new(label.to_owned()),
group: Some(GroupLabel("Apply SSR".into())),
target: comment_range,
source_change,

View File

@ -105,7 +105,7 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option<FileId>) -> String {
}
}
buf.trim().to_string()
buf.trim().to_owned()
}
fn collect_query<'q, Q>(table: QueryTable<'q, Q>) -> <Q as QueryCollect>::Collector

View File

@ -55,7 +55,7 @@ fn syntax_tree_for_string(token: &SyntaxToken, text_range: TextRange) -> Option<
fn syntax_tree_for_token(node: &SyntaxToken, text_range: TextRange) -> Option<String> {
// Range of the full node
let node_range = node.text_range();
let text = node.text().to_string();
let text = node.text().to_owned();
// We start at some point inside the node
// Either we have selected the whole string

View File

@ -149,10 +149,7 @@ fn on_opening_bracket_typed(
let tree: ast::UseTree = find_node_at_offset(file.syntax(), offset)?;
Some(TextEdit::insert(
tree.syntax().text_range().end() + TextSize::of("{"),
"}".to_string(),
))
Some(TextEdit::insert(tree.syntax().text_range().end() + TextSize::of("{"), "}".to_owned()))
}
fn bracket_expr(
@ -235,7 +232,7 @@ fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
return None;
}
let offset = expr.syntax().text_range().end();
Some(TextEdit::insert(offset, ";".to_string()))
Some(TextEdit::insert(offset, ";".to_owned()))
}
/// `a =$0 b;` removes the semicolon if an expression is valid in this context.
@ -275,7 +272,7 @@ fn on_eq_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit> {
return None;
}
let offset = let_stmt.syntax().text_range().end();
Some(TextEdit::insert(offset, ";".to_string()))
Some(TextEdit::insert(offset, ";".to_owned()))
}
}
@ -353,7 +350,7 @@ fn on_left_angle_typed(file: &SourceFile, offset: TextSize) -> Option<ExtendedTe
if let Some(t) = file.syntax().token_at_offset(offset).left_biased() {
if T![impl] == t.kind() {
return Some(ExtendedTextEdit {
edit: TextEdit::replace(range, "<$0>".to_string()),
edit: TextEdit::replace(range, "<$0>".to_owned()),
is_snippet: true,
});
}
@ -363,7 +360,7 @@ fn on_left_angle_typed(file: &SourceFile, offset: TextSize) -> Option<ExtendedTe
ast::GenericParamList::can_cast(n.kind()) || ast::GenericArgList::can_cast(n.kind())
}) {
Some(ExtendedTextEdit {
edit: TextEdit::replace(range, "<$0>".to_string()),
edit: TextEdit::replace(range, "<$0>".to_owned()),
is_snippet: true,
})
} else {
@ -383,7 +380,7 @@ fn on_right_angle_typed(file: &SourceFile, offset: TextSize) -> Option<TextEdit>
}
find_node_at_offset::<ast::RetType>(file.syntax(), offset)?;
Some(TextEdit::insert(after_arrow, " ".to_string()))
Some(TextEdit::insert(after_arrow, " ".to_owned()))
}
#[cfg(test)]

View File

@ -12,7 +12,7 @@ use syntax::{algo::ancestors_at_offset, ast, AstNode};
// |===
// image::https://user-images.githubusercontent.com/48062697/113065588-068bdb80-91b1-11eb-9a78-0b4ef1e972fb.gif[]
pub(crate) fn view_hir(db: &RootDatabase, position: FilePosition) -> String {
body_hir(db, position).unwrap_or_else(|| "Not inside a function body".to_string())
body_hir(db, position).unwrap_or_else(|| "Not inside a function body".to_owned())
}
fn body_hir(db: &RootDatabase, position: FilePosition) -> Option<String> {

View File

@ -11,7 +11,7 @@ use syntax::{algo::ancestors_at_offset, ast, AstNode};
// | VS Code | **rust-analyzer: View Mir**
// |===
pub(crate) fn view_mir(db: &RootDatabase, position: FilePosition) -> String {
body_mir(db, position).unwrap_or_else(|| "Not inside a function body".to_string())
body_mir(db, position).unwrap_or_else(|| "Not inside a function body".to_owned())
}
fn body_mir(db: &RootDatabase, position: FilePosition) -> Option<String> {

View File

@ -279,7 +279,7 @@ pub fn load_proc_macro(
let dylib = MacroDylib::new(path.to_path_buf());
let vec = server.load_dylib(dylib).map_err(|e| format!("{e}"))?;
if vec.is_empty() {
return Err("proc macro library returned no proc macros".to_string());
return Err("proc macro library returned no proc macros".to_owned());
}
Ok(vec
.into_iter()
@ -382,7 +382,7 @@ impl ProcMacroExpander for Expander {
call_site: Span,
mixed_site: Span,
) -> Result<tt::Subtree<Span>, ProcMacroExpansionError> {
let env = env.iter().map(|(k, v)| (k.to_string(), v.to_string())).collect();
let env = env.iter().map(|(k, v)| (k.to_owned(), v.to_owned())).collect();
match self.0.expand(subtree, attrs, env, def_site, call_site, mixed_site) {
Ok(Ok(subtree)) => Ok(subtree),
Ok(Err(err)) => Err(ProcMacroExpansionError::Panic(err.0)),

View File

@ -146,7 +146,7 @@ where
}
parser::Step::Enter { kind } => tree_sink.start_node(kind),
parser::Step::Exit => tree_sink.finish_node(),
parser::Step::Error { msg } => tree_sink.error(msg.to_string()),
parser::Step::Error { msg } => tree_sink.error(msg.to_owned()),
}
}
tree_sink.finish()

View File

@ -149,7 +149,7 @@ impl<'a> Converter<'a> {
if let Some(err) = err {
let token = self.res.len() as u32;
let msg = err.to_string();
let msg = err.to_owned();
self.res.error.push(LexError { msg, token });
}
}

View File

@ -60,9 +60,9 @@ fn collect_tests(s: &str) -> Vec<Test> {
for comment_block in sourcegen::CommentBlock::extract_untagged(s) {
let first_line = &comment_block.contents[0];
let (name, ok) = if let Some(name) = first_line.strip_prefix("test ") {
(name.to_string(), true)
(name.to_owned(), true)
} else if let Some(name) = first_line.strip_prefix("test_err ") {
(name.to_string(), false)
(name.to_owned(), false)
} else {
continue;
};

View File

@ -197,7 +197,7 @@ impl ProcMacro {
&deserialize_span_data_index_map(&resp.span_data_table),
)
})),
_ => Err(ServerError { message: "unexpected response".to_string(), io: None }),
_ => Err(ServerError { message: "unexpected response".to_owned(), io: None }),
}
}
}

View File

@ -419,7 +419,7 @@ impl<'a, 'span, S: InternableSpan> Writer<'a, 'span, S> {
let table = &mut self.text;
*self.string_table.entry(text).or_insert_with(|| {
let idx = table.len();
table.push(text.to_string());
table.push(text.to_owned());
idx as u32
})
}

View File

@ -78,7 +78,7 @@ impl ProcMacroProcessSrv {
match response {
Response::ApiVersionCheck(version) => Ok(version),
_ => Err(ServerError { message: "unexpected response".to_string(), io: None }),
_ => Err(ServerError { message: "unexpected response".to_owned(), io: None }),
}
}
@ -90,7 +90,7 @@ impl ProcMacroProcessSrv {
match response {
Response::SetConfig(crate::msg::ServerConfig { span_mode }) => Ok(span_mode),
_ => Err(ServerError { message: "unexpected response".to_string(), io: None }),
_ => Err(ServerError { message: "unexpected response".to_owned(), io: None }),
}
}
@ -104,7 +104,7 @@ impl ProcMacroProcessSrv {
match response {
Response::ListMacros(it) => Ok(it),
_ => Err(ServerError { message: "unexpected response".to_string(), io: None }),
_ => Err(ServerError { message: "unexpected response".to_owned(), io: None }),
}
}

View File

@ -38,7 +38,7 @@ pub fn read_dylib_info(dylib_path: &AbsPath) -> io::Result<RustCInfo> {
let version_part = items.next().ok_or_else(|| err!("no version string"))?;
let mut version_parts = version_part.split('-');
let version = version_parts.next().ok_or_else(|| err!("no version"))?;
let channel = version_parts.next().unwrap_or_default().to_string();
let channel = version_parts.next().unwrap_or_default().to_owned();
let commit = match items.next() {
Some(commit) => {

View File

@ -322,7 +322,7 @@ impl WorkspaceBuildScripts {
let mut deserializer = serde_json::Deserializer::from_str(line);
deserializer.disable_recursion_limit();
let message = Message::deserialize(&mut deserializer)
.unwrap_or_else(|_| Message::TextLine(line.to_string()));
.unwrap_or_else(|_| Message::TextLine(line.to_owned()));
match message {
Message::BuildScriptExecuted(mut message) => {
@ -356,7 +356,7 @@ impl WorkspaceBuildScripts {
if let Some(out_dir) =
out_dir.as_os_str().to_str().map(|s| s.to_owned())
{
data.envs.push(("OUT_DIR".to_string(), out_dir));
data.envs.push(("OUT_DIR".to_owned(), out_dir));
}
data.out_dir = Some(out_dir);
data.cfgs = cfgs;
@ -396,7 +396,7 @@ impl WorkspaceBuildScripts {
let errors = if !output.status.success() {
let errors = errors.into_inner();
Some(if errors.is_empty() { "cargo check failed".to_string() } else { errors })
Some(if errors.is_empty() { "cargo check failed".to_owned() } else { errors })
} else {
None
};
@ -490,7 +490,7 @@ impl WorkspaceBuildScripts {
// FIXME: Find a better way to know if it is a dylib.
fn is_dylib(path: &Utf8Path) -> bool {
match path.extension().map(|e| e.to_string().to_lowercase()) {
match path.extension().map(|e| e.to_owned().to_lowercase()) {
None => false,
Some(ext) => matches!(ext.as_str(), "dll" | "dylib" | "so"),
}

View File

@ -285,7 +285,7 @@ impl CargoWorkspace {
// FIXME: Fetching metadata is a slow process, as it might require
// calling crates.io. We should be reporting progress here, but it's
// unclear whether cargo itself supports it.
progress("metadata".to_string());
progress("metadata".to_owned());
(|| -> Result<cargo_metadata::Metadata, cargo_metadata::Error> {
let mut command = meta.cargo_command();
@ -502,7 +502,7 @@ fn rustc_discover_host_triple(
let field = "host: ";
let target = stdout.lines().find_map(|l| l.strip_prefix(field));
if let Some(target) = target {
Some(target.to_string())
Some(target.to_owned())
} else {
// If we fail to resolve the host platform, it's not the end of the world.
tracing::info!("rustc -vV did not report host platform, got:\n{}", stdout);
@ -536,7 +536,7 @@ fn parse_output_cargo_config_build_target(stdout: String) -> Vec<String> {
let trimmed = stdout.trim_start_matches("build.target = ").trim_matches('"');
if !trimmed.starts_with('[') {
return [trimmed.to_string()].to_vec();
return [trimmed.to_owned()].to_vec();
}
let res = serde_json::from_str(trimmed);

View File

@ -19,7 +19,7 @@ impl FromStr for CfgFlag {
if !(value.starts_with('"') && value.ends_with('"')) {
return Err(format!("Invalid cfg ({s:?}), value should be in quotes"));
}
let key = key.to_string();
let key = key.to_owned();
let value = value[1..value.len() - 1].to_string();
CfgFlag::KeyValue { key, value }
}

View File

@ -167,7 +167,7 @@ fn utf8_stdout(mut cmd: Command) -> anyhow::Result<String> {
}
}
let stdout = String::from_utf8(output.stdout)?;
Ok(stdout.trim().to_string())
Ok(stdout.trim().to_owned())
}
#[derive(Copy, Clone, Debug, Default, PartialEq, Eq)]

View File

@ -33,7 +33,7 @@ pub(crate) fn get(
res.push(CfgFlag::Atom("target_thread_local".into()));
for ty in ["8", "16", "32", "64", "cas", "ptr"] {
for key in ["target_has_atomic", "target_has_atomic_load_store"] {
res.push(CfgFlag::KeyValue { key: key.to_string(), value: ty.into() });
res.push(CfgFlag::KeyValue { key: key.to_owned(), value: ty.into() });
}
}

View File

@ -129,7 +129,7 @@ fn get_fake_sysroot() -> Sysroot {
}
fn rooted_project_json(data: ProjectJsonData) -> ProjectJson {
let mut root = "$ROOT$".to_string();
let mut root = "$ROOT$".to_owned();
replace_root(&mut root, true);
let path = Path::new(&root);
let base = AbsPath::assert(path);

Some files were not shown because too many files have changed in this diff Show More