Remove unnecessary sigils around Symbol::as_str() calls.

This commit is contained in:
Nicholas Nethercote 2021-12-15 14:39:23 +11:00
parent 8cddcd39ba
commit 056d48a2c9
104 changed files with 189 additions and 192 deletions

View File

@ -35,12 +35,12 @@ impl LitKind {
LitKind::Bool(symbol == kw::True)
}
token::Byte => {
return unescape_byte(&symbol.as_str())
return unescape_byte(symbol.as_str())
.map(LitKind::Byte)
.map_err(|_| LitError::LexerError);
}
token::Char => {
return unescape_char(&symbol.as_str())
return unescape_char(symbol.as_str())
.map(LitKind::Char)
.map_err(|_| LitError::LexerError);
}

View File

@ -1278,7 +1278,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
}
pub(super) fn lower_abi(&mut self, abi: StrLit) -> abi::Abi {
abi::lookup(&abi.symbol_unescaped.as_str()).unwrap_or_else(|| {
abi::lookup(abi.symbol_unescaped.as_str()).unwrap_or_else(|| {
self.error_on_invalid_abi(abi);
abi::Abi::Rust
})

View File

@ -61,7 +61,7 @@ impl<'a> PostExpansionVisitor<'a> {
fn check_abi(&self, abi: ast::StrLit) {
let ast::StrLit { symbol_unescaped, span, .. } = abi;
match &*symbol_unescaped.as_str() {
match symbol_unescaped.as_str() {
// Stable
"Rust" | "C" | "cdecl" | "stdcall" | "fastcall" | "aapcs" | "win64" | "sysv64"
| "system" => {}

View File

@ -204,7 +204,7 @@ pub fn literal_to_string(lit: token::Lit) -> String {
};
if let Some(suffix) = suffix {
out.push_str(&suffix.as_str())
out.push_str(suffix.as_str())
}
out
@ -384,7 +384,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
}
fn print_symbol(&mut self, sym: Symbol, style: ast::StrStyle) {
self.print_string(&sym.as_str(), style);
self.print_string(sym.as_str(), style);
}
fn print_inner_attributes(&mut self, attrs: &[ast::Attribute]) {

View File

@ -236,7 +236,7 @@ where
// These unwraps are safe because `get` ensures the meta item
// is a name/value pair string literal.
issue_num = match &*issue.unwrap().as_str() {
issue_num = match issue.unwrap().as_str() {
"none" => None,
issue => {
let emit_diag = |msg: &str| {
@ -301,7 +301,7 @@ where
match (feature, reason, issue) {
(Some(feature), reason, Some(_)) => {
if !rustc_lexer::is_ident(&feature.as_str()) {
if !rustc_lexer::is_ident(feature.as_str()) {
handle_errors(
&sess.parse_sess,
attr.span,
@ -535,7 +535,7 @@ pub fn eval_condition(
return false;
}
};
let min_version = match parse_version(&min_version.as_str(), false) {
let min_version = match parse_version(min_version.as_str(), false) {
Some(ver) => ver,
None => {
sess.span_diagnostic

View File

@ -416,7 +416,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
tcx,
generics,
&mut err,
&param.name.as_str(),
param.name.as_str(),
"Copy",
None,
);

View File

@ -206,7 +206,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
{
let local_info = &self.body.local_decls[local].local_info;
if let Some(box LocalInfo::StaticRef { def_id, .. }) = *local_info {
buf.push_str(&self.infcx.tcx.item_name(def_id).as_str());
buf.push_str(self.infcx.tcx.item_name(def_id).as_str());
} else {
unreachable!();
}
@ -318,7 +318,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let decl = &self.body.local_decls[local];
match self.local_names[local] {
Some(name) if !decl.from_compiler_desugaring() => {
buf.push_str(&name.as_str());
buf.push_str(name.as_str());
Ok(())
}
_ => Err(()),

View File

@ -572,7 +572,7 @@ fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, args: AsmArgs) -> Option<ast::Inl
template_snippet.as_ref().map(|s| Symbol::intern(s)),
template_sp,
));
let template_str = &template_str.as_str();
let template_str = template_str.as_str();
if let Some(InlineAsmArch::X86 | InlineAsmArch::X86_64) = ecx.sess.asm_arch {
let find_span = |needle: &str| -> Span {

View File

@ -21,7 +21,7 @@ pub fn expand_concat(
match e.kind {
ast::ExprKind::Lit(ref lit) => match lit.kind {
ast::LitKind::Str(ref s, _) | ast::LitKind::Float(ref s, _) => {
accumulator.push_str(&s.as_str());
accumulator.push_str(s.as_str());
}
ast::LitKind::Char(c) => {
accumulator.push(c);

View File

@ -29,7 +29,7 @@ pub fn expand_concat_idents<'cx>(
} else {
if let TokenTree::Token(token) = e {
if let Some((ident, _)) = token.ident() {
res_str.push_str(&ident.name.as_str());
res_str.push_str(ident.name.as_str());
continue;
}
}

View File

@ -121,7 +121,7 @@ fn report_bad_target(sess: &Session, item: &Annotatable, span: Span) -> bool {
fn report_unexpected_literal(sess: &Session, lit: &ast::Lit) {
let help_msg = match lit.token.kind {
token::Str if rustc_lexer::is_ident(&lit.token.symbol.as_str()) => {
token::Str if rustc_lexer::is_ident(lit.token.symbol.as_str()) => {
format!("try using `#[derive({})]`", lit.token.symbol)
}
_ => "for example, write `#[derive(Debug)]` for `Debug`".to_string(),

View File

@ -80,11 +80,11 @@ pub fn expand_env<'cx>(
}
let sp = cx.with_def_site_ctxt(sp);
let value = env::var(&*var.as_str()).ok().as_deref().map(Symbol::intern);
let value = env::var(var.as_str()).ok().as_deref().map(Symbol::intern);
cx.sess.parse_sess.env_depinfo.borrow_mut().insert((var, value));
let e = match value {
None => {
cx.span_err(sp, &msg.as_str());
cx.span_err(sp, msg.as_str());
return DummyResult::any(sp);
}
Some(value) => cx.expr_str(sp, value),

View File

@ -955,7 +955,7 @@ pub fn expand_preparsed_format_args(
ast::StrStyle::Raw(raw) => Some(raw as usize),
};
let fmt_str = &fmt_str.as_str(); // for the suggestions below
let fmt_str = fmt_str.as_str(); // for the suggestions below
let fmt_snippet = ecx.source_map().span_to_snippet(fmt_sp).ok();
let mut parser = parse::Parser::new(
fmt_str,

View File

@ -84,7 +84,7 @@ fn reuse_workproduct_for_cgu(
let work_product = cgu.work_product(tcx);
if let Some(saved_file) = &work_product.saved_file {
let obj_out =
tcx.output_filenames(()).temp_path(OutputType::Object, Some(&cgu.name().as_str()));
tcx.output_filenames(()).temp_path(OutputType::Object, Some(cgu.name().as_str()));
object = Some(obj_out.clone());
let source_file = rustc_incremental::in_incr_comp_dir_sess(&tcx.sess, &saved_file);
if let Err(err) = rustc_fs_util::link_or_copy(&source_file, &obj_out) {
@ -176,7 +176,7 @@ fn module_codegen(
)
});
codegen_global_asm(tcx, &cgu.name().as_str(), &cx.global_asm);
codegen_global_asm(tcx, cgu.name().as_str(), &cx.global_asm);
codegen_result
}
@ -207,7 +207,7 @@ pub(crate) fn run_aot(
cgus.iter()
.map(|cgu| {
let cgu_reuse = determine_cgu_reuse(tcx, cgu);
tcx.sess.cgu_reuse_tracker.set_actual_reuse(&cgu.name().as_str(), cgu_reuse);
tcx.sess.cgu_reuse_tracker.set_actual_reuse(cgu.name().as_str(), cgu_reuse);
match cgu_reuse {
_ if backend_config.disable_incr_cache => {}

View File

@ -33,7 +33,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
return value;
}
let global = self.global_string(&*symbol.as_str());
let global = self.global_string(symbol.as_str());
self.const_cstr_cache.borrow_mut().insert(symbol, global);
global

View File

@ -17,7 +17,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
global.set_tls_model(self.tls_model);
}
if let Some(link_section) = link_section {
global.set_link_section(&link_section.as_str());
global.set_link_section(link_section.as_str());
}
global
}
@ -53,7 +53,7 @@ impl<'gcc, 'tcx> CodegenCx<'gcc, 'tcx> {
global.set_tls_model(self.tls_model);
}
if let Some(link_section) = link_section {
global.set_link_section(&link_section.as_str());
global.set_link_section(link_section.as_str());
}
let global_address = global.get_address(None);
self.globals.borrow_mut().insert(name.to_string(), global_address);

View File

@ -88,7 +88,7 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallMethods<'tcx> for Builder<'a, 'gcc, 'tcx> {
let arg_tys = sig.inputs();
let ret_ty = sig.output();
let name = tcx.item_name(def_id);
let name_str = &*name.as_str();
let name_str = name.as_str();
let llret_ty = self.layout_of(ret_ty).gcc_type(self, true);
let result = PlaceRef::new_sized(llresult, fn_abi.ret.layout);

View File

@ -52,7 +52,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(bx: &mut Builder<'a, 'gcc, 'tcx>,
let sig =
tcx.normalize_erasing_late_bound_regions(ty::ParamEnv::reveal_all(), callee_ty.fn_sig(tcx));
let arg_tys = sig.inputs();
let name_str = &*name.as_str();
let name_str = name.as_str();
// every intrinsic below takes a SIMD vector as its first argument
require_simd!(arg_tys[0], "input");

View File

@ -322,7 +322,7 @@ pub fn from_fn_attrs(cx: &CodegenCx<'ll, 'tcx>, llfn: &'ll Value, instance: ty::
.target_features
.iter()
.flat_map(|f| {
let feature = &f.as_str();
let feature = f.as_str();
llvm_util::to_llvm_feature(cx.tcx.sess, feature)
.into_iter()
.map(|f| format!("+{}", f))
@ -347,7 +347,7 @@ pub fn from_fn_attrs(cx: &CodegenCx<'ll, 'tcx>, llfn: &'ll Value, instance: ty::
let name =
codegen_fn_attrs.link_name.unwrap_or_else(|| cx.tcx.item_name(instance.def_id()));
let name = CString::new(&name.as_str()[..]).unwrap();
let name = CString::new(name.as_str()).unwrap();
llvm::AddFunctionAttrStringValue(
llfn,
llvm::AttributePlace::Function,

View File

@ -82,7 +82,7 @@ pub fn compile_codegen_unit(
&[cgu_name.to_string(), cgu.size_estimate().to_string()],
);
// Instantiate monomorphizations without filling out definitions yet...
let llvm_module = ModuleLlvm::new(tcx, &cgu_name.as_str());
let llvm_module = ModuleLlvm::new(tcx, cgu_name.as_str());
{
let cx = CodegenCx::new(tcx, cgu, &llvm_module);
let mono_items = cx.codegen_unit.items_in_deterministic_order(cx.tcx);
@ -146,7 +146,7 @@ pub fn set_link_section(llval: &Value, attrs: &CodegenFnAttrs) {
None => return,
};
unsafe {
let buf = SmallCStr::new(&sect.as_str());
let buf = SmallCStr::new(sect.as_str());
llvm::LLVMSetSection(llval, buf.as_ptr());
}
}

View File

@ -320,7 +320,7 @@ impl<'ll, 'tcx> CodegenCx<'ll, 'tcx> {
let dbg_cx = if tcx.sess.opts.debuginfo != DebugInfo::None {
let dctx = debuginfo::CrateDebugContext::new(llmod);
debuginfo::metadata::compile_unit_metadata(tcx, &codegen_unit.name().as_str(), &dctx);
debuginfo::metadata::compile_unit_metadata(tcx, codegen_unit.name().as_str(), &dctx);
Some(dctx)
} else {
None

View File

@ -1033,7 +1033,7 @@ pub fn compile_unit_metadata(
) -> &'ll DIDescriptor {
let mut name_in_debuginfo = match tcx.sess.local_crate_source_file {
Some(ref path) => path.clone(),
None => PathBuf::from(&*tcx.crate_name(LOCAL_CRATE).as_str()),
None => PathBuf::from(tcx.crate_name(LOCAL_CRATE).as_str()),
};
// The OSX linker has an idiosyncrasy where it will ignore some debuginfo
@ -1353,7 +1353,7 @@ fn closure_saved_names_of_captured_variables(tcx: TyCtxt<'tcx>, def_id: DefId) -
_ => return None,
};
let prefix = if is_ref { "_ref__" } else { "" };
Some(prefix.to_owned() + &var.name.as_str())
Some(prefix.to_owned() + var.name.as_str())
})
.collect::<Vec<_>>()
}
@ -2421,7 +2421,7 @@ fn compute_type_parameters(cx: &CodegenCx<'ll, 'tcx>, ty: Ty<'tcx>) -> &'ll DIAr
cx.tcx.normalize_erasing_regions(ParamEnv::reveal_all(), ty);
let actual_type_metadata =
type_metadata(cx, actual_type, rustc_span::DUMMY_SP);
let name = &name.as_str();
let name = name.as_str();
Some(unsafe {
Some(llvm::LLVMRustDIBuilderCreateTemplateTypeParameter(
DIB(cx),

View File

@ -48,7 +48,7 @@ impl Command {
}
pub fn sym_arg(&mut self, arg: Symbol) -> &mut Command {
self.arg(&*arg.as_str());
self.arg(arg.as_str());
self
}

View File

@ -88,7 +88,7 @@ pub fn link_binary<'a, B: ArchiveBuilder<'a>>(
sess,
crate_type,
outputs,
&codegen_results.crate_info.local_crate_name.as_str(),
codegen_results.crate_info.local_crate_name.as_str(),
);
match crate_type {
CrateType::Rlib => {

View File

@ -672,7 +672,7 @@ pub fn codegen_crate<B: ExtraBackendMethods>(
}
let cgu_reuse = cgu_reuse[i];
tcx.sess.cgu_reuse_tracker.set_actual_reuse(&cgu.name().as_str(), cgu_reuse);
tcx.sess.cgu_reuse_tracker.set_actual_reuse(cgu.name().as_str(), cgu_reuse);
match cgu_reuse {
CguReuse::No => {

View File

@ -516,7 +516,7 @@ fn push_unqualified_item_name(
) {
match disambiguated_data.data {
DefPathData::CrateRoot => {
output.push_str(&tcx.crate_name(def_id.krate).as_str());
output.push_str(tcx.crate_name(def_id.krate).as_str());
}
DefPathData::ClosureExpr if tcx.generator_kind(def_id).is_some() => {
// Generators look like closures, but we want to treat them differently
@ -529,7 +529,7 @@ fn push_unqualified_item_name(
}
_ => match disambiguated_data.data.name() {
DefPathDataName::Named(name) => {
output.push_str(&name.as_str());
output.push_str(name.as_str());
}
DefPathDataName::Anon { namespace } => {
if cpp_like_names(tcx) {

View File

@ -68,7 +68,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
let arg_tys = sig.inputs();
let ret_ty = sig.output();
let name = bx.tcx().item_name(def_id);
let name_str = &*name.as_str();
let name_str = name.as_str();
let llret_ty = bx.backend_type(bx.layout_of(ret_ty));
let result = PlaceRef::new_sized(llresult, fn_abi.ret.layout);
@ -375,7 +375,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
use crate::common::AtomicOrdering::*;
use crate::common::{AtomicRmwBinOp, SynchronizationScope};
let split: Vec<&str> = name_str.split('_').collect();
let split: Vec<_> = name_str.split('_').collect();
let is_cxchg = split[1] == "cxchg" || split[1] == "cxchgweak";
let (order, failorder) = match split.len() {

View File

@ -82,7 +82,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
) -> MPlaceTy<'tcx, M::PointerTag> {
let loc_details = &self.tcx.sess.opts.debugging_opts.location_detail;
let file = if loc_details.file {
self.allocate_str(&filename.as_str(), MemoryKind::CallerLocation, Mutability::Not)
self.allocate_str(filename.as_str(), MemoryKind::CallerLocation, Mutability::Not)
} else {
// FIXME: This creates a new allocation each time. It might be preferable to
// perform this allocation only once, and re-use the `MPlaceTy`.

View File

@ -88,7 +88,7 @@ impl<'tcx> Printer<'tcx> for AbsolutePathPrinter<'tcx> {
}
fn path_crate(mut self, cnum: CrateNum) -> Result<Self::Path, Self::Error> {
self.path.push_str(&self.tcx.crate_name(cnum).as_str());
self.path.push_str(self.tcx.crate_name(cnum).as_str());
Ok(self)
}

View File

@ -171,7 +171,7 @@ fn get_features(
}
if let Some(allowed) = sess.opts.debugging_opts.allow_features.as_ref() {
if allowed.iter().all(|f| name.as_str() != *f) {
if allowed.iter().all(|f| name.as_str() != f) {
struct_span_err!(
span_handler,
mi.span(),

View File

@ -331,9 +331,9 @@ pub struct Ident {
impl Ident {
fn new(sess: &ParseSess, sym: Symbol, is_raw: bool, span: Span) -> Ident {
let sym = nfc_normalize(&sym.as_str());
let sym = nfc_normalize(sym.as_str());
let string = sym.as_str();
if !rustc_lexer::is_ident(&string) {
if !rustc_lexer::is_ident(string) {
panic!("`{:?}` is not a valid identifier", string)
}
if is_raw && !sym.can_be_raw() {

View File

@ -173,7 +173,7 @@ impl DisambiguatedDefPathData {
if verbose && self.disambiguator != 0 {
write!(writer, "{}#{}", name, self.disambiguator)
} else {
writer.write_str(&name.as_str())
writer.write_str(name.as_str())
}
}
DefPathDataName::Anon { namespace } => {
@ -494,7 +494,7 @@ impl DefPathData {
impl fmt::Display for DefPathData {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self.name() {
DefPathDataName::Named(name) => f.write_str(&name.as_str()),
DefPathDataName::Named(name) => f.write_str(name.as_str()),
// FIXME(#70334): this will generate legacy {{closure}}, {{impl}}, etc
DefPathDataName::Anon { namespace } => write!(f, "{{{{{}}}}}", namespace),
}

View File

@ -131,7 +131,7 @@ impl IfThisChanged<'tcx> {
DepNode::from_def_path_hash(self.tcx, def_path_hash, DepKind::hir_owner)
}
Some(n) => {
match DepNode::from_label_string(self.tcx, &n.as_str(), def_path_hash) {
match DepNode::from_label_string(self.tcx, n.as_str(), def_path_hash) {
Ok(n) => n,
Err(()) => {
self.tcx.sess.span_fatal(
@ -147,7 +147,7 @@ impl IfThisChanged<'tcx> {
let dep_node_interned = self.argument(attr);
let dep_node = match dep_node_interned {
Some(n) => {
match DepNode::from_label_string(self.tcx, &n.as_str(), def_path_hash) {
match DepNode::from_label_string(self.tcx, n.as_str(), def_path_hash) {
Ok(n) => n,
Err(()) => {
self.tcx.sess.span_fatal(

View File

@ -124,7 +124,7 @@ impl AssertModuleSource<'tcx> {
debug!("mapping '{}' to cgu name '{}'", self.field(attr, sym::module), cgu_name);
if !self.available_cgus.contains(&*cgu_name.as_str()) {
if !self.available_cgus.contains(cgu_name.as_str()) {
self.tcx.sess.span_err(
attr.span,
&format!(

View File

@ -2252,8 +2252,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
.map(|p| p.name.as_str()),
);
}
let lts = lts_names.iter().map(|s| -> &str { &*s }).collect::<Vec<_>>();
possible.find(|candidate| !lts.contains(&candidate.as_str()))
possible.find(|candidate| !lts_names.contains(&&candidate[..]))
})
.unwrap_or("'lt".to_string());
let add_lt_sugg = generics

View File

@ -324,7 +324,7 @@ pub fn configure_and_expand(
let crate_attrs = krate.attrs.clone();
let extern_mod_loaded = |ident: Ident, attrs, items, span| {
let krate = ast::Crate { attrs, items, span, is_placeholder: None };
pre_expansion_lint(sess, lint_store, &krate, &crate_attrs, &ident.name.as_str());
pre_expansion_lint(sess, lint_store, &krate, &crate_attrs, ident.name.as_str());
(krate.attrs, krate.items)
};
let mut ecx = ExtCtxt::new(sess, cfg, resolver, Some(&extern_mod_loaded));
@ -631,7 +631,7 @@ fn write_out_deps(
// (e.g. accessed in proc macros).
let file_depinfo = sess.parse_sess.file_depinfo.borrow();
let extra_tracked_files = file_depinfo.iter().map(|path_sym| {
let path = PathBuf::from(&*path_sym.as_str());
let path = PathBuf::from(path_sym.as_str());
let file = FileName::from(path);
escape_dep_filename(&file.prefer_local().to_string())
});

View File

@ -3183,7 +3183,7 @@ impl<'tcx> LateLintPass<'tcx> for NamedAsmLabels {
} = expr
{
for (template_sym, template_snippet, template_span) in template_strs.iter() {
let template_str = &template_sym.as_str();
let template_str = template_sym.as_str();
let find_label_span = |needle: &str| -> Option<Span> {
if let Some(template_snippet) = template_snippet {
let snippet = template_snippet.as_str();

View File

@ -381,10 +381,10 @@ impl LintStore {
lint_name,
self.lint_groups.keys().collect::<Vec<_>>()
);
let lint_name_str = &*lint_name.as_str();
self.lint_groups.contains_key(&lint_name_str) || {
let lint_name_str = lint_name.as_str();
self.lint_groups.contains_key(lint_name_str) || {
let warnings_name_str = crate::WARNINGS.name_lower();
lint_name_str == &*warnings_name_str
lint_name_str == warnings_name_str
}
}

View File

@ -127,7 +127,7 @@ impl HiddenUnicodeCodepoints {
impl EarlyLintPass for HiddenUnicodeCodepoints {
fn check_attribute(&mut self, cx: &EarlyContext<'_>, attr: &ast::Attribute) {
if let ast::AttrKind::DocComment(_, comment) = attr.kind {
if contains_text_flow_control_chars(&comment.as_str()) {
if contains_text_flow_control_chars(comment.as_str()) {
self.lint_text_direction_codepoint(cx, comment, attr.span, 0, false, "doc comment");
}
}
@ -138,7 +138,7 @@ impl EarlyLintPass for HiddenUnicodeCodepoints {
let (text, span, padding) = match &expr.kind {
ast::ExprKind::Lit(ast::Lit { token, kind, span }) => {
let text = token.symbol;
if !contains_text_flow_control_chars(&text.as_str()) {
if !contains_text_flow_control_chars(text.as_str()) {
return;
}
let padding = match kind {

View File

@ -154,7 +154,7 @@ impl<'s> LintLevelsBuilder<'s> {
LintLevelSource::Node(_, forbid_source_span, reason) => {
diag_builder.span_label(forbid_source_span, "`forbid` level set here");
if let Some(rationale) = reason {
diag_builder.note(&rationale.as_str());
diag_builder.note(rationale.as_str());
}
}
LintLevelSource::CommandLine(_, _) => {

View File

@ -218,8 +218,7 @@ impl EarlyLintPass for NonAsciiIdents {
cx.struct_span_lint(CONFUSABLE_IDENTS, sp, |lint| {
lint.build(&format!(
"identifier pair considered confusable between `{}` and `{}`",
existing_symbol.as_str(),
symbol.as_str()
existing_symbol, symbol
))
.span_label(
*existing_span,

View File

@ -71,7 +71,7 @@ fn check_panic<'tcx>(cx: &LateContext<'tcx>, f: &'tcx hir::Expr<'tcx>, arg: &'tc
if let hir::ExprKind::Lit(lit) = &arg.kind {
if let ast::LitKind::Str(sym, _) = lit.node {
// The argument is a string literal.
check_panic_str(cx, f, arg, &sym.as_str());
check_panic_str(cx, f, arg, sym.as_str());
return;
}
}

View File

@ -133,7 +133,7 @@ fn to_camel_case(s: &str) -> String {
impl NonCamelCaseTypes {
fn check_case(&self, cx: &EarlyContext<'_>, sort: &str, ident: &Ident) {
let name = &ident.name.as_str();
let name = ident.name.as_str();
if !is_camel_case(name) {
cx.struct_span_lint(NON_CAMEL_CASE_TYPES, ident.span, |lint| {
@ -276,7 +276,7 @@ impl NonSnakeCase {
})
}
let name = &ident.name.as_str();
let name = ident.name.as_str();
if !is_snake_case(name) {
cx.struct_span_lint(NON_SNAKE_CASE, ident.span, |lint| {
@ -484,7 +484,7 @@ declare_lint_pass!(NonUpperCaseGlobals => [NON_UPPER_CASE_GLOBALS]);
impl NonUpperCaseGlobals {
fn check_upper_case(cx: &LateContext<'_>, sort: &str, ident: &Ident) {
let name = &ident.name.as_str();
let name = ident.name.as_str();
if name.chars().any(|c| c.is_lowercase()) {
cx.struct_span_lint(NON_UPPER_CASE_GLOBALS, ident.span, |lint| {
let uc = NonSnakeCase::to_snake_case(&name).to_uppercase();

View File

@ -313,7 +313,7 @@ impl<'tcx> LateLintPass<'tcx> for UnusedResults {
let mut err = lint.build(&msg);
// check for #[must_use = "..."]
if let Some(note) = attr.value_str() {
err.note(&note.as_str());
err.note(note.as_str());
}
err.emit();
});

View File

@ -292,7 +292,7 @@ impl<'a> CrateLoader<'a> {
// `source` stores paths which are normalized which may be different
// from the strings on the command line.
let source = self.cstore.get_crate_data(cnum).cdata.source();
if let Some(entry) = self.sess.opts.externs.get(&name.as_str()) {
if let Some(entry) = self.sess.opts.externs.get(name.as_str()) {
// Only use `--extern crate_name=path` here, not `--extern crate_name`.
if let Some(mut files) = entry.files() {
if files.any(|l| {
@ -381,7 +381,7 @@ impl<'a> CrateLoader<'a> {
let host_hash = host_lib.as_ref().map(|lib| lib.metadata.get_root().hash());
let private_dep =
self.sess.opts.externs.get(&name.as_str()).map_or(false, |e| e.is_private_dep);
self.sess.opts.externs.get(name.as_str()).map_or(false, |e| e.is_private_dep);
// Claim this crate number and cache it
let cnum = self.cstore.alloc_new_crate_num();
@ -997,7 +997,7 @@ impl<'a> CrateLoader<'a> {
);
let name = match orig_name {
Some(orig_name) => {
validate_crate_name(self.sess, &orig_name.as_str(), Some(item.span));
validate_crate_name(self.sess, orig_name.as_str(), Some(item.span));
orig_name
}
None => item.ident.name,

View File

@ -315,7 +315,7 @@ impl<'a> CrateLocator<'a> {
exact_paths: if hash.is_none() {
sess.opts
.externs
.get(&crate_name.as_str())
.get(crate_name.as_str())
.into_iter()
.filter_map(|entry| entry.files())
.flatten()
@ -1175,7 +1175,7 @@ impl CrateError {
} else if crate_name
== Symbol::intern(&sess.opts.debugging_opts.profiler_runtime)
{
err.note(&"the compiler may have been built without the profiler runtime");
err.note("the compiler may have been built without the profiler runtime");
} else if crate_name.as_str().starts_with("rustc_") {
err.help(
"maybe you need to install the missing components with: \

View File

@ -67,7 +67,7 @@ impl ItemLikeVisitor<'tcx> for Collector<'tcx> {
Some(name) => name,
None => continue, // skip like historical compilers
};
lib.kind = match &*kind.as_str() {
lib.kind = match kind.as_str() {
"static" => NativeLibKind::Static { bundle: None, whole_archive: None },
"static-nobundle" => {
sess.struct_span_warn(

View File

@ -319,7 +319,7 @@ pub fn struct_lint_level<'s, 'd>(
}
LintLevelSource::Node(lint_attr_name, src, reason) => {
if let Some(rationale) = reason {
err.note(&rationale.as_str());
err.note(rationale.as_str());
}
sess.diag_span_note_once(
&mut err,

View File

@ -2473,7 +2473,7 @@ impl<'tcx> Debug for Rvalue<'tcx> {
if let Some(upvars) = tcx.upvars_mentioned(def_id) {
for (&var_id, place) in iter::zip(upvars.keys(), places) {
let var_name = tcx.hir().name(var_id);
struct_fmt.field(&var_name.as_str(), place);
struct_fmt.field(var_name.as_str(), place);
}
}
@ -2493,7 +2493,7 @@ impl<'tcx> Debug for Rvalue<'tcx> {
if let Some(upvars) = tcx.upvars_mentioned(def_id) {
for (&var_id, place) in iter::zip(upvars.keys(), places) {
let var_name = tcx.hir().name(var_id);
struct_fmt.field(&var_name.as_str(), place);
struct_fmt.field(var_name.as_str(), place);
}
}

View File

@ -338,7 +338,7 @@ impl<'tcx> CodegenUnit<'tcx> {
}
pub fn work_product_id(&self) -> WorkProductId {
WorkProductId::from_cgu_name(&self.name().as_str())
WorkProductId::from_cgu_name(self.name().as_str())
}
pub fn work_product(&self, tcx: TyCtxt<'_>) -> WorkProduct {
@ -470,7 +470,7 @@ impl CodegenUnitNameBuilder<'tcx> {
if self.tcx.sess.opts.debugging_opts.human_readable_cgu_names {
cgu_name
} else {
Symbol::intern(&CodegenUnit::mangle_name(&cgu_name.as_str()))
Symbol::intern(&CodegenUnit::mangle_name(cgu_name.as_str()))
}
}

View File

@ -303,7 +303,7 @@ pub trait PrettyPrinter<'tcx>:
match self.tcx().trimmed_def_paths(()).get(&def_id) {
None => Ok((self, false)),
Some(symbol) => {
self.write_str(&symbol.as_str())?;
self.write_str(symbol.as_str())?;
Ok((self, true))
}
}

View File

@ -366,7 +366,7 @@ fn collect_and_partition_mono_items<'tcx>(
for cgu in codegen_units {
tcx.prof.artifact_size(
"codegen_unit_size_estimate",
&cgu.name().as_str()[..],
cgu.name().as_str(),
cgu.size_estimate() as u64,
);
}
@ -401,7 +401,7 @@ fn collect_and_partition_mono_items<'tcx>(
cgus.dedup();
for &(ref cgu_name, (linkage, _)) in cgus.iter() {
output.push(' ');
output.push_str(&cgu_name.as_str());
output.push_str(cgu_name.as_str());
let linkage_abbrev = match linkage {
Linkage::External => "External",

View File

@ -1608,7 +1608,7 @@ impl<'a> Parser<'a> {
next_token.kind
{
if self.token.span.hi() == next_token.span.lo() {
let s = String::from("0.") + &symbol.as_str();
let s = String::from("0.") + symbol.as_str();
let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix);
return Some(Token::new(kind, self.token.span.to(next_token.span)));
}
@ -1698,7 +1698,7 @@ impl<'a> Parser<'a> {
LitError::InvalidFloatSuffix => {
let suf = suffix.expect("suffix error with no suffix");
let suf = suf.as_str();
if looks_like_width_suffix(&['f'], &suf) {
if looks_like_width_suffix(&['f'], suf) {
// If it looks like a width, try to be helpful.
let msg = format!("invalid width `{}` for float literal", &suf[1..]);
self.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit();

View File

@ -607,7 +607,7 @@ impl CheckAttrVisitor<'tcx> {
return err_fn(meta.span(), &format!("isn't allowed on {}", err));
}
let item_name = self.tcx.hir().name(hir_id);
if &*item_name.as_str() == doc_alias {
if item_name.as_str() == doc_alias {
return err_fn(meta.span(), "is the same as the item's name");
}
let span = meta.span();
@ -636,7 +636,7 @@ impl CheckAttrVisitor<'tcx> {
LitKind::Str(s, _) => {
if !self.check_doc_alias_value(
v,
&s.as_str(),
s.as_str(),
hir_id,
target,
true,

View File

@ -1464,7 +1464,7 @@ impl<'tcx> Liveness<'_, 'tcx> {
if name == kw::Empty {
return None;
}
let name: &str = &name.as_str();
let name = name.as_str();
if name.as_bytes()[0] == b'_' {
return None;
}

View File

@ -2340,7 +2340,7 @@ impl<'tcx> LifetimeContext<'_, 'tcx> {
_ => None,
});
}
suggest_existing(err, &name.as_str()[..], suggs);
suggest_existing(err, name.as_str(), suggs);
}
[] => {
let mut suggs = Vec::new();

View File

@ -693,7 +693,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LifetimeContext<'a, 'tcx> {
intravisit::FnKind::Method(id, _, _) => id.name,
intravisit::FnKind::Closure => sym::closure,
};
let name: &str = name.as_str();
let name = name.as_str();
let span = span!(Level::DEBUG, "visit_fn", name);
let _enter = span.enter();
match fk {

View File

@ -3482,7 +3482,7 @@ fn names_to_string(names: &[Symbol]) -> String {
if Ident::with_dummy_span(*name).is_raw_guess() {
result.push_str("r#");
}
result.push_str(&name.as_str());
result.push_str(name.as_str());
}
result
}

View File

@ -825,7 +825,7 @@ impl<'tcx> SaveContext<'tcx> {
for attr in attrs {
if let Some(val) = attr.doc_str() {
// FIXME: Should save-analysis beautify doc strings itself or leave it to users?
result.push_str(&beautify_doc_string(val).as_str());
result.push_str(beautify_doc_string(val).as_str());
result.push('\n');
}
}

View File

@ -60,7 +60,7 @@ pub fn find_crate_name(sess: &Session, attrs: &[ast::Attribute], input: &Input)
if let Some(ref s) = sess.opts.crate_name {
if let Some((attr, name)) = attr_crate_name {
if name.as_str() != *s {
if name.as_str() != s {
let msg = format!(
"`--crate-name` and `#[crate_name]` are \
required to match, but `{}` != `{}`",

View File

@ -55,7 +55,7 @@ pub fn find_best_match_for_name(
lookup: Symbol,
dist: Option<usize>,
) -> Option<Symbol> {
let lookup = &lookup.as_str();
let lookup = lookup.as_str();
let max_dist = dist.unwrap_or_else(|| cmp::max(lookup.len(), 3) / 3);
// Priority of matches:
@ -70,7 +70,7 @@ pub fn find_best_match_for_name(
let levenshtein_match = name_vec
.iter()
.filter_map(|&name| {
let dist = lev_distance(lookup, &name.as_str());
let dist = lev_distance(lookup, name.as_str());
if dist <= max_dist { Some((name, dist)) } else { None }
})
// Here we are collecting the next structure:
@ -88,7 +88,7 @@ pub fn find_best_match_for_name(
fn find_match_by_sorted_words(iter_names: &[Symbol], lookup: &str) -> Option<Symbol> {
iter_names.iter().fold(None, |result, candidate| {
if sort_by_words(&candidate.as_str()) == sort_by_words(lookup) {
if sort_by_words(candidate.as_str()) == sort_by_words(lookup) {
Some(*candidate)
} else {
result

View File

@ -1686,19 +1686,19 @@ impl Symbol {
impl fmt::Debug for Symbol {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.as_str(), f)
fmt::Debug::fmt(self.as_str(), f)
}
}
impl fmt::Display for Symbol {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(&self.as_str(), f)
fmt::Display::fmt(self.as_str(), f)
}
}
impl<S: Encoder> Encodable<S> for Symbol {
fn encode(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_str(&self.as_str())
s.emit_str(self.as_str())
}
}

View File

@ -255,7 +255,7 @@ impl Printer<'tcx> for &mut SymbolPrinter<'tcx> {
}
fn path_crate(self, cnum: CrateNum) -> Result<Self::Path, Self::Error> {
self.write_str(&self.tcx.crate_name(cnum).as_str())?;
self.write_str(self.tcx.crate_name(cnum).as_str())?;
Ok(self)
}
fn path_qualified(

View File

@ -298,43 +298,43 @@ impl InlineAsmReg {
let name = name.as_str();
Ok(match arch {
InlineAsmArch::X86 | InlineAsmArch::X86_64 => {
Self::X86(X86InlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::X86(X86InlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Arm => {
Self::Arm(ArmInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Arm(ArmInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::AArch64 => {
Self::AArch64(AArch64InlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::AArch64(AArch64InlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::RiscV32 | InlineAsmArch::RiscV64 => {
Self::RiscV(RiscVInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::RiscV(RiscVInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Nvptx64 => {
Self::Nvptx(NvptxInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Nvptx(NvptxInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::PowerPC | InlineAsmArch::PowerPC64 => {
Self::PowerPC(PowerPCInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::PowerPC(PowerPCInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Hexagon => {
Self::Hexagon(HexagonInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Hexagon(HexagonInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Mips | InlineAsmArch::Mips64 => {
Self::Mips(MipsInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Mips(MipsInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::S390x => {
Self::S390x(S390xInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::S390x(S390xInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::SpirV => {
Self::SpirV(SpirVInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::SpirV(SpirVInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Wasm32 | InlineAsmArch::Wasm64 => {
Self::Wasm(WasmInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Wasm(WasmInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Bpf => {
Self::Bpf(BpfInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Bpf(BpfInlineAsmReg::parse(arch, has_feature, target, name)?)
}
InlineAsmArch::Avr => {
Self::Avr(AvrInlineAsmReg::parse(arch, has_feature, target, &name)?)
Self::Avr(AvrInlineAsmReg::parse(arch, has_feature, target, name)?)
}
})
}
@ -798,7 +798,7 @@ impl InlineAsmClobberAbi {
target: &Target,
name: Symbol,
) -> Result<Self, &'static [&'static str]> {
let name = &*name.as_str();
let name = name.as_str();
match arch {
InlineAsmArch::X86 => match name {
"C" | "system" | "efiapi" | "cdecl" | "stdcall" | "fastcall" => {

View File

@ -269,7 +269,7 @@ impl<'tcx> OnUnimplementedFormatString {
let name = tcx.item_name(trait_def_id);
let generics = tcx.generics_of(trait_def_id);
let s = self.0.as_str();
let parser = Parser::new(&s, None, None, false, ParseMode::Format);
let parser = Parser::new(s, None, None, false, ParseMode::Format);
let mut result = Ok(());
for token in parser {
match token {
@ -347,7 +347,7 @@ impl<'tcx> OnUnimplementedFormatString {
let empty_string = String::new();
let s = self.0.as_str();
let parser = Parser::new(&s, None, None, false, ParseMode::Format);
let parser = Parser::new(s, None, None, false, ParseMode::Format);
let item_context = (options.get(&sym::ItemContext)).unwrap_or(&empty_string);
parser
.map(|p| match p {

View File

@ -609,7 +609,7 @@ fn check_must_not_suspend_def(
// Add optional reason note
if let Some(note) = attr.value_str() {
// FIXME(guswynn): consider formatting this better
err.span_note(data.source_span, &note.as_str());
err.span_note(data.source_span, note.as_str());
}
// Add some quick suggestions on what to do

View File

@ -2846,7 +2846,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs {
);
} else if attr.has_name(sym::linkage) {
if let Some(val) = attr.value_str() {
codegen_fn_attrs.linkage = Some(linkage_by_name(tcx, id, &val.as_str()));
codegen_fn_attrs.linkage = Some(linkage_by_name(tcx, id, val.as_str()));
}
} else if attr.has_name(sym::link_section) {
if let Some(val) = attr.value_str() {

View File

@ -466,7 +466,7 @@ impl<'a> fmt::Display for Display<'a> {
(sym::unix, None) => "Unix",
(sym::windows, None) => "Windows",
(sym::debug_assertions, None) => "debug-assertions enabled",
(sym::target_os, Some(os)) => match &*os.as_str() {
(sym::target_os, Some(os)) => match os.as_str() {
"android" => "Android",
"dragonfly" => "DragonFly BSD",
"emscripten" => "Emscripten",
@ -487,7 +487,7 @@ impl<'a> fmt::Display for Display<'a> {
"windows" => "Windows",
_ => "",
},
(sym::target_arch, Some(arch)) => match &*arch.as_str() {
(sym::target_arch, Some(arch)) => match arch.as_str() {
"aarch64" => "AArch64",
"arm" => "ARM",
"asmjs" => "JavaScript",
@ -504,14 +504,14 @@ impl<'a> fmt::Display for Display<'a> {
"x86_64" => "x86-64",
_ => "",
},
(sym::target_vendor, Some(vendor)) => match &*vendor.as_str() {
(sym::target_vendor, Some(vendor)) => match vendor.as_str() {
"apple" => "Apple",
"pc" => "PC",
"sun" => "Sun",
"fortanix" => "Fortanix",
_ => "",
},
(sym::target_env, Some(env)) => match &*env.as_str() {
(sym::target_env, Some(env)) => match env.as_str() {
"gnu" => "GNU",
"msvc" => "MSVC",
"musl" => "musl",
@ -545,14 +545,14 @@ impl<'a> fmt::Display for Display<'a> {
write!(
fmt,
r#"<code>{}="{}"</code>"#,
Escape(&name.as_str()),
Escape(&v.as_str())
Escape(name.as_str()),
Escape(v.as_str())
)
} else {
write!(fmt, r#"`{}="{}"`"#, name, v)
}
} else if self.1.is_html() {
write!(fmt, "<code>{}</code>", Escape(&name.as_str()))
write!(fmt, "<code>{}</code>", Escape(name.as_str()))
} else {
write!(fmt, "`{}`", name)
}

View File

@ -201,7 +201,7 @@ impl ExternalCrate {
// See if there's documentation generated into the local directory
// WARNING: since rustdoc creates these directories as it generates documentation, this check is only accurate before rendering starts.
// Make sure to call `location()` by that time.
let local_location = dst.join(&*self.name(tcx).as_str());
let local_location = dst.join(self.name(tcx).as_str());
if local_location.is_dir() {
return Local;
}

View File

@ -150,8 +150,7 @@ impl Cache {
let name = e.name(tcx);
let render_options = &cx.render_options;
let extern_url =
render_options.extern_html_root_urls.get(&*name.as_str()).map(|u| &**u);
let extern_url = render_options.extern_html_root_urls.get(name.as_str()).map(|u| &**u);
let extern_url_takes_precedence = render_options.extern_html_root_takes_precedence;
let dst = &render_options.output;
let location = e.location(extern_url, extern_url_takes_precedence, dst, tcx);

View File

@ -90,7 +90,7 @@ crate fn run_format<'tcx, T: FormatRenderer<'tcx>>(
// FIXME: checking `item.name.is_some()` is very implicit and leads to lots of special
// cases. Use an explicit match instead.
} else if item.name.is_some() && !item.is_extern_crate() {
prof.generic_activity_with_arg("render_item", &*item.name.unwrap_or(unknown).as_str())
prof.generic_activity_with_arg("render_item", item.name.unwrap_or(unknown).as_str())
.run(|| cx.item(item))?;
}
}

View File

@ -173,7 +173,7 @@ impl clean::GenericParamDef {
Ok(())
}
clean::GenericParamDefKind::Type { bounds, default, .. } => {
f.write_str(&*self.name.as_str())?;
f.write_str(self.name.as_str())?;
if !bounds.is_empty() {
if f.alternate() {
@ -637,7 +637,7 @@ fn resolved_path<'cx>(
last.name.to_string()
}
} else {
anchor(did, &*last.name.as_str(), cx).to_string()
anchor(did, last.name.as_str(), cx).to_string()
};
write!(w, "{}{}", path, last.args.print(cx))?;
}
@ -772,7 +772,7 @@ fn fmt_type<'cx>(
clean::Primitive(clean::PrimitiveType::Never) => {
primitive_link(f, PrimitiveType::Never, "!", cx)
}
clean::Primitive(prim) => primitive_link(f, prim, &*prim.as_sym().as_str(), cx),
clean::Primitive(prim) => primitive_link(f, prim, prim.as_sym().as_str(), cx),
clean::BareFunction(ref decl) => {
if f.alternate() {
write!(
@ -1268,7 +1268,7 @@ impl clean::Visibility {
debug!("path={:?}", path);
// modified from `resolved_path()` to work with `DefPathData`
let last_name = path.data.last().unwrap().data.get_opt_name().unwrap();
let anchor = anchor(vis_did, &last_name.as_str(), cx).to_string();
let anchor = anchor(vis_did, last_name.as_str(), cx).to_string();
let mut s = "pub(in ".to_owned();
for seg in &path.data[..path.data.len() - 1] {
@ -1417,7 +1417,7 @@ impl clean::TypeBinding {
cx: &'a Context<'tcx>,
) -> impl fmt::Display + 'a + Captures<'tcx> {
display_fn(move |f| {
f.write_str(&*self.name.as_str())?;
f.write_str(self.name.as_str())?;
match self.kind {
clean::TypeBindingKind::Equality { ref ty } => {
if f.alternate() {

View File

@ -180,7 +180,7 @@ impl<'tcx> Context<'tcx> {
fn render_item(&self, it: &clean::Item, is_module: bool) -> String {
let mut title = String::new();
if !is_module {
title.push_str(&it.name.unwrap().as_str());
title.push_str(it.name.unwrap().as_str());
}
if !it.is_primitive() && !it.is_keyword() {
if !is_module {
@ -549,7 +549,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
fn after_krate(&mut self) -> Result<(), Error> {
let crate_name = self.tcx().crate_name(LOCAL_CRATE);
let final_file = self.dst.join(&*crate_name.as_str()).join("all.html");
let final_file = self.dst.join(crate_name.as_str()).join("all.html");
let settings_file = self.dst.join("settings.html");
let mut root_path = self.dst.to_str().expect("invalid path").to_owned();
@ -619,9 +619,9 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
if let Some(ref redirections) = self.shared.redirections {
if !redirections.borrow().is_empty() {
let redirect_map_path =
self.dst.join(&*crate_name.as_str()).join("redirect-map.json");
self.dst.join(crate_name.as_str()).join("redirect-map.json");
let paths = serde_json::to_string(&*redirections.borrow()).unwrap();
self.shared.ensure_dir(&self.dst.join(&*crate_name.as_str()))?;
self.shared.ensure_dir(&self.dst.join(crate_name.as_str()))?;
self.shared.fs.write(redirect_map_path, paths)?;
}
}
@ -703,7 +703,7 @@ impl<'tcx> FormatRenderer<'tcx> for Context<'tcx> {
if !buf.is_empty() {
let name = item.name.as_ref().unwrap();
let item_type = item.type_();
let file_name = &item_path(item_type, &name.as_str());
let file_name = &item_path(item_type, name.as_str());
self.shared.ensure_dir(&self.dst)?;
let joint_dst = self.dst.join(file_name);
self.shared.fs.write(joint_dst, buf)?;

View File

@ -640,9 +640,9 @@ fn short_item_info(
// We display deprecation messages for #[deprecated] and #[rustc_deprecated]
// but only display the future-deprecation messages for #[rustc_deprecated].
let mut message = if let Some(since) = since {
let since = &since.as_str();
let since = since.as_str();
if !stability::deprecation_in_effect(&depr) {
if *since == "TBD" {
if since == "TBD" {
String::from("Deprecating in a future Rust version")
} else {
format!("Deprecating in {}", Escape(since))
@ -658,7 +658,7 @@ fn short_item_info(
let note = note.as_str();
let mut ids = cx.id_map.borrow_mut();
let html = MarkdownHtml(
&note,
note,
&mut ids,
error_codes,
cx.shared.edition(),
@ -683,7 +683,7 @@ fn short_item_info(
let mut message =
"<span class=\"emoji\">🔬</span> This is a nightly-only experimental API.".to_owned();
let mut feature = format!("<code>{}</code>", Escape(&feature.as_str()));
let mut feature = format!("<code>{}</code>", Escape(feature.as_str()));
if let (Some(url), Some(issue)) = (&cx.shared.issue_tracker_base_url, issue) {
feature.push_str(&format!(
"&nbsp;<a href=\"{url}{issue}\">#{issue}</a>",

View File

@ -136,7 +136,7 @@ pub(super) fn print_item(
page: page,
static_root_path: page.get_static_root_path(),
typ: typ,
name: &item.name.as_ref().unwrap().as_str(),
name: item.name.as_ref().unwrap().as_str(),
item_type: &item.type_().to_string(),
path_components: path_components,
stability_since_raw: &stability_since_raw,
@ -315,7 +315,7 @@ fn item_module(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item, items: &[cl
w,
"<div class=\"item-left\"><code>{}extern crate {} as {};",
myitem.visibility.print_with_space(myitem.def_id, cx),
anchor(myitem.def_id.expect_def_id(), &*src.as_str(), cx),
anchor(myitem.def_id.expect_def_id(), src.as_str(), cx),
myitem.name.as_ref().unwrap(),
),
None => write!(
@ -324,7 +324,7 @@ fn item_module(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item, items: &[cl
myitem.visibility.print_with_space(myitem.def_id, cx),
anchor(
myitem.def_id.expect_def_id(),
&*myitem.name.as_ref().unwrap().as_str(),
myitem.name.as_ref().unwrap().as_str(),
cx
),
),
@ -405,7 +405,7 @@ fn item_module(w: &mut Buffer, cx: &Context<'_>, item: &clean::Item, items: &[cl
add = add,
stab = stab.unwrap_or_default(),
unsafety_flag = unsafety_flag,
href = item_path(myitem.type_(), &myitem.name.unwrap().as_str()),
href = item_path(myitem.type_(), myitem.name.unwrap().as_str()),
title = [full_path(cx, myitem), myitem.type_().to_string()]
.iter()
.filter_map(|s| if !s.is_empty() { Some(s.as_str()) } else { None })
@ -1308,7 +1308,7 @@ fn item_struct(w: &mut Buffer, cx: &Context<'_>, it: &clean::Item, s: &clean::St
document_non_exhaustive(w, it);
for (index, (field, ty)) in fields.enumerate() {
let field_name =
field.name.map_or_else(|| index.to_string(), |sym| (*sym.as_str()).to_string());
field.name.map_or_else(|| index.to_string(), |sym| sym.as_str().to_string());
let id = cx.derive_id(format!("{}.{}", ItemType::StructField, field_name));
write!(
w,
@ -1410,7 +1410,7 @@ crate fn compare_names(mut lhs: &str, mut rhs: &str) -> Ordering {
pub(super) fn full_path(cx: &Context<'_>, item: &clean::Item) -> String {
let mut s = cx.current.join("::");
s.push_str("::");
s.push_str(&item.name.unwrap().as_str());
s.push_str(item.name.unwrap().as_str());
s
}

View File

@ -418,7 +418,7 @@ pub(super) fn write_shared(
let dst = cx.dst.join(&format!("source-files{}.js", cx.shared.resource_suffix));
let make_sources = || {
let (mut all_sources, _krates) =
try_err!(collect(&dst, &krate.name(cx.tcx()).as_str(), "sourcesIndex"), &dst);
try_err!(collect(&dst, krate.name(cx.tcx()).as_str(), "sourcesIndex"), &dst);
all_sources.push(format!(
"sourcesIndex[\"{}\"] = {};",
&krate.name(cx.tcx()),
@ -437,7 +437,7 @@ pub(super) fn write_shared(
// Update the search index and crate list.
let dst = cx.dst.join(&format!("search-index{}.js", cx.shared.resource_suffix));
let (mut all_indexes, mut krates) =
try_err!(collect_json(&dst, &krate.name(cx.tcx()).as_str()), &dst);
try_err!(collect_json(&dst, krate.name(cx.tcx()).as_str()), &dst);
all_indexes.push(search_index);
krates.push(krate.name(cx.tcx()).to_string());
krates.sort();
@ -575,7 +575,7 @@ pub(super) fn write_shared(
mydst.push(&format!("{}.{}.js", remote_item_type, remote_path[remote_path.len() - 1]));
let (mut all_implementors, _) =
try_err!(collect(&mydst, &krate.name(cx.tcx()).as_str(), "implementors"), &mydst);
try_err!(collect(&mydst, krate.name(cx.tcx()).as_str(), "implementors"), &mydst);
all_implementors.push(implementors);
// Sort the implementors by crate so the file will be generated
// identically even with rustdoc running in parallel.

View File

@ -19,7 +19,7 @@ use std::path::{Component, Path, PathBuf};
crate fn render(cx: &mut Context<'_>, krate: &clean::Crate) -> Result<(), Error> {
info!("emitting source files");
let dst = cx.dst.join("src").join(&*krate.name(cx.tcx()).as_str());
let dst = cx.dst.join("src").join(krate.name(cx.tcx()).as_str());
cx.shared.ensure_dir(&dst)?;
let mut collector = SourceCollector { dst, cx, emitted_local_sources: FxHashSet::default() };

View File

@ -486,7 +486,7 @@ fn check_attrs(cx: &LateContext<'_>, span: Span, name: Symbol, attrs: &[Attribut
fn check_semver(cx: &LateContext<'_>, span: Span, lit: &Lit) {
if let LitKind::Str(is, _) = lit.kind {
if Version::parse(&is.as_str()).is_ok() {
if Version::parse(is.as_str()).is_ok() {
return;
}
}
@ -619,7 +619,7 @@ fn check_mismatched_target_os(cx: &EarlyContext<'_>, attr: &Attribute) {
MetaItemKind::Word => {
if_chain! {
if let Some(ident) = meta.ident();
if let Some(os) = find_os(&*ident.name.as_str());
if let Some(os) = find_os(ident.name.as_str());
then {
mismatched.push((os, ident.span));
}

View File

@ -272,7 +272,7 @@ fn simplify_not(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<String> {
.copied()
.flat_map(|(a, b)| vec![(a, b), (b, a)])
.find(|&(a, _)| {
let path: &str = &path.ident.name.as_str();
let path: &str = path.ident.name.as_str();
a == path
})
.and_then(|(_, neg_method)| Some(format!("{}.{}()", snippet_opt(cx, args[0].span)?, neg_method)))

View File

@ -321,8 +321,8 @@ fn get_implementing_type<'a>(path: &QPath<'_>, candidates: &'a [&str], function:
if let TyKind::Path(QPath::Resolved(None, tp)) = &ty.kind;
if let [int] = &*tp.segments;
then {
let name = &int.ident.name.as_str();
candidates.iter().find(|c| name == *c).copied()
let name = int.ident.name.as_str();
candidates.iter().find(|c| &name == *c).copied()
} else {
None
}
@ -335,8 +335,8 @@ fn int_ty_to_sym<'tcx>(path: &QPath<'_>) -> Option<&'tcx str> {
if let QPath::Resolved(_, path) = *path;
if let [ty] = &*path.segments;
then {
let name = &ty.ident.name.as_str();
INTS.iter().find(|c| name == *c).copied()
let name = ty.ident.name.as_str();
INTS.iter().find(|c| &name == *c).copied()
} else {
None
}

View File

@ -437,7 +437,7 @@ fn check_attrs<'a>(cx: &LateContext<'_>, valid_idents: &FxHashSet<String>, attrs
for attr in attrs {
if let AttrKind::DocComment(comment_kind, comment) = attr.kind {
let (comment, current_spans) = strip_doc_comment_decoration(&comment.as_str(), comment_kind, attr.span);
let (comment, current_spans) = strip_doc_comment_decoration(comment.as_str(), comment_kind, attr.span);
spans.extend_from_slice(&current_spans);
doc.push_str(&comment);
} else if attr.has_name(sym::doc) {

View File

@ -153,7 +153,7 @@ fn check_variant(
);
}
}
let first = &def.variants[0].ident.name.as_str();
let first = def.variants[0].ident.name.as_str();
let mut pre = &first[..str_utils::camel_case_until(&*first).byte_index];
let mut post = &first[str_utils::camel_case_start(&*first).byte_index..];
for var in def.variants {

View File

@ -68,7 +68,7 @@ impl<'tcx> LateLintPass<'tcx> for FloatLiteral {
if let LitKind::Float(sym, lit_float_ty) = lit.node;
then {
let sym_str = sym.as_str();
let formatter = FloatFormat::new(&sym_str);
let formatter = FloatFormat::new(sym_str);
// Try to bail out if the float is for sure fine.
// If its within the 2 decimal digits of being out of precision we
// check if the parsed representation is the same as the string

View File

@ -696,7 +696,7 @@ impl<'tcx> LateLintPass<'tcx> for FloatingPointArithmetic {
let recv_ty = cx.typeck_results().expr_ty(&args[0]);
if recv_ty.is_floating_point() {
match &*path.ident.name.as_str() {
match path.ident.name.as_str() {
"ln" => check_ln1p(cx, expr, args),
"log" => check_log_base(cx, expr, args),
"powf" => check_powf(cx, expr, args),

View File

@ -42,7 +42,7 @@ declare_lint_pass!(IterNotReturningIterator => [ITER_NOT_RETURNING_ITERATOR]);
impl LateLintPass<'_> for IterNotReturningIterator {
fn check_impl_item(&mut self, cx: &LateContext<'tcx>, impl_item: &'tcx ImplItem<'tcx>) {
let name: &str = &impl_item.ident.name.as_str();
let name = impl_item.ident.name.as_str();
if_chain! {
if let ImplItemKind::Fn(fn_sig, _) = &impl_item.kind;
let ret_ty = return_ty(cx, impl_item.hir_id());

View File

@ -31,7 +31,7 @@ fn check_needless_collect_direct_usage<'tcx>(expr: &'tcx Expr<'_>, cx: &LateCont
let ty = cx.typeck_results().expr_ty(&args[0]);
let mut applicability = Applicability::MaybeIncorrect;
let is_empty_sugg = "next().is_none()".to_string();
let method_name = &*method.ident.name.as_str();
let method_name = method.ident.name.as_str();
let sugg = if is_type_diagnostic_item(cx, ty, sym::Vec) ||
is_type_diagnostic_item(cx, ty, sym::VecDeque) ||
is_type_diagnostic_item(cx, ty, sym::LinkedList) ||
@ -210,7 +210,7 @@ impl<'tcx> Visitor<'tcx> for IterFunctionVisitor<'_, 'tcx> {
if let Some(hir_id) = self.current_statement_hir_id {
self.hir_id_uses_map.insert(hir_id, self.uses.len());
}
match &*method_name.ident.name.as_str() {
match method_name.ident.name.as_str() {
"into_iter" => self.uses.push(Some(IterFunction {
func: IterFunctionKind::IntoIter,
span: expr.span,

View File

@ -966,7 +966,7 @@ fn check_wild_err_arm<'tcx>(cx: &LateContext<'tcx>, ex: &Expr<'tcx>, arms: &[Arm
for pat in inner.iter() {
if let PatKind::Binding(_, id, ident, None) = pat.kind {
if ident.as_str().starts_with('_') && !is_local_used(cx, arm.body, id) {
ident_bind_name = (&ident.name.as_str()).to_string();
ident_bind_name = ident.name.as_str().to_string();
matching_wild = true;
}
}

View File

@ -2154,7 +2154,7 @@ impl<'tcx> LateLintPass<'tcx> for Methods {
let self_ty = TraitRef::identity(cx.tcx, item.def_id.to_def_id()).self_ty().skip_binder();
wrong_self_convention::check(
cx,
&item.ident.name.as_str(),
item.ident.name.as_str(),
self_ty,
first_arg_ty,
first_arg_span,

View File

@ -75,7 +75,7 @@ impl LateLintPass<'_> for ImportRename {
if let Some(import) = match snip.split_once(" as ") {
None => Some(snip.as_str()),
Some((import, rename)) => {
if rename.trim() == &*name.as_str() {
if rename.trim() == name.as_str() {
None
} else {
Some(import.trim())

View File

@ -224,14 +224,14 @@ impl<'a, 'tcx, 'b> SimilarNamesNameVisitor<'a, 'tcx, 'b> {
match existing_name.len.cmp(&count) {
Ordering::Greater => {
if existing_name.len - count != 1
|| levenstein_not_1(&interned_name, &existing_name.interned.as_str())
|| levenstein_not_1(&interned_name, existing_name.interned.as_str())
{
continue;
}
},
Ordering::Less => {
if count - existing_name.len != 1
|| levenstein_not_1(&existing_name.interned.as_str(), &interned_name)
|| levenstein_not_1(existing_name.interned.as_str(), &interned_name)
{
continue;
}

View File

@ -104,7 +104,7 @@ fn is_offending_macro<'a>(cx: &EarlyContext<'_>, span: Span, mac_braces: &'a Mac
};
if_chain! {
if let ExpnKind::Macro(MacroKind::Bang, mac_name) = span.ctxt().outer_expn_data().kind;
let name = &*mac_name.as_str();
let name = mac_name.as_str();
if let Some(braces) = mac_braces.macro_braces.get(name);
if let Some(snip) = snippet_opt(cx, span.ctxt().outer_expn_data().call_site);
// we must check only invocation sites

View File

@ -53,7 +53,7 @@ impl<'tcx> LateLintPass<'tcx> for PathBufPushOverwrite {
if let Some(get_index_arg) = args.get(1);
if let ExprKind::Lit(ref lit) = get_index_arg.kind;
if let LitKind::Str(ref path_lit, _) = lit.node;
if let pushed_path = Path::new(&*path_lit.as_str());
if let pushed_path = Path::new(path_lit.as_str());
if let Some(pushed_path_lit) = pushed_path.to_str();
if pushed_path.has_root();
if let Some(root) = pushed_path.components().next();

View File

@ -150,7 +150,7 @@ fn check_regex<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, utf8: bool) {
if let ExprKind::Lit(ref lit) = expr.kind {
if let LitKind::Str(ref r, style) = lit.node {
let r = &r.as_str();
let r = r.as_str();
let offset = if let StrStyle::Raw(n) = style { 2 + n } else { 1 };
match parser.parse(r) {
Ok(r) => {

View File

@ -89,7 +89,7 @@ fn detect_stable_sort_primitive(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option
if_chain! {
if let ExprKind::MethodCall(method_name, _, args, _) = &expr.kind;
if let Some(slice) = &args.get(0);
if let Some(method) = SortingKind::from_stable_name(&method_name.ident.name.as_str());
if let Some(method) = SortingKind::from_stable_name(method_name.ident.name.as_str());
if let Some(slice_type) = is_slice_of_primitives(cx, slice);
then {
let args_str = args.iter().skip(1).map(|arg| Sugg::hir(cx, arg, "..").to_string()).collect::<Vec<String>>().join(", ");

View File

@ -330,7 +330,7 @@ impl<'tcx> LateLintPass<'tcx> for StringLitAsBytes {
if let ExprKind::MethodCall(path, _, [recv], _) = &e.kind;
if path.ident.name == sym!(into_bytes);
if let ExprKind::MethodCall(path, _, [recv], _) = &recv.kind;
if matches!(&*path.ident.name.as_str(), "to_owned" | "to_string");
if matches!(path.ident.name.as_str(), "to_owned" | "to_string");
if let ExprKind::Lit(lit) = &recv.kind;
if let LitKind::Str(lit_content, _) = &lit.node;

View File

@ -12,7 +12,7 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &Expr<'_>) {
if let ExprKind::Binary(ref cmp, left, _) = expr.kind {
let op = cmp.node;
if op.is_comparison() && cx.typeck_results().expr_ty(left).is_unit() {
let result = match &*symbol.as_str() {
let result = match symbol.as_str() {
"assert_eq" | "debug_assert_eq" => "succeed",
"assert_ne" | "debug_assert_ne" => "fail",
_ => return,

View File

@ -94,7 +94,7 @@ impl EarlyLintPass for UnusedUnit {
if_chain! {
if segments.len() == 1;
if ["Fn", "FnMut", "FnOnce"].contains(&&*segments[0].ident.name.as_str());
if ["Fn", "FnMut", "FnOnce"].contains(&segments[0].ident.name.as_str());
if let Some(args) = &segments[0].args;
if let ast::GenericArgs::Parenthesized(generic_args) = &**args;
if let ast::FnRetTy::Ty(ty) = &generic_args.output;

View File

@ -371,9 +371,9 @@ impl EarlyLintPass for Write {
/// Return this and a boolean indicating whether it only consisted of a newline.
fn newline_span(fmtstr: &StrLit) -> (Span, bool) {
let sp = fmtstr.span;
let contents = &fmtstr.symbol.as_str();
let contents = fmtstr.symbol.as_str();
if *contents == r"\n" {
if contents == r"\n" {
return (sp, true);
}
@ -484,7 +484,7 @@ impl Write {
StrStyle::Raw(n) => Some(n as usize),
};
let mut parser = Parser::new(&str_sym, style, snippet_opt(cx, str_lit.span), false, ParseMode::Format);
let mut parser = Parser::new(str_sym, style, snippet_opt(cx, str_lit.span), false, ParseMode::Format);
let mut args = SimpleFormatArgs::default();
while let Some(arg) = parser.next() {
@ -589,7 +589,7 @@ impl Write {
lit.token.symbol.as_str().replace('{', "{{").replace('}', "}}")
},
LitKind::StrRaw(_) | LitKind::Str | LitKind::ByteStrRaw(_) | LitKind::ByteStr => continue,
LitKind::Byte | LitKind::Char => match &*lit.token.symbol.as_str() {
LitKind::Byte | LitKind::Char => match lit.token.symbol.as_str() {
"\"" if matches!(fmtstr.style, StrStyle::Cooked) => "\\\"",
"\"" if matches!(fmtstr.style, StrStyle::Raw(0)) => continue,
"\\\\" if matches!(fmtstr.style, StrStyle::Raw(_)) => "\\",
@ -671,7 +671,7 @@ fn check_newlines(fmtstr: &StrLit) -> bool {
let mut last_was_cr = false;
let mut should_lint = false;
let contents = &fmtstr.symbol.as_str();
let contents = fmtstr.symbol.as_str();
let mut cb = |r: Range<usize>, c: Result<char, EscapeError>| {
let c = c.unwrap();

View File

@ -113,7 +113,7 @@ pub fn get_attr<'a>(
fn parse_attrs<F: FnMut(u64)>(sess: &Session, attrs: &[ast::Attribute], name: &'static str, mut f: F) {
for attr in get_attr(sess, attrs, name) {
if let Some(ref value) = attr.value_str() {
if let Ok(value) = FromStr::from_str(&value.as_str()) {
if let Ok(value) = FromStr::from_str(value.as_str()) {
f(value);
} else {
sess.span_err(attr.span, "not a number");

View File

@ -47,7 +47,7 @@ impl ops::BitOrAssign for EagernessSuggestion {
/// Determine the eagerness of the given function call.
fn fn_eagerness(cx: &LateContext<'tcx>, fn_id: DefId, name: Symbol, args: &'tcx [Expr<'_>]) -> EagernessSuggestion {
use EagernessSuggestion::{Eager, Lazy, NoChange};
let name = &*name.as_str();
let name = name.as_str();
let ty = match cx.tcx.impl_of_method(fn_id) {
Some(id) => cx.tcx.type_of(id),

Some files were not shown because too many files have changed in this diff Show More