Rollup merge of #69481 - matthiaskrgr:single_char, r=ecstatic-morse

use char instead of &str for single char patterns
This commit is contained in:
Mazdak Farrokhzad 2020-02-28 17:17:30 +01:00 committed by GitHub
commit 07d9ed2c09
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
30 changed files with 44 additions and 44 deletions

View File

@ -182,7 +182,7 @@ fn parse_inline_asm<'a>(
};
let is_rw = output.is_some();
let is_indirect = constraint_str.contains("*");
let is_indirect = constraint_str.contains('*');
outputs.push(ast::InlineAsmOutput {
constraint: output.unwrap_or(constraint),
expr,
@ -199,7 +199,7 @@ fn parse_inline_asm<'a>(
let constraint = parse_asm_str(&mut p)?;
if constraint.as_str().starts_with("=") {
if constraint.as_str().starts_with('=') {
struct_span_err!(
cx.parse_sess.span_diagnostic,
p.prev_span,
@ -207,7 +207,7 @@ fn parse_inline_asm<'a>(
"input operand constraint contains '='"
)
.emit();
} else if constraint.as_str().starts_with("+") {
} else if constraint.as_str().starts_with('+') {
struct_span_err!(
cx.parse_sess.span_diagnostic,
p.prev_span,
@ -234,7 +234,7 @@ fn parse_inline_asm<'a>(
if OPTIONS.iter().any(|&opt| s == opt) {
cx.span_warn(p.prev_span, "expected a clobber, found an option");
} else if s.as_str().starts_with("{") || s.as_str().ends_with("}") {
} else if s.as_str().starts_with('{') || s.as_str().ends_with('}') {
struct_span_err!(
cx.parse_sess.span_diagnostic,
p.prev_span,

View File

@ -894,7 +894,7 @@ pub fn expand_preparsed_format_args(
};
let (is_literal, fmt_snippet) = match ecx.source_map().span_to_snippet(fmt_sp) {
Ok(s) => (s.starts_with("\"") || s.starts_with("r#"), Some(s)),
Ok(s) => (s.starts_with('"') || s.starts_with("r#"), Some(s)),
_ => (false, None),
};

View File

@ -917,7 +917,7 @@ impl ThinLTOImports {
if line.is_empty() {
let importing_module = current_module.take().expect("Importing module not set");
imports.insert(importing_module, mem::replace(&mut current_imports, vec![]));
} else if line.starts_with(" ") {
} else if line.starts_with(' ') {
// Space marks an imported module
assert_ne!(current_module, None);
current_imports.push(line.trim().to_string());

View File

@ -78,7 +78,7 @@ pub fn find_crate_name(sess: Option<&Session>, attrs: &[ast::Attribute], input:
}
if let Input::File(ref path) = *input {
if let Some(s) = path.file_stem().and_then(|s| s.to_str()) {
if s.starts_with("-") {
if s.starts_with('-') {
let msg = format!(
"crate names cannot start with a `-`, but \
`{}` has a leading hyphen",

View File

@ -4,7 +4,7 @@ use std::fs;
use std::io;
pub fn arg_expand(arg: String) -> Result<Vec<String>, Error> {
if arg.starts_with("@") {
if arg.starts_with('@') {
let path = &arg[1..];
let file = match fs::read_to_string(path) {
Ok(file) => file,

View File

@ -521,7 +521,7 @@ fn stdout_isatty() -> bool {
fn handle_explain(registry: Registry, code: &str, output: ErrorOutputType) {
let normalised =
if code.starts_with("E") { code.to_string() } else { format!("E{0:0>4}", code) };
if code.starts_with('E') { code.to_string() } else { format!("E{0:0>4}", code) };
match registry.find_description(&normalised) {
Some(ref description) => {
let mut is_in_code_block = false;

View File

@ -205,7 +205,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
TokenTree::Literal(self::Literal {
lit: token::Lit { kind: token::Integer, symbol, suffix },
span,
}) if symbol.as_str().starts_with("-") => {
}) if symbol.as_str().starts_with('-') => {
let minus = BinOp(BinOpToken::Minus);
let symbol = Symbol::intern(&symbol.as_str()[1..]);
let integer = TokenKind::lit(token::Integer, symbol, suffix);
@ -216,7 +216,7 @@ impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
TokenTree::Literal(self::Literal {
lit: token::Lit { kind: token::Float, symbol, suffix },
span,
}) if symbol.as_str().starts_with("-") => {
}) if symbol.as_str().starts_with('-') => {
let minus = BinOp(BinOpToken::Minus);
let symbol = Symbol::intern(&symbol.as_str()[1..]);
let float = TokenKind::lit(token::Float, symbol, suffix);

View File

@ -1504,7 +1504,7 @@ pub fn is_range_literal(sm: &SourceMap, expr: &Expr<'_>) -> bool {
let end_point = sm.end_point(*span);
if let Ok(end_string) = sm.span_to_snippet(end_point) {
!(end_string.ends_with("}") || end_string.ends_with(")"))
!(end_string.ends_with('}') || end_string.ends_with(')'))
} else {
false
}

View File

@ -107,7 +107,7 @@ impl AssertModuleSource<'tcx> {
}
// Split of the "special suffix" if there is one.
let (user_path, cgu_special_suffix) = if let Some(index) = user_path.rfind(".") {
let (user_path, cgu_special_suffix) = if let Some(index) = user_path.rfind('.') {
(&user_path[..index], Some(&user_path[index + 1..]))
} else {
(&user_path[..], None)

View File

@ -152,7 +152,7 @@ pub fn lock_file_path(session_dir: &Path) -> PathBuf {
let directory_name = session_dir.file_name().unwrap().to_string_lossy();
assert_no_characters_lost(&directory_name);
let dash_indices: Vec<_> = directory_name.match_indices("-").map(|(idx, _)| idx).collect();
let dash_indices: Vec<_> = directory_name.match_indices('-').map(|(idx, _)| idx).collect();
if dash_indices.len() != 3 {
bug!(
"Encountered incremental compilation session directory with \
@ -342,7 +342,7 @@ pub fn finalize_session_directory(sess: &Session, svh: Svh) {
// Keep the 's-{timestamp}-{random-number}' prefix, but replace the
// '-working' part with the SVH of the crate
let dash_indices: Vec<_> = old_sub_dir_name.match_indices("-").map(|(idx, _)| idx).collect();
let dash_indices: Vec<_> = old_sub_dir_name.match_indices('-').map(|(idx, _)| idx).collect();
if dash_indices.len() != 3 {
bug!(
"Encountered incremental compilation session directory with \
@ -594,7 +594,7 @@ fn extract_timestamp_from_session_dir(directory_name: &str) -> Result<SystemTime
return Err(());
}
let dash_indices: Vec<_> = directory_name.match_indices("-").map(|(idx, _)| idx).collect();
let dash_indices: Vec<_> = directory_name.match_indices('-').map(|(idx, _)| idx).collect();
if dash_indices.len() != 3 {
return Err(());
}

View File

@ -244,7 +244,7 @@ pub fn get_codegen_backend(sess: &Session) -> Box<dyn CodegenBackend> {
.as_ref()
.unwrap_or(&sess.target.target.options.codegen_backend);
let backend = match &codegen_name[..] {
filename if filename.contains(".") => load_backend_from_dylib(filename.as_ref()),
filename if filename.contains('.') => load_backend_from_dylib(filename.as_ref()),
codegen_name => get_builtin_codegen_backend(codegen_name),
};

View File

@ -178,7 +178,7 @@ fn main() {
for lib in output(&mut cmd).split_whitespace() {
let name = if lib.starts_with("-l") {
&lib[2..]
} else if lib.starts_with("-") {
} else if lib.starts_with('-') {
&lib[1..]
} else if Path::new(lib).exists() {
// On MSVC llvm-config will print the full name to libraries, but

View File

@ -612,7 +612,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> {
} else {
"'_".to_string()
};
let suggestion = if snippet.ends_with(";") {
let suggestion = if snippet.ends_with(';') {
// `type X = impl Trait;`
format!("{} + {};", &snippet[..snippet.len() - 1], suggestable_fr_name)
} else {

View File

@ -365,7 +365,7 @@ fn do_mir_borrowck<'a, 'tcx>(
// Skip over locals that begin with an underscore or have no name
match mbcx.local_names[local] {
Some(name) => {
if name.as_str().starts_with("_") {
if name.as_str().starts_with('_') {
continue;
}
}

View File

@ -122,7 +122,7 @@ impl<'tcx> MirSource<'tcx> {
/// type `T`.
pub fn default_name<T: ?Sized>() -> Cow<'static, str> {
let name = ::std::any::type_name::<T>();
if let Some(tail) = name.rfind(":") { Cow::from(&name[tail + 1..]) } else { Cow::from(name) }
if let Some(tail) = name.rfind(':') { Cow::from(&name[tail + 1..]) } else { Cow::from(name) }
}
/// A streamlined trait that you can implement to create a pass; the

View File

@ -753,7 +753,7 @@ impl<'a> Parser<'a> {
s.print_usize(float.trunc() as usize);
s.pclose();
s.s.word(".");
s.s.word(fstr.splitn(2, ".").last().unwrap().to_string())
s.s.word(fstr.splitn(2, '.').last().unwrap().to_string())
});
err.span_suggestion(
lo.to(self.prev_span),

View File

@ -553,7 +553,7 @@ impl DeadVisitor<'tcx> {
node_type: &str,
participle: &str,
) {
if !name.as_str().starts_with("_") {
if !name.as_str().starts_with('_') {
self.tcx.struct_span_lint_hir(lint::builtin::DEAD_CODE, id, span, |lint| {
lint.build(&format!("{} is never {}: `{}`", node_type, participle, name)).emit()
});

View File

@ -1103,7 +1103,7 @@ impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> {
// Macro uses will remove items from this set, and the remaining
// items will be reported as `unused_macros`.
fn insert_unused_macro(&mut self, ident: Ident, node_id: NodeId, span: Span) {
if !ident.as_str().starts_with("_") {
if !ident.as_str().starts_with('_') {
self.r.unused_macros.insert(node_id, span);
}
}

View File

@ -2663,7 +2663,7 @@ impl<'a> Resolver<'a> {
"{} as {}{}",
&snippet[..pos],
suggested_name,
if snippet.ends_with(";") { ";" } else { "" }
if snippet.ends_with(';') { ";" } else { "" }
))
}
}

View File

@ -101,7 +101,7 @@ impl TargetDataLayout {
match &*spec_parts {
["e"] => dl.endian = Endian::Little,
["E"] => dl.endian = Endian::Big,
[p] if p.starts_with("P") => {
[p] if p.starts_with('P') => {
dl.instruction_address_space = parse_address_space(&p[1..], "P")?
}
["a", ref a @ ..] => dl.aggregate_align = align(a, "a")?,
@ -111,7 +111,7 @@ impl TargetDataLayout {
dl.pointer_size = size(s, p)?;
dl.pointer_align = align(a, p)?;
}
[s, ref a @ ..] if s.starts_with("i") => {
[s, ref a @ ..] if s.starts_with('i') => {
let bits = match s[1..].parse::<u64>() {
Ok(bits) => bits,
Err(_) => {
@ -135,7 +135,7 @@ impl TargetDataLayout {
dl.i128_align = a;
}
}
[s, ref a @ ..] if s.starts_with("v") => {
[s, ref a @ ..] if s.starts_with('v') => {
let v_size = size(&s[1..], "v")?;
let a = align(a, s)?;
if let Some(v) = dl.vector_align.iter_mut().find(|v| v.0 == v_size) {

View File

@ -1858,7 +1858,7 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
{
let types: Vec<_> =
assoc_items.iter().map(|item| format!("{} = Type", item.ident)).collect();
let code = if snippet.ends_with(">") {
let code = if snippet.ends_with('>') {
// The user wrote `Trait<'a>` or similar and we don't have a type we can
// suggest, but at least we can clue them to the correct syntax
// `Trait<'a, Item = Type>` while accounting for the `<'a>` in the

View File

@ -412,7 +412,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
{
if let hir::ExprKind::Lit(_) = expr.kind {
if let Ok(src) = sm.span_to_snippet(sp) {
if src.starts_with("\"") {
if src.starts_with('"') {
return Some((
sp,
"consider adding a leading `b`",
@ -709,7 +709,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
{
// Remove fractional part from literal, for example `42.0f32` into `42`
let src = src.trim_end_matches(&checked_ty.to_string());
src.split(".").next().unwrap()
src.split('.').next().unwrap()
} else {
src.trim_end_matches(&checked_ty.to_string())
},

View File

@ -4996,7 +4996,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let sugg = if receiver.ends_with(".clone()")
&& method_call_list.contains(&method_call.as_str())
{
let max_len = receiver.rfind(".").unwrap();
let max_len = receiver.rfind('.').unwrap();
format!("{}{}", &receiver[..max_len], method_call)
} else {
if expr.precedence().order() < ExprPrecedence::MethodCall.order() {

View File

@ -597,12 +597,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Ok(lstring) => {
err.span_suggestion(
lhs_expr.span,
if lstring.starts_with("&") {
if lstring.starts_with('&') {
remove_borrow_msg
} else {
msg
},
if lstring.starts_with("&") {
if lstring.starts_with('&') {
// let a = String::new();
// let _ = &a + "bar";
format!("{}", &lstring[1..])
@ -630,7 +630,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
is_assign,
) {
(Ok(l), Ok(r), false) => {
let to_string = if l.starts_with("&") {
let to_string = if l.starts_with('&') {
// let a = String::new(); let b = String::new();
// let _ = &a + b;
format!("{}", &l[1..])

View File

@ -2206,7 +2206,7 @@ fn from_target_feature(
item.span(),
format!("`{}` is not valid for this target", feature),
);
if feature.starts_with("+") {
if feature.starts_with('+') {
let valid = whitelist.contains_key(&feature[1..]);
if valid {
err.help("consider removing the leading `+` in the feature name");
@ -2337,7 +2337,7 @@ fn codegen_fn_attrs(tcx: TyCtxt<'_>, id: DefId) -> CodegenFnAttrs {
codegen_fn_attrs.flags |= CodegenFnAttrFlags::TRACK_CALLER;
} else if attr.check_name(sym::export_name) {
if let Some(s) = attr.value_str() {
if s.as_str().contains("\0") {
if s.as_str().contains('\0') {
// `#[export_name = ...]` will be converted to a null-terminated string,
// so it may not contain any null characters.
struct_span_err!(

View File

@ -707,7 +707,7 @@ impl LangString {
x if x.starts_with("edition") => {
data.edition = x[7..].parse::<Edition>().ok();
}
x if allow_error_code_check && x.starts_with("E") && x.len() == 5 => {
x if allow_error_code_check && x.starts_with('E') && x.len() == 5 => {
if x[1..].parse::<u32>().is_ok() {
data.error_codes.push(x.to_owned());
seen_rust_tags = !seen_other_tags || seen_rust_tags;

View File

@ -86,7 +86,7 @@ pub type NameDoc = (String, Option<String>);
crate fn ensure_trailing_slash(v: &str) -> impl fmt::Display + '_ {
crate::html::format::display_fn(move |f| {
if !v.ends_with("/") && !v.is_empty() { write!(f, "{}/", v) } else { write!(f, "{}", v) }
if !v.ends_with('/') && !v.is_empty() { write!(f, "{}/", v) } else { write!(f, "{}", v) }
})
}

View File

@ -534,7 +534,7 @@ fn extern_location(
if let Some(url) = extern_url {
let mut url = url.to_string();
if !url.ends_with("/") {
if !url.ends_with('/') {
url.push('/');
}
return Remote(url);
@ -548,7 +548,7 @@ fn extern_location(
.filter_map(|a| a.value_str())
.map(|url| {
let mut url = url.to_string();
if !url.ends_with("/") {
if !url.ends_with('/') {
url.push('/')
}
Remote(url)

View File

@ -19,7 +19,7 @@ fn extract_leading_metadata(s: &str) -> (Vec<&str>, &str) {
let mut count = 0;
for line in s.lines() {
if line.starts_with("# ") || line.starts_with("%") {
if line.starts_with("# ") || line.starts_with('%') {
// trim the whitespace after the symbol
metadata.push(line[1..].trim_start());
count += line.len() + 1;

View File

@ -2396,9 +2396,9 @@ impl<B: BufRead> Iterator for Lines<B> {
match self.buf.read_line(&mut buf) {
Ok(0) => None,
Ok(_n) => {
if buf.ends_with("\n") {
if buf.ends_with('\n') {
buf.pop();
if buf.ends_with("\r") {
if buf.ends_with('\r') {
buf.pop();
}
}