Rollup merge of #69736 - matthiaskrgr:even_more_clippy, r=Dylan-DPC

even more clippy cleanups

* Don't pass &mut where immutable reference (&) is sufficient (clippy::unnecessary_mut_passed)
* Use more efficient &&str to String conversion (clippy::inefficient_to_string)
* Don't always eval arguments inside .expect(), use unwrap_or_else and closure. (clippy::expect_fun_call)
* Use righthand '&' instead of lefthand "ref". (clippy::toplevel_ref_arg)
* Use simple 'for i in x' loops instead of 'while let Some(i) = x.next()' loops on iterators. (clippy::while_let_on_iterator)
* Const items have by default a static lifetime, there's no need to annotate it. (clippy::redundant_static_lifetimes)
* Remove redundant patterns when matching ( x @ _  to  x) (clippy::redundant_pattern)
This commit is contained in:
Dylan DPC 2020-03-05 22:04:10 +01:00 committed by GitHub
commit 67d735c4bf
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
26 changed files with 56 additions and 60 deletions

View File

@ -1198,7 +1198,7 @@ impl<'tcx> TerminatorKind<'tcx> {
t: BasicBlock, t: BasicBlock,
f: BasicBlock, f: BasicBlock,
) -> TerminatorKind<'tcx> { ) -> TerminatorKind<'tcx> {
static BOOL_SWITCH_FALSE: &'static [u128] = &[0]; static BOOL_SWITCH_FALSE: &[u128] = &[0];
TerminatorKind::SwitchInt { TerminatorKind::SwitchInt {
discr: cond, discr: cond,
switch_ty: tcx.types.bool, switch_ty: tcx.types.bool,

View File

@ -284,7 +284,7 @@ impl<'a, 'b> Context<'a, 'b> {
err.tool_only_span_suggestion( err.tool_only_span_suggestion(
sp, sp,
&format!("use the `{}` trait", name), &format!("use the `{}` trait", name),
fmt.to_string(), (*fmt).to_string(),
Applicability::MaybeIncorrect, Applicability::MaybeIncorrect,
); );
} }
@ -476,7 +476,7 @@ impl<'a, 'b> Context<'a, 'b> {
match ty { match ty {
Placeholder(_) => { Placeholder(_) => {
// record every (position, type) combination only once // record every (position, type) combination only once
let ref mut seen_ty = self.arg_unique_types[arg]; let seen_ty = &mut self.arg_unique_types[arg];
let i = seen_ty.iter().position(|x| *x == ty).unwrap_or_else(|| { let i = seen_ty.iter().position(|x| *x == ty).unwrap_or_else(|| {
let i = seen_ty.len(); let i = seen_ty.len();
seen_ty.push(ty); seen_ty.push(ty);
@ -526,7 +526,7 @@ impl<'a, 'b> Context<'a, 'b> {
// Map the arguments // Map the arguments
for i in 0..args_len { for i in 0..args_len {
let ref arg_types = self.arg_types[i]; let arg_types = &self.arg_types[i];
let arg_offsets = arg_types.iter().map(|offset| sofar + *offset).collect::<Vec<_>>(); let arg_offsets = arg_types.iter().map(|offset| sofar + *offset).collect::<Vec<_>>();
self.arg_index_map.push(arg_offsets); self.arg_index_map.push(arg_offsets);
sofar += self.arg_unique_types[i].len(); sofar += self.arg_unique_types[i].len();
@ -597,7 +597,7 @@ impl<'a, 'b> Context<'a, 'b> {
let arg_idx = match arg_index_consumed.get_mut(i) { let arg_idx = match arg_index_consumed.get_mut(i) {
None => 0, // error already emitted elsewhere None => 0, // error already emitted elsewhere
Some(offset) => { Some(offset) => {
let ref idx_map = self.arg_index_map[i]; let idx_map = &self.arg_index_map[i];
// unwrap_or branch: error already emitted elsewhere // unwrap_or branch: error already emitted elsewhere
let arg_idx = *idx_map.get(*offset).unwrap_or(&0); let arg_idx = *idx_map.get(*offset).unwrap_or(&0);
*offset += 1; *offset += 1;
@ -721,7 +721,7 @@ impl<'a, 'b> Context<'a, 'b> {
let name = names_pos[i]; let name = names_pos[i];
let span = self.ecx.with_def_site_ctxt(e.span); let span = self.ecx.with_def_site_ctxt(e.span);
pats.push(self.ecx.pat_ident(span, name)); pats.push(self.ecx.pat_ident(span, name));
for ref arg_ty in self.arg_unique_types[i].iter() { for arg_ty in self.arg_unique_types[i].iter() {
locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name)); locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name));
} }
heads.push(self.ecx.expr_addr_of(e.span, e)); heads.push(self.ecx.expr_addr_of(e.span, e));

View File

@ -57,12 +57,12 @@ impl AllocFnFactory<'_, '_> {
fn allocator_fn(&self, method: &AllocatorMethod) -> Stmt { fn allocator_fn(&self, method: &AllocatorMethod) -> Stmt {
let mut abi_args = Vec::new(); let mut abi_args = Vec::new();
let mut i = 0; let mut i = 0;
let ref mut mk = || { let mut mk = || {
let name = self.cx.ident_of(&format!("arg{}", i), self.span); let name = self.cx.ident_of(&format!("arg{}", i), self.span);
i += 1; i += 1;
name name
}; };
let args = method.inputs.iter().map(|ty| self.arg_ty(ty, &mut abi_args, mk)).collect(); let args = method.inputs.iter().map(|ty| self.arg_ty(ty, &mut abi_args, &mut mk)).collect();
let result = self.call_allocator(method.name, args); let result = self.call_allocator(method.name, args);
let (output_ty, output_expr) = self.ret_ty(&method.output, result); let (output_ty, output_expr) = self.ret_ty(&method.output, result);
let decl = self.cx.fn_decl(abi_args, ast::FnRetTy::Ty(output_ty)); let decl = self.cx.fn_decl(abi_args, ast::FnRetTy::Ty(output_ty));

View File

@ -313,7 +313,7 @@ fn should_fail(i: &ast::Item) -> bool {
fn should_panic(cx: &ExtCtxt<'_>, i: &ast::Item) -> ShouldPanic { fn should_panic(cx: &ExtCtxt<'_>, i: &ast::Item) -> ShouldPanic {
match attr::find_by_name(&i.attrs, sym::should_panic) { match attr::find_by_name(&i.attrs, sym::should_panic) {
Some(attr) => { Some(attr) => {
let ref sd = cx.parse_sess.span_diagnostic; let sd = &cx.parse_sess.span_diagnostic;
match attr.meta_item_list() { match attr.meta_item_list() {
// Handle #[should_panic(expected = "foo")] // Handle #[should_panic(expected = "foo")]
@ -378,7 +378,7 @@ fn test_type(cx: &ExtCtxt<'_>) -> TestType {
fn has_test_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool { fn has_test_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool {
let has_should_panic_attr = attr::contains_name(&i.attrs, sym::should_panic); let has_should_panic_attr = attr::contains_name(&i.attrs, sym::should_panic);
let ref sd = cx.parse_sess.span_diagnostic; let sd = &cx.parse_sess.span_diagnostic;
if let ast::ItemKind::Fn(_, ref sig, ref generics, _) = i.kind { if let ast::ItemKind::Fn(_, ref sig, ref generics, _) = i.kind {
if let ast::Unsafe::Yes(span) = sig.header.unsafety { if let ast::Unsafe::Yes(span) = sig.header.unsafety {
sd.struct_span_err(i.span, "unsafe functions cannot be used for tests") sd.struct_span_err(i.span, "unsafe functions cannot be used for tests")

View File

@ -326,7 +326,7 @@ fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
/// &[&test1, &test2] /// &[&test1, &test2]
fn mk_tests_slice(cx: &TestCtxt<'_>, sp: Span) -> P<ast::Expr> { fn mk_tests_slice(cx: &TestCtxt<'_>, sp: Span) -> P<ast::Expr> {
debug!("building test vector from {} tests", cx.test_cases.len()); debug!("building test vector from {} tests", cx.test_cases.len());
let ref ecx = cx.ext_cx; let ecx = &cx.ext_cx;
ecx.expr_vec_slice( ecx.expr_vec_slice(
sp, sp,

View File

@ -60,7 +60,7 @@ impl AsmBuilderMethods<'tcx> for Builder<'a, 'll, 'tcx> {
.chain(ia.inputs.iter().map(|s| s.to_string())) .chain(ia.inputs.iter().map(|s| s.to_string()))
.chain(ext_constraints) .chain(ext_constraints)
.chain(clobbers) .chain(clobbers)
.chain(arch_clobbers.iter().map(|s| s.to_string())) .chain(arch_clobbers.iter().map(|s| (*s).to_string()))
.collect::<Vec<String>>() .collect::<Vec<String>>()
.join(","); .join(",");

View File

@ -343,7 +343,8 @@ impl DirtyCleanVisitor<'tcx> {
&format!("clean/dirty auto-assertions not yet defined for {:?}", node), &format!("clean/dirty auto-assertions not yet defined for {:?}", node),
), ),
}; };
let labels = Labels::from_iter(labels.iter().flat_map(|s| s.iter().map(|l| l.to_string()))); let labels =
Labels::from_iter(labels.iter().flat_map(|s| s.iter().map(|l| (*l).to_string())));
(name, labels) (name, labels)
} }

View File

@ -150,7 +150,7 @@ impl<'tcx> AutoTraitFinder<'tcx> {
// SelectionContext to return it back to us. // SelectionContext to return it back to us.
let (new_env, user_env) = match self.evaluate_predicates( let (new_env, user_env) = match self.evaluate_predicates(
&mut infcx, &infcx,
trait_did, trait_did,
ty, ty,
orig_env, orig_env,

View File

@ -1341,7 +1341,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
stack: &TraitObligationStack<'o, 'tcx>, stack: &TraitObligationStack<'o, 'tcx>,
) -> Result<SelectionCandidateSet<'tcx>, SelectionError<'tcx>> { ) -> Result<SelectionCandidateSet<'tcx>, SelectionError<'tcx>> {
let TraitObligationStack { obligation, .. } = *stack; let TraitObligationStack { obligation, .. } = *stack;
let ref obligation = Obligation { let obligation = &Obligation {
param_env: obligation.param_env, param_env: obligation.param_env,
cause: obligation.cause.clone(), cause: obligation.cause.clone(),
recursion_depth: obligation.recursion_depth, recursion_depth: obligation.recursion_depth,

View File

@ -369,7 +369,7 @@ impl LintStore {
return if *silent { return if *silent {
CheckLintNameResult::Ok(&lint_ids) CheckLintNameResult::Ok(&lint_ids)
} else { } else {
CheckLintNameResult::Tool(Err((Some(&lint_ids), name.to_string()))) CheckLintNameResult::Tool(Err((Some(&lint_ids), (*name).to_string())))
}; };
} }
CheckLintNameResult::Ok(&lint_ids) CheckLintNameResult::Ok(&lint_ids)
@ -404,7 +404,7 @@ impl LintStore {
return if *silent { return if *silent {
CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name))) CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name)))
} else { } else {
CheckLintNameResult::Tool(Err((Some(&lint_ids), name.to_string()))) CheckLintNameResult::Tool(Err((Some(&lint_ids), (*name).to_string())))
}; };
} }
CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name))) CheckLintNameResult::Tool(Err((Some(&lint_ids), complete_name)))

View File

@ -604,8 +604,8 @@ fn write_diff<A: Analysis<'tcx>>(
Ok(()) Ok(())
} }
const BR_LEFT: &'static str = r#"<br align="left"/>"#; const BR_LEFT: &str = r#"<br align="left"/>"#;
const BR_LEFT_SPACE: &'static str = r#"<br align="left"/> "#; const BR_LEFT_SPACE: &str = r#"<br align="left"/> "#;
/// Line break policy that breaks at 40 characters and starts the next line with a single space. /// Line break policy that breaks at 40 characters and starts the next line with a single space.
const LIMIT_30_ALIGN_1: Option<LineBreak> = Some(LineBreak { sequence: BR_LEFT_SPACE, limit: 30 }); const LIMIT_30_ALIGN_1: Option<LineBreak> = Some(LineBreak { sequence: BR_LEFT_SPACE, limit: 30 });

View File

@ -22,20 +22,20 @@ pub fn visit_results<F>(
let loc = Location { block, statement_index }; let loc = Location { block, statement_index };
results.reconstruct_before_statement_effect(&mut state, stmt, loc); results.reconstruct_before_statement_effect(&mut state, stmt, loc);
vis.visit_statement(&mut state, stmt, loc); vis.visit_statement(&state, stmt, loc);
results.reconstruct_statement_effect(&mut state, stmt, loc); results.reconstruct_statement_effect(&mut state, stmt, loc);
vis.visit_statement_exit(&mut state, stmt, loc); vis.visit_statement_exit(&state, stmt, loc);
} }
let loc = body.terminator_loc(block); let loc = body.terminator_loc(block);
let term = block_data.terminator(); let term = block_data.terminator();
results.reconstruct_before_terminator_effect(&mut state, term, loc); results.reconstruct_before_terminator_effect(&mut state, term, loc);
vis.visit_terminator(&mut state, term, loc); vis.visit_terminator(&state, term, loc);
results.reconstruct_terminator_effect(&mut state, term, loc); results.reconstruct_terminator_effect(&mut state, term, loc);
vis.visit_terminator_exit(&mut state, term, loc); vis.visit_terminator_exit(&state, term, loc);
} }
} }

View File

@ -311,9 +311,8 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// taking into account the `spread_arg`. If we could write // taking into account the `spread_arg`. If we could write
// this is a single iterator (that handles `spread_arg`), then // this is a single iterator (that handles `spread_arg`), then
// `pass_argument` would be the loop body. It takes care to // `pass_argument` would be the loop body. It takes care to
// not advance `caller_iter` for ZSTs. // not advance `caller_iter` for ZSTs
let mut locals_iter = body.args_iter(); for local in body.args_iter() {
while let Some(local) = locals_iter.next() {
let dest = self.eval_place(&mir::Place::from(local))?; let dest = self.eval_place(&mir::Place::from(local))?;
if Some(local) == body.spread_arg { if Some(local) == body.spread_arg {
// Must be a tuple // Must be a tuple

View File

@ -920,7 +920,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
let (blocks, local_decls) = self.source.basic_blocks_and_local_decls_mut(); let (blocks, local_decls) = self.source.basic_blocks_and_local_decls_mut();
match candidate { match candidate {
Candidate::Ref(loc) => { Candidate::Ref(loc) => {
let ref mut statement = blocks[loc.block].statements[loc.statement_index]; let statement = &mut blocks[loc.block].statements[loc.statement_index];
match statement.kind { match statement.kind {
StatementKind::Assign(box ( StatementKind::Assign(box (
_, _,
@ -971,7 +971,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> {
} }
} }
Candidate::Repeat(loc) => { Candidate::Repeat(loc) => {
let ref mut statement = blocks[loc.block].statements[loc.statement_index]; let statement = &mut blocks[loc.block].statements[loc.statement_index];
match statement.kind { match statement.kind {
StatementKind::Assign(box (_, Rvalue::Repeat(ref mut operand, _))) => { StatementKind::Assign(box (_, Rvalue::Repeat(ref mut operand, _))) => {
let ty = operand.ty(local_decls, self.tcx); let ty = operand.ty(local_decls, self.tcx);

View File

@ -2331,7 +2331,7 @@ fn specialize_one_pattern<'p, 'tcx>(
PatKind::Binding { .. } | PatKind::Wild => Some(ctor_wild_subpatterns.iter().collect()), PatKind::Binding { .. } | PatKind::Wild => Some(ctor_wild_subpatterns.iter().collect()),
PatKind::Variant { adt_def, variant_index, ref subpatterns, .. } => { PatKind::Variant { adt_def, variant_index, ref subpatterns, .. } => {
let ref variant = adt_def.variants[variant_index]; let variant = &adt_def.variants[variant_index];
let is_non_exhaustive = cx.is_foreign_non_exhaustive_variant(pat.ty, variant); let is_non_exhaustive = cx.is_foreign_non_exhaustive_variant(pat.ty, variant);
Some(Variant(variant.def_id)) Some(Variant(variant.def_id))
.filter(|variant_constructor| variant_constructor == constructor) .filter(|variant_constructor| variant_constructor == constructor)

View File

@ -19,7 +19,7 @@ use rustc_span::{MultiSpan, Span, SpanSnippetError, DUMMY_SP};
use log::{debug, trace}; use log::{debug, trace};
use std::mem; use std::mem;
const TURBOFISH: &'static str = "use `::<...>` instead of `<...>` to specify type arguments"; const TURBOFISH: &str = "use `::<...>` instead of `<...>` to specify type arguments";
/// Creates a placeholder argument. /// Creates a placeholder argument.
pub(super) fn dummy_arg(ident: Ident) -> Param { pub(super) fn dummy_arg(ident: Ident) -> Param {

View File

@ -1432,7 +1432,7 @@ impl<'a, 'b> ImportResolver<'a, 'b> {
} }
msg msg
} }
ref s @ _ => bug!("unexpected import subclass {:?}", s), ref s => bug!("unexpected import subclass {:?}", s),
}; };
let mut err = this.session.struct_span_err(binding.span, &msg); let mut err = this.session.struct_span_err(binding.span, &msg);

View File

@ -737,8 +737,8 @@ impl ItemLikeVisitor<'tcx> for CheckItemTypesVisitor<'tcx> {
} }
pub fn check_wf_new(tcx: TyCtxt<'_>) { pub fn check_wf_new(tcx: TyCtxt<'_>) {
let mut visit = wfcheck::CheckTypeWellFormedVisitor::new(tcx); let visit = wfcheck::CheckTypeWellFormedVisitor::new(tcx);
tcx.hir().krate().par_visit_all_item_likes(&mut visit); tcx.hir().krate().par_visit_all_item_likes(&visit);
} }
fn check_mod_item_types(tcx: TyCtxt<'_>, module_def_id: DefId) { fn check_mod_item_types(tcx: TyCtxt<'_>, module_def_id: DefId) {

View File

@ -151,7 +151,7 @@ crate fn placeholder_type_error(
.unwrap_or(&"ParamName"); .unwrap_or(&"ParamName");
let mut sugg: Vec<_> = let mut sugg: Vec<_> =
placeholder_types.iter().map(|sp| (*sp, type_name.to_string())).collect(); placeholder_types.iter().map(|sp| (*sp, (*type_name).to_string())).collect();
if generics.is_empty() { if generics.is_empty() {
sugg.push((span, format!("<{}>", type_name))); sugg.push((span, format!("<{}>", type_name)));
} else if let Some(arg) = generics.iter().find(|arg| match arg.name { } else if let Some(arg) = generics.iter().find(|arg| match arg.name {
@ -160,7 +160,7 @@ crate fn placeholder_type_error(
}) { }) {
// Account for `_` already present in cases like `struct S<_>(_);` and suggest // Account for `_` already present in cases like `struct S<_>(_);` and suggest
// `struct S<T>(T);` instead of `struct S<_, T>(T);`. // `struct S<T>(T);` instead of `struct S<_, T>(T);`.
sugg.push((arg.span, type_name.to_string())); sugg.push((arg.span, (*type_name).to_string()));
} else { } else {
sugg.push(( sugg.push((
generics.iter().last().unwrap().span.shrink_to_hi(), generics.iter().last().unwrap().span.shrink_to_hi(),

View File

@ -50,7 +50,7 @@ pub use self::types::Type::*;
pub use self::types::Visibility::{Inherited, Public}; pub use self::types::Visibility::{Inherited, Public};
pub use self::types::*; pub use self::types::*;
const FN_OUTPUT_NAME: &'static str = "Output"; const FN_OUTPUT_NAME: &str = "Output";
pub trait Clean<T> { pub trait Clean<T> {
fn clean(&self, cx: &DocContext<'_>) -> T; fn clean(&self, cx: &DocContext<'_>) -> T;

View File

@ -90,14 +90,14 @@ impl DocFS {
let sender = self.errors.sender.clone().unwrap(); let sender = self.errors.sender.clone().unwrap();
rayon::spawn(move || match fs::write(&path, &contents) { rayon::spawn(move || match fs::write(&path, &contents) {
Ok(_) => { Ok(_) => {
sender sender.send(None).unwrap_or_else(|_| {
.send(None) panic!("failed to send error on \"{}\"", path.display())
.expect(&format!("failed to send error on \"{}\"", path.display())); });
} }
Err(e) => { Err(e) => {
sender sender.send(Some(format!("\"{}\": {}", path.display(), e))).unwrap_or_else(
.send(Some(format!("\"{}\": {}", path.display(), e))) |_| panic!("failed to send non-error on \"{}\"", path.display()),
.expect(&format!("failed to send non-error on \"{}\"", path.display())); );
} }
}); });
Ok(()) Ok(())

View File

@ -62,7 +62,7 @@ impl<'a> From<&'a clean::Item> for ItemType {
fn from(item: &'a clean::Item) -> ItemType { fn from(item: &'a clean::Item) -> ItemType {
let inner = match item.inner { let inner = match item.inner {
clean::StrippedItem(box ref item) => item, clean::StrippedItem(box ref item) => item,
ref inner @ _ => inner, ref inner => inner,
}; };
match *inner { match *inner {
@ -194,7 +194,7 @@ impl fmt::Display for ItemType {
} }
} }
pub const NAMESPACE_TYPE: &'static str = "t"; pub const NAMESPACE_TYPE: &str = "t";
pub const NAMESPACE_VALUE: &'static str = "v"; pub const NAMESPACE_VALUE: &str = "v";
pub const NAMESPACE_MACRO: &'static str = "m"; pub const NAMESPACE_MACRO: &str = "m";
pub const NAMESPACE_KEYWORD: &'static str = "k"; pub const NAMESPACE_KEYWORD: &str = "k";

View File

@ -869,12 +869,8 @@ pub fn plain_summary_line(md: &str) -> String {
} }
} }
let mut s = String::with_capacity(md.len() * 3 / 2); let mut s = String::with_capacity(md.len() * 3 / 2);
let mut p = ParserWrapper { inner: Parser::new(md), is_in: 0, is_first: true }; let p = ParserWrapper { inner: Parser::new(md), is_in: 0, is_first: true };
while let Some(t) = p.next() { p.into_iter().filter(|t| !t.is_empty()).for_each(|i| s.push_str(&i));
if !t.is_empty() {
s.push_str(&t);
}
}
s s
} }

View File

@ -2727,7 +2727,7 @@ fn naive_assoc_href(it: &clean::Item, link: AssocItemLink<'_>) -> String {
let name = it.name.as_ref().unwrap(); let name = it.name.as_ref().unwrap();
let ty = match it.type_() { let ty = match it.type_() {
Typedef | AssocType => AssocType, Typedef | AssocType => AssocType,
s @ _ => s, s => s,
}; };
let anchor = format!("#{}.{}", ty, name); let anchor = format!("#{}.{}", ty, name);
@ -3150,7 +3150,7 @@ fn render_attribute(attr: &ast::MetaItem) -> Option<String> {
} }
} }
const ATTRIBUTE_WHITELIST: &'static [Symbol] = &[ const ATTRIBUTE_WHITELIST: &[Symbol] = &[
sym::export_name, sym::export_name,
sym::lang, sym::lang,
sym::link_section, sym::link_section,
@ -4610,7 +4610,7 @@ fn item_keyword(w: &mut Buffer, cx: &Context, it: &clean::Item) {
document(w, cx, it) document(w, cx, it)
} }
crate const BASIC_KEYWORDS: &'static str = "rust, rustlang, rust-lang"; crate const BASIC_KEYWORDS: &str = "rust, rustlang, rust-lang";
fn make_item_keywords(it: &clean::Item) -> String { fn make_item_keywords(it: &clean::Item) -> String {
format!("{}, {}", BASIC_KEYWORDS, it.name.as_ref().unwrap()) format!("{}, {}", BASIC_KEYWORDS, it.name.as_ref().unwrap())

View File

@ -19,9 +19,9 @@ cfg_if::cfg_if! {
if #[cfg(target_os = "fuchsia")] { if #[cfg(target_os = "fuchsia")] {
// fuchsia doesn't have /dev/null // fuchsia doesn't have /dev/null
} else if #[cfg(target_os = "redox")] { } else if #[cfg(target_os = "redox")] {
const DEV_NULL: &'static str = "null:\0"; const DEV_NULL: &str = "null:\0";
} else { } else {
const DEV_NULL: &'static str = "/dev/null\0"; const DEV_NULL: &str = "/dev/null\0";
} }
} }

View File

@ -96,7 +96,7 @@ use time::TestExecTime;
// Process exit code to be used to indicate test failures. // Process exit code to be used to indicate test failures.
const ERROR_EXIT_CODE: i32 = 101; const ERROR_EXIT_CODE: i32 = 101;
const SECONDARY_TEST_INVOKER_VAR: &'static str = "__RUST_TEST_INVOKE"; const SECONDARY_TEST_INVOKER_VAR: &str = "__RUST_TEST_INVOKE";
// The default console test runner. It accepts the command line // The default console test runner. It accepts the command line
// arguments and a vector of test_descs. // arguments and a vector of test_descs.
@ -158,7 +158,7 @@ pub fn test_main_static_abort(tests: &[&TestDescAndFn]) {
.filter(|test| test.desc.name.as_slice() == name) .filter(|test| test.desc.name.as_slice() == name)
.map(make_owned_test) .map(make_owned_test)
.next() .next()
.expect(&format!("couldn't find a test with the provided name '{}'", name)); .unwrap_or_else(|| panic!("couldn't find a test with the provided name '{}'", name));
let TestDescAndFn { desc, testfn } = test; let TestDescAndFn { desc, testfn } = test;
let testfn = match testfn { let testfn = match testfn {
StaticTestFn(f) => f, StaticTestFn(f) => f,