Rollup merge of #126088 - onur-ozkan:brooming, r=albertlarsan68

[1/2] clean-up / general improvements

This PR applies various clippy suggestions on the tools. I have only applied the ones that make sense and left out trivial changes (e.g., suggestions like 'remove &' are ignored to keep the original commit history for the lines).

I am planning to do the same for the library and compiler, but those will add too many changes to this PR, so I will handle them in a separate PR later.
This commit is contained in:
Matthias Krüger 2024-06-13 22:55:44 +02:00 committed by GitHub
commit c22ee450c5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
26 changed files with 139 additions and 160 deletions

View File

@ -93,7 +93,7 @@ fn get_modified_rs_files(build: &Builder<'_>) -> Result<Option<Vec<String>>, Str
return Ok(None);
}
get_git_modified_files(&build.config.git_config(), Some(&build.config.src), &vec!["rs"])
get_git_modified_files(&build.config.git_config(), Some(&build.config.src), &["rs"])
}
#[derive(serde_derive::Deserialize)]

View File

@ -495,7 +495,7 @@ impl Builder {
Some(p) => p,
None => return false,
};
pkg.target.get(&c.target).is_some()
pkg.target.contains_key(&c.target)
};
extensions.retain(&has_component);
components.retain(&has_component);

View File

@ -21,7 +21,7 @@ fn output_result(cmd: &mut Command) -> Result<String, String> {
String::from_utf8(output.stderr).map_err(|err| format!("{err:?}"))?
));
}
Ok(String::from_utf8(output.stdout).map_err(|err| format!("{err:?}"))?)
String::from_utf8(output.stdout).map_err(|err| format!("{err:?}"))
}
/// Finds the remote for rust-lang/rust.
@ -64,18 +64,14 @@ pub fn rev_exists(rev: &str, git_dir: Option<&Path>) -> Result<bool, String> {
match output.status.code() {
Some(0) => Ok(true),
Some(128) => Ok(false),
None => {
return Err(format!(
"git didn't exit properly: {}",
String::from_utf8(output.stderr).map_err(|err| format!("{err:?}"))?
));
}
Some(code) => {
return Err(format!(
"git command exited with status code: {code}: {}",
String::from_utf8(output.stderr).map_err(|err| format!("{err:?}"))?
));
}
None => Err(format!(
"git didn't exit properly: {}",
String::from_utf8(output.stderr).map_err(|err| format!("{err:?}"))?
)),
Some(code) => Err(format!(
"git command exited with status code: {code}: {}",
String::from_utf8(output.stderr).map_err(|err| format!("{err:?}"))?
)),
}
}
@ -96,7 +92,7 @@ pub fn updated_master_branch(
}
}
Err(format!("Cannot find any suitable upstream master branch"))
Err("Cannot find any suitable upstream master branch".to_owned())
}
pub fn get_git_merge_base(
@ -118,7 +114,7 @@ pub fn get_git_merge_base(
pub fn get_git_modified_files(
config: &GitConfig<'_>,
git_dir: Option<&Path>,
extensions: &Vec<&str>,
extensions: &[&str],
) -> Result<Option<Vec<String>>, String> {
let merge_base = get_git_merge_base(config, git_dir)?;

View File

@ -582,7 +582,7 @@ impl TargetCfgs {
name,
Some(
value
.strip_suffix("\"")
.strip_suffix('\"')
.expect("key-value pair should be properly quoted"),
),
)

View File

@ -82,7 +82,7 @@ impl EarlyProps {
panic!("errors encountered during EarlyProps parsing");
}
return props;
props
}
}
@ -382,7 +382,7 @@ impl TestProps {
// Individual flags can be single-quoted to preserve spaces; see
// <https://github.com/rust-lang/rust/pull/115948/commits/957c5db6>.
flags
.split("'")
.split('\'')
.enumerate()
.flat_map(|(i, f)| {
if i % 2 == 1 { vec![f] } else { f.split_whitespace().collect() }
@ -613,7 +613,7 @@ impl TestProps {
for key in &["RUST_TEST_NOCAPTURE", "RUST_TEST_THREADS"] {
if let Ok(val) = env::var(key) {
if self.exec_env.iter().find(|&&(ref x, _)| x == key).is_none() {
if !self.exec_env.iter().any(|&(ref x, _)| x == key) {
self.exec_env.push(((*key).to_owned(), val))
}
}
@ -991,7 +991,7 @@ pub(crate) fn check_directive(directive_ln: &str) -> CheckDirectiveResult<'_> {
let trailing = post.trim().split_once(' ').map(|(pre, _)| pre).unwrap_or(post);
let trailing_directive = {
// 1. is the directive name followed by a space? (to exclude `:`)
matches!(directive_ln.get(directive_name.len()..), Some(s) if s.starts_with(" "))
matches!(directive_ln.get(directive_name.len()..), Some(s) if s.starts_with(' '))
// 2. is what is after that directive also a directive (ex: "only-x86 only-arm")
&& KNOWN_DIRECTIVE_NAMES.contains(&trailing)
}
@ -1363,7 +1363,7 @@ pub fn extract_llvm_version_from_binary(binary_path: &str) -> Option<u32> {
}
let version = String::from_utf8(output.stdout).ok()?;
for line in version.lines() {
if let Some(version) = line.split("LLVM version ").skip(1).next() {
if let Some(version) = line.split("LLVM version ").nth(1) {
return extract_llvm_version(version);
}
}
@ -1394,7 +1394,7 @@ where
let min = parse(min)?;
let max = match max {
Some(max) if max.is_empty() => return None,
Some("") => return None,
Some(max) => parse(max)?,
_ => min,
};
@ -1466,12 +1466,12 @@ pub fn make_test_description<R: Read>(
decision!(ignore_gdb(config, ln));
decision!(ignore_lldb(config, ln));
if config.target == "wasm32-unknown-unknown" {
if config.parse_name_directive(ln, directives::CHECK_RUN_RESULTS) {
decision!(IgnoreDecision::Ignore {
reason: "ignored on WASM as the run results cannot be checked there".into(),
});
}
if config.target == "wasm32-unknown-unknown"
&& config.parse_name_directive(ln, directives::CHECK_RUN_RESULTS)
{
decision!(IgnoreDecision::Ignore {
reason: "ignored on WASM as the run results cannot be checked there".into(),
});
}
should_fail |= config.parse_name_directive(ln, "should-fail");

View File

@ -58,7 +58,7 @@ pub(super) fn parse_cfg_name_directive<'a>(
// Some of the matchers might be "" depending on what the target information is. To avoid
// problems we outright reject empty directives.
if name == "" {
if name.is_empty() {
return ParsedNameDirective::not_a_directive();
}

View File

@ -1147,7 +1147,7 @@ fn extract_lldb_version(full_version_line: &str) -> Option<(u32, bool)> {
}
fn not_a_digit(c: char) -> bool {
!c.is_digit(10)
!c.is_ascii_digit()
}
fn check_overlapping_tests(found_paths: &HashSet<PathBuf>) {

View File

@ -6,7 +6,6 @@ mod tests;
pub use self::imp::read2;
use std::io::{self, Write};
use std::mem::replace;
use std::process::{Child, Output};
#[derive(Copy, Clone, Debug)]
@ -101,10 +100,10 @@ impl ProcOutput {
return;
}
let mut head = replace(bytes, Vec::new());
let mut head = std::mem::take(bytes);
// Don't truncate if this as a whole line.
// That should make it less likely that we cut a JSON line in half.
if head.last() != Some(&('\n' as u8)) {
if head.last() != Some(&b'\n') {
head.truncate(MAX_OUT_LEN);
}
let skipped = new_len - head.len();

View File

@ -64,9 +64,9 @@ fn test_abbreviate_filterss_are_detected() {
#[test]
fn test_abbreviate_filters_avoid_abbreviations() {
let mut out = ProcOutput::new();
let filters = &[std::iter::repeat('a').take(64).collect::<String>()];
let filters = &["a".repeat(64)];
let mut expected = vec![b'.'; MAX_OUT_LEN - FILTERED_PATHS_PLACEHOLDER_LEN as usize];
let mut expected = vec![b'.'; MAX_OUT_LEN - FILTERED_PATHS_PLACEHOLDER_LEN];
expected.extend_from_slice(filters[0].as_bytes());
out.extend(&expected, filters);
@ -81,7 +81,7 @@ fn test_abbreviate_filters_avoid_abbreviations() {
#[test]
fn test_abbreviate_filters_can_still_cause_abbreviations() {
let mut out = ProcOutput::new();
let filters = &[std::iter::repeat('a').take(64).collect::<String>()];
let filters = &["a".repeat(64)];
let mut input = vec![b'.'; MAX_OUT_LEN];
input.extend_from_slice(filters[0].as_bytes());

View File

@ -374,11 +374,11 @@ impl<'test> TestCx<'test> {
// if a test does not crash, consider it an error
if proc_res.status.success() || matches!(proc_res.status.code(), Some(1 | 0)) {
self.fatal(&format!(
self.fatal(
"test no longer crashes/triggers ICE! Please give it a mearningful name, \
add a doc-comment to the start of the test explaining why it exists and \
move it to tests/ui or wherever you see fit."
));
move it to tests/ui or wherever you see fit.",
);
}
}
@ -697,10 +697,10 @@ impl<'test> TestCx<'test> {
// since it is extensively used in the testsuite.
check_cfg.push_str("cfg(FALSE");
for revision in &self.props.revisions {
check_cfg.push_str(",");
check_cfg.push_str(&normalize_revision(&revision));
check_cfg.push(',');
check_cfg.push_str(&normalize_revision(revision));
}
check_cfg.push_str(")");
check_cfg.push(')');
cmd.args(&["--check-cfg", &check_cfg]);
}
@ -818,7 +818,7 @@ impl<'test> TestCx<'test> {
// Append the other `cdb-command:`s
for line in &dbg_cmds.commands {
script_str.push_str(line);
script_str.push_str("\n");
script_str.push('\n');
}
script_str.push_str("qq\n"); // Quit the debugger (including remote debugger, if any)
@ -1200,7 +1200,7 @@ impl<'test> TestCx<'test> {
// Append the other commands
for line in &dbg_cmds.commands {
script_str.push_str(line);
script_str.push_str("\n");
script_str.push('\n');
}
// Finally, quit the debugger
@ -1250,7 +1250,7 @@ impl<'test> TestCx<'test> {
// Remove options that are either unwanted (-O) or may lead to duplicates due to RUSTFLAGS.
let options_to_remove = ["-O".to_owned(), "-g".to_owned(), "--debuginfo".to_owned()];
options.iter().filter(|x| !options_to_remove.contains(x)).map(|x| x.clone()).collect()
options.iter().filter(|x| !options_to_remove.contains(x)).cloned().collect()
}
fn maybe_add_external_args(&self, cmd: &mut Command, args: &Vec<String>) {
@ -2504,8 +2504,8 @@ impl<'test> TestCx<'test> {
// This works with both `--emit asm` (as default output name for the assembly)
// and `ptx-linker` because the latter can write output at requested location.
let output_path = self.output_base_name().with_extension(extension);
let output_file = TargetLocation::ThisFile(output_path.clone());
output_file
TargetLocation::ThisFile(output_path.clone())
}
}
@ -2752,7 +2752,7 @@ impl<'test> TestCx<'test> {
for entry in walkdir::WalkDir::new(dir) {
let entry = entry.expect("failed to read file");
if entry.file_type().is_file()
&& entry.path().extension().and_then(|p| p.to_str()) == Some("html".into())
&& entry.path().extension().and_then(|p| p.to_str()) == Some("html")
{
let status =
Command::new("tidy").args(&tidy_args).arg(entry.path()).status().unwrap();
@ -2783,8 +2783,7 @@ impl<'test> TestCx<'test> {
&compare_dir,
self.config.verbose,
|file_type, extension| {
file_type.is_file()
&& (extension == Some("html".into()) || extension == Some("js".into()))
file_type.is_file() && (extension == Some("html") || extension == Some("js"))
},
) {
return;
@ -2830,11 +2829,11 @@ impl<'test> TestCx<'test> {
}
match String::from_utf8(line.clone()) {
Ok(line) => {
if line.starts_with("+") {
if line.starts_with('+') {
write!(&mut out, "{}", line.green()).unwrap();
} else if line.starts_with("-") {
} else if line.starts_with('-') {
write!(&mut out, "{}", line.red()).unwrap();
} else if line.starts_with("@") {
} else if line.starts_with('@') {
write!(&mut out, "{}", line.blue()).unwrap();
} else {
out.write_all(line.as_bytes()).unwrap();
@ -2907,7 +2906,7 @@ impl<'test> TestCx<'test> {
&& line.ends_with(';')
{
if let Some(ref mut other_files) = other_files {
other_files.push(line.rsplit("mod ").next().unwrap().replace(";", ""));
other_files.push(line.rsplit("mod ").next().unwrap().replace(';', ""));
}
None
} else {
@ -3139,7 +3138,7 @@ impl<'test> TestCx<'test> {
let mut string = String::new();
for cgu in cgus {
string.push_str(&cgu[..]);
string.push_str(" ");
string.push(' ');
}
string
@ -3172,10 +3171,7 @@ impl<'test> TestCx<'test> {
// CGUs joined with "--". This function splits such composite CGU names
// and handles each component individually.
fn remove_crate_disambiguators_from_set_of_cgu_names(cgus: &str) -> String {
cgus.split("--")
.map(|cgu| remove_crate_disambiguator_from_cgu(cgu))
.collect::<Vec<_>>()
.join("--")
cgus.split("--").map(remove_crate_disambiguator_from_cgu).collect::<Vec<_>>().join("--")
}
}
@ -3357,7 +3353,7 @@ impl<'test> TestCx<'test> {
// endif
}
if self.config.target.contains("msvc") && self.config.cc != "" {
if self.config.target.contains("msvc") && !self.config.cc.is_empty() {
// We need to pass a path to `lib.exe`, so assume that `cc` is `cl.exe`
// and that `lib.exe` lives next to it.
let lib = Path::new(&self.config.cc).parent().unwrap().join("lib.exe");
@ -3639,7 +3635,7 @@ impl<'test> TestCx<'test> {
// endif
}
if self.config.target.contains("msvc") && self.config.cc != "" {
if self.config.target.contains("msvc") && !self.config.cc.is_empty() {
// We need to pass a path to `lib.exe`, so assume that `cc` is `cl.exe`
// and that `lib.exe` lives next to it.
let lib = Path::new(&self.config.cc).parent().unwrap().join("lib.exe");
@ -3830,7 +3826,7 @@ impl<'test> TestCx<'test> {
&& !self.props.dont_check_compiler_stderr
{
self.fatal_proc_rec(
&format!("compiler output got truncated, cannot compare with reference file"),
"compiler output got truncated, cannot compare with reference file",
&proc_res,
);
}
@ -4011,8 +4007,8 @@ impl<'test> TestCx<'test> {
crate_name.to_str().expect("crate name implies file name must be valid UTF-8");
// replace `a.foo` -> `a__foo` for crate name purposes.
// replace `revision-name-with-dashes` -> `revision_name_with_underscore`
let crate_name = crate_name.replace(".", "__");
let crate_name = crate_name.replace("-", "_");
let crate_name = crate_name.replace('.', "__");
let crate_name = crate_name.replace('-', "_");
rustc.arg("--crate-name");
rustc.arg(crate_name);
}
@ -4060,7 +4056,7 @@ impl<'test> TestCx<'test> {
fn check_mir_dump(&self, test_info: MiroptTest) {
let test_dir = self.testpaths.file.parent().unwrap();
let test_crate =
self.testpaths.file.file_stem().unwrap().to_str().unwrap().replace("-", "_");
self.testpaths.file.file_stem().unwrap().to_str().unwrap().replace('-', "_");
let MiroptTest { run_filecheck, suffix, files, passes: _ } = test_info;

View File

@ -148,5 +148,5 @@ fn check_single_line(line: &str, check_line: &str) -> bool {
rest = &rest[pos + current_fragment.len()..];
}
if !can_end_anywhere && !rest.is_empty() { false } else { true }
can_end_anywhere || rest.is_empty()
}

View File

@ -58,11 +58,11 @@ fn test_extract_lldb_version() {
#[test]
fn is_test_test() {
assert_eq!(true, is_test(&OsString::from("a_test.rs")));
assert_eq!(false, is_test(&OsString::from(".a_test.rs")));
assert_eq!(false, is_test(&OsString::from("a_cat.gif")));
assert_eq!(false, is_test(&OsString::from("#a_dog_gif")));
assert_eq!(false, is_test(&OsString::from("~a_temp_file")));
assert!(is_test(&OsString::from("a_test.rs")));
assert!(!is_test(&OsString::from(".a_test.rs")));
assert!(!is_test(&OsString::from("a_cat.gif")));
assert!(!is_test(&OsString::from("#a_dog_gif")));
assert!(!is_test(&OsString::from("~a_temp_file")));
}
#[test]

View File

@ -418,15 +418,13 @@ impl<'a> Validator<'a> {
} else {
self.fail_expecting(id, expected);
}
} else {
if !self.missing_ids.contains(id) {
self.missing_ids.insert(id);
} else if !self.missing_ids.contains(id) {
self.missing_ids.insert(id);
let sels = json_find::find_selector(&self.krate_json, &Value::String(id.0.clone()));
assert_ne!(sels.len(), 0);
let sels = json_find::find_selector(&self.krate_json, &Value::String(id.0.clone()));
assert_ne!(sels.len(), 0);
self.fail(id, ErrorKind::NotFound(sels))
}
self.fail(id, ErrorKind::NotFound(sels))
}
}

View File

@ -121,13 +121,13 @@ impl<'a> LintExtractor<'a> {
};
to_link.extend(group_lints);
let brackets: Vec<_> = group_lints.iter().map(|l| format!("[{}]", l)).collect();
write!(result, "| {} | {} | {} |\n", group_name, description, brackets.join(", "))
writeln!(result, "| {} | {} | {} |", group_name, description, brackets.join(", "))
.unwrap();
}
result.push('\n');
result.push_str("[warn-by-default]: listing/warn-by-default.md\n");
for lint_name in to_link {
let lint_def = match lints.iter().find(|l| l.name == lint_name.replace("-", "_")) {
let lint_def = match lints.iter().find(|l| l.name == lint_name.replace('-', "_")) {
Some(def) => def,
None => {
let msg = format!(
@ -144,9 +144,9 @@ impl<'a> LintExtractor<'a> {
}
}
};
write!(
writeln!(
result,
"[{}]: listing/{}#{}\n",
"[{}]: listing/{}#{}",
lint_name,
lint_def.level.doc_filename(),
lint_name

View File

@ -84,8 +84,8 @@ impl Lint {
for &expected in &["### Example", "### Explanation", "{{produces}}"] {
if expected == "{{produces}}" && self.is_ignored() {
if self.doc_contains("{{produces}}") {
return Err(format!(
"the lint example has `ignore`, but also contains the {{{{produces}}}} marker\n\
return Err(
"the lint example has `ignore`, but also contains the {{produces}} marker\n\
\n\
The documentation generator cannot generate the example output when the \
example is ignored.\n\
@ -111,7 +111,7 @@ impl Lint {
Replacing the output with the text of the example you \
compiled manually yourself.\n\
"
).into());
.into());
}
continue;
}
@ -519,11 +519,11 @@ impl<'a> LintExtractor<'a> {
let mut these_lints: Vec<_> = lints.iter().filter(|lint| lint.level == level).collect();
these_lints.sort_unstable_by_key(|lint| &lint.name);
for lint in &these_lints {
write!(result, "* [`{}`](#{})\n", lint.name, lint.name.replace("_", "-")).unwrap();
writeln!(result, "* [`{}`](#{})", lint.name, lint.name.replace('_', "-")).unwrap();
}
result.push('\n');
for lint in &these_lints {
write!(result, "## {}\n\n", lint.name.replace("_", "-")).unwrap();
write!(result, "## {}\n\n", lint.name.replace('_', "-")).unwrap();
for line in &lint.doc {
result.push_str(line);
result.push('\n');
@ -583,7 +583,7 @@ fn add_rename_redirect(level: Level, output: &mut String) {
let filename = level.doc_filename().replace(".md", ".html");
output.push_str(RENAME_START);
for (from, to) in *names {
write!(output, " \"#{from}\": \"{filename}#{to}\",\n").unwrap();
writeln!(output, " \"#{from}\": \"{filename}#{to}\",").unwrap();
}
output.push_str(RENAME_END);
}

View File

@ -216,11 +216,9 @@ pub fn gather_bolt_profiles(
log::info!("Profile file count: {}", profiles.len());
// Delete the gathered profiles
for profile in glob::glob(&format!("{profile_prefix}*"))?.into_iter() {
if let Ok(profile) = profile {
if let Err(error) = std::fs::remove_file(&profile) {
log::error!("Cannot delete BOLT profile {}: {error:?}", profile.display());
}
for profile in glob::glob(&format!("{profile_prefix}*"))?.flatten() {
if let Err(error) = std::fs::remove_file(&profile) {
log::error!("Cannot delete BOLT profile {}: {error:?}", profile.display());
}
}

View File

@ -317,13 +317,11 @@ fn run(support_lib_count: usize, exe: String, all_args: Vec<String>) {
t!(io::copy(&mut (&mut client).take(amt), &mut stdout));
t!(stdout.flush());
}
} else if amt == 0 {
stderr_done = true;
} else {
if amt == 0 {
stderr_done = true;
} else {
t!(io::copy(&mut (&mut client).take(amt), &mut stderr));
t!(stderr.flush());
}
t!(io::copy(&mut (&mut client).take(amt), &mut stderr));
t!(stderr.flush());
}
}

View File

@ -282,7 +282,7 @@ fn handle_run(socket: TcpStream, work: &Path, tmp: &Path, lock: &Mutex<()>, conf
cmd.env(library_path, env::join_paths(paths).unwrap());
// Some tests assume RUST_TEST_TMPDIR exists
cmd.env("RUST_TEST_TMPDIR", tmp.to_owned());
cmd.env("RUST_TEST_TMPDIR", tmp);
let socket = Arc::new(Mutex::new(reader.into_inner()));

View File

@ -88,7 +88,7 @@ fn check_section<'a>(
let trimmed_line = line.trim_start_matches(' ');
if trimmed_line.starts_with("//")
|| (trimmed_line.starts_with("#") && !trimmed_line.starts_with("#!"))
|| (trimmed_line.starts_with('#') && !trimmed_line.starts_with("#!"))
|| trimmed_line.starts_with(is_close_bracket)
{
continue;

View File

@ -61,7 +61,7 @@ mod os_impl {
fs::remove_file(&path).expect("Deleted temp file");
// If the file is executable, then we assume that this
// filesystem does not track executability, so skip this check.
return if exec { Unsupported } else { Supported };
if exec { Unsupported } else { Supported }
}
Err(e) => {
// If the directory is read-only or we otherwise don't have rights,
@ -76,7 +76,7 @@ mod os_impl {
panic!("unable to create temporary file `{:?}`: {:?}", path, e);
}
};
}
}
for &source_dir in sources {
@ -92,7 +92,7 @@ mod os_impl {
}
}
return true;
true
}
// FIXME: check when rust-installer test sh files will be removed,

View File

@ -699,11 +699,9 @@ fn check_permitted_dependencies(
for dep in deps {
let dep = pkg_from_id(metadata, dep);
// If this path is in-tree, we don't require it to be explicitly permitted.
if dep.source.is_some() {
if !permitted_dependencies.contains(dep.name.as_str()) {
tidy_error!(bad, "Dependency for {descr} not explicitly permitted: {}", dep.id);
has_permitted_dep_error = true;
}
if dep.source.is_some() && !permitted_dependencies.contains(dep.name.as_str()) {
tidy_error!(bad, "Dependency for {descr} not explicitly permitted: {}", dep.id);
has_permitted_dep_error = true;
}
}

View File

@ -308,11 +308,9 @@ fn check_error_codes_tests(
for line in file.lines() {
let s = line.trim();
// Assuming the line starts with `error[E`, we can substring the error code out.
if s.starts_with("error[E") {
if &s[6..11] == code {
found_code = true;
break;
}
if s.starts_with("error[E") && &s[6..11] == code {
found_code = true;
break;
};
}

View File

@ -78,9 +78,9 @@ fn check_impl(
let mut py_path = None;
let (cfg_args, file_args): (Vec<_>, Vec<_>) = pos_args
.into_iter()
.iter()
.map(OsStr::new)
.partition(|arg| arg.to_str().is_some_and(|s| s.starts_with("-")));
.partition(|arg| arg.to_str().is_some_and(|s| s.starts_with('-')));
if python_lint || python_fmt {
let venv_path = outdir.join("venv");
@ -277,10 +277,11 @@ fn create_venv_at_path(path: &Path) -> Result<(), Error> {
let stderr = String::from_utf8_lossy(&out.stderr);
let err = if stderr.contains("No module named virtualenv") {
Error::Generic(format!(
Error::Generic(
"virtualenv not found: you may need to install it \
(`python3 -m pip install venv`)"
))
.to_owned(),
)
} else {
Error::Generic(format!(
"failed to create venv at '{}' using {sys_py}: {stderr}",

View File

@ -463,10 +463,13 @@ pub fn check(path: &Path, bad: &mut bool) {
}
}
// for now we just check libcore
if trimmed.contains("unsafe {") && !trimmed.starts_with("//") && !last_safety_comment {
if file.components().any(|c| c.as_os_str() == "core") && !is_test {
suppressible_tidy_err!(err, skip_undocumented_unsafe, "undocumented unsafe");
}
if trimmed.contains("unsafe {")
&& !trimmed.starts_with("//")
&& !last_safety_comment
&& file.components().any(|c| c.as_os_str() == "core")
&& !is_test
{
suppressible_tidy_err!(err, skip_undocumented_unsafe, "undocumented unsafe");
}
if trimmed.contains("// SAFETY:") {
last_safety_comment = true;
@ -487,10 +490,10 @@ pub fn check(path: &Path, bad: &mut bool) {
"copyright notices attributed to the Rust Project Developers are deprecated"
);
}
if !file.components().any(|c| c.as_os_str() == "rustc_baked_icu_data") {
if is_unexplained_ignore(&extension, line) {
err(UNEXPLAINED_IGNORE_DOCTEST_INFO);
}
if !file.components().any(|c| c.as_os_str() == "rustc_baked_icu_data")
&& is_unexplained_ignore(&extension, line)
{
err(UNEXPLAINED_IGNORE_DOCTEST_INFO);
}
if filename.ends_with(".cpp") && line.contains("llvm_unreachable") {
@ -525,26 +528,24 @@ pub fn check(path: &Path, bad: &mut bool) {
backtick_count += comment_text.chars().filter(|ch| *ch == '`').count();
}
comment_block = Some((start_line, backtick_count));
} else {
if let Some((start_line, backtick_count)) = comment_block.take() {
if backtick_count % 2 == 1 {
let mut err = |msg: &str| {
tidy_error!(bad, "{}:{start_line}: {msg}", file.display());
};
let block_len = (i + 1) - start_line;
if block_len == 1 {
suppressible_tidy_err!(
err,
skip_odd_backticks,
"comment with odd number of backticks"
);
} else {
suppressible_tidy_err!(
err,
skip_odd_backticks,
"{block_len}-line comment block with odd number of backticks"
);
}
} else if let Some((start_line, backtick_count)) = comment_block.take() {
if backtick_count % 2 == 1 {
let mut err = |msg: &str| {
tidy_error!(bad, "{}:{start_line}: {msg}", file.display());
};
let block_len = (i + 1) - start_line;
if block_len == 1 {
suppressible_tidy_err!(
err,
skip_odd_backticks,
"comment with odd number of backticks"
);
} else {
suppressible_tidy_err!(
err,
skip_odd_backticks,
"{block_len}-line comment block with odd number of backticks"
);
}
}
}

View File

@ -79,13 +79,11 @@ pub(crate) fn walk_no_read(
let walker = walker.filter_entry(move |e| {
!skip(e.path(), e.file_type().map(|ft| ft.is_dir()).unwrap_or(false))
});
for entry in walker.build() {
if let Ok(entry) = entry {
if entry.file_type().map_or(true, |kind| kind.is_dir() || kind.is_symlink()) {
continue;
}
f(&entry);
for entry in walker.build().flatten() {
if entry.file_type().map_or(true, |kind| kind.is_dir() || kind.is_symlink()) {
continue;
}
f(&entry);
}
}
@ -97,11 +95,9 @@ pub(crate) fn walk_dir(
) {
let mut walker = ignore::WalkBuilder::new(path);
let walker = walker.filter_entry(move |e| !skip(e.path()));
for entry in walker.build() {
if let Ok(entry) = entry {
if entry.path().is_dir() {
f(&entry);
}
for entry in walker.build().flatten() {
if entry.path().is_dir() {
f(&entry);
}
}
}

View File

@ -52,7 +52,7 @@ pub fn check(root: &Path, cargo: &Path, bad: &mut bool) {
);
}
} else {
return tidy_error!(bad, "failed to check version of `x`: {}", cargo_list.status);
tidy_error!(bad, "failed to check version of `x`: {}", cargo_list.status)
}
}