mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-27 01:04:03 +00:00
implement more types to lint, fix wording
This commit is contained in:
parent
7bc39f3af8
commit
941164807f
@ -1,8 +1,11 @@
|
||||
use clippy_utils::diagnostics::span_lint;
|
||||
use clippy_utils::higher::ForLoop;
|
||||
use clippy_utils::match_def_path;
|
||||
use clippy_utils::paths::{HASHMAP_KEYS, HASHMAP_VALUES, HASHSET_ITER_TY};
|
||||
use clippy_utils::paths::{
|
||||
HASHMAP_DRAIN, HASHMAP_ITER, HASHMAP_ITER_MUT, HASHMAP_KEYS, HASHMAP_VALUES, HASHMAP_VALUES_MUT, HASHSET_DRAIN,
|
||||
HASHSET_ITER_TY,
|
||||
};
|
||||
use clippy_utils::ty::is_type_diagnostic_item;
|
||||
use clippy_utils::match_any_def_paths;
|
||||
use rustc_lint::{LateContext, LateLintPass};
|
||||
use rustc_session::{declare_lint_pass, declare_tool_lint};
|
||||
use rustc_span::sym;
|
||||
@ -13,7 +16,7 @@ declare_clippy_lint! {
|
||||
///
|
||||
/// ### Why is this bad?
|
||||
/// Because hash types are unordered, when iterated through such as in a for loop, the values are returned in
|
||||
/// a pseudo-random order. As a result, on redundant systems this may cause inconsistencies and anomalies.
|
||||
/// an undefined order. As a result, on redundant systems this may cause inconsistencies and anomalies.
|
||||
/// In addition, the unknown order of the elements may reduce readability or introduce other undesired
|
||||
/// side effects.
|
||||
///
|
||||
@ -46,9 +49,21 @@ impl LateLintPass<'_> for IterOverHashType {
|
||||
&& let ty = cx.typeck_results().expr_ty(for_loop.arg).peel_refs()
|
||||
&& let Some(adt) = ty.ty_adt_def()
|
||||
&& let did = adt.did()
|
||||
&& (match_def_path(cx, did, &HASHMAP_KEYS)
|
||||
|| match_def_path(cx, did, &HASHMAP_VALUES)
|
||||
|| match_def_path(cx, did, &HASHSET_ITER_TY)
|
||||
&& (match_any_def_paths(
|
||||
cx,
|
||||
did,
|
||||
&[
|
||||
&HASHMAP_KEYS,
|
||||
&HASHMAP_VALUES,
|
||||
&HASHMAP_VALUES_MUT,
|
||||
&HASHMAP_ITER,
|
||||
&HASHMAP_ITER_MUT,
|
||||
&HASHMAP_DRAIN,
|
||||
&HASHSET_ITER_TY,
|
||||
&HASHSET_DRAIN,
|
||||
],
|
||||
)
|
||||
.is_some()
|
||||
|| is_type_diagnostic_item(cx, ty, sym::HashMap)
|
||||
|| is_type_diagnostic_item(cx, ty, sym::HashSet))
|
||||
{
|
||||
@ -56,7 +71,7 @@ impl LateLintPass<'_> for IterOverHashType {
|
||||
cx,
|
||||
ITER_OVER_HASH_TYPE,
|
||||
expr.span,
|
||||
"iterating over unordered hash-based type",
|
||||
"iteration over unordered hash-based type",
|
||||
);
|
||||
};
|
||||
}
|
||||
|
@ -32,10 +32,15 @@ pub const FUTURES_IO_ASYNCREADEXT: [&str; 3] = ["futures_util", "io", "AsyncRead
|
||||
pub const FUTURES_IO_ASYNCWRITEEXT: [&str; 3] = ["futures_util", "io", "AsyncWriteExt"];
|
||||
pub const HASHMAP_CONTAINS_KEY: [&str; 6] = ["std", "collections", "hash", "map", "HashMap", "contains_key"];
|
||||
pub const HASHMAP_INSERT: [&str; 6] = ["std", "collections", "hash", "map", "HashMap", "insert"];
|
||||
pub const HASHMAP_ITER: [&str; 5] = ["std", "collections", "hash", "map", "Iter"];
|
||||
pub const HASHMAP_ITER_MUT: [&str; 5] = ["std", "collections", "hash", "map", "IterMut"];
|
||||
pub const HASHMAP_KEYS: [&str; 5] = ["std", "collections", "hash", "map", "Keys"];
|
||||
pub const HASHMAP_VALUES: [&str; 5] = ["std", "collections", "hash", "map", "Values"];
|
||||
pub const HASHMAP_DRAIN: [&str; 5] = ["std", "collections", "hash", "map", "Drain"];
|
||||
pub const HASHMAP_VALUES_MUT: [&str; 5] = ["std", "collections", "hash", "map", "ValuesMut"];
|
||||
pub const HASHSET_ITER_TY: [&str; 5] = ["std", "collections", "hash", "set", "Iter"];
|
||||
pub const HASHSET_ITER: [&str; 6] = ["std", "collections", "hash", "set", "HashSet", "iter"];
|
||||
pub const HASHSET_DRAIN: [&str; 5] = ["std", "collections", "hash", "set", "Drain"];
|
||||
pub const IDENT: [&str; 3] = ["rustc_span", "symbol", "Ident"];
|
||||
pub const IDENT_AS_STR: [&str; 4] = ["rustc_span", "symbol", "Ident", "as_str"];
|
||||
pub const INSERT_STR: [&str; 4] = ["alloc", "string", "String", "insert_str"];
|
||||
|
@ -6,8 +6,8 @@ use std::collections::{HashMap, HashSet};
|
||||
extern crate proc_macros;
|
||||
|
||||
fn main() {
|
||||
let hash_set = HashSet::<i32>::new();
|
||||
let hash_map = HashMap::<i32, i32>::new();
|
||||
let mut hash_set = HashSet::<i32>::new();
|
||||
let mut hash_map = HashMap::<i32, i32>::new();
|
||||
let vec = Vec::<i32>::new();
|
||||
|
||||
for x in &hash_set {
|
||||
@ -16,7 +16,10 @@ fn main() {
|
||||
for x in hash_set.iter() {
|
||||
let _ = x;
|
||||
}
|
||||
for x in hash_set {
|
||||
for x in hash_set.clone() {
|
||||
let _ = x;
|
||||
}
|
||||
for x in hash_set.drain() {
|
||||
let _ = x;
|
||||
}
|
||||
for (x, y) in &hash_map {
|
||||
@ -28,6 +31,18 @@ fn main() {
|
||||
for x in hash_map.values() {
|
||||
let _ = x;
|
||||
}
|
||||
for x in hash_map.values_mut() {
|
||||
*x += 1;
|
||||
}
|
||||
for x in hash_map.iter() {
|
||||
let _ = x;
|
||||
}
|
||||
for x in hash_map.clone() {
|
||||
let _ = x;
|
||||
}
|
||||
for x in hash_map.drain() {
|
||||
let _ = x;
|
||||
}
|
||||
|
||||
// shouldnt fire
|
||||
for x in &vec {
|
||||
|
@ -1,4 +1,4 @@
|
||||
error: iterating over unordered hash-based type
|
||||
error: iteration over unordered hash-based type
|
||||
--> $DIR/iter_over_hash_type.rs:13:5
|
||||
|
|
||||
LL | / for x in &hash_set {
|
||||
@ -9,7 +9,7 @@ LL | | }
|
||||
= note: `-D clippy::iter-over-hash-type` implied by `-D warnings`
|
||||
= help: to override `-D warnings` add `#[allow(clippy::iter_over_hash_type)]`
|
||||
|
||||
error: iterating over unordered hash-based type
|
||||
error: iteration over unordered hash-based type
|
||||
--> $DIR/iter_over_hash_type.rs:16:5
|
||||
|
|
||||
LL | / for x in hash_set.iter() {
|
||||
@ -17,37 +17,77 @@ LL | | let _ = x;
|
||||
LL | | }
|
||||
| |_____^
|
||||
|
||||
error: iterating over unordered hash-based type
|
||||
error: iteration over unordered hash-based type
|
||||
--> $DIR/iter_over_hash_type.rs:19:5
|
||||
|
|
||||
LL | / for x in hash_set {
|
||||
LL | / for x in hash_set.clone() {
|
||||
LL | | let _ = x;
|
||||
LL | | }
|
||||
| |_____^
|
||||
|
||||
error: iterating over unordered hash-based type
|
||||
error: iteration over unordered hash-based type
|
||||
--> $DIR/iter_over_hash_type.rs:22:5
|
||||
|
|
||||
LL | / for x in hash_set.drain() {
|
||||
LL | | let _ = x;
|
||||
LL | | }
|
||||
| |_____^
|
||||
|
||||
error: iteration over unordered hash-based type
|
||||
--> $DIR/iter_over_hash_type.rs:25:5
|
||||
|
|
||||
LL | / for (x, y) in &hash_map {
|
||||
LL | | let _ = (x, y);
|
||||
LL | | }
|
||||
| |_____^
|
||||
|
||||
error: iterating over unordered hash-based type
|
||||
--> $DIR/iter_over_hash_type.rs:25:5
|
||||
error: iteration over unordered hash-based type
|
||||
--> $DIR/iter_over_hash_type.rs:28:5
|
||||
|
|
||||
LL | / for x in hash_map.keys() {
|
||||
LL | | let _ = x;
|
||||
LL | | }
|
||||
| |_____^
|
||||
|
||||
error: iterating over unordered hash-based type
|
||||
--> $DIR/iter_over_hash_type.rs:28:5
|
||||
error: iteration over unordered hash-based type
|
||||
--> $DIR/iter_over_hash_type.rs:31:5
|
||||
|
|
||||
LL | / for x in hash_map.values() {
|
||||
LL | | let _ = x;
|
||||
LL | | }
|
||||
| |_____^
|
||||
|
||||
error: aborting due to 6 previous errors
|
||||
error: iteration over unordered hash-based type
|
||||
--> $DIR/iter_over_hash_type.rs:34:5
|
||||
|
|
||||
LL | / for x in hash_map.values_mut() {
|
||||
LL | | *x += 1;
|
||||
LL | | }
|
||||
| |_____^
|
||||
|
||||
error: iteration over unordered hash-based type
|
||||
--> $DIR/iter_over_hash_type.rs:37:5
|
||||
|
|
||||
LL | / for x in hash_map.iter() {
|
||||
LL | | let _ = x;
|
||||
LL | | }
|
||||
| |_____^
|
||||
|
||||
error: iteration over unordered hash-based type
|
||||
--> $DIR/iter_over_hash_type.rs:40:5
|
||||
|
|
||||
LL | / for x in hash_map.clone() {
|
||||
LL | | let _ = x;
|
||||
LL | | }
|
||||
| |_____^
|
||||
|
||||
error: iteration over unordered hash-based type
|
||||
--> $DIR/iter_over_hash_type.rs:43:5
|
||||
|
|
||||
LL | / for x in hash_map.drain() {
|
||||
LL | | let _ = x;
|
||||
LL | | }
|
||||
| |_____^
|
||||
|
||||
error: aborting due to 11 previous errors
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user