mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-26 16:54:01 +00:00
Auto merge of #84980 - flip1995:clippyup, r=Manishearth
Update Clippy Bi-weekly Clippy update. r? `@Manishearth`
This commit is contained in:
commit
770792ff8d
@ -548,7 +548,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clippy"
|
||||
version = "0.1.53"
|
||||
version = "0.1.54"
|
||||
dependencies = [
|
||||
"cargo_metadata 0.12.0",
|
||||
"clippy-mini-macro-test",
|
||||
@ -585,7 +585,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clippy_lints"
|
||||
version = "0.1.53"
|
||||
version = "0.1.54"
|
||||
dependencies = [
|
||||
"cargo_metadata 0.12.0",
|
||||
"clippy_utils",
|
||||
@ -597,6 +597,7 @@ dependencies = [
|
||||
"rustc-semver",
|
||||
"semver 0.11.0",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"toml",
|
||||
"unicode-normalization",
|
||||
"url 2.1.1",
|
||||
@ -604,7 +605,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clippy_utils"
|
||||
version = "0.1.53"
|
||||
version = "0.1.54"
|
||||
dependencies = [
|
||||
"if_chain",
|
||||
"itertools 0.9.0",
|
||||
|
1
src/tools/clippy/.gitignore
vendored
1
src/tools/clippy/.gitignore
vendored
@ -29,6 +29,7 @@ out
|
||||
|
||||
# gh pages docs
|
||||
util/gh-pages/lints.json
|
||||
**/metadata_collection.json
|
||||
|
||||
# rustfmt backups
|
||||
*.rs.bk
|
||||
|
@ -118,7 +118,7 @@ which `IntelliJ Rust` will be able to understand.
|
||||
Run `cargo dev ide_setup --repo-path <repo-path>` where `<repo-path>` is a path to the rustc repo
|
||||
you just cloned.
|
||||
The command will add path-dependencies pointing towards rustc-crates inside the rustc repo to
|
||||
Clippys `Cargo.toml`s and should allow rust-analyzer to understand most of the types that Clippy uses.
|
||||
Clippys `Cargo.toml`s and should allow `IntelliJ Rust` to understand most of the types that Clippy uses.
|
||||
Just make sure to remove the dependencies again before finally making a pull request!
|
||||
|
||||
[rustc_repo]: https://github.com/rust-lang/rust/
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "clippy"
|
||||
version = "0.1.53"
|
||||
version = "0.1.54"
|
||||
authors = ["The Rust Clippy Developers"]
|
||||
description = "A bunch of helpful lints to avoid common pitfalls in Rust"
|
||||
repository = "https://github.com/rust-lang/rust-clippy"
|
||||
@ -52,6 +52,7 @@ rustc_tools_util = { version = "0.2.0", path = "rustc_tools_util" }
|
||||
deny-warnings = []
|
||||
integration = ["tempfile"]
|
||||
internal-lints = ["clippy_lints/internal-lints"]
|
||||
metadata-collector-lint = ["internal-lints", "clippy_lints/metadata-collector-lint"]
|
||||
|
||||
[package.metadata.rust-analyzer]
|
||||
# This package uses #[feature(rustc_private)]
|
||||
|
@ -1,7 +1,7 @@
|
||||
[package]
|
||||
name = "clippy_lints"
|
||||
# begin automatic update
|
||||
version = "0.1.53"
|
||||
version = "0.1.54"
|
||||
# end automatic update
|
||||
authors = ["The Rust Clippy Developers"]
|
||||
description = "A bunch of helpful lints to avoid common pitfalls in Rust"
|
||||
@ -20,6 +20,7 @@ pulldown-cmark = { version = "0.8", default-features = false }
|
||||
quine-mc_cluskey = "0.2.2"
|
||||
regex-syntax = "0.6"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = { version = "1.0", optional = true }
|
||||
toml = "0.5.3"
|
||||
unicode-normalization = "0.1"
|
||||
semver = "0.11"
|
||||
@ -32,6 +33,7 @@ url = { version = "2.1.0", features = ["serde"] }
|
||||
deny-warnings = []
|
||||
# build clippy with internal lints enabled, off by default
|
||||
internal-lints = ["clippy_utils/internal-lints"]
|
||||
metadata-collector-lint = ["serde_json", "clippy_utils/metadata-collector-lint"]
|
||||
|
||||
[package.metadata.rust-analyzer]
|
||||
# This crate uses #[feature(rustc_private)]
|
||||
|
@ -1,6 +1,6 @@
|
||||
use clippy_utils::diagnostics::span_lint_and_help;
|
||||
use clippy_utils::ty::implements_trait;
|
||||
use clippy_utils::{get_trait_def_id, if_sequence, is_else_clause, paths, SpanlessEq};
|
||||
use clippy_utils::{get_trait_def_id, if_sequence, in_constant, is_else_clause, paths, SpanlessEq};
|
||||
use rustc_hir::{BinOpKind, Expr, ExprKind};
|
||||
use rustc_lint::{LateContext, LateLintPass};
|
||||
use rustc_session::{declare_lint_pass, declare_tool_lint};
|
||||
@ -64,6 +64,10 @@ impl<'tcx> LateLintPass<'tcx> for ComparisonChain {
|
||||
return;
|
||||
}
|
||||
|
||||
if in_constant(cx, expr.hir_id) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check that there exists at least one explicit else condition
|
||||
let (conds, _) = if_sequence(expr);
|
||||
if conds.len() < 2 {
|
||||
|
@ -1,6 +1,6 @@
|
||||
use clippy_utils::diagnostics::{span_lint_and_note, span_lint_and_sugg};
|
||||
use clippy_utils::source::snippet_with_macro_callsite;
|
||||
use clippy_utils::{any_parent_is_automatically_derived, contains_name, match_def_path, paths};
|
||||
use clippy_utils::{any_parent_is_automatically_derived, contains_name, in_macro, match_def_path, paths};
|
||||
use if_chain::if_chain;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_errors::Applicability;
|
||||
@ -75,6 +75,7 @@ impl_lint_pass!(Default => [DEFAULT_TRAIT_ACCESS, FIELD_REASSIGN_WITH_DEFAULT]);
|
||||
impl LateLintPass<'_> for Default {
|
||||
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
|
||||
if_chain! {
|
||||
if !in_macro(expr.span);
|
||||
// Avoid cases already linted by `field_reassign_with_default`
|
||||
if !self.reassigned_linted.contains(&expr.span);
|
||||
if let ExprKind::Call(path, ..) = expr.kind;
|
||||
|
@ -1,5 +1,6 @@
|
||||
use clippy_utils::diagnostics::{span_lint, span_lint_and_note};
|
||||
use clippy_utils::{get_parent_expr, path_to_local, path_to_local_id};
|
||||
use if_chain::if_chain;
|
||||
use rustc_hir::intravisit::{walk_expr, NestedVisitorMap, Visitor};
|
||||
use rustc_hir::{BinOpKind, Block, Expr, ExprKind, Guard, HirId, Local, Node, Stmt, StmtKind};
|
||||
use rustc_lint::{LateContext, LateLintPass};
|
||||
@ -70,20 +71,19 @@ declare_lint_pass!(EvalOrderDependence => [EVAL_ORDER_DEPENDENCE, DIVERGING_SUB_
|
||||
impl<'tcx> LateLintPass<'tcx> for EvalOrderDependence {
|
||||
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
|
||||
// Find a write to a local variable.
|
||||
match expr.kind {
|
||||
ExprKind::Assign(lhs, ..) | ExprKind::AssignOp(_, lhs, _) => {
|
||||
if let Some(var) = path_to_local(lhs) {
|
||||
let mut visitor = ReadVisitor {
|
||||
cx,
|
||||
var,
|
||||
write_expr: expr,
|
||||
last_expr: expr,
|
||||
};
|
||||
check_for_unsequenced_reads(&mut visitor);
|
||||
}
|
||||
},
|
||||
_ => {},
|
||||
}
|
||||
let var = if_chain! {
|
||||
if let ExprKind::Assign(lhs, ..) | ExprKind::AssignOp(_, lhs, _) = expr.kind;
|
||||
if let Some(var) = path_to_local(lhs);
|
||||
if expr.span.desugaring_kind().is_none();
|
||||
then { var } else { return; }
|
||||
};
|
||||
let mut visitor = ReadVisitor {
|
||||
cx,
|
||||
var,
|
||||
write_expr: expr,
|
||||
last_expr: expr,
|
||||
};
|
||||
check_for_unsequenced_reads(&mut visitor);
|
||||
}
|
||||
fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
|
||||
match stmt.kind {
|
||||
@ -305,7 +305,7 @@ impl<'a, 'tcx> Visitor<'tcx> for ReadVisitor<'a, 'tcx> {
|
||||
self.cx,
|
||||
EVAL_ORDER_DEPENDENCE,
|
||||
expr.span,
|
||||
"unsequenced read of a variable",
|
||||
&format!("unsequenced read of `{}`", self.cx.tcx.hir().name(self.var)),
|
||||
Some(self.write_expr.span),
|
||||
"whether read occurs before this write depends on evaluation order",
|
||||
);
|
||||
|
@ -1,13 +1,16 @@
|
||||
use clippy_utils::diagnostics::span_lint_and_then;
|
||||
use clippy_utils::match_panic_def_id;
|
||||
use clippy_utils::source::snippet_opt;
|
||||
use if_chain::if_chain;
|
||||
use clippy_utils::{
|
||||
diagnostics::span_lint_and_sugg,
|
||||
get_async_fn_body, is_async_fn,
|
||||
source::{snippet_with_applicability, snippet_with_context, walk_span_to_context},
|
||||
visitors::visit_break_exprs,
|
||||
};
|
||||
use rustc_errors::Applicability;
|
||||
use rustc_hir::intravisit::FnKind;
|
||||
use rustc_hir::{Body, Expr, ExprKind, FnDecl, HirId, MatchSource, StmtKind};
|
||||
use rustc_lint::{LateContext, LateLintPass};
|
||||
use rustc_hir::{Block, Body, Expr, ExprKind, FnDecl, FnRetTy, HirId};
|
||||
use rustc_lint::{LateContext, LateLintPass, LintContext};
|
||||
use rustc_middle::lint::in_external_macro;
|
||||
use rustc_session::{declare_lint_pass, declare_tool_lint};
|
||||
use rustc_span::source_map::Span;
|
||||
use rustc_span::{Span, SyntaxContext};
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Checks for missing return statements at the end of a block.
|
||||
@ -39,89 +42,160 @@ declare_clippy_lint! {
|
||||
|
||||
declare_lint_pass!(ImplicitReturn => [IMPLICIT_RETURN]);
|
||||
|
||||
static LINT_BREAK: &str = "change `break` to `return` as shown";
|
||||
static LINT_RETURN: &str = "add `return` as shown";
|
||||
|
||||
fn lint(cx: &LateContext<'_>, outer_span: Span, inner_span: Span, msg: &str) {
|
||||
let outer_span = outer_span.source_callsite();
|
||||
let inner_span = inner_span.source_callsite();
|
||||
|
||||
span_lint_and_then(cx, IMPLICIT_RETURN, outer_span, "missing `return` statement", |diag| {
|
||||
if let Some(snippet) = snippet_opt(cx, inner_span) {
|
||||
diag.span_suggestion(
|
||||
outer_span,
|
||||
msg,
|
||||
format!("return {}", snippet),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
});
|
||||
fn lint_return(cx: &LateContext<'_>, span: Span) {
|
||||
let mut app = Applicability::MachineApplicable;
|
||||
let snip = snippet_with_applicability(cx, span, "..", &mut app);
|
||||
span_lint_and_sugg(
|
||||
cx,
|
||||
IMPLICIT_RETURN,
|
||||
span,
|
||||
"missing `return` statement",
|
||||
"add `return` as shown",
|
||||
format!("return {}", snip),
|
||||
app,
|
||||
);
|
||||
}
|
||||
|
||||
fn expr_match(cx: &LateContext<'_>, expr: &Expr<'_>) {
|
||||
fn lint_break(cx: &LateContext<'_>, break_span: Span, expr_span: Span) {
|
||||
let mut app = Applicability::MachineApplicable;
|
||||
let snip = snippet_with_context(cx, expr_span, break_span.ctxt(), "..", &mut app).0;
|
||||
span_lint_and_sugg(
|
||||
cx,
|
||||
IMPLICIT_RETURN,
|
||||
break_span,
|
||||
"missing `return` statement",
|
||||
"change `break` to `return` as shown",
|
||||
format!("return {}", snip),
|
||||
app,
|
||||
)
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
enum LintLocation {
|
||||
/// The lint was applied to a parent expression.
|
||||
Parent,
|
||||
/// The lint was applied to this expression, a child, or not applied.
|
||||
Inner,
|
||||
}
|
||||
impl LintLocation {
|
||||
fn still_parent(self, b: bool) -> Self {
|
||||
if b { self } else { Self::Inner }
|
||||
}
|
||||
|
||||
fn is_parent(self) -> bool {
|
||||
self == Self::Parent
|
||||
}
|
||||
}
|
||||
|
||||
// Gets the call site if the span is in a child context. Otherwise returns `None`.
|
||||
fn get_call_site(span: Span, ctxt: SyntaxContext) -> Option<Span> {
|
||||
(span.ctxt() != ctxt).then(|| walk_span_to_context(span, ctxt).unwrap_or(span))
|
||||
}
|
||||
|
||||
fn lint_implicit_returns(
|
||||
cx: &LateContext<'tcx>,
|
||||
expr: &'tcx Expr<'_>,
|
||||
// The context of the function body.
|
||||
ctxt: SyntaxContext,
|
||||
// Whether the expression is from a macro expansion.
|
||||
call_site_span: Option<Span>,
|
||||
) -> LintLocation {
|
||||
match expr.kind {
|
||||
// loops could be using `break` instead of `return`
|
||||
ExprKind::Block(block, ..) | ExprKind::Loop(block, ..) => {
|
||||
if let Some(expr) = &block.expr {
|
||||
expr_match(cx, expr);
|
||||
ExprKind::Block(
|
||||
Block {
|
||||
expr: Some(block_expr), ..
|
||||
},
|
||||
_,
|
||||
) => lint_implicit_returns(
|
||||
cx,
|
||||
block_expr,
|
||||
ctxt,
|
||||
call_site_span.or_else(|| get_call_site(block_expr.span, ctxt)),
|
||||
)
|
||||
.still_parent(call_site_span.is_some()),
|
||||
|
||||
ExprKind::If(_, then_expr, Some(else_expr)) => {
|
||||
// Both `then_expr` or `else_expr` are required to be blocks in the same context as the `if`. Don't
|
||||
// bother checking.
|
||||
let res = lint_implicit_returns(cx, then_expr, ctxt, call_site_span).still_parent(call_site_span.is_some());
|
||||
if res.is_parent() {
|
||||
// The return was added as a parent of this if expression.
|
||||
return res;
|
||||
}
|
||||
// only needed in the case of `break` with `;` at the end
|
||||
else if let Some(stmt) = block.stmts.last() {
|
||||
if_chain! {
|
||||
if let StmtKind::Semi(expr, ..) = &stmt.kind;
|
||||
// make sure it's a break, otherwise we want to skip
|
||||
if let ExprKind::Break(.., Some(break_expr)) = &expr.kind;
|
||||
then {
|
||||
lint(cx, expr.span, break_expr.span, LINT_BREAK);
|
||||
lint_implicit_returns(cx, else_expr, ctxt, call_site_span).still_parent(call_site_span.is_some())
|
||||
},
|
||||
|
||||
ExprKind::Match(_, arms, _) => {
|
||||
for arm in arms {
|
||||
let res = lint_implicit_returns(
|
||||
cx,
|
||||
arm.body,
|
||||
ctxt,
|
||||
call_site_span.or_else(|| get_call_site(arm.body.span, ctxt)),
|
||||
)
|
||||
.still_parent(call_site_span.is_some());
|
||||
if res.is_parent() {
|
||||
// The return was added as a parent of this match expression.
|
||||
return res;
|
||||
}
|
||||
}
|
||||
LintLocation::Inner
|
||||
},
|
||||
|
||||
ExprKind::Loop(block, ..) => {
|
||||
let mut add_return = false;
|
||||
visit_break_exprs(block, |break_expr, dest, sub_expr| {
|
||||
if dest.target_id.ok() == Some(expr.hir_id) {
|
||||
if call_site_span.is_none() && break_expr.span.ctxt() == ctxt {
|
||||
lint_break(cx, break_expr.span, sub_expr.unwrap().span);
|
||||
} else {
|
||||
// the break expression is from a macro call, add a return to the loop
|
||||
add_return = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
// use `return` instead of `break`
|
||||
ExprKind::Break(.., break_expr) => {
|
||||
if let Some(break_expr) = break_expr {
|
||||
lint(cx, expr.span, break_expr.span, LINT_BREAK);
|
||||
}
|
||||
},
|
||||
ExprKind::If(.., if_expr, else_expr) => {
|
||||
expr_match(cx, if_expr);
|
||||
|
||||
if let Some(else_expr) = else_expr {
|
||||
expr_match(cx, else_expr);
|
||||
}
|
||||
},
|
||||
ExprKind::Match(.., arms, source) => {
|
||||
let check_all_arms = match source {
|
||||
MatchSource::IfLetDesugar {
|
||||
contains_else_clause: has_else,
|
||||
} => has_else,
|
||||
_ => true,
|
||||
};
|
||||
|
||||
if check_all_arms {
|
||||
for arm in arms {
|
||||
expr_match(cx, arm.body);
|
||||
});
|
||||
if add_return {
|
||||
#[allow(clippy::option_if_let_else)]
|
||||
if let Some(span) = call_site_span {
|
||||
lint_return(cx, span);
|
||||
LintLocation::Parent
|
||||
} else {
|
||||
lint_return(cx, expr.span);
|
||||
LintLocation::Inner
|
||||
}
|
||||
} else {
|
||||
expr_match(cx, arms.first().expect("`if let` doesn't have a single arm").body);
|
||||
LintLocation::Inner
|
||||
}
|
||||
},
|
||||
// skip if it already has a return statement
|
||||
ExprKind::Ret(..) => (),
|
||||
// make sure it's not a call that panics
|
||||
ExprKind::Call(expr, ..) => {
|
||||
if_chain! {
|
||||
if let ExprKind::Path(qpath) = &expr.kind;
|
||||
if let Some(path_def_id) = cx.qpath_res(qpath, expr.hir_id).opt_def_id();
|
||||
if match_panic_def_id(cx, path_def_id);
|
||||
then { }
|
||||
else {
|
||||
lint(cx, expr.span, expr.span, LINT_RETURN)
|
||||
}
|
||||
|
||||
// If expressions without an else clause, and blocks without a final expression can only be the final expression
|
||||
// if they are divergent, or return the unit type.
|
||||
ExprKind::If(_, _, None) | ExprKind::Block(Block { expr: None, .. }, _) | ExprKind::Ret(_) => {
|
||||
LintLocation::Inner
|
||||
},
|
||||
|
||||
// Any divergent expression doesn't need a return statement.
|
||||
ExprKind::MethodCall(..)
|
||||
| ExprKind::Call(..)
|
||||
| ExprKind::Binary(..)
|
||||
| ExprKind::Unary(..)
|
||||
| ExprKind::Index(..)
|
||||
if cx.typeck_results().expr_ty(expr).is_never() =>
|
||||
{
|
||||
LintLocation::Inner
|
||||
},
|
||||
|
||||
_ =>
|
||||
{
|
||||
#[allow(clippy::option_if_let_else)]
|
||||
if let Some(span) = call_site_span {
|
||||
lint_return(cx, span);
|
||||
LintLocation::Parent
|
||||
} else {
|
||||
lint_return(cx, expr.span);
|
||||
LintLocation::Inner
|
||||
}
|
||||
},
|
||||
// everything else is missing `return`
|
||||
_ => lint(cx, expr.span, expr.span, LINT_RETURN),
|
||||
}
|
||||
}
|
||||
|
||||
@ -129,19 +203,32 @@ impl<'tcx> LateLintPass<'tcx> for ImplicitReturn {
|
||||
fn check_fn(
|
||||
&mut self,
|
||||
cx: &LateContext<'tcx>,
|
||||
_: FnKind<'tcx>,
|
||||
_: &'tcx FnDecl<'_>,
|
||||
kind: FnKind<'tcx>,
|
||||
decl: &'tcx FnDecl<'_>,
|
||||
body: &'tcx Body<'_>,
|
||||
span: Span,
|
||||
_: HirId,
|
||||
) {
|
||||
if span.from_expansion() {
|
||||
if (!matches!(kind, FnKind::Closure) && matches!(decl.output, FnRetTy::DefaultReturn(_)))
|
||||
|| span.ctxt() != body.value.span.ctxt()
|
||||
|| in_external_macro(cx.sess(), span)
|
||||
{
|
||||
return;
|
||||
}
|
||||
let body = cx.tcx.hir().body(body.id());
|
||||
if cx.typeck_results().expr_ty(&body.value).is_unit() {
|
||||
|
||||
let res_ty = cx.typeck_results().expr_ty(&body.value);
|
||||
if res_ty.is_unit() || res_ty.is_never() {
|
||||
return;
|
||||
}
|
||||
expr_match(cx, &body.value);
|
||||
|
||||
let expr = if is_async_fn(kind) {
|
||||
match get_async_fn_body(cx.tcx, body) {
|
||||
Some(e) => e,
|
||||
None => return,
|
||||
}
|
||||
} else {
|
||||
&body.value
|
||||
};
|
||||
lint_implicit_returns(cx, expr, expr.span.ctxt(), None);
|
||||
}
|
||||
}
|
||||
|
@ -383,6 +383,7 @@ mod zero_sized_map_values;
|
||||
// end lints modules, do not remove this comment, it’s used in `update_lints`
|
||||
|
||||
pub use crate::utils::conf::Conf;
|
||||
use crate::utils::conf::TryConf;
|
||||
|
||||
/// Register all pre expansion lints
|
||||
///
|
||||
@ -400,56 +401,40 @@ pub fn register_pre_expansion_lints(store: &mut rustc_lint::LintStore) {
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
pub fn read_conf(args: &[rustc_ast::NestedMetaItem], sess: &Session) -> Conf {
|
||||
pub fn read_conf(sess: &Session) -> Conf {
|
||||
use std::path::Path;
|
||||
match utils::conf::file_from_args(args) {
|
||||
Ok(file_name) => {
|
||||
// if the user specified a file, it must exist, otherwise default to `clippy.toml` but
|
||||
// do not require the file to exist
|
||||
let file_name = match file_name {
|
||||
Some(file_name) => file_name,
|
||||
None => match utils::conf::lookup_conf_file() {
|
||||
Ok(Some(path)) => path,
|
||||
Ok(None) => return Conf::default(),
|
||||
Err(error) => {
|
||||
sess.struct_err(&format!("error finding Clippy's configuration file: {}", error))
|
||||
.emit();
|
||||
return Conf::default();
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
let file_name = if file_name.is_relative() {
|
||||
sess.local_crate_source_file
|
||||
.as_deref()
|
||||
.and_then(Path::parent)
|
||||
.unwrap_or_else(|| Path::new(""))
|
||||
.join(file_name)
|
||||
} else {
|
||||
file_name
|
||||
};
|
||||
|
||||
let (conf, errors) = utils::conf::read(&file_name);
|
||||
|
||||
// all conf errors are non-fatal, we just use the default conf in case of error
|
||||
for error in errors {
|
||||
sess.struct_err(&format!(
|
||||
"error reading Clippy's configuration file `{}`: {}",
|
||||
file_name.display(),
|
||||
error
|
||||
))
|
||||
let file_name = match utils::conf::lookup_conf_file() {
|
||||
Ok(Some(path)) => path,
|
||||
Ok(None) => return Conf::default(),
|
||||
Err(error) => {
|
||||
sess.struct_err(&format!("error finding Clippy's configuration file: {}", error))
|
||||
.emit();
|
||||
}
|
||||
return Conf::default();
|
||||
},
|
||||
};
|
||||
|
||||
conf
|
||||
},
|
||||
Err((err, span)) => {
|
||||
sess.struct_span_err(span, err)
|
||||
.span_note(span, "Clippy will use default configuration")
|
||||
.emit();
|
||||
Conf::default()
|
||||
},
|
||||
let file_name = if file_name.is_relative() {
|
||||
sess.local_crate_source_file
|
||||
.as_deref()
|
||||
.and_then(Path::parent)
|
||||
.unwrap_or_else(|| Path::new(""))
|
||||
.join(file_name)
|
||||
} else {
|
||||
file_name
|
||||
};
|
||||
|
||||
let TryConf { conf, errors } = utils::conf::read(&file_name);
|
||||
// all conf errors are non-fatal, we just use the default conf in case of error
|
||||
for error in errors {
|
||||
sess.struct_err(&format!(
|
||||
"error reading Clippy's configuration file `{}`: {}",
|
||||
file_name.display(),
|
||||
error
|
||||
))
|
||||
.emit();
|
||||
}
|
||||
|
||||
conf
|
||||
}
|
||||
|
||||
/// Register all lints and lint groups with the rustc plugin registry
|
||||
@ -1020,6 +1005,13 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
|
||||
store.register_late_pass(|| box utils::internal_lints::MatchTypeOnDiagItem);
|
||||
store.register_late_pass(|| box utils::internal_lints::OuterExpnDataPass);
|
||||
}
|
||||
#[cfg(feature = "metadata-collector-lint")]
|
||||
{
|
||||
if std::env::var("ENABLE_METADATA_COLLECTION").eq(&Ok("1".to_string())) {
|
||||
store.register_late_pass(|| box utils::internal_lints::metadata_collector::MetadataCollector::default());
|
||||
}
|
||||
}
|
||||
|
||||
store.register_late_pass(|| box utils::author::Author);
|
||||
store.register_late_pass(|| box await_holding_invalid::AwaitHolding);
|
||||
store.register_late_pass(|| box serde_api::SerdeApi);
|
||||
|
@ -7,9 +7,10 @@ use clippy_utils::{is_trait_method, path_to_local_id, paths};
|
||||
use if_chain::if_chain;
|
||||
use rustc_errors::Applicability;
|
||||
use rustc_hir::intravisit::{walk_block, walk_expr, NestedVisitorMap, Visitor};
|
||||
use rustc_hir::{Block, Expr, ExprKind, GenericArg, HirId, Local, Pat, PatKind, QPath, StmtKind};
|
||||
use rustc_hir::{Block, Expr, ExprKind, GenericArg, GenericArgs, HirId, Local, Pat, PatKind, QPath, StmtKind, Ty};
|
||||
use rustc_lint::LateContext;
|
||||
use rustc_middle::hir::map::Map;
|
||||
|
||||
use rustc_span::symbol::{sym, Ident};
|
||||
use rustc_span::{MultiSpan, Span};
|
||||
|
||||
@ -26,7 +27,7 @@ fn check_needless_collect_direct_usage<'tcx>(expr: &'tcx Expr<'_>, cx: &LateCont
|
||||
if chain_method.ident.name == sym!(collect) && is_trait_method(cx, &args[0], sym::Iterator);
|
||||
if let Some(generic_args) = chain_method.args;
|
||||
if let Some(GenericArg::Type(ref ty)) = generic_args.args.get(0);
|
||||
let ty = cx.typeck_results().node_type(ty.hir_id);
|
||||
if let Some(ty) = cx.typeck_results().node_type_opt(ty.hir_id);
|
||||
if is_type_diagnostic_item(cx, ty, sym::vec_type)
|
||||
|| is_type_diagnostic_item(cx, ty, sym::vecdeque_type)
|
||||
|| match_type(cx, ty, &paths::BTREEMAP)
|
||||
@ -58,20 +59,33 @@ fn check_needless_collect_direct_usage<'tcx>(expr: &'tcx Expr<'_>, cx: &LateCont
|
||||
}
|
||||
|
||||
fn check_needless_collect_indirect_usage<'tcx>(expr: &'tcx Expr<'_>, cx: &LateContext<'tcx>) {
|
||||
fn get_hir_id<'tcx>(ty: Option<&Ty<'tcx>>, method_args: Option<&GenericArgs<'tcx>>) -> Option<HirId> {
|
||||
if let Some(ty) = ty {
|
||||
return Some(ty.hir_id);
|
||||
}
|
||||
|
||||
if let Some(generic_args) = method_args {
|
||||
if let Some(GenericArg::Type(ref ty)) = generic_args.args.get(0) {
|
||||
return Some(ty.hir_id);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
if let ExprKind::Block(block, _) = expr.kind {
|
||||
for stmt in block.stmts {
|
||||
if_chain! {
|
||||
if let StmtKind::Local(
|
||||
Local { pat: Pat { hir_id: pat_id, kind: PatKind::Binding(_, _, ident, .. ), .. },
|
||||
init: Some(init_expr), .. }
|
||||
init: Some(init_expr), ty, .. }
|
||||
) = stmt.kind;
|
||||
if let ExprKind::MethodCall(method_name, collect_span, &[ref iter_source], ..) = init_expr.kind;
|
||||
if method_name.ident.name == sym!(collect) && is_trait_method(cx, init_expr, sym::Iterator);
|
||||
if let Some(generic_args) = method_name.args;
|
||||
if let Some(GenericArg::Type(ref ty)) = generic_args.args.get(0);
|
||||
if let ty = cx.typeck_results().node_type(ty.hir_id);
|
||||
if let Some(hir_id) = get_hir_id(*ty, method_name.args);
|
||||
if let Some(ty) = cx.typeck_results().node_type_opt(hir_id);
|
||||
if is_type_diagnostic_item(cx, ty, sym::vec_type) ||
|
||||
is_type_diagnostic_item(cx, ty, sym::vecdeque_type) ||
|
||||
is_type_diagnostic_item(cx, ty, sym::BinaryHeap) ||
|
||||
match_type(cx, ty, &paths::LINKED_LIST);
|
||||
if let Some(iter_calls) = detect_iter_and_into_iters(block, *ident);
|
||||
if let [iter_call] = &*iter_calls;
|
||||
|
@ -28,11 +28,14 @@ pub(super) fn check<'tcx>(cx: &LateContext<'tcx>, cond: &'tcx Expr<'_>, expr: &'
|
||||
return;
|
||||
}
|
||||
let used_in_condition = &var_visitor.ids;
|
||||
let no_cond_variable_mutated = if let Some(used_mutably) = mutated_variables(expr, cx) {
|
||||
used_in_condition.is_disjoint(&used_mutably)
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
let mutated_in_body = mutated_variables(expr, cx);
|
||||
let mutated_in_condition = mutated_variables(cond, cx);
|
||||
let no_cond_variable_mutated =
|
||||
if let (Some(used_mutably_body), Some(used_mutably_cond)) = (mutated_in_body, mutated_in_condition) {
|
||||
used_in_condition.is_disjoint(&used_mutably_body) && used_in_condition.is_disjoint(&used_mutably_cond)
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
let mutable_static_in_cond = var_visitor.def_ids.iter().any(|(_, v)| *v);
|
||||
|
||||
let mut has_break_or_return_visitor = HasBreakOrReturnVisitor {
|
||||
|
@ -1590,9 +1590,9 @@ fn is_none_arm(cx: &LateContext<'_>, arm: &Arm<'_>) -> bool {
|
||||
// Checks if arm has the form `Some(ref v) => Some(v)` (checks for `ref` and `ref mut`)
|
||||
fn is_ref_some_arm(cx: &LateContext<'_>, arm: &Arm<'_>) -> Option<BindingAnnotation> {
|
||||
if_chain! {
|
||||
if let PatKind::TupleStruct(ref qpath, pats, _) = arm.pat.kind;
|
||||
if let PatKind::TupleStruct(ref qpath, [first_pat, ..], _) = arm.pat.kind;
|
||||
if is_lang_ctor(cx, qpath, OptionSome);
|
||||
if let PatKind::Binding(rb, .., ident, _) = pats[0].kind;
|
||||
if let PatKind::Binding(rb, .., ident, _) = first_pat.kind;
|
||||
if rb == BindingAnnotation::Ref || rb == BindingAnnotation::RefMut;
|
||||
if let ExprKind::Call(e, args) = remove_blocks(arm.body).kind;
|
||||
if let ExprKind::Path(ref some_path) = e.kind;
|
||||
@ -1712,6 +1712,7 @@ mod redundant_pattern_match {
|
||||
use clippy_utils::{is_lang_ctor, is_qpath_def_path, is_trait_method, paths};
|
||||
use if_chain::if_chain;
|
||||
use rustc_ast::ast::LitKind;
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_errors::Applicability;
|
||||
use rustc_hir::LangItem::{OptionNone, OptionSome, PollPending, PollReady, ResultErr, ResultOk};
|
||||
use rustc_hir::{
|
||||
@ -1739,6 +1740,13 @@ mod redundant_pattern_match {
|
||||
/// deallocate memory. For these types, and composites containing them, changing the drop order
|
||||
/// won't result in any observable side effects.
|
||||
fn type_needs_ordered_drop(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool {
|
||||
type_needs_ordered_drop_inner(cx, ty, &mut FxHashSet::default())
|
||||
}
|
||||
|
||||
fn type_needs_ordered_drop_inner(cx: &LateContext<'tcx>, ty: Ty<'tcx>, seen: &mut FxHashSet<Ty<'tcx>>) -> bool {
|
||||
if !seen.insert(ty) {
|
||||
return false;
|
||||
}
|
||||
if !ty.needs_drop(cx.tcx, cx.param_env) {
|
||||
false
|
||||
} else if !cx
|
||||
@ -1750,12 +1758,12 @@ mod redundant_pattern_match {
|
||||
// This type doesn't implement drop, so no side effects here.
|
||||
// Check if any component type has any.
|
||||
match ty.kind() {
|
||||
ty::Tuple(_) => ty.tuple_fields().any(|ty| type_needs_ordered_drop(cx, ty)),
|
||||
ty::Array(ty, _) => type_needs_ordered_drop(cx, ty),
|
||||
ty::Tuple(_) => ty.tuple_fields().any(|ty| type_needs_ordered_drop_inner(cx, ty, seen)),
|
||||
ty::Array(ty, _) => type_needs_ordered_drop_inner(cx, ty, seen),
|
||||
ty::Adt(adt, subs) => adt
|
||||
.all_fields()
|
||||
.map(|f| f.ty(cx.tcx, subs))
|
||||
.any(|ty| type_needs_ordered_drop(cx, ty)),
|
||||
.any(|ty| type_needs_ordered_drop_inner(cx, ty, seen)),
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
@ -1772,7 +1780,7 @@ mod redundant_pattern_match {
|
||||
{
|
||||
// Check all of the generic arguments.
|
||||
if let ty::Adt(_, subs) = ty.kind() {
|
||||
subs.types().any(|ty| type_needs_ordered_drop(cx, ty))
|
||||
subs.types().any(|ty| type_needs_ordered_drop_inner(cx, ty, seen))
|
||||
} else {
|
||||
true
|
||||
}
|
||||
|
@ -2189,27 +2189,6 @@ const TRAIT_METHODS: [ShouldImplTraitCase; 30] = [
|
||||
ShouldImplTraitCase::new("std::ops::Sub", "sub", 2, FN_HEADER, SelfKind::Value, OutType::Any, true),
|
||||
];
|
||||
|
||||
#[rustfmt::skip]
|
||||
const PATTERN_METHODS: [(&str, usize); 17] = [
|
||||
("contains", 1),
|
||||
("starts_with", 1),
|
||||
("ends_with", 1),
|
||||
("find", 1),
|
||||
("rfind", 1),
|
||||
("split", 1),
|
||||
("rsplit", 1),
|
||||
("split_terminator", 1),
|
||||
("rsplit_terminator", 1),
|
||||
("splitn", 2),
|
||||
("rsplitn", 2),
|
||||
("matches", 1),
|
||||
("rmatches", 1),
|
||||
("match_indices", 1),
|
||||
("rmatch_indices", 1),
|
||||
("trim_start_matches", 1),
|
||||
("trim_end_matches", 1),
|
||||
];
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Debug)]
|
||||
enum SelfKind {
|
||||
Value,
|
||||
|
@ -9,9 +9,31 @@ use rustc_span::symbol::Symbol;
|
||||
|
||||
use super::SINGLE_CHAR_PATTERN;
|
||||
|
||||
const PATTERN_METHODS: [(&str, usize); 19] = [
|
||||
("contains", 1),
|
||||
("starts_with", 1),
|
||||
("ends_with", 1),
|
||||
("find", 1),
|
||||
("rfind", 1),
|
||||
("split", 1),
|
||||
("rsplit", 1),
|
||||
("split_terminator", 1),
|
||||
("rsplit_terminator", 1),
|
||||
("splitn", 2),
|
||||
("rsplitn", 2),
|
||||
("matches", 1),
|
||||
("rmatches", 1),
|
||||
("match_indices", 1),
|
||||
("rmatch_indices", 1),
|
||||
("strip_prefix", 1),
|
||||
("strip_suffix", 1),
|
||||
("trim_start_matches", 1),
|
||||
("trim_end_matches", 1),
|
||||
];
|
||||
|
||||
/// lint for length-1 `str`s for methods in `PATTERN_METHODS`
|
||||
pub(super) fn check(cx: &LateContext<'_>, _expr: &hir::Expr<'_>, method_name: Symbol, args: &[hir::Expr<'_>]) {
|
||||
for &(method, pos) in &crate::methods::PATTERN_METHODS {
|
||||
for &(method, pos) in &PATTERN_METHODS {
|
||||
if_chain! {
|
||||
if let ty::Ref(_, ty, _) = cx.typeck_results().expr_ty_adjusted(&args[0]).kind();
|
||||
if *ty.kind() == ty::Str;
|
||||
|
@ -6,6 +6,7 @@ use rustc_hir::intravisit::{walk_expr, NestedVisitorMap, Visitor};
|
||||
use rustc_hir::LangItem::{OptionNone, OptionSome};
|
||||
use rustc_lint::LateContext;
|
||||
use rustc_middle::hir::map::Map;
|
||||
use rustc_middle::ty::{self, TyS};
|
||||
use rustc_span::sym;
|
||||
|
||||
use super::UNNECESSARY_FILTER_MAP;
|
||||
@ -28,25 +29,28 @@ pub(super) fn check(cx: &LateContext<'_>, expr: &hir::Expr<'_>, arg: &hir::Expr<
|
||||
found_mapping |= return_visitor.found_mapping;
|
||||
found_filtering |= return_visitor.found_filtering;
|
||||
|
||||
if !found_filtering {
|
||||
span_lint(
|
||||
cx,
|
||||
UNNECESSARY_FILTER_MAP,
|
||||
expr.span,
|
||||
"this `.filter_map` can be written more simply using `.map`",
|
||||
);
|
||||
let sugg = if !found_filtering {
|
||||
"map"
|
||||
} else if !found_mapping && !mutates_arg {
|
||||
let in_ty = cx.typeck_results().node_type(body.params[0].hir_id);
|
||||
match cx.typeck_results().expr_ty(&body.value).kind() {
|
||||
ty::Adt(adt, subst)
|
||||
if cx.tcx.is_diagnostic_item(sym::option_type, adt.did)
|
||||
&& TyS::same_type(in_ty, subst.type_at(0)) =>
|
||||
{
|
||||
"filter"
|
||||
},
|
||||
_ => return,
|
||||
}
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
if !found_mapping && !mutates_arg {
|
||||
span_lint(
|
||||
cx,
|
||||
UNNECESSARY_FILTER_MAP,
|
||||
expr.span,
|
||||
"this `.filter_map` can be written more simply using `.filter`",
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
span_lint(
|
||||
cx,
|
||||
UNNECESSARY_FILTER_MAP,
|
||||
expr.span,
|
||||
&format!("this `.filter_map` can be written more simply using `.{}`", sugg),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,569 +0,0 @@
|
||||
use clippy_utils::diagnostics::{span_lint, span_lint_and_help, span_lint_and_sugg, span_lint_and_then};
|
||||
use clippy_utils::source::snippet_opt;
|
||||
use rustc_ast::ast::{
|
||||
BindingMode, Expr, ExprKind, GenericParamKind, Generics, Lit, LitFloatType, LitIntType, LitKind, Mutability,
|
||||
NodeId, Pat, PatKind, UnOp,
|
||||
};
|
||||
use rustc_ast::visit::FnKind;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_errors::Applicability;
|
||||
use rustc_hir::PrimTy;
|
||||
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
|
||||
use rustc_middle::lint::in_external_macro;
|
||||
use rustc_session::{declare_lint_pass, declare_tool_lint};
|
||||
use rustc_span::source_map::Span;
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Checks for structure field patterns bound to wildcards.
|
||||
///
|
||||
/// **Why is this bad?** Using `..` instead is shorter and leaves the focus on
|
||||
/// the fields that are actually bound.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
/// ```rust
|
||||
/// # struct Foo {
|
||||
/// # a: i32,
|
||||
/// # b: i32,
|
||||
/// # c: i32,
|
||||
/// # }
|
||||
/// let f = Foo { a: 0, b: 0, c: 0 };
|
||||
///
|
||||
/// // Bad
|
||||
/// match f {
|
||||
/// Foo { a: _, b: 0, .. } => {},
|
||||
/// Foo { a: _, b: _, c: _ } => {},
|
||||
/// }
|
||||
///
|
||||
/// // Good
|
||||
/// match f {
|
||||
/// Foo { b: 0, .. } => {},
|
||||
/// Foo { .. } => {},
|
||||
/// }
|
||||
/// ```
|
||||
pub UNNEEDED_FIELD_PATTERN,
|
||||
restriction,
|
||||
"struct fields bound to a wildcard instead of using `..`"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Checks for function arguments having the similar names
|
||||
/// differing by an underscore.
|
||||
///
|
||||
/// **Why is this bad?** It affects code readability.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
/// ```rust
|
||||
/// // Bad
|
||||
/// fn foo(a: i32, _a: i32) {}
|
||||
///
|
||||
/// // Good
|
||||
/// fn bar(a: i32, _b: i32) {}
|
||||
/// ```
|
||||
pub DUPLICATE_UNDERSCORE_ARGUMENT,
|
||||
style,
|
||||
"function arguments having names which only differ by an underscore"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Detects expressions of the form `--x`.
|
||||
///
|
||||
/// **Why is this bad?** It can mislead C/C++ programmers to think `x` was
|
||||
/// decremented.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
/// ```rust
|
||||
/// let mut x = 3;
|
||||
/// --x;
|
||||
/// ```
|
||||
pub DOUBLE_NEG,
|
||||
style,
|
||||
"`--x`, which is a double negation of `x` and not a pre-decrement as in C/C++"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Warns on hexadecimal literals with mixed-case letter
|
||||
/// digits.
|
||||
///
|
||||
/// **Why is this bad?** It looks confusing.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
/// ```rust
|
||||
/// // Bad
|
||||
/// let y = 0x1a9BAcD;
|
||||
///
|
||||
/// // Good
|
||||
/// let y = 0x1A9BACD;
|
||||
/// ```
|
||||
pub MIXED_CASE_HEX_LITERALS,
|
||||
style,
|
||||
"hex literals whose letter digits are not consistently upper- or lowercased"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Warns if literal suffixes are not separated by an
|
||||
/// underscore.
|
||||
///
|
||||
/// **Why is this bad?** It is much less readable.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
/// ```rust
|
||||
/// // Bad
|
||||
/// let y = 123832i32;
|
||||
///
|
||||
/// // Good
|
||||
/// let y = 123832_i32;
|
||||
/// ```
|
||||
pub UNSEPARATED_LITERAL_SUFFIX,
|
||||
pedantic,
|
||||
"literals whose suffix is not separated by an underscore"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Warns if an integral constant literal starts with `0`.
|
||||
///
|
||||
/// **Why is this bad?** In some languages (including the infamous C language
|
||||
/// and most of its
|
||||
/// family), this marks an octal constant. In Rust however, this is a decimal
|
||||
/// constant. This could
|
||||
/// be confusing for both the writer and a reader of the constant.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
///
|
||||
/// In Rust:
|
||||
/// ```rust
|
||||
/// fn main() {
|
||||
/// let a = 0123;
|
||||
/// println!("{}", a);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// prints `123`, while in C:
|
||||
///
|
||||
/// ```c
|
||||
/// #include <stdio.h>
|
||||
///
|
||||
/// int main() {
|
||||
/// int a = 0123;
|
||||
/// printf("%d\n", a);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// prints `83` (as `83 == 0o123` while `123 == 0o173`).
|
||||
pub ZERO_PREFIXED_LITERAL,
|
||||
complexity,
|
||||
"integer literals starting with `0`"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Warns if a generic shadows a built-in type.
|
||||
///
|
||||
/// **Why is this bad?** This gives surprising type errors.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
///
|
||||
/// ```ignore
|
||||
/// impl<u32> Foo<u32> {
|
||||
/// fn impl_func(&self) -> u32 {
|
||||
/// 42
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
pub BUILTIN_TYPE_SHADOW,
|
||||
style,
|
||||
"shadowing a builtin type"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Checks for patterns in the form `name @ _`.
|
||||
///
|
||||
/// **Why is this bad?** It's almost always more readable to just use direct
|
||||
/// bindings.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
/// ```rust
|
||||
/// # let v = Some("abc");
|
||||
///
|
||||
/// // Bad
|
||||
/// match v {
|
||||
/// Some(x) => (),
|
||||
/// y @ _ => (),
|
||||
/// }
|
||||
///
|
||||
/// // Good
|
||||
/// match v {
|
||||
/// Some(x) => (),
|
||||
/// y => (),
|
||||
/// }
|
||||
/// ```
|
||||
pub REDUNDANT_PATTERN,
|
||||
style,
|
||||
"using `name @ _` in a pattern"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Checks for tuple patterns with a wildcard
|
||||
/// pattern (`_`) is next to a rest pattern (`..`).
|
||||
///
|
||||
/// _NOTE_: While `_, ..` means there is at least one element left, `..`
|
||||
/// means there are 0 or more elements left. This can make a difference
|
||||
/// when refactoring, but shouldn't result in errors in the refactored code,
|
||||
/// since the wildcard pattern isn't used anyway.
|
||||
/// **Why is this bad?** The wildcard pattern is unneeded as the rest pattern
|
||||
/// can match that element as well.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
/// ```rust
|
||||
/// # struct TupleStruct(u32, u32, u32);
|
||||
/// # let t = TupleStruct(1, 2, 3);
|
||||
/// // Bad
|
||||
/// match t {
|
||||
/// TupleStruct(0, .., _) => (),
|
||||
/// _ => (),
|
||||
/// }
|
||||
///
|
||||
/// // Good
|
||||
/// match t {
|
||||
/// TupleStruct(0, ..) => (),
|
||||
/// _ => (),
|
||||
/// }
|
||||
/// ```
|
||||
pub UNNEEDED_WILDCARD_PATTERN,
|
||||
complexity,
|
||||
"tuple patterns with a wildcard pattern (`_`) is next to a rest pattern (`..`)"
|
||||
}
|
||||
|
||||
declare_lint_pass!(MiscEarlyLints => [
|
||||
UNNEEDED_FIELD_PATTERN,
|
||||
DUPLICATE_UNDERSCORE_ARGUMENT,
|
||||
DOUBLE_NEG,
|
||||
MIXED_CASE_HEX_LITERALS,
|
||||
UNSEPARATED_LITERAL_SUFFIX,
|
||||
ZERO_PREFIXED_LITERAL,
|
||||
BUILTIN_TYPE_SHADOW,
|
||||
REDUNDANT_PATTERN,
|
||||
UNNEEDED_WILDCARD_PATTERN,
|
||||
]);
|
||||
|
||||
impl EarlyLintPass for MiscEarlyLints {
|
||||
fn check_generics(&mut self, cx: &EarlyContext<'_>, gen: &Generics) {
|
||||
for param in &gen.params {
|
||||
if let GenericParamKind::Type { .. } = param.kind {
|
||||
if let Some(prim_ty) = PrimTy::from_name(param.ident.name) {
|
||||
span_lint(
|
||||
cx,
|
||||
BUILTIN_TYPE_SHADOW,
|
||||
param.ident.span,
|
||||
&format!("this generic shadows the built-in type `{}`", prim_ty.name()),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_pat(&mut self, cx: &EarlyContext<'_>, pat: &Pat) {
|
||||
if let PatKind::Struct(ref npat, ref pfields, _) = pat.kind {
|
||||
let mut wilds = 0;
|
||||
let type_name = npat
|
||||
.segments
|
||||
.last()
|
||||
.expect("A path must have at least one segment")
|
||||
.ident
|
||||
.name;
|
||||
|
||||
for field in pfields {
|
||||
if let PatKind::Wild = field.pat.kind {
|
||||
wilds += 1;
|
||||
}
|
||||
}
|
||||
if !pfields.is_empty() && wilds == pfields.len() {
|
||||
span_lint_and_help(
|
||||
cx,
|
||||
UNNEEDED_FIELD_PATTERN,
|
||||
pat.span,
|
||||
"all the struct fields are matched to a wildcard pattern, consider using `..`",
|
||||
None,
|
||||
&format!("try with `{} {{ .. }}` instead", type_name),
|
||||
);
|
||||
return;
|
||||
}
|
||||
if wilds > 0 {
|
||||
for field in pfields {
|
||||
if let PatKind::Wild = field.pat.kind {
|
||||
wilds -= 1;
|
||||
if wilds > 0 {
|
||||
span_lint(
|
||||
cx,
|
||||
UNNEEDED_FIELD_PATTERN,
|
||||
field.span,
|
||||
"you matched a field with a wildcard pattern, consider using `..` instead",
|
||||
);
|
||||
} else {
|
||||
let mut normal = vec![];
|
||||
|
||||
for field in pfields {
|
||||
match field.pat.kind {
|
||||
PatKind::Wild => {},
|
||||
_ => {
|
||||
if let Ok(n) = cx.sess().source_map().span_to_snippet(field.span) {
|
||||
normal.push(n);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
span_lint_and_help(
|
||||
cx,
|
||||
UNNEEDED_FIELD_PATTERN,
|
||||
field.span,
|
||||
"you matched a field with a wildcard pattern, consider using `..` \
|
||||
instead",
|
||||
None,
|
||||
&format!("try with `{} {{ {}, .. }}`", type_name, normal[..].join(", ")),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let PatKind::Ident(left, ident, Some(ref right)) = pat.kind {
|
||||
let left_binding = match left {
|
||||
BindingMode::ByRef(Mutability::Mut) => "ref mut ",
|
||||
BindingMode::ByRef(Mutability::Not) => "ref ",
|
||||
BindingMode::ByValue(..) => "",
|
||||
};
|
||||
|
||||
if let PatKind::Wild = right.kind {
|
||||
span_lint_and_sugg(
|
||||
cx,
|
||||
REDUNDANT_PATTERN,
|
||||
pat.span,
|
||||
&format!(
|
||||
"the `{} @ _` pattern can be written as just `{}`",
|
||||
ident.name, ident.name,
|
||||
),
|
||||
"try",
|
||||
format!("{}{}", left_binding, ident.name),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
check_unneeded_wildcard_pattern(cx, pat);
|
||||
}
|
||||
|
||||
fn check_fn(&mut self, cx: &EarlyContext<'_>, fn_kind: FnKind<'_>, _: Span, _: NodeId) {
|
||||
let mut registered_names: FxHashMap<String, Span> = FxHashMap::default();
|
||||
|
||||
for arg in &fn_kind.decl().inputs {
|
||||
if let PatKind::Ident(_, ident, None) = arg.pat.kind {
|
||||
let arg_name = ident.to_string();
|
||||
|
||||
if let Some(arg_name) = arg_name.strip_prefix('_') {
|
||||
if let Some(correspondence) = registered_names.get(arg_name) {
|
||||
span_lint(
|
||||
cx,
|
||||
DUPLICATE_UNDERSCORE_ARGUMENT,
|
||||
*correspondence,
|
||||
&format!(
|
||||
"`{}` already exists, having another argument having almost the same \
|
||||
name makes code comprehension and documentation more difficult",
|
||||
arg_name
|
||||
),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
registered_names.insert(arg_name, arg.pat.span);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
|
||||
if in_external_macro(cx.sess(), expr.span) {
|
||||
return;
|
||||
}
|
||||
match expr.kind {
|
||||
ExprKind::Unary(UnOp::Neg, ref inner) => {
|
||||
if let ExprKind::Unary(UnOp::Neg, _) = inner.kind {
|
||||
span_lint(
|
||||
cx,
|
||||
DOUBLE_NEG,
|
||||
expr.span,
|
||||
"`--x` could be misinterpreted as pre-decrement by C programmers, is usually a no-op",
|
||||
);
|
||||
}
|
||||
},
|
||||
ExprKind::Lit(ref lit) => Self::check_lit(cx, lit),
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl MiscEarlyLints {
|
||||
fn check_lit(cx: &EarlyContext<'_>, lit: &Lit) {
|
||||
// We test if first character in snippet is a number, because the snippet could be an expansion
|
||||
// from a built-in macro like `line!()` or a proc-macro like `#[wasm_bindgen]`.
|
||||
// Note that this check also covers special case that `line!()` is eagerly expanded by compiler.
|
||||
// See <https://github.com/rust-lang/rust-clippy/issues/4507> for a regression.
|
||||
// FIXME: Find a better way to detect those cases.
|
||||
let lit_snip = match snippet_opt(cx, lit.span) {
|
||||
Some(snip) if snip.chars().next().map_or(false, |c| c.is_digit(10)) => snip,
|
||||
_ => return,
|
||||
};
|
||||
|
||||
if let LitKind::Int(value, lit_int_type) = lit.kind {
|
||||
let suffix = match lit_int_type {
|
||||
LitIntType::Signed(ty) => ty.name_str(),
|
||||
LitIntType::Unsigned(ty) => ty.name_str(),
|
||||
LitIntType::Unsuffixed => "",
|
||||
};
|
||||
|
||||
let maybe_last_sep_idx = if let Some(val) = lit_snip.len().checked_sub(suffix.len() + 1) {
|
||||
val
|
||||
} else {
|
||||
return; // It's useless so shouldn't lint.
|
||||
};
|
||||
// Do not lint when literal is unsuffixed.
|
||||
if !suffix.is_empty() && lit_snip.as_bytes()[maybe_last_sep_idx] != b'_' {
|
||||
span_lint_and_sugg(
|
||||
cx,
|
||||
UNSEPARATED_LITERAL_SUFFIX,
|
||||
lit.span,
|
||||
"integer type suffix should be separated by an underscore",
|
||||
"add an underscore",
|
||||
format!("{}_{}", &lit_snip[..=maybe_last_sep_idx], suffix),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
|
||||
if lit_snip.starts_with("0x") {
|
||||
if maybe_last_sep_idx <= 2 {
|
||||
// It's meaningless or causes range error.
|
||||
return;
|
||||
}
|
||||
let mut seen = (false, false);
|
||||
for ch in lit_snip.as_bytes()[2..=maybe_last_sep_idx].iter() {
|
||||
match ch {
|
||||
b'a'..=b'f' => seen.0 = true,
|
||||
b'A'..=b'F' => seen.1 = true,
|
||||
_ => {},
|
||||
}
|
||||
if seen.0 && seen.1 {
|
||||
span_lint(
|
||||
cx,
|
||||
MIXED_CASE_HEX_LITERALS,
|
||||
lit.span,
|
||||
"inconsistent casing in hexadecimal literal",
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else if lit_snip.starts_with("0b") || lit_snip.starts_with("0o") {
|
||||
/* nothing to do */
|
||||
} else if value != 0 && lit_snip.starts_with('0') {
|
||||
span_lint_and_then(
|
||||
cx,
|
||||
ZERO_PREFIXED_LITERAL,
|
||||
lit.span,
|
||||
"this is a decimal constant",
|
||||
|diag| {
|
||||
diag.span_suggestion(
|
||||
lit.span,
|
||||
"if you mean to use a decimal constant, remove the `0` to avoid confusion",
|
||||
lit_snip.trim_start_matches(|c| c == '_' || c == '0').to_string(),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
diag.span_suggestion(
|
||||
lit.span,
|
||||
"if you mean to use an octal constant, use `0o`",
|
||||
format!("0o{}", lit_snip.trim_start_matches(|c| c == '_' || c == '0')),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
},
|
||||
);
|
||||
}
|
||||
} else if let LitKind::Float(_, LitFloatType::Suffixed(float_ty)) = lit.kind {
|
||||
let suffix = float_ty.name_str();
|
||||
let maybe_last_sep_idx = if let Some(val) = lit_snip.len().checked_sub(suffix.len() + 1) {
|
||||
val
|
||||
} else {
|
||||
return; // It's useless so shouldn't lint.
|
||||
};
|
||||
if lit_snip.as_bytes()[maybe_last_sep_idx] != b'_' {
|
||||
span_lint_and_sugg(
|
||||
cx,
|
||||
UNSEPARATED_LITERAL_SUFFIX,
|
||||
lit.span,
|
||||
"float type suffix should be separated by an underscore",
|
||||
"add an underscore",
|
||||
format!("{}_{}", &lit_snip[..=maybe_last_sep_idx], suffix),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_unneeded_wildcard_pattern(cx: &EarlyContext<'_>, pat: &Pat) {
|
||||
if let PatKind::TupleStruct(_, ref patterns) | PatKind::Tuple(ref patterns) = pat.kind {
|
||||
fn span_lint(cx: &EarlyContext<'_>, span: Span, only_one: bool) {
|
||||
span_lint_and_sugg(
|
||||
cx,
|
||||
UNNEEDED_WILDCARD_PATTERN,
|
||||
span,
|
||||
if only_one {
|
||||
"this pattern is unneeded as the `..` pattern can match that element"
|
||||
} else {
|
||||
"these patterns are unneeded as the `..` pattern can match those elements"
|
||||
},
|
||||
if only_one { "remove it" } else { "remove them" },
|
||||
"".to_string(),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(rest_index) = patterns.iter().position(|pat| pat.is_rest()) {
|
||||
if let Some((left_index, left_pat)) = patterns[..rest_index]
|
||||
.iter()
|
||||
.rev()
|
||||
.take_while(|pat| matches!(pat.kind, PatKind::Wild))
|
||||
.enumerate()
|
||||
.last()
|
||||
{
|
||||
span_lint(cx, left_pat.span.until(patterns[rest_index].span), left_index == 0);
|
||||
}
|
||||
|
||||
if let Some((right_index, right_pat)) = patterns[rest_index + 1..]
|
||||
.iter()
|
||||
.take_while(|pat| matches!(pat.kind, PatKind::Wild))
|
||||
.enumerate()
|
||||
.last()
|
||||
{
|
||||
span_lint(
|
||||
cx,
|
||||
patterns[rest_index].span.shrink_to_hi().to(right_pat.span),
|
||||
right_index == 0,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
use clippy_utils::diagnostics::span_lint;
|
||||
use rustc_ast::ast::{GenericParam, GenericParamKind};
|
||||
use rustc_hir::PrimTy;
|
||||
use rustc_lint::EarlyContext;
|
||||
|
||||
use super::BUILTIN_TYPE_SHADOW;
|
||||
|
||||
pub(super) fn check(cx: &EarlyContext<'_>, param: &GenericParam) {
|
||||
if let GenericParamKind::Type { .. } = param.kind {
|
||||
if let Some(prim_ty) = PrimTy::from_name(param.ident.name) {
|
||||
span_lint(
|
||||
cx,
|
||||
BUILTIN_TYPE_SHADOW,
|
||||
param.ident.span,
|
||||
&format!("this generic shadows the built-in type `{}`", prim_ty.name()),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
23
src/tools/clippy/clippy_lints/src/misc_early/double_neg.rs
Normal file
23
src/tools/clippy/clippy_lints/src/misc_early/double_neg.rs
Normal file
@ -0,0 +1,23 @@
|
||||
use super::MiscEarlyLints;
|
||||
use clippy_utils::diagnostics::span_lint;
|
||||
use rustc_ast::ast::{Expr, ExprKind, UnOp};
|
||||
use rustc_lint::EarlyContext;
|
||||
|
||||
use super::DOUBLE_NEG;
|
||||
|
||||
pub(super) fn check(cx: &EarlyContext<'_>, expr: &Expr) {
|
||||
match expr.kind {
|
||||
ExprKind::Unary(UnOp::Neg, ref inner) => {
|
||||
if let ExprKind::Unary(UnOp::Neg, _) = inner.kind {
|
||||
span_lint(
|
||||
cx,
|
||||
DOUBLE_NEG,
|
||||
expr.span,
|
||||
"`--x` could be misinterpreted as pre-decrement by C programmers, is usually a no-op",
|
||||
);
|
||||
}
|
||||
},
|
||||
ExprKind::Lit(ref lit) => MiscEarlyLints::check_lit(cx, lit),
|
||||
_ => (),
|
||||
}
|
||||
}
|
@ -0,0 +1,34 @@
|
||||
use clippy_utils::diagnostics::span_lint;
|
||||
use rustc_ast::ast::Lit;
|
||||
use rustc_lint::EarlyContext;
|
||||
|
||||
use super::MIXED_CASE_HEX_LITERALS;
|
||||
|
||||
pub(super) fn check(cx: &EarlyContext<'_>, lit: &Lit, suffix: &str, lit_snip: &str) {
|
||||
let maybe_last_sep_idx = if let Some(val) = lit_snip.len().checked_sub(suffix.len() + 1) {
|
||||
val
|
||||
} else {
|
||||
return; // It's useless so shouldn't lint.
|
||||
};
|
||||
if maybe_last_sep_idx <= 2 {
|
||||
// It's meaningless or causes range error.
|
||||
return;
|
||||
}
|
||||
let mut seen = (false, false);
|
||||
for ch in lit_snip.as_bytes()[2..=maybe_last_sep_idx].iter() {
|
||||
match ch {
|
||||
b'a'..=b'f' => seen.0 = true,
|
||||
b'A'..=b'F' => seen.1 = true,
|
||||
_ => {},
|
||||
}
|
||||
if seen.0 && seen.1 {
|
||||
span_lint(
|
||||
cx,
|
||||
MIXED_CASE_HEX_LITERALS,
|
||||
lit.span,
|
||||
"inconsistent casing in hexadecimal literal",
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
348
src/tools/clippy/clippy_lints/src/misc_early/mod.rs
Normal file
348
src/tools/clippy/clippy_lints/src/misc_early/mod.rs
Normal file
@ -0,0 +1,348 @@
|
||||
mod builtin_type_shadow;
|
||||
mod double_neg;
|
||||
mod mixed_case_hex_literals;
|
||||
mod redundant_pattern;
|
||||
mod unneeded_field_pattern;
|
||||
mod unneeded_wildcard_pattern;
|
||||
mod unseparated_literal_suffix;
|
||||
mod zero_prefixed_literal;
|
||||
|
||||
use clippy_utils::diagnostics::span_lint;
|
||||
use clippy_utils::source::snippet_opt;
|
||||
use rustc_ast::ast::{Expr, Generics, Lit, LitFloatType, LitIntType, LitKind, NodeId, Pat, PatKind};
|
||||
use rustc_ast::visit::FnKind;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_lint::{EarlyContext, EarlyLintPass, LintContext};
|
||||
use rustc_middle::lint::in_external_macro;
|
||||
use rustc_session::{declare_lint_pass, declare_tool_lint};
|
||||
use rustc_span::source_map::Span;
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Checks for structure field patterns bound to wildcards.
|
||||
///
|
||||
/// **Why is this bad?** Using `..` instead is shorter and leaves the focus on
|
||||
/// the fields that are actually bound.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
/// ```rust
|
||||
/// # struct Foo {
|
||||
/// # a: i32,
|
||||
/// # b: i32,
|
||||
/// # c: i32,
|
||||
/// # }
|
||||
/// let f = Foo { a: 0, b: 0, c: 0 };
|
||||
///
|
||||
/// // Bad
|
||||
/// match f {
|
||||
/// Foo { a: _, b: 0, .. } => {},
|
||||
/// Foo { a: _, b: _, c: _ } => {},
|
||||
/// }
|
||||
///
|
||||
/// // Good
|
||||
/// match f {
|
||||
/// Foo { b: 0, .. } => {},
|
||||
/// Foo { .. } => {},
|
||||
/// }
|
||||
/// ```
|
||||
pub UNNEEDED_FIELD_PATTERN,
|
||||
restriction,
|
||||
"struct fields bound to a wildcard instead of using `..`"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Checks for function arguments having the similar names
|
||||
/// differing by an underscore.
|
||||
///
|
||||
/// **Why is this bad?** It affects code readability.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
/// ```rust
|
||||
/// // Bad
|
||||
/// fn foo(a: i32, _a: i32) {}
|
||||
///
|
||||
/// // Good
|
||||
/// fn bar(a: i32, _b: i32) {}
|
||||
/// ```
|
||||
pub DUPLICATE_UNDERSCORE_ARGUMENT,
|
||||
style,
|
||||
"function arguments having names which only differ by an underscore"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Detects expressions of the form `--x`.
|
||||
///
|
||||
/// **Why is this bad?** It can mislead C/C++ programmers to think `x` was
|
||||
/// decremented.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
/// ```rust
|
||||
/// let mut x = 3;
|
||||
/// --x;
|
||||
/// ```
|
||||
pub DOUBLE_NEG,
|
||||
style,
|
||||
"`--x`, which is a double negation of `x` and not a pre-decrement as in C/C++"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Warns on hexadecimal literals with mixed-case letter
|
||||
/// digits.
|
||||
///
|
||||
/// **Why is this bad?** It looks confusing.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
/// ```rust
|
||||
/// // Bad
|
||||
/// let y = 0x1a9BAcD;
|
||||
///
|
||||
/// // Good
|
||||
/// let y = 0x1A9BACD;
|
||||
/// ```
|
||||
pub MIXED_CASE_HEX_LITERALS,
|
||||
style,
|
||||
"hex literals whose letter digits are not consistently upper- or lowercased"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Warns if literal suffixes are not separated by an
|
||||
/// underscore.
|
||||
///
|
||||
/// **Why is this bad?** It is much less readable.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
/// ```rust
|
||||
/// // Bad
|
||||
/// let y = 123832i32;
|
||||
///
|
||||
/// // Good
|
||||
/// let y = 123832_i32;
|
||||
/// ```
|
||||
pub UNSEPARATED_LITERAL_SUFFIX,
|
||||
pedantic,
|
||||
"literals whose suffix is not separated by an underscore"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Warns if an integral constant literal starts with `0`.
|
||||
///
|
||||
/// **Why is this bad?** In some languages (including the infamous C language
|
||||
/// and most of its
|
||||
/// family), this marks an octal constant. In Rust however, this is a decimal
|
||||
/// constant. This could
|
||||
/// be confusing for both the writer and a reader of the constant.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
///
|
||||
/// In Rust:
|
||||
/// ```rust
|
||||
/// fn main() {
|
||||
/// let a = 0123;
|
||||
/// println!("{}", a);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// prints `123`, while in C:
|
||||
///
|
||||
/// ```c
|
||||
/// #include <stdio.h>
|
||||
///
|
||||
/// int main() {
|
||||
/// int a = 0123;
|
||||
/// printf("%d\n", a);
|
||||
/// }
|
||||
/// ```
|
||||
///
|
||||
/// prints `83` (as `83 == 0o123` while `123 == 0o173`).
|
||||
pub ZERO_PREFIXED_LITERAL,
|
||||
complexity,
|
||||
"integer literals starting with `0`"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Warns if a generic shadows a built-in type.
|
||||
///
|
||||
/// **Why is this bad?** This gives surprising type errors.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
///
|
||||
/// ```ignore
|
||||
/// impl<u32> Foo<u32> {
|
||||
/// fn impl_func(&self) -> u32 {
|
||||
/// 42
|
||||
/// }
|
||||
/// }
|
||||
/// ```
|
||||
pub BUILTIN_TYPE_SHADOW,
|
||||
style,
|
||||
"shadowing a builtin type"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Checks for patterns in the form `name @ _`.
|
||||
///
|
||||
/// **Why is this bad?** It's almost always more readable to just use direct
|
||||
/// bindings.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
/// ```rust
|
||||
/// # let v = Some("abc");
|
||||
///
|
||||
/// // Bad
|
||||
/// match v {
|
||||
/// Some(x) => (),
|
||||
/// y @ _ => (),
|
||||
/// }
|
||||
///
|
||||
/// // Good
|
||||
/// match v {
|
||||
/// Some(x) => (),
|
||||
/// y => (),
|
||||
/// }
|
||||
/// ```
|
||||
pub REDUNDANT_PATTERN,
|
||||
style,
|
||||
"using `name @ _` in a pattern"
|
||||
}
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Checks for tuple patterns with a wildcard
|
||||
/// pattern (`_`) is next to a rest pattern (`..`).
|
||||
///
|
||||
/// _NOTE_: While `_, ..` means there is at least one element left, `..`
|
||||
/// means there are 0 or more elements left. This can make a difference
|
||||
/// when refactoring, but shouldn't result in errors in the refactored code,
|
||||
/// since the wildcard pattern isn't used anyway.
|
||||
/// **Why is this bad?** The wildcard pattern is unneeded as the rest pattern
|
||||
/// can match that element as well.
|
||||
///
|
||||
/// **Known problems:** None.
|
||||
///
|
||||
/// **Example:**
|
||||
/// ```rust
|
||||
/// # struct TupleStruct(u32, u32, u32);
|
||||
/// # let t = TupleStruct(1, 2, 3);
|
||||
/// // Bad
|
||||
/// match t {
|
||||
/// TupleStruct(0, .., _) => (),
|
||||
/// _ => (),
|
||||
/// }
|
||||
///
|
||||
/// // Good
|
||||
/// match t {
|
||||
/// TupleStruct(0, ..) => (),
|
||||
/// _ => (),
|
||||
/// }
|
||||
/// ```
|
||||
pub UNNEEDED_WILDCARD_PATTERN,
|
||||
complexity,
|
||||
"tuple patterns with a wildcard pattern (`_`) is next to a rest pattern (`..`)"
|
||||
}
|
||||
|
||||
declare_lint_pass!(MiscEarlyLints => [
|
||||
UNNEEDED_FIELD_PATTERN,
|
||||
DUPLICATE_UNDERSCORE_ARGUMENT,
|
||||
DOUBLE_NEG,
|
||||
MIXED_CASE_HEX_LITERALS,
|
||||
UNSEPARATED_LITERAL_SUFFIX,
|
||||
ZERO_PREFIXED_LITERAL,
|
||||
BUILTIN_TYPE_SHADOW,
|
||||
REDUNDANT_PATTERN,
|
||||
UNNEEDED_WILDCARD_PATTERN,
|
||||
]);
|
||||
|
||||
impl EarlyLintPass for MiscEarlyLints {
|
||||
fn check_generics(&mut self, cx: &EarlyContext<'_>, gen: &Generics) {
|
||||
for param in &gen.params {
|
||||
builtin_type_shadow::check(cx, param);
|
||||
}
|
||||
}
|
||||
|
||||
fn check_pat(&mut self, cx: &EarlyContext<'_>, pat: &Pat) {
|
||||
unneeded_field_pattern::check(cx, pat);
|
||||
redundant_pattern::check(cx, pat);
|
||||
unneeded_wildcard_pattern::check(cx, pat);
|
||||
}
|
||||
|
||||
fn check_fn(&mut self, cx: &EarlyContext<'_>, fn_kind: FnKind<'_>, _: Span, _: NodeId) {
|
||||
let mut registered_names: FxHashMap<String, Span> = FxHashMap::default();
|
||||
|
||||
for arg in &fn_kind.decl().inputs {
|
||||
if let PatKind::Ident(_, ident, None) = arg.pat.kind {
|
||||
let arg_name = ident.to_string();
|
||||
|
||||
if let Some(arg_name) = arg_name.strip_prefix('_') {
|
||||
if let Some(correspondence) = registered_names.get(arg_name) {
|
||||
span_lint(
|
||||
cx,
|
||||
DUPLICATE_UNDERSCORE_ARGUMENT,
|
||||
*correspondence,
|
||||
&format!(
|
||||
"`{}` already exists, having another argument having almost the same \
|
||||
name makes code comprehension and documentation more difficult",
|
||||
arg_name
|
||||
),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
registered_names.insert(arg_name, arg.pat.span);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn check_expr(&mut self, cx: &EarlyContext<'_>, expr: &Expr) {
|
||||
if in_external_macro(cx.sess(), expr.span) {
|
||||
return;
|
||||
}
|
||||
double_neg::check(cx, expr)
|
||||
}
|
||||
}
|
||||
|
||||
impl MiscEarlyLints {
|
||||
fn check_lit(cx: &EarlyContext<'_>, lit: &Lit) {
|
||||
// We test if first character in snippet is a number, because the snippet could be an expansion
|
||||
// from a built-in macro like `line!()` or a proc-macro like `#[wasm_bindgen]`.
|
||||
// Note that this check also covers special case that `line!()` is eagerly expanded by compiler.
|
||||
// See <https://github.com/rust-lang/rust-clippy/issues/4507> for a regression.
|
||||
// FIXME: Find a better way to detect those cases.
|
||||
let lit_snip = match snippet_opt(cx, lit.span) {
|
||||
Some(snip) if snip.chars().next().map_or(false, |c| c.is_digit(10)) => snip,
|
||||
_ => return,
|
||||
};
|
||||
|
||||
if let LitKind::Int(value, lit_int_type) = lit.kind {
|
||||
let suffix = match lit_int_type {
|
||||
LitIntType::Signed(ty) => ty.name_str(),
|
||||
LitIntType::Unsigned(ty) => ty.name_str(),
|
||||
LitIntType::Unsuffixed => "",
|
||||
};
|
||||
unseparated_literal_suffix::check(cx, lit, &lit_snip, suffix, "integer");
|
||||
if lit_snip.starts_with("0x") {
|
||||
mixed_case_hex_literals::check(cx, lit, suffix, &lit_snip)
|
||||
} else if lit_snip.starts_with("0b") || lit_snip.starts_with("0o") {
|
||||
/* nothing to do */
|
||||
} else if value != 0 && lit_snip.starts_with('0') {
|
||||
zero_prefixed_literal::check(cx, lit, &lit_snip)
|
||||
}
|
||||
} else if let LitKind::Float(_, LitFloatType::Suffixed(float_ty)) = lit.kind {
|
||||
let suffix = float_ty.name_str();
|
||||
unseparated_literal_suffix::check(cx, lit, &lit_snip, suffix, "float")
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
use clippy_utils::diagnostics::span_lint_and_sugg;
|
||||
use rustc_ast::ast::{BindingMode, Mutability, Pat, PatKind};
|
||||
use rustc_errors::Applicability;
|
||||
use rustc_lint::EarlyContext;
|
||||
|
||||
use super::REDUNDANT_PATTERN;
|
||||
|
||||
pub(super) fn check(cx: &EarlyContext<'_>, pat: &Pat) {
|
||||
if let PatKind::Ident(left, ident, Some(ref right)) = pat.kind {
|
||||
let left_binding = match left {
|
||||
BindingMode::ByRef(Mutability::Mut) => "ref mut ",
|
||||
BindingMode::ByRef(Mutability::Not) => "ref ",
|
||||
BindingMode::ByValue(..) => "",
|
||||
};
|
||||
|
||||
if let PatKind::Wild = right.kind {
|
||||
span_lint_and_sugg(
|
||||
cx,
|
||||
REDUNDANT_PATTERN,
|
||||
pat.span,
|
||||
&format!(
|
||||
"the `{} @ _` pattern can be written as just `{}`",
|
||||
ident.name, ident.name,
|
||||
),
|
||||
"try",
|
||||
format!("{}{}", left_binding, ident.name),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,72 @@
|
||||
use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
|
||||
use rustc_ast::ast::{Pat, PatKind};
|
||||
use rustc_lint::{EarlyContext, LintContext};
|
||||
|
||||
use super::UNNEEDED_FIELD_PATTERN;
|
||||
|
||||
pub(super) fn check(cx: &EarlyContext<'_>, pat: &Pat) {
|
||||
if let PatKind::Struct(ref npat, ref pfields, _) = pat.kind {
|
||||
let mut wilds = 0;
|
||||
let type_name = npat
|
||||
.segments
|
||||
.last()
|
||||
.expect("A path must have at least one segment")
|
||||
.ident
|
||||
.name;
|
||||
|
||||
for field in pfields {
|
||||
if let PatKind::Wild = field.pat.kind {
|
||||
wilds += 1;
|
||||
}
|
||||
}
|
||||
if !pfields.is_empty() && wilds == pfields.len() {
|
||||
span_lint_and_help(
|
||||
cx,
|
||||
UNNEEDED_FIELD_PATTERN,
|
||||
pat.span,
|
||||
"all the struct fields are matched to a wildcard pattern, consider using `..`",
|
||||
None,
|
||||
&format!("try with `{} {{ .. }}` instead", type_name),
|
||||
);
|
||||
return;
|
||||
}
|
||||
if wilds > 0 {
|
||||
for field in pfields {
|
||||
if let PatKind::Wild = field.pat.kind {
|
||||
wilds -= 1;
|
||||
if wilds > 0 {
|
||||
span_lint(
|
||||
cx,
|
||||
UNNEEDED_FIELD_PATTERN,
|
||||
field.span,
|
||||
"you matched a field with a wildcard pattern, consider using `..` instead",
|
||||
);
|
||||
} else {
|
||||
let mut normal = vec![];
|
||||
|
||||
for field in pfields {
|
||||
match field.pat.kind {
|
||||
PatKind::Wild => {},
|
||||
_ => {
|
||||
if let Ok(n) = cx.sess().source_map().span_to_snippet(field.span) {
|
||||
normal.push(n);
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
span_lint_and_help(
|
||||
cx,
|
||||
UNNEEDED_FIELD_PATTERN,
|
||||
field.span,
|
||||
"you matched a field with a wildcard pattern, consider using `..` \
|
||||
instead",
|
||||
None,
|
||||
&format!("try with `{} {{ {}, .. }}`", type_name, normal[..].join(", ")),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,52 @@
|
||||
use clippy_utils::diagnostics::span_lint_and_sugg;
|
||||
use rustc_ast::ast::{Pat, PatKind};
|
||||
use rustc_errors::Applicability;
|
||||
use rustc_lint::EarlyContext;
|
||||
use rustc_span::source_map::Span;
|
||||
|
||||
use super::UNNEEDED_WILDCARD_PATTERN;
|
||||
|
||||
pub(super) fn check(cx: &EarlyContext<'_>, pat: &Pat) {
|
||||
if let PatKind::TupleStruct(_, ref patterns) | PatKind::Tuple(ref patterns) = pat.kind {
|
||||
if let Some(rest_index) = patterns.iter().position(|pat| pat.is_rest()) {
|
||||
if let Some((left_index, left_pat)) = patterns[..rest_index]
|
||||
.iter()
|
||||
.rev()
|
||||
.take_while(|pat| matches!(pat.kind, PatKind::Wild))
|
||||
.enumerate()
|
||||
.last()
|
||||
{
|
||||
span_lint(cx, left_pat.span.until(patterns[rest_index].span), left_index == 0);
|
||||
}
|
||||
|
||||
if let Some((right_index, right_pat)) = patterns[rest_index + 1..]
|
||||
.iter()
|
||||
.take_while(|pat| matches!(pat.kind, PatKind::Wild))
|
||||
.enumerate()
|
||||
.last()
|
||||
{
|
||||
span_lint(
|
||||
cx,
|
||||
patterns[rest_index].span.shrink_to_hi().to(right_pat.span),
|
||||
right_index == 0,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn span_lint(cx: &EarlyContext<'_>, span: Span, only_one: bool) {
|
||||
span_lint_and_sugg(
|
||||
cx,
|
||||
UNNEEDED_WILDCARD_PATTERN,
|
||||
span,
|
||||
if only_one {
|
||||
"this pattern is unneeded as the `..` pattern can match that element"
|
||||
} else {
|
||||
"these patterns are unneeded as the `..` pattern can match those elements"
|
||||
},
|
||||
if only_one { "remove it" } else { "remove them" },
|
||||
"".to_string(),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
@ -0,0 +1,26 @@
|
||||
use clippy_utils::diagnostics::span_lint_and_sugg;
|
||||
use rustc_ast::ast::Lit;
|
||||
use rustc_errors::Applicability;
|
||||
use rustc_lint::EarlyContext;
|
||||
|
||||
use super::UNSEPARATED_LITERAL_SUFFIX;
|
||||
|
||||
pub(super) fn check(cx: &EarlyContext<'_>, lit: &Lit, lit_snip: &str, suffix: &str, sugg_type: &str) {
|
||||
let maybe_last_sep_idx = if let Some(val) = lit_snip.len().checked_sub(suffix.len() + 1) {
|
||||
val
|
||||
} else {
|
||||
return; // It's useless so shouldn't lint.
|
||||
};
|
||||
// Do not lint when literal is unsuffixed.
|
||||
if !suffix.is_empty() && lit_snip.as_bytes()[maybe_last_sep_idx] != b'_' {
|
||||
span_lint_and_sugg(
|
||||
cx,
|
||||
UNSEPARATED_LITERAL_SUFFIX,
|
||||
lit.span,
|
||||
&format!("{} type suffix should be separated by an underscore", sugg_type),
|
||||
"add an underscore",
|
||||
format!("{}_{}", &lit_snip[..=maybe_last_sep_idx], suffix),
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
}
|
@ -0,0 +1,29 @@
|
||||
use clippy_utils::diagnostics::span_lint_and_then;
|
||||
use rustc_ast::ast::Lit;
|
||||
use rustc_errors::Applicability;
|
||||
use rustc_lint::EarlyContext;
|
||||
|
||||
use super::ZERO_PREFIXED_LITERAL;
|
||||
|
||||
pub(super) fn check(cx: &EarlyContext<'_>, lit: &Lit, lit_snip: &str) {
|
||||
span_lint_and_then(
|
||||
cx,
|
||||
ZERO_PREFIXED_LITERAL,
|
||||
lit.span,
|
||||
"this is a decimal constant",
|
||||
|diag| {
|
||||
diag.span_suggestion(
|
||||
lit.span,
|
||||
"if you mean to use a decimal constant, remove the `0` to avoid confusion",
|
||||
lit_snip.trim_start_matches(|c| c == '_' || c == '0').to_string(),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
diag.span_suggestion(
|
||||
lit.span,
|
||||
"if you mean to use an octal constant, use `0o`",
|
||||
format!("0o{}", lit_snip.trim_start_matches(|c| c == '_' || c == '0')),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
},
|
||||
);
|
||||
}
|
@ -7,7 +7,7 @@ use rustc_ast::ast::LitKind;
|
||||
use rustc_errors::Applicability;
|
||||
use rustc_hir::intravisit::{walk_block, walk_expr, walk_stmt, NestedVisitorMap, Visitor};
|
||||
use rustc_hir::{BindingAnnotation, Block, Expr, ExprKind, HirId, PatKind, QPath, Stmt, StmtKind};
|
||||
use rustc_lint::{LateContext, LateLintPass, Lint};
|
||||
use rustc_lint::{LateContext, LateLintPass};
|
||||
use rustc_middle::hir::map::Map;
|
||||
use rustc_session::{declare_lint_pass, declare_tool_lint};
|
||||
use rustc_span::symbol::sym;
|
||||
@ -157,26 +157,16 @@ impl SlowVectorInit {
|
||||
vec_alloc: &VecAllocation<'_>,
|
||||
) {
|
||||
match initialization {
|
||||
InitializationType::Extend(e) | InitializationType::Resize(e) => Self::emit_lint(
|
||||
cx,
|
||||
e,
|
||||
vec_alloc,
|
||||
"slow zero-filling initialization",
|
||||
SLOW_VECTOR_INITIALIZATION,
|
||||
),
|
||||
InitializationType::Extend(e) | InitializationType::Resize(e) => {
|
||||
Self::emit_lint(cx, e, vec_alloc, "slow zero-filling initialization")
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fn emit_lint<'tcx>(
|
||||
cx: &LateContext<'tcx>,
|
||||
slow_fill: &Expr<'_>,
|
||||
vec_alloc: &VecAllocation<'_>,
|
||||
msg: &str,
|
||||
lint: &'static Lint,
|
||||
) {
|
||||
fn emit_lint<'tcx>(cx: &LateContext<'tcx>, slow_fill: &Expr<'_>, vec_alloc: &VecAllocation<'_>, msg: &str) {
|
||||
let len_expr = Sugg::hir(cx, vec_alloc.len_expr, "len");
|
||||
|
||||
span_lint_and_then(cx, lint, slow_fill.span, msg, |diag| {
|
||||
span_lint_and_then(cx, SLOW_VECTOR_INITIALIZATION, slow_fill.span, msg, |diag| {
|
||||
diag.span_suggestion(
|
||||
vec_alloc.allocation_expr.span,
|
||||
"consider replace allocation with",
|
||||
|
@ -47,7 +47,9 @@ impl EarlyLintPass for UnusedUnit {
|
||||
if_chain! {
|
||||
if let Some(stmt) = block.stmts.last();
|
||||
if let ast::StmtKind::Expr(ref expr) = stmt.kind;
|
||||
if is_unit_expr(expr) && !stmt.span.from_expansion();
|
||||
if is_unit_expr(expr);
|
||||
let ctxt = block.span.ctxt();
|
||||
if stmt.span.ctxt() == ctxt && expr.span.ctxt() == ctxt;
|
||||
then {
|
||||
let sp = expr.span;
|
||||
span_lint_and_sugg(
|
||||
|
@ -1,121 +1,110 @@
|
||||
//! Read configurations files.
|
||||
|
||||
#![deny(clippy::missing_docs_in_private_items)]
|
||||
#![allow(clippy::module_name_repetitions)]
|
||||
|
||||
use rustc_ast::ast::{LitKind, MetaItemKind, NestedMetaItem};
|
||||
use rustc_span::source_map;
|
||||
use source_map::Span;
|
||||
use std::lazy::SyncLazy;
|
||||
use serde::de::{Deserializer, IgnoredAny, IntoDeserializer, MapAccess, Visitor};
|
||||
use serde::Deserialize;
|
||||
use std::error::Error;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Mutex;
|
||||
use std::{env, fmt, fs, io};
|
||||
|
||||
/// Gets the configuration file from arguments.
|
||||
pub fn file_from_args(args: &[NestedMetaItem]) -> Result<Option<PathBuf>, (&'static str, Span)> {
|
||||
for arg in args.iter().filter_map(NestedMetaItem::meta_item) {
|
||||
if arg.has_name(sym!(conf_file)) {
|
||||
return match arg.kind {
|
||||
MetaItemKind::Word | MetaItemKind::List(_) => Err(("`conf_file` must be a named value", arg.span)),
|
||||
MetaItemKind::NameValue(ref value) => {
|
||||
if let LitKind::Str(ref file, _) = value.kind {
|
||||
Ok(Some(file.to_string().into()))
|
||||
} else {
|
||||
Err(("`conf_file` value must be a string", value.span))
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
/// Conf with parse errors
|
||||
#[derive(Default)]
|
||||
pub struct TryConf {
|
||||
pub conf: Conf,
|
||||
pub errors: Vec<String>,
|
||||
}
|
||||
|
||||
/// Error from reading a configuration file.
|
||||
#[derive(Debug)]
|
||||
pub enum Error {
|
||||
/// An I/O error.
|
||||
Io(io::Error),
|
||||
/// Not valid toml or doesn't fit the expected config format
|
||||
Toml(String),
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Self::Io(err) => err.fmt(f),
|
||||
Self::Toml(err) => err.fmt(f),
|
||||
impl TryConf {
|
||||
fn from_error(error: impl Error) -> Self {
|
||||
Self {
|
||||
conf: Conf::default(),
|
||||
errors: vec![error.to_string()],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<io::Error> for Error {
|
||||
fn from(e: io::Error) -> Self {
|
||||
Self::Io(e)
|
||||
}
|
||||
}
|
||||
|
||||
/// Vec of errors that might be collected during config toml parsing
|
||||
static ERRORS: SyncLazy<Mutex<Vec<Error>>> = SyncLazy::new(|| Mutex::new(Vec::new()));
|
||||
|
||||
macro_rules! define_Conf {
|
||||
($(#[$doc:meta] ($config:ident, $config_str:literal: $Ty:ty, $default:expr),)+) => {
|
||||
mod helpers {
|
||||
use serde::Deserialize;
|
||||
/// Type used to store lint configuration.
|
||||
#[derive(Deserialize)]
|
||||
#[serde(rename_all = "kebab-case", deny_unknown_fields)]
|
||||
pub struct Conf {
|
||||
$(
|
||||
#[$doc]
|
||||
#[serde(default = $config_str)]
|
||||
#[serde(with = $config_str)]
|
||||
pub $config: $Ty,
|
||||
)+
|
||||
#[allow(dead_code)]
|
||||
#[serde(default)]
|
||||
third_party: Option<::toml::Value>,
|
||||
($(
|
||||
#[$doc:meta]
|
||||
$(#[conf_deprecated($dep:literal)])?
|
||||
($name:ident: $ty:ty = $default:expr),
|
||||
)*) => {
|
||||
/// Clippy lint configuration
|
||||
pub struct Conf {
|
||||
$(#[$doc] pub $name: $ty,)*
|
||||
}
|
||||
|
||||
mod defaults {
|
||||
$(pub fn $name() -> $ty { $default })*
|
||||
}
|
||||
|
||||
impl Default for Conf {
|
||||
fn default() -> Self {
|
||||
Self { $($name: defaults::$name(),)* }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for TryConf {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de> {
|
||||
deserializer.deserialize_map(ConfVisitor)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(field_identifier, rename_all = "kebab-case")]
|
||||
#[allow(non_camel_case_types)]
|
||||
enum Field { $($name,)* third_party, }
|
||||
|
||||
struct ConfVisitor;
|
||||
|
||||
impl<'de> Visitor<'de> for ConfVisitor {
|
||||
type Value = TryConf;
|
||||
|
||||
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
formatter.write_str("Conf")
|
||||
}
|
||||
|
||||
$(
|
||||
mod $config {
|
||||
use serde::Deserialize;
|
||||
pub fn deserialize<'de, D: serde::Deserializer<'de>>(deserializer: D) -> Result<$Ty, D::Error> {
|
||||
use super::super::{ERRORS, Error};
|
||||
|
||||
Ok(
|
||||
<$Ty>::deserialize(deserializer).unwrap_or_else(|e| {
|
||||
ERRORS
|
||||
.lock()
|
||||
.expect("no threading here")
|
||||
.push(Error::Toml(e.to_string()));
|
||||
super::$config()
|
||||
})
|
||||
)
|
||||
fn visit_map<V>(self, mut map: V) -> Result<Self::Value, V::Error> where V: MapAccess<'de> {
|
||||
let mut errors = Vec::new();
|
||||
$(let mut $name = None;)*
|
||||
// could get `Field` here directly, but get `str` first for diagnostics
|
||||
while let Some(name) = map.next_key::<&str>()? {
|
||||
match Field::deserialize(name.into_deserializer())? {
|
||||
$(Field::$name => {
|
||||
$(errors.push(format!("deprecated field `{}`. {}", name, $dep));)?
|
||||
match map.next_value() {
|
||||
Err(e) => errors.push(e.to_string()),
|
||||
Ok(value) => match $name {
|
||||
Some(_) => errors.push(format!("duplicate field `{}`", name)),
|
||||
None => $name = Some(value),
|
||||
}
|
||||
}
|
||||
})*
|
||||
// white-listed; ignore
|
||||
Field::third_party => drop(map.next_value::<IgnoredAny>())
|
||||
}
|
||||
}
|
||||
|
||||
#[must_use]
|
||||
fn $config() -> $Ty {
|
||||
let x = $default;
|
||||
x
|
||||
}
|
||||
)+
|
||||
let conf = Conf { $($name: $name.unwrap_or_else(defaults::$name),)* };
|
||||
Ok(TryConf { conf, errors })
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub use self::helpers::Conf;
|
||||
// N.B., this macro is parsed by util/lintlib.py
|
||||
define_Conf! {
|
||||
/// Lint: CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR. The minimum rust version that the project supports
|
||||
(msrv, "msrv": Option<String>, None),
|
||||
/// Lint: CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE. The minimum rust version that the project supports
|
||||
(msrv: Option<String> = None),
|
||||
/// Lint: BLACKLISTED_NAME. The list of blacklisted names to lint about. NB: `bar` is not here since it has legitimate uses
|
||||
(blacklisted_names, "blacklisted_names": Vec<String>, ["foo", "baz", "quux"].iter().map(ToString::to_string).collect()),
|
||||
(blacklisted_names: Vec<String> = ["foo", "baz", "quux"].iter().map(ToString::to_string).collect()),
|
||||
/// Lint: COGNITIVE_COMPLEXITY. The maximum cognitive complexity a function can have
|
||||
(cognitive_complexity_threshold, "cognitive_complexity_threshold": u64, 25),
|
||||
(cognitive_complexity_threshold: u64 = 25),
|
||||
/// DEPRECATED LINT: CYCLOMATIC_COMPLEXITY. Use the Cognitive Complexity lint instead.
|
||||
(cyclomatic_complexity_threshold, "cyclomatic_complexity_threshold": Option<u64>, None),
|
||||
#[conf_deprecated("Please use `cognitive-complexity-threshold` instead")]
|
||||
(cyclomatic_complexity_threshold: Option<u64> = None),
|
||||
/// Lint: DOC_MARKDOWN. The list of words this lint should not consider as identifiers needing ticks
|
||||
(doc_valid_idents, "doc_valid_idents": Vec<String>, [
|
||||
(doc_valid_idents: Vec<String> = [
|
||||
"KiB", "MiB", "GiB", "TiB", "PiB", "EiB",
|
||||
"DirectX",
|
||||
"ECMAScript",
|
||||
@ -136,54 +125,47 @@ define_Conf! {
|
||||
"CamelCase",
|
||||
].iter().map(ToString::to_string).collect()),
|
||||
/// Lint: TOO_MANY_ARGUMENTS. The maximum number of argument a function or method can have
|
||||
(too_many_arguments_threshold, "too_many_arguments_threshold": u64, 7),
|
||||
(too_many_arguments_threshold: u64 = 7),
|
||||
/// Lint: TYPE_COMPLEXITY. The maximum complexity a type can have
|
||||
(type_complexity_threshold, "type_complexity_threshold": u64, 250),
|
||||
(type_complexity_threshold: u64 = 250),
|
||||
/// Lint: MANY_SINGLE_CHAR_NAMES. The maximum number of single char bindings a scope may have
|
||||
(single_char_binding_names_threshold, "single_char_binding_names_threshold": u64, 4),
|
||||
(single_char_binding_names_threshold: u64 = 4),
|
||||
/// Lint: BOXED_LOCAL, USELESS_VEC. The maximum size of objects (in bytes) that will be linted. Larger objects are ok on the heap
|
||||
(too_large_for_stack, "too_large_for_stack": u64, 200),
|
||||
(too_large_for_stack: u64 = 200),
|
||||
/// Lint: ENUM_VARIANT_NAMES. The minimum number of enum variants for the lints about variant names to trigger
|
||||
(enum_variant_name_threshold, "enum_variant_name_threshold": u64, 3),
|
||||
(enum_variant_name_threshold: u64 = 3),
|
||||
/// Lint: LARGE_ENUM_VARIANT. The maximum size of a enum's variant to avoid box suggestion
|
||||
(enum_variant_size_threshold, "enum_variant_size_threshold": u64, 200),
|
||||
(enum_variant_size_threshold: u64 = 200),
|
||||
/// Lint: VERBOSE_BIT_MASK. The maximum allowed size of a bit mask before suggesting to use 'trailing_zeros'
|
||||
(verbose_bit_mask_threshold, "verbose_bit_mask_threshold": u64, 1),
|
||||
(verbose_bit_mask_threshold: u64 = 1),
|
||||
/// Lint: DECIMAL_LITERAL_REPRESENTATION. The lower bound for linting decimal literals
|
||||
(literal_representation_threshold, "literal_representation_threshold": u64, 16384),
|
||||
(literal_representation_threshold: u64 = 16384),
|
||||
/// Lint: TRIVIALLY_COPY_PASS_BY_REF. The maximum size (in bytes) to consider a `Copy` type for passing by value instead of by reference.
|
||||
(trivial_copy_size_limit, "trivial_copy_size_limit": Option<u64>, None),
|
||||
(trivial_copy_size_limit: Option<u64> = None),
|
||||
/// Lint: LARGE_TYPE_PASS_BY_MOVE. The minimum size (in bytes) to consider a type for passing by reference instead of by value.
|
||||
(pass_by_value_size_limit, "pass_by_value_size_limit": u64, 256),
|
||||
(pass_by_value_size_limit: u64 = 256),
|
||||
/// Lint: TOO_MANY_LINES. The maximum number of lines a function or method can have
|
||||
(too_many_lines_threshold, "too_many_lines_threshold": u64, 100),
|
||||
(too_many_lines_threshold: u64 = 100),
|
||||
/// Lint: LARGE_STACK_ARRAYS, LARGE_CONST_ARRAYS. The maximum allowed size for arrays on the stack
|
||||
(array_size_threshold, "array_size_threshold": u64, 512_000),
|
||||
(array_size_threshold: u64 = 512_000),
|
||||
/// Lint: VEC_BOX. The size of the boxed type in bytes, where boxing in a `Vec` is allowed
|
||||
(vec_box_size_threshold, "vec_box_size_threshold": u64, 4096),
|
||||
(vec_box_size_threshold: u64 = 4096),
|
||||
/// Lint: TYPE_REPETITION_IN_BOUNDS. The maximum number of bounds a trait can have to be linted
|
||||
(max_trait_bounds, "max_trait_bounds": u64, 3),
|
||||
(max_trait_bounds: u64 = 3),
|
||||
/// Lint: STRUCT_EXCESSIVE_BOOLS. The maximum number of bools a struct can have
|
||||
(max_struct_bools, "max_struct_bools": u64, 3),
|
||||
(max_struct_bools: u64 = 3),
|
||||
/// Lint: FN_PARAMS_EXCESSIVE_BOOLS. The maximum number of bools function parameters can have
|
||||
(max_fn_params_bools, "max_fn_params_bools": u64, 3),
|
||||
(max_fn_params_bools: u64 = 3),
|
||||
/// Lint: WILDCARD_IMPORTS. Whether to allow certain wildcard imports (prelude, super in tests).
|
||||
(warn_on_all_wildcard_imports, "warn_on_all_wildcard_imports": bool, false),
|
||||
(warn_on_all_wildcard_imports: bool = false),
|
||||
/// Lint: DISALLOWED_METHOD. The list of disallowed methods, written as fully qualified paths.
|
||||
(disallowed_methods, "disallowed_methods": Vec<String>, Vec::<String>::new()),
|
||||
(disallowed_methods: Vec<String> = Vec::new()),
|
||||
/// Lint: UNREADABLE_LITERAL. Should the fraction of a decimal be linted to include separators.
|
||||
(unreadable_literal_lint_fractions, "unreadable_literal_lint_fractions": bool, true),
|
||||
(unreadable_literal_lint_fractions: bool = true),
|
||||
/// Lint: UPPER_CASE_ACRONYMS. Enables verbose mode. Triggers if there is more than one uppercase char next to each other
|
||||
(upper_case_acronyms_aggressive, "upper_case_acronyms_aggressive": bool, false),
|
||||
(upper_case_acronyms_aggressive: bool = false),
|
||||
/// Lint: _CARGO_COMMON_METADATA. For internal testing only, ignores the current `publish` settings in the Cargo manifest.
|
||||
(cargo_ignore_publish, "cargo_ignore_publish": bool, false),
|
||||
}
|
||||
|
||||
impl Default for Conf {
|
||||
#[must_use]
|
||||
fn default() -> Self {
|
||||
toml::from_str("").expect("we never error on empty config files")
|
||||
}
|
||||
(cargo_ignore_publish: bool = false),
|
||||
}
|
||||
|
||||
/// Search for the configuration file.
|
||||
@ -217,43 +199,13 @@ pub fn lookup_conf_file() -> io::Result<Option<PathBuf>> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Produces a `Conf` filled with the default values and forwards the errors
|
||||
///
|
||||
/// Used internally for convenience
|
||||
fn default(errors: Vec<Error>) -> (Conf, Vec<Error>) {
|
||||
(Conf::default(), errors)
|
||||
}
|
||||
|
||||
/// Read the `toml` configuration file.
|
||||
///
|
||||
/// In case of error, the function tries to continue as much as possible.
|
||||
pub fn read(path: &Path) -> (Conf, Vec<Error>) {
|
||||
pub fn read(path: &Path) -> TryConf {
|
||||
let content = match fs::read_to_string(path) {
|
||||
Err(e) => return TryConf::from_error(e),
|
||||
Ok(content) => content,
|
||||
Err(err) => return default(vec![err.into()]),
|
||||
};
|
||||
|
||||
assert!(ERRORS.lock().expect("no threading -> mutex always safe").is_empty());
|
||||
match toml::from_str(&content) {
|
||||
Ok(toml) => {
|
||||
let mut errors = ERRORS.lock().expect("no threading -> mutex always safe").split_off(0);
|
||||
|
||||
let toml_ref: &Conf = &toml;
|
||||
|
||||
let cyc_field: Option<u64> = toml_ref.cyclomatic_complexity_threshold;
|
||||
|
||||
if cyc_field.is_some() {
|
||||
let cyc_err = "found deprecated field `cyclomatic-complexity-threshold`. Please use `cognitive-complexity-threshold` instead.".to_string();
|
||||
errors.push(Error::Toml(cyc_err));
|
||||
}
|
||||
|
||||
(toml, errors)
|
||||
},
|
||||
Err(e) => {
|
||||
let mut errors = ERRORS.lock().expect("no threading -> mutex always safe").split_off(0);
|
||||
errors.push(Error::Toml(e.to_string()));
|
||||
|
||||
default(errors)
|
||||
},
|
||||
}
|
||||
toml::from_str(&content).unwrap_or_else(TryConf::from_error)
|
||||
}
|
||||
|
@ -32,6 +32,9 @@ use rustc_typeck::hir_ty_to_ty;
|
||||
|
||||
use std::borrow::{Borrow, Cow};
|
||||
|
||||
#[cfg(feature = "metadata-collector-lint")]
|
||||
pub mod metadata_collector;
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Checks for various things we like to keep tidy in clippy.
|
||||
///
|
||||
|
@ -0,0 +1,632 @@
|
||||
//! This lint is used to collect metadata about clippy lints. This metadata is exported as a json
|
||||
//! file and then used to generate the [clippy lint list](https://rust-lang.github.io/rust-clippy/master/index.html)
|
||||
//!
|
||||
//! This module and therefor the entire lint is guarded by a feature flag called
|
||||
//! `metadata-collector-lint`
|
||||
//!
|
||||
//! The module transforms all lint names to ascii lowercase to ensure that we don't have mismatches
|
||||
//! during any comparison or mapping. (Please take care of this, it's not fun to spend time on such
|
||||
//! a simple mistake)
|
||||
|
||||
// # NITs
|
||||
// - TODO xFrednet 2021-02-13: Collect depreciations and maybe renames
|
||||
|
||||
use if_chain::if_chain;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_hir::{
|
||||
self as hir, def::DefKind, intravisit, intravisit::Visitor, ExprKind, Item, ItemKind, Mutability, QPath,
|
||||
};
|
||||
use rustc_lint::{CheckLintNameResult, LateContext, LateLintPass, LintContext, LintId};
|
||||
use rustc_middle::hir::map::Map;
|
||||
use rustc_session::{declare_tool_lint, impl_lint_pass};
|
||||
use rustc_span::{sym, Loc, Span, Symbol};
|
||||
use serde::{ser::SerializeStruct, Serialize, Serializer};
|
||||
use std::collections::BinaryHeap;
|
||||
use std::fs::{self, OpenOptions};
|
||||
use std::io::prelude::*;
|
||||
use std::path::Path;
|
||||
|
||||
use crate::utils::internal_lints::is_lint_ref_type;
|
||||
use clippy_utils::{
|
||||
diagnostics::span_lint, last_path_segment, match_function_call, match_path, paths, ty::match_type,
|
||||
ty::walk_ptrs_ty_depth,
|
||||
};
|
||||
|
||||
/// This is the output file of the lint collector.
|
||||
const OUTPUT_FILE: &str = "../util/gh-pages/metadata_collection.json";
|
||||
/// These lints are excluded from the export.
|
||||
const BLACK_LISTED_LINTS: [&str; 3] = ["lint_author", "deep_code_inspection", "internal_metadata_collector"];
|
||||
/// These groups will be ignored by the lint group matcher. This is useful for collections like
|
||||
/// `clippy::all`
|
||||
const IGNORED_LINT_GROUPS: [&str; 1] = ["clippy::all"];
|
||||
/// Lints within this group will be excluded from the collection
|
||||
const EXCLUDED_LINT_GROUPS: [&str; 1] = ["clippy::internal"];
|
||||
|
||||
const LINT_EMISSION_FUNCTIONS: [&[&str]; 7] = [
|
||||
&["clippy_utils", "diagnostics", "span_lint"],
|
||||
&["clippy_utils", "diagnostics", "span_lint_and_help"],
|
||||
&["clippy_utils", "diagnostics", "span_lint_and_note"],
|
||||
&["clippy_utils", "diagnostics", "span_lint_hir"],
|
||||
&["clippy_utils", "diagnostics", "span_lint_and_sugg"],
|
||||
&["clippy_utils", "diagnostics", "span_lint_and_then"],
|
||||
&["clippy_utils", "diagnostics", "span_lint_hir_and_then"],
|
||||
];
|
||||
const SUGGESTION_DIAGNOSTIC_BUILDER_METHODS: [(&str, bool); 9] = [
|
||||
("span_suggestion", false),
|
||||
("span_suggestion_short", false),
|
||||
("span_suggestion_verbose", false),
|
||||
("span_suggestion_hidden", false),
|
||||
("tool_only_span_suggestion", false),
|
||||
("multipart_suggestion", true),
|
||||
("multipart_suggestions", true),
|
||||
("tool_only_multipart_suggestion", true),
|
||||
("span_suggestions", true),
|
||||
];
|
||||
const SUGGESTION_FUNCTIONS: [&[&str]; 2] = [
|
||||
&["clippy_utils", "diagnostics", "multispan_sugg"],
|
||||
&["clippy_utils", "diagnostics", "multispan_sugg_with_applicability"],
|
||||
];
|
||||
|
||||
/// The index of the applicability name of `paths::APPLICABILITY_VALUES`
|
||||
const APPLICABILITY_NAME_INDEX: usize = 2;
|
||||
|
||||
declare_clippy_lint! {
|
||||
/// **What it does:** Collects metadata about clippy lints for the website.
|
||||
///
|
||||
/// This lint will be used to report problems of syntax parsing. You should hopefully never
|
||||
/// see this but never say never I guess ^^
|
||||
///
|
||||
/// **Why is this bad?** This is not a bad thing but definitely a hacky way to do it. See
|
||||
/// issue [#4310](https://github.com/rust-lang/rust-clippy/issues/4310) for a discussion
|
||||
/// about the implementation.
|
||||
///
|
||||
/// **Known problems:** Hopefully none. It would be pretty uncool to have a problem here :)
|
||||
///
|
||||
/// **Example output:**
|
||||
/// ```json,ignore
|
||||
/// {
|
||||
/// "id": "internal_metadata_collector",
|
||||
/// "id_span": {
|
||||
/// "path": "clippy_lints/src/utils/internal_lints/metadata_collector.rs",
|
||||
/// "line": 1
|
||||
/// },
|
||||
/// "group": "clippy::internal",
|
||||
/// "docs": " **What it does:** Collects metadata about clippy lints for the website. [...] "
|
||||
/// }
|
||||
/// ```
|
||||
pub INTERNAL_METADATA_COLLECTOR,
|
||||
internal_warn,
|
||||
"A busy bee collection metadata about lints"
|
||||
}
|
||||
|
||||
impl_lint_pass!(MetadataCollector => [INTERNAL_METADATA_COLLECTOR]);
|
||||
|
||||
#[allow(clippy::module_name_repetitions)]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct MetadataCollector {
|
||||
/// All collected lints
|
||||
///
|
||||
/// We use a Heap here to have the lints added in alphabetic order in the export
|
||||
lints: BinaryHeap<LintMetadata>,
|
||||
applicability_info: FxHashMap<String, ApplicabilityInfo>,
|
||||
}
|
||||
|
||||
impl Drop for MetadataCollector {
|
||||
/// You might ask: How hacky is this?
|
||||
/// My answer: YES
|
||||
fn drop(&mut self) {
|
||||
// The metadata collector gets dropped twice, this makes sure that we only write
|
||||
// when the list is full
|
||||
if self.lints.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut applicability_info = std::mem::take(&mut self.applicability_info);
|
||||
|
||||
// Mapping the final data
|
||||
let mut lints = std::mem::take(&mut self.lints).into_sorted_vec();
|
||||
lints
|
||||
.iter_mut()
|
||||
.for_each(|x| x.applicability = applicability_info.remove(&x.id));
|
||||
|
||||
// Outputting
|
||||
if Path::new(OUTPUT_FILE).exists() {
|
||||
fs::remove_file(OUTPUT_FILE).unwrap();
|
||||
}
|
||||
let mut file = OpenOptions::new().write(true).create(true).open(OUTPUT_FILE).unwrap();
|
||||
writeln!(file, "{}", serde_json::to_string_pretty(&lints).unwrap()).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct LintMetadata {
|
||||
id: String,
|
||||
id_span: SerializableSpan,
|
||||
group: String,
|
||||
docs: String,
|
||||
/// This field is only used in the output and will only be
|
||||
/// mapped shortly before the actual output.
|
||||
applicability: Option<ApplicabilityInfo>,
|
||||
}
|
||||
|
||||
impl LintMetadata {
|
||||
fn new(id: String, id_span: SerializableSpan, group: String, docs: String) -> Self {
|
||||
Self {
|
||||
id,
|
||||
id_span,
|
||||
group,
|
||||
docs,
|
||||
applicability: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct SerializableSpan {
|
||||
path: String,
|
||||
line: usize,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for SerializableSpan {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}:{}", self.path.rsplit('/').next().unwrap_or_default(), self.line)
|
||||
}
|
||||
}
|
||||
|
||||
impl SerializableSpan {
|
||||
fn from_item(cx: &LateContext<'_>, item: &Item<'_>) -> Self {
|
||||
Self::from_span(cx, item.ident.span)
|
||||
}
|
||||
|
||||
fn from_span(cx: &LateContext<'_>, span: Span) -> Self {
|
||||
let loc: Loc = cx.sess().source_map().lookup_char_pos(span.lo());
|
||||
|
||||
Self {
|
||||
path: format!("{}", loc.file.name),
|
||||
line: loc.line,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct ApplicabilityInfo {
|
||||
/// Indicates if any of the lint emissions uses multiple spans. This is related to
|
||||
/// [rustfix#141](https://github.com/rust-lang/rustfix/issues/141) as such suggestions can
|
||||
/// currently not be applied automatically.
|
||||
is_multi_part_suggestion: bool,
|
||||
applicability: Option<usize>,
|
||||
}
|
||||
|
||||
impl Serialize for ApplicabilityInfo {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
let index = self.applicability.unwrap_or_default();
|
||||
|
||||
let mut s = serializer.serialize_struct("ApplicabilityInfo", 2)?;
|
||||
s.serialize_field("is_multi_part_suggestion", &self.is_multi_part_suggestion)?;
|
||||
s.serialize_field(
|
||||
"applicability",
|
||||
&paths::APPLICABILITY_VALUES[index][APPLICABILITY_NAME_INDEX],
|
||||
)?;
|
||||
s.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'hir> LateLintPass<'hir> for MetadataCollector {
|
||||
/// Collecting lint declarations like:
|
||||
/// ```rust, ignore
|
||||
/// declare_clippy_lint! {
|
||||
/// /// **What it does:** Something IDK.
|
||||
/// pub SOME_LINT,
|
||||
/// internal,
|
||||
/// "Who am I?"
|
||||
/// }
|
||||
/// ```
|
||||
fn check_item(&mut self, cx: &LateContext<'hir>, item: &'hir Item<'_>) {
|
||||
if_chain! {
|
||||
// item validation
|
||||
if let ItemKind::Static(ref ty, Mutability::Not, _) = item.kind;
|
||||
if is_lint_ref_type(cx, ty);
|
||||
// blacklist check
|
||||
let lint_name = sym_to_string(item.ident.name).to_ascii_lowercase();
|
||||
if !BLACK_LISTED_LINTS.contains(&lint_name.as_str());
|
||||
// metadata extraction
|
||||
if let Some(group) = get_lint_group_or_lint(cx, &lint_name, item);
|
||||
if let Some(docs) = extract_attr_docs_or_lint(cx, item);
|
||||
then {
|
||||
self.lints.push(LintMetadata::new(
|
||||
lint_name,
|
||||
SerializableSpan::from_item(cx, item),
|
||||
group,
|
||||
docs,
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Collecting constant applicability from the actual lint emissions
|
||||
///
|
||||
/// Example:
|
||||
/// ```rust, ignore
|
||||
/// span_lint_and_sugg(
|
||||
/// cx,
|
||||
/// SOME_LINT,
|
||||
/// item.span,
|
||||
/// "Le lint message",
|
||||
/// "Here comes help:",
|
||||
/// "#![allow(clippy::all)]",
|
||||
/// Applicability::MachineApplicable, // <-- Extracts this constant value
|
||||
/// );
|
||||
/// ```
|
||||
fn check_expr(&mut self, cx: &LateContext<'hir>, expr: &'hir hir::Expr<'_>) {
|
||||
if let Some(args) = match_lint_emission(cx, expr) {
|
||||
let mut emission_info = extract_emission_info(cx, args);
|
||||
if emission_info.is_empty() {
|
||||
// See:
|
||||
// - src/misc.rs:734:9
|
||||
// - src/methods/mod.rs:3545:13
|
||||
// - src/methods/mod.rs:3496:13
|
||||
// We are basically unable to resolve the lint name it self.
|
||||
return;
|
||||
}
|
||||
|
||||
for (lint_name, applicability, is_multi_part) in emission_info.drain(..) {
|
||||
let app_info = self.applicability_info.entry(lint_name).or_default();
|
||||
app_info.applicability = applicability;
|
||||
app_info.is_multi_part_suggestion = is_multi_part;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ==================================================================
|
||||
// Lint definition extraction
|
||||
// ==================================================================
|
||||
fn sym_to_string(sym: Symbol) -> String {
|
||||
sym.as_str().to_string()
|
||||
}
|
||||
|
||||
fn extract_attr_docs_or_lint(cx: &LateContext<'_>, item: &Item<'_>) -> Option<String> {
|
||||
extract_attr_docs(cx, item).or_else(|| {
|
||||
lint_collection_error_item(cx, item, "could not collect the lint documentation");
|
||||
None
|
||||
})
|
||||
}
|
||||
|
||||
/// This function collects all documentation that has been added to an item using
|
||||
/// `#[doc = r""]` attributes. Several attributes are aggravated using line breaks
|
||||
///
|
||||
/// ```ignore
|
||||
/// #[doc = r"Hello world!"]
|
||||
/// #[doc = r"=^.^="]
|
||||
/// struct SomeItem {}
|
||||
/// ```
|
||||
///
|
||||
/// Would result in `Hello world!\n=^.^=\n`
|
||||
fn extract_attr_docs(cx: &LateContext<'_>, item: &Item<'_>) -> Option<String> {
|
||||
cx.tcx
|
||||
.hir()
|
||||
.attrs(item.hir_id())
|
||||
.iter()
|
||||
.filter_map(|ref x| x.doc_str().map(|sym| sym.as_str().to_string()))
|
||||
.reduce(|mut acc, sym| {
|
||||
acc.push_str(&sym);
|
||||
acc.push('\n');
|
||||
acc
|
||||
})
|
||||
}
|
||||
|
||||
fn get_lint_group_or_lint(cx: &LateContext<'_>, lint_name: &str, item: &'hir Item<'_>) -> Option<String> {
|
||||
let result = cx.lint_store.check_lint_name(lint_name, Some(sym::clippy));
|
||||
if let CheckLintNameResult::Tool(Ok(lint_lst)) = result {
|
||||
get_lint_group(cx, lint_lst[0])
|
||||
.or_else(|| {
|
||||
lint_collection_error_item(cx, item, "Unable to determine lint group");
|
||||
None
|
||||
})
|
||||
.filter(|group| !EXCLUDED_LINT_GROUPS.contains(&group.as_str()))
|
||||
} else {
|
||||
lint_collection_error_item(cx, item, "Unable to find lint in lint_store");
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn get_lint_group(cx: &LateContext<'_>, lint_id: LintId) -> Option<String> {
|
||||
for (group_name, lints, _) in &cx.lint_store.get_lint_groups() {
|
||||
if IGNORED_LINT_GROUPS.contains(group_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if lints.iter().any(|x| *x == lint_id) {
|
||||
return Some((*group_name).to_string());
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
// ==================================================================
|
||||
// Lint emission
|
||||
// ==================================================================
|
||||
fn lint_collection_error_item(cx: &LateContext<'_>, item: &Item<'_>, message: &str) {
|
||||
span_lint(
|
||||
cx,
|
||||
INTERNAL_METADATA_COLLECTOR,
|
||||
item.ident.span,
|
||||
&format!("metadata collection error for `{}`: {}", item.ident.name, message),
|
||||
);
|
||||
}
|
||||
|
||||
// ==================================================================
|
||||
// Applicability
|
||||
// ==================================================================
|
||||
/// This function checks if a given expression is equal to a simple lint emission function call.
|
||||
/// It will return the function arguments if the emission matched any function.
|
||||
fn match_lint_emission<'hir>(cx: &LateContext<'hir>, expr: &'hir hir::Expr<'_>) -> Option<&'hir [hir::Expr<'hir>]> {
|
||||
LINT_EMISSION_FUNCTIONS
|
||||
.iter()
|
||||
.find_map(|emission_fn| match_function_call(cx, expr, emission_fn))
|
||||
}
|
||||
|
||||
fn take_higher_applicability(a: Option<usize>, b: Option<usize>) -> Option<usize> {
|
||||
a.map_or(b, |a| a.max(b.unwrap_or_default()).into())
|
||||
}
|
||||
|
||||
fn extract_emission_info<'hir>(
|
||||
cx: &LateContext<'hir>,
|
||||
args: &'hir [hir::Expr<'hir>],
|
||||
) -> Vec<(String, Option<usize>, bool)> {
|
||||
let mut lints = Vec::new();
|
||||
let mut applicability = None;
|
||||
let mut multi_part = false;
|
||||
|
||||
for arg in args {
|
||||
let (arg_ty, _) = walk_ptrs_ty_depth(cx.typeck_results().expr_ty(&arg));
|
||||
|
||||
if match_type(cx, arg_ty, &paths::LINT) {
|
||||
// If we found the lint arg, extract the lint name
|
||||
let mut resolved_lints = resolve_lints(cx, arg);
|
||||
lints.append(&mut resolved_lints);
|
||||
} else if match_type(cx, arg_ty, &paths::APPLICABILITY) {
|
||||
applicability = resolve_applicability(cx, arg);
|
||||
} else if arg_ty.is_closure() {
|
||||
multi_part |= check_is_multi_part(cx, arg);
|
||||
// TODO xFrednet 2021-03-01: don't use or_else but rather a comparison
|
||||
applicability = applicability.or_else(|| resolve_applicability(cx, arg));
|
||||
}
|
||||
}
|
||||
|
||||
lints
|
||||
.drain(..)
|
||||
.map(|lint_name| (lint_name, applicability, multi_part))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Resolves the possible lints that this expression could reference
|
||||
fn resolve_lints(cx: &LateContext<'hir>, expr: &'hir hir::Expr<'hir>) -> Vec<String> {
|
||||
let mut resolver = LintResolver::new(cx);
|
||||
resolver.visit_expr(expr);
|
||||
resolver.lints
|
||||
}
|
||||
|
||||
/// This function tries to resolve the linked applicability to the given expression.
|
||||
fn resolve_applicability(cx: &LateContext<'hir>, expr: &'hir hir::Expr<'hir>) -> Option<usize> {
|
||||
let mut resolver = ApplicabilityResolver::new(cx);
|
||||
resolver.visit_expr(expr);
|
||||
resolver.complete()
|
||||
}
|
||||
|
||||
fn check_is_multi_part(cx: &LateContext<'hir>, closure_expr: &'hir hir::Expr<'hir>) -> bool {
|
||||
if let ExprKind::Closure(_, _, body_id, _, _) = closure_expr.kind {
|
||||
let mut scanner = IsMultiSpanScanner::new(cx);
|
||||
intravisit::walk_body(&mut scanner, cx.tcx.hir().body(body_id));
|
||||
return scanner.is_multi_part();
|
||||
} else if let Some(local) = get_parent_local(cx, closure_expr) {
|
||||
if let Some(local_init) = local.init {
|
||||
return check_is_multi_part(cx, local_init);
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
struct LintResolver<'a, 'hir> {
|
||||
cx: &'a LateContext<'hir>,
|
||||
lints: Vec<String>,
|
||||
}
|
||||
|
||||
impl<'a, 'hir> LintResolver<'a, 'hir> {
|
||||
fn new(cx: &'a LateContext<'hir>) -> Self {
|
||||
Self {
|
||||
cx,
|
||||
lints: Vec::<String>::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'hir> intravisit::Visitor<'hir> for LintResolver<'a, 'hir> {
|
||||
type Map = Map<'hir>;
|
||||
|
||||
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
|
||||
intravisit::NestedVisitorMap::All(self.cx.tcx.hir())
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'hir hir::Expr<'hir>) {
|
||||
if_chain! {
|
||||
if let ExprKind::Path(qpath) = &expr.kind;
|
||||
if let QPath::Resolved(_, path) = qpath;
|
||||
|
||||
let (expr_ty, _) = walk_ptrs_ty_depth(self.cx.typeck_results().expr_ty(&expr));
|
||||
if match_type(self.cx, expr_ty, &paths::LINT);
|
||||
then {
|
||||
if let hir::def::Res::Def(DefKind::Static, _) = path.res {
|
||||
let lint_name = last_path_segment(qpath).ident.name;
|
||||
self.lints.push(sym_to_string(lint_name).to_ascii_lowercase());
|
||||
} else if let Some(local) = get_parent_local(self.cx, expr) {
|
||||
if let Some(local_init) = local.init {
|
||||
intravisit::walk_expr(self, local_init);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
intravisit::walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
|
||||
/// This visitor finds the highest applicability value in the visited expressions
|
||||
struct ApplicabilityResolver<'a, 'hir> {
|
||||
cx: &'a LateContext<'hir>,
|
||||
/// This is the index of hightest `Applicability` for `paths::APPLICABILITY_VALUES`
|
||||
applicability_index: Option<usize>,
|
||||
}
|
||||
|
||||
impl<'a, 'hir> ApplicabilityResolver<'a, 'hir> {
|
||||
fn new(cx: &'a LateContext<'hir>) -> Self {
|
||||
Self {
|
||||
cx,
|
||||
applicability_index: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_new_index(&mut self, new_index: usize) {
|
||||
self.applicability_index = take_higher_applicability(self.applicability_index, Some(new_index));
|
||||
}
|
||||
|
||||
fn complete(self) -> Option<usize> {
|
||||
self.applicability_index
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'hir> intravisit::Visitor<'hir> for ApplicabilityResolver<'a, 'hir> {
|
||||
type Map = Map<'hir>;
|
||||
|
||||
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
|
||||
intravisit::NestedVisitorMap::All(self.cx.tcx.hir())
|
||||
}
|
||||
|
||||
fn visit_path(&mut self, path: &'hir hir::Path<'hir>, _id: hir::HirId) {
|
||||
for (index, enum_value) in paths::APPLICABILITY_VALUES.iter().enumerate() {
|
||||
if match_path(path, enum_value) {
|
||||
self.add_new_index(index);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'hir hir::Expr<'hir>) {
|
||||
let (expr_ty, _) = walk_ptrs_ty_depth(self.cx.typeck_results().expr_ty(&expr));
|
||||
|
||||
if_chain! {
|
||||
if match_type(self.cx, expr_ty, &paths::APPLICABILITY);
|
||||
if let Some(local) = get_parent_local(self.cx, expr);
|
||||
if let Some(local_init) = local.init;
|
||||
then {
|
||||
intravisit::walk_expr(self, local_init);
|
||||
}
|
||||
};
|
||||
|
||||
// TODO xFrednet 2021-03-01: support function arguments?
|
||||
|
||||
intravisit::walk_expr(self, expr);
|
||||
}
|
||||
}
|
||||
|
||||
/// This returns the parent local node if the expression is a reference one
|
||||
fn get_parent_local(cx: &LateContext<'hir>, expr: &'hir hir::Expr<'hir>) -> Option<&'hir hir::Local<'hir>> {
|
||||
if let ExprKind::Path(QPath::Resolved(_, path)) = expr.kind {
|
||||
if let hir::def::Res::Local(local_hir) = path.res {
|
||||
return get_parent_local_hir_id(cx, local_hir);
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn get_parent_local_hir_id(cx: &LateContext<'hir>, hir_id: hir::HirId) -> Option<&'hir hir::Local<'hir>> {
|
||||
let map = cx.tcx.hir();
|
||||
|
||||
match map.find(map.get_parent_node(hir_id)) {
|
||||
Some(hir::Node::Local(local)) => Some(local),
|
||||
Some(hir::Node::Pat(pattern)) => get_parent_local_hir_id(cx, pattern.hir_id),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// This visitor finds the highest applicability value in the visited expressions
|
||||
struct IsMultiSpanScanner<'a, 'hir> {
|
||||
cx: &'a LateContext<'hir>,
|
||||
suggestion_count: usize,
|
||||
}
|
||||
|
||||
impl<'a, 'hir> IsMultiSpanScanner<'a, 'hir> {
|
||||
fn new(cx: &'a LateContext<'hir>) -> Self {
|
||||
Self {
|
||||
cx,
|
||||
suggestion_count: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Add a new single expression suggestion to the counter
|
||||
fn add_single_span_suggestion(&mut self) {
|
||||
self.suggestion_count += 1;
|
||||
}
|
||||
|
||||
/// Signals that a suggestion with possible multiple spans was found
|
||||
fn add_multi_part_suggestion(&mut self) {
|
||||
self.suggestion_count += 2;
|
||||
}
|
||||
|
||||
/// Checks if the suggestions include multiple spanns
|
||||
fn is_multi_part(&self) -> bool {
|
||||
self.suggestion_count > 1
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'hir> intravisit::Visitor<'hir> for IsMultiSpanScanner<'a, 'hir> {
|
||||
type Map = Map<'hir>;
|
||||
|
||||
fn nested_visit_map(&mut self) -> intravisit::NestedVisitorMap<Self::Map> {
|
||||
intravisit::NestedVisitorMap::All(self.cx.tcx.hir())
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &'hir hir::Expr<'hir>) {
|
||||
// Early return if the lint is already multi span
|
||||
if self.is_multi_part() {
|
||||
return;
|
||||
}
|
||||
|
||||
match &expr.kind {
|
||||
ExprKind::Call(fn_expr, _args) => {
|
||||
let found_function = SUGGESTION_FUNCTIONS
|
||||
.iter()
|
||||
.any(|func_path| match_function_call(self.cx, fn_expr, func_path).is_some());
|
||||
if found_function {
|
||||
// These functions are all multi part suggestions
|
||||
self.add_single_span_suggestion()
|
||||
}
|
||||
},
|
||||
ExprKind::MethodCall(path, _path_span, arg, _arg_span) => {
|
||||
let (self_ty, _) = walk_ptrs_ty_depth(self.cx.typeck_results().expr_ty(&arg[0]));
|
||||
if match_type(self.cx, self_ty, &paths::DIAGNOSTIC_BUILDER) {
|
||||
let called_method = path.ident.name.as_str().to_string();
|
||||
for (method_name, is_multi_part) in &SUGGESTION_DIAGNOSTIC_BUILDER_METHODS {
|
||||
if *method_name == called_method {
|
||||
if *is_multi_part {
|
||||
self.add_multi_part_suggestion();
|
||||
} else {
|
||||
self.add_single_span_suggestion();
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => {},
|
||||
}
|
||||
|
||||
intravisit::walk_expr(self, expr);
|
||||
}
|
||||
}
|
@ -1,5 +1,5 @@
|
||||
pub mod author;
|
||||
pub mod conf;
|
||||
pub mod inspector;
|
||||
#[cfg(feature = "internal-lints")]
|
||||
#[cfg(any(feature = "internal-lints", feature = "metadata-collector-lint"))]
|
||||
pub mod internal_lints;
|
||||
|
@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "clippy_utils"
|
||||
version = "0.1.53"
|
||||
version = "0.1.54"
|
||||
authors = ["The Rust Clippy Developers"]
|
||||
edition = "2018"
|
||||
publish = false
|
||||
@ -15,6 +15,7 @@ rustc-semver="1.1.0"
|
||||
|
||||
[features]
|
||||
internal-lints = []
|
||||
metadata-collector-lint = []
|
||||
|
||||
[package.metadata.rust-analyzer]
|
||||
# This crate uses #[feature(rustc_private)]
|
||||
|
@ -1,4 +1,12 @@
|
||||
//! Clippy wrappers around rustc's diagnostic functions.
|
||||
//!
|
||||
//! These functions are used by the `INTERNAL_METADATA_COLLECTOR` lint to collect the corresponding
|
||||
//! lint applicability. Please make sure that you update the `LINT_EMISSION_FUNCTIONS` variable in
|
||||
//! `clippy_lints::utils::internal_lints::metadata_collector` when a new function is added
|
||||
//! or renamed.
|
||||
//!
|
||||
//! Thank you!
|
||||
//! ~The `INTERNAL_METADATA_COLLECTOR` lint
|
||||
|
||||
use rustc_errors::{Applicability, DiagnosticBuilder};
|
||||
use rustc_hir::HirId;
|
||||
|
@ -713,7 +713,7 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
|
||||
self.hash_expr(e);
|
||||
|
||||
for arm in arms {
|
||||
// TODO: arm.pat?
|
||||
self.hash_pat(arm.pat);
|
||||
if let Some(ref e) = arm.guard {
|
||||
self.hash_guard(e);
|
||||
}
|
||||
@ -791,6 +791,72 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
|
||||
// self.maybe_typeck_results.unwrap().qpath_res(p, id).hash(&mut self.s);
|
||||
}
|
||||
|
||||
pub fn hash_pat(&mut self, pat: &Pat<'_>) {
|
||||
std::mem::discriminant(&pat.kind).hash(&mut self.s);
|
||||
match pat.kind {
|
||||
PatKind::Binding(ann, _, _, pat) => {
|
||||
ann.hash_stable(&mut self.cx.tcx.get_stable_hashing_context(), &mut self.s);
|
||||
if let Some(pat) = pat {
|
||||
self.hash_pat(pat);
|
||||
}
|
||||
},
|
||||
PatKind::Box(pat) => self.hash_pat(pat),
|
||||
PatKind::Lit(expr) => self.hash_expr(expr),
|
||||
PatKind::Or(pats) => {
|
||||
for pat in pats {
|
||||
self.hash_pat(pat);
|
||||
}
|
||||
},
|
||||
PatKind::Path(ref qpath) => self.hash_qpath(qpath),
|
||||
PatKind::Range(s, e, i) => {
|
||||
if let Some(s) = s {
|
||||
self.hash_expr(s);
|
||||
}
|
||||
if let Some(e) = e {
|
||||
self.hash_expr(e);
|
||||
}
|
||||
i.hash_stable(&mut self.cx.tcx.get_stable_hashing_context(), &mut self.s);
|
||||
},
|
||||
PatKind::Ref(pat, m) => {
|
||||
self.hash_pat(pat);
|
||||
m.hash(&mut self.s);
|
||||
},
|
||||
PatKind::Slice(l, m, r) => {
|
||||
for pat in l {
|
||||
self.hash_pat(pat);
|
||||
}
|
||||
if let Some(pat) = m {
|
||||
self.hash_pat(pat);
|
||||
}
|
||||
for pat in r {
|
||||
self.hash_pat(pat);
|
||||
}
|
||||
},
|
||||
PatKind::Struct(ref qpath, fields, e) => {
|
||||
self.hash_qpath(qpath);
|
||||
for f in fields {
|
||||
self.hash_name(f.ident.name);
|
||||
self.hash_pat(f.pat);
|
||||
}
|
||||
e.hash(&mut self.s)
|
||||
},
|
||||
PatKind::Tuple(pats, e) => {
|
||||
for pat in pats {
|
||||
self.hash_pat(pat);
|
||||
}
|
||||
e.hash(&mut self.s);
|
||||
},
|
||||
PatKind::TupleStruct(ref qpath, pats, e) => {
|
||||
self.hash_qpath(qpath);
|
||||
for pat in pats {
|
||||
self.hash_pat(pat);
|
||||
}
|
||||
e.hash(&mut self.s);
|
||||
},
|
||||
PatKind::Wild => {},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hash_path(&mut self, path: &Path<'_>) {
|
||||
match path.res {
|
||||
// constant hash since equality is dependant on inter-expression context
|
||||
@ -808,6 +874,7 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
|
||||
|
||||
match &b.kind {
|
||||
StmtKind::Local(local) => {
|
||||
self.hash_pat(local.pat);
|
||||
if let Some(ref init) = local.init {
|
||||
self.hash_expr(init);
|
||||
}
|
||||
@ -827,7 +894,7 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hash_lifetime(&mut self, lifetime: &Lifetime) {
|
||||
pub fn hash_lifetime(&mut self, lifetime: Lifetime) {
|
||||
std::mem::discriminant(&lifetime.name).hash(&mut self.s);
|
||||
if let LifetimeName::Param(ref name) = lifetime.name {
|
||||
std::mem::discriminant(name).hash(&mut self.s);
|
||||
@ -844,12 +911,8 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
|
||||
}
|
||||
|
||||
pub fn hash_ty(&mut self, ty: &Ty<'_>) {
|
||||
self.hash_tykind(&ty.kind);
|
||||
}
|
||||
|
||||
pub fn hash_tykind(&mut self, ty: &TyKind<'_>) {
|
||||
std::mem::discriminant(ty).hash(&mut self.s);
|
||||
match ty {
|
||||
std::mem::discriminant(&ty.kind).hash(&mut self.s);
|
||||
match ty.kind {
|
||||
TyKind::Slice(ty) => {
|
||||
self.hash_ty(ty);
|
||||
},
|
||||
@ -857,11 +920,11 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
|
||||
self.hash_ty(ty);
|
||||
self.hash_body(anon_const.body);
|
||||
},
|
||||
TyKind::Ptr(mut_ty) => {
|
||||
TyKind::Ptr(ref mut_ty) => {
|
||||
self.hash_ty(&mut_ty.ty);
|
||||
mut_ty.mutbl.hash(&mut self.s);
|
||||
},
|
||||
TyKind::Rptr(lifetime, mut_ty) => {
|
||||
TyKind::Rptr(lifetime, ref mut_ty) => {
|
||||
self.hash_lifetime(lifetime);
|
||||
self.hash_ty(&mut_ty.ty);
|
||||
mut_ty.mutbl.hash(&mut self.s);
|
||||
@ -883,11 +946,11 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
|
||||
bfn.decl.c_variadic.hash(&mut self.s);
|
||||
},
|
||||
TyKind::Tup(ty_list) => {
|
||||
for ty in *ty_list {
|
||||
for ty in ty_list {
|
||||
self.hash_ty(ty);
|
||||
}
|
||||
},
|
||||
TyKind::Path(qpath) => match qpath {
|
||||
TyKind::Path(ref qpath) => match qpath {
|
||||
QPath::Resolved(ref maybe_ty, ref path) => {
|
||||
if let Some(ref ty) = maybe_ty {
|
||||
self.hash_ty(ty);
|
||||
@ -927,9 +990,9 @@ impl<'a, 'tcx> SpanlessHash<'a, 'tcx> {
|
||||
|
||||
fn hash_generic_args(&mut self, arg_list: &[GenericArg<'_>]) {
|
||||
for arg in arg_list {
|
||||
match arg {
|
||||
GenericArg::Lifetime(ref l) => self.hash_lifetime(l),
|
||||
GenericArg::Type(ref ty) => self.hash_ty(&ty),
|
||||
match *arg {
|
||||
GenericArg::Lifetime(l) => self.hash_lifetime(l),
|
||||
GenericArg::Type(ref ty) => self.hash_ty(ty),
|
||||
GenericArg::Const(ref ca) => self.hash_body(ca.value.body),
|
||||
}
|
||||
}
|
||||
|
@ -61,12 +61,12 @@ use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
use rustc_hir::def_id::{DefId, LOCAL_CRATE};
|
||||
use rustc_hir::intravisit::{self, walk_expr, ErasedMap, NestedVisitorMap, Visitor};
|
||||
use rustc_hir::intravisit::{self, walk_expr, ErasedMap, FnKind, NestedVisitorMap, Visitor};
|
||||
use rustc_hir::LangItem::{ResultErr, ResultOk};
|
||||
use rustc_hir::{
|
||||
def, Arm, BindingAnnotation, Block, Body, Constness, Destination, Expr, ExprKind, FnDecl, GenericArgs, HirId, Impl,
|
||||
ImplItem, ImplItemKind, Item, ItemKind, LangItem, Local, MatchSource, Node, Param, Pat, PatKind, Path, PathSegment,
|
||||
QPath, Stmt, StmtKind, TraitItem, TraitItemKind, TraitRef, TyKind,
|
||||
ImplItem, ImplItemKind, IsAsync, Item, ItemKind, LangItem, Local, MatchSource, Node, Param, Pat, PatKind, Path,
|
||||
PathSegment, QPath, Stmt, StmtKind, TraitItem, TraitItemKind, TraitRef, TyKind,
|
||||
};
|
||||
use rustc_lint::{LateContext, Level, Lint, LintContext};
|
||||
use rustc_middle::hir::exports::Export;
|
||||
@ -821,7 +821,13 @@ pub fn get_parent_node(tcx: TyCtxt<'_>, id: HirId) -> Option<Node<'_>> {
|
||||
|
||||
/// Gets the parent expression, if any –- this is useful to constrain a lint.
|
||||
pub fn get_parent_expr<'tcx>(cx: &LateContext<'tcx>, e: &Expr<'_>) -> Option<&'tcx Expr<'tcx>> {
|
||||
match get_parent_node(cx.tcx, e.hir_id) {
|
||||
get_parent_expr_for_hir(cx, e.hir_id)
|
||||
}
|
||||
|
||||
/// This retrieves the parent for the given `HirId` if it's an expression. This is useful for
|
||||
/// constraint lints
|
||||
pub fn get_parent_expr_for_hir<'tcx>(cx: &LateContext<'tcx>, hir_id: hir::HirId) -> Option<&'tcx Expr<'tcx>> {
|
||||
match get_parent_node(cx.tcx, hir_id) {
|
||||
Some(Node::Expr(parent)) => Some(parent),
|
||||
_ => None,
|
||||
}
|
||||
@ -1301,6 +1307,40 @@ pub fn if_sequence<'tcx>(mut expr: &'tcx Expr<'tcx>) -> (Vec<&'tcx Expr<'tcx>>,
|
||||
(conds, blocks)
|
||||
}
|
||||
|
||||
/// Checks if the given function kind is an async function.
|
||||
pub fn is_async_fn(kind: FnKind<'_>) -> bool {
|
||||
matches!(kind, FnKind::ItemFn(_, _, header, _) if header.asyncness == IsAsync::Async)
|
||||
}
|
||||
|
||||
/// Peels away all the compiler generated code surrounding the body of an async function,
|
||||
pub fn get_async_fn_body(tcx: TyCtxt<'tcx>, body: &Body<'_>) -> Option<&'tcx Expr<'tcx>> {
|
||||
if let ExprKind::Call(
|
||||
_,
|
||||
&[Expr {
|
||||
kind: ExprKind::Closure(_, _, body, _, _),
|
||||
..
|
||||
}],
|
||||
) = body.value.kind
|
||||
{
|
||||
if let ExprKind::Block(
|
||||
Block {
|
||||
stmts: [],
|
||||
expr:
|
||||
Some(Expr {
|
||||
kind: ExprKind::DropTemps(expr),
|
||||
..
|
||||
}),
|
||||
..
|
||||
},
|
||||
_,
|
||||
) = tcx.hir().body(body).value.kind
|
||||
{
|
||||
return Some(expr);
|
||||
}
|
||||
};
|
||||
None
|
||||
}
|
||||
|
||||
// Finds the `#[must_use]` attribute, if any
|
||||
pub fn must_use_attr(attrs: &[Attribute]) -> Option<&Attribute> {
|
||||
attrs.iter().find(|a| a.has_name(sym::must_use))
|
||||
|
@ -5,6 +5,17 @@
|
||||
//! See <https://github.com/rust-lang/rust-clippy/issues/5393> for more information.
|
||||
|
||||
pub const ANY_TRAIT: [&str; 3] = ["core", "any", "Any"];
|
||||
#[cfg(feature = "metadata-collector-lint")]
|
||||
pub const APPLICABILITY: [&str; 2] = ["rustc_lint_defs", "Applicability"];
|
||||
#[cfg(feature = "metadata-collector-lint")]
|
||||
pub const APPLICABILITY_VALUES: [[&str; 3]; 4] = [
|
||||
["rustc_lint_defs", "Applicability", "Unspecified"],
|
||||
["rustc_lint_defs", "Applicability", "HasPlaceholders"],
|
||||
["rustc_lint_defs", "Applicability", "MaybeIncorrect"],
|
||||
["rustc_lint_defs", "Applicability", "MachineApplicable"],
|
||||
];
|
||||
#[cfg(feature = "metadata-collector-lint")]
|
||||
pub const DIAGNOSTIC_BUILDER: [&str; 3] = ["rustc_errors", "diagnostic_builder", "DiagnosticBuilder"];
|
||||
pub const ARC_PTR_EQ: [&str; 4] = ["alloc", "sync", "Arc", "ptr_eq"];
|
||||
pub const ASMUT_TRAIT: [&str; 3] = ["core", "convert", "AsMut"];
|
||||
pub const ASREF_TRAIT: [&str; 3] = ["core", "convert", "AsRef"];
|
||||
@ -72,7 +83,7 @@ pub const KW_MODULE: [&str; 3] = ["rustc_span", "symbol", "kw"];
|
||||
#[cfg(feature = "internal-lints")]
|
||||
pub const LATE_CONTEXT: [&str; 2] = ["rustc_lint", "LateContext"];
|
||||
pub const LINKED_LIST: [&str; 4] = ["alloc", "collections", "linked_list", "LinkedList"];
|
||||
#[cfg(feature = "internal-lints")]
|
||||
#[cfg(any(feature = "internal-lints", feature = "metadata-collector-lint"))]
|
||||
pub const LINT: [&str; 2] = ["rustc_lint_defs", "Lint"];
|
||||
pub const MEM_DISCRIMINANT: [&str; 3] = ["core", "mem", "discriminant"];
|
||||
pub const MEM_FORGET: [&str; 3] = ["core", "mem", "forget"];
|
||||
|
@ -280,17 +280,17 @@ pub fn snippet_with_context(
|
||||
default: &'a str,
|
||||
applicability: &mut Applicability,
|
||||
) -> (Cow<'a, str>, bool) {
|
||||
let outer_span = hygiene::walk_chain(span, outer);
|
||||
let (span, is_macro_call) = if outer_span.ctxt() == outer {
|
||||
(outer_span, span.ctxt() != outer)
|
||||
} else {
|
||||
// The span is from a macro argument, and the outer context is the macro using the argument
|
||||
if *applicability != Applicability::Unspecified {
|
||||
*applicability = Applicability::MaybeIncorrect;
|
||||
}
|
||||
// TODO: get the argument span.
|
||||
(span, false)
|
||||
};
|
||||
let (span, is_macro_call) = walk_span_to_context(span, outer).map_or_else(
|
||||
|| {
|
||||
// The span is from a macro argument, and the outer context is the macro using the argument
|
||||
if *applicability != Applicability::Unspecified {
|
||||
*applicability = Applicability::MaybeIncorrect;
|
||||
}
|
||||
// TODO: get the argument span.
|
||||
(span, false)
|
||||
},
|
||||
|outer_span| (outer_span, span.ctxt() != outer),
|
||||
);
|
||||
|
||||
(
|
||||
snippet_with_applicability(cx, span, default, applicability),
|
||||
@ -298,6 +298,37 @@ pub fn snippet_with_context(
|
||||
)
|
||||
}
|
||||
|
||||
/// Walks the span up to the target context, thereby returning the macro call site if the span is
|
||||
/// inside a macro expansion, or the original span if it is not. Note this will return `None` in the
|
||||
/// case of the span being in a macro expansion, but the target context is from expanding a macro
|
||||
/// argument.
|
||||
///
|
||||
/// Given the following
|
||||
///
|
||||
/// ```rust,ignore
|
||||
/// macro_rules! m { ($e:expr) => { f($e) }; }
|
||||
/// g(m!(0))
|
||||
/// ```
|
||||
///
|
||||
/// If called with a span of the call to `f` and a context of the call to `g` this will return a
|
||||
/// span containing `m!(0)`. However, if called with a span of the literal `0` this will give a span
|
||||
/// containing `0` as the context is the same as the outer context.
|
||||
///
|
||||
/// This will traverse through multiple macro calls. Given the following:
|
||||
///
|
||||
/// ```rust,ignore
|
||||
/// macro_rules! m { ($e:expr) => { n!($e, 0) }; }
|
||||
/// macro_rules! n { ($e:expr, $f:expr) => { f($e, $f) }; }
|
||||
/// g(m!(0))
|
||||
/// ```
|
||||
///
|
||||
/// If called with a span of the call to `f` and a context of the call to `g` this will return a
|
||||
/// span containing `m!(0)`.
|
||||
pub fn walk_span_to_context(span: Span, outer: SyntaxContext) -> Option<Span> {
|
||||
let outer_span = hygiene::walk_chain(span, outer);
|
||||
(outer_span.ctxt() == outer).then(|| outer_span)
|
||||
}
|
||||
|
||||
/// Removes block comments from the given `Vec` of lines.
|
||||
///
|
||||
/// # Examples
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::path_to_local_id;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::intravisit::{self, walk_expr, NestedVisitorMap, Visitor};
|
||||
use rustc_hir::{Arm, Body, Expr, HirId, Stmt};
|
||||
use rustc_hir::intravisit::{self, walk_expr, ErasedMap, NestedVisitorMap, Visitor};
|
||||
use rustc_hir::{Arm, Block, Body, Destination, Expr, ExprKind, HirId, Stmt};
|
||||
use rustc_lint::LateContext;
|
||||
use rustc_middle::hir::map::Map;
|
||||
|
||||
@ -188,3 +188,54 @@ impl<'v> Visitor<'v> for LocalUsedVisitor<'v> {
|
||||
NestedVisitorMap::OnlyBodies(self.hir)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Visitable<'tcx> {
|
||||
fn visit<V: Visitor<'tcx>>(self, v: &mut V);
|
||||
}
|
||||
impl Visitable<'tcx> for &'tcx Expr<'tcx> {
|
||||
fn visit<V: Visitor<'tcx>>(self, v: &mut V) {
|
||||
v.visit_expr(self)
|
||||
}
|
||||
}
|
||||
impl Visitable<'tcx> for &'tcx Block<'tcx> {
|
||||
fn visit<V: Visitor<'tcx>>(self, v: &mut V) {
|
||||
v.visit_block(self)
|
||||
}
|
||||
}
|
||||
impl<'tcx> Visitable<'tcx> for &'tcx Stmt<'tcx> {
|
||||
fn visit<V: Visitor<'tcx>>(self, v: &mut V) {
|
||||
v.visit_stmt(self)
|
||||
}
|
||||
}
|
||||
impl<'tcx> Visitable<'tcx> for &'tcx Body<'tcx> {
|
||||
fn visit<V: Visitor<'tcx>>(self, v: &mut V) {
|
||||
v.visit_body(self)
|
||||
}
|
||||
}
|
||||
impl<'tcx> Visitable<'tcx> for &'tcx Arm<'tcx> {
|
||||
fn visit<V: Visitor<'tcx>>(self, v: &mut V) {
|
||||
v.visit_arm(self)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn visit_break_exprs<'tcx>(
|
||||
node: impl Visitable<'tcx>,
|
||||
f: impl FnMut(&'tcx Expr<'tcx>, Destination, Option<&'tcx Expr<'tcx>>),
|
||||
) {
|
||||
struct V<F>(F);
|
||||
impl<'tcx, F: FnMut(&'tcx Expr<'tcx>, Destination, Option<&'tcx Expr<'tcx>>)> Visitor<'tcx> for V<F> {
|
||||
type Map = ErasedMap<'tcx>;
|
||||
fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
|
||||
NestedVisitorMap::None
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
|
||||
if let ExprKind::Break(dest, sub_expr) = e.kind {
|
||||
self.0(e, dest, sub_expr)
|
||||
}
|
||||
walk_expr(self, e);
|
||||
}
|
||||
}
|
||||
|
||||
node.visit(&mut V(f));
|
||||
}
|
||||
|
@ -454,7 +454,7 @@ in `clippy_lints/src/utils/conf.rs`:
|
||||
```rust
|
||||
define_Conf! {
|
||||
/// Lint: LIST, OF, LINTS, <THE_NEWLY_ADDED_LINT>. The minimum rust version that the project supports
|
||||
(msrv, "msrv": Option<String>, None),
|
||||
(msrv: Option<String> = None),
|
||||
...
|
||||
}
|
||||
```
|
||||
@ -562,7 +562,7 @@ in the following steps:
|
||||
like this:
|
||||
```rust
|
||||
/// Lint: LINT_NAME. <The configuration field doc comment>
|
||||
(configuration_ident, "configuration_value": Type, DefaultValue),
|
||||
(configuration_ident: Type = DefaultValue),
|
||||
```
|
||||
The configuration value and identifier should usually be the same. The doc comment will be
|
||||
automatically added to the lint documentation.
|
||||
|
@ -1,3 +1,3 @@
|
||||
[toolchain]
|
||||
channel = "nightly-2021-04-22"
|
||||
channel = "nightly-2021-05-06"
|
||||
components = ["llvm-tools-preview", "rustc-dev", "rust-src"]
|
||||
|
@ -106,7 +106,7 @@ impl rustc_driver::Callbacks for ClippyCallbacks {
|
||||
(previous)(sess, lint_store);
|
||||
}
|
||||
|
||||
let conf = clippy_lints::read_conf(&[], sess);
|
||||
let conf = clippy_lints::read_conf(sess);
|
||||
clippy_lints::register_plugins(lint_store, sess, &conf);
|
||||
clippy_lints::register_pre_expansion_lints(lint_store);
|
||||
clippy_lints::register_renamed(lint_store);
|
||||
|
@ -1,3 +1,8 @@
|
||||
//! This test is a part of quality control and makes clippy eat what it produces. Awesome lints and
|
||||
//! long error messages
|
||||
//!
|
||||
//! See [Eating your own dog food](https://en.wikipedia.org/wiki/Eating_your_own_dog_food) for context
|
||||
|
||||
// Dogfood cannot run on Windows
|
||||
#![cfg(not(windows))]
|
||||
#![feature(once_cell)]
|
||||
@ -17,12 +22,14 @@ fn dogfood_clippy() {
|
||||
return;
|
||||
}
|
||||
let root_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
|
||||
let enable_metadata_collection = std::env::var("ENABLE_METADATA_COLLECTION").unwrap_or_else(|_| "0".to_string());
|
||||
|
||||
let mut command = Command::new(&*CLIPPY_PATH);
|
||||
command
|
||||
.current_dir(root_dir)
|
||||
.env("CLIPPY_DOGFOOD", "1")
|
||||
.env("CARGO_INCREMENTAL", "0")
|
||||
.env("ENABLE_METADATA_COLLECTION", &enable_metadata_collection)
|
||||
.arg("clippy")
|
||||
.arg("--all-targets")
|
||||
.arg("--all-features")
|
||||
|
@ -1,4 +1,4 @@
|
||||
error: error reading Clippy's configuration file `$DIR/clippy.toml`: invalid type: integer `42`, expected a sequence
|
||||
error: error reading Clippy's configuration file `$DIR/clippy.toml`: invalid type: integer `42`, expected a sequence for key `blacklisted-names`
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
error: error reading Clippy's configuration file `$DIR/clippy.toml`: found deprecated field `cyclomatic-complexity-threshold`. Please use `cognitive-complexity-threshold` instead.
|
||||
error: error reading Clippy's configuration file `$DIR/clippy.toml`: deprecated field `cyclomatic-complexity-threshold`. Please use `cognitive-complexity-threshold` instead
|
||||
|
||||
error: aborting due to previous error
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
error: this generic shadows the built-in type `u32`
|
||||
--> $DIR/builtin-type-shadow.rs:4:8
|
||||
--> $DIR/builtin_type_shadow.rs:4:8
|
||||
|
|
||||
LL | fn foo<u32>(a: u32) -> u32 {
|
||||
| ^^^
|
||||
@ -7,7 +7,7 @@ LL | fn foo<u32>(a: u32) -> u32 {
|
||||
= note: `-D clippy::builtin-type-shadow` implied by `-D warnings`
|
||||
|
||||
error[E0308]: mismatched types
|
||||
--> $DIR/builtin-type-shadow.rs:5:5
|
||||
--> $DIR/builtin_type_shadow.rs:5:5
|
||||
|
|
||||
LL | fn foo<u32>(a: u32) -> u32 {
|
||||
| --- --- expected `u32` because of return type
|
@ -203,4 +203,32 @@ mod issue_5212 {
|
||||
}
|
||||
}
|
||||
|
||||
enum Sign {
|
||||
Negative,
|
||||
Positive,
|
||||
Zero,
|
||||
}
|
||||
|
||||
impl Sign {
|
||||
const fn sign_i8(n: i8) -> Self {
|
||||
if n == 0 {
|
||||
Sign::Zero
|
||||
} else if n > 0 {
|
||||
Sign::Positive
|
||||
} else {
|
||||
Sign::Negative
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const fn sign_i8(n: i8) -> Sign {
|
||||
if n == 0 {
|
||||
Sign::Zero
|
||||
} else if n > 0 {
|
||||
Sign::Positive
|
||||
} else {
|
||||
Sign::Negative
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
@ -16,8 +16,6 @@ impl Iterator for Countdown {
|
||||
|
||||
fn main() {
|
||||
let my_iterator = Countdown(5);
|
||||
let a: Vec<_> = my_iterator.take(1).collect();
|
||||
assert_eq!(a.len(), 1);
|
||||
let b: Vec<_> = my_iterator.collect();
|
||||
assert_eq!(b.len(), 5);
|
||||
assert_eq!(my_iterator.take(1).count(), 1);
|
||||
assert_eq!(my_iterator.count(), 5);
|
||||
}
|
||||
|
9
src/tools/clippy/tests/ui/crashes/ice-7169.rs
Normal file
9
src/tools/clippy/tests/ui/crashes/ice-7169.rs
Normal file
@ -0,0 +1,9 @@
|
||||
#[derive(Default)]
|
||||
struct A<T> {
|
||||
a: Vec<A<T>>,
|
||||
b: T,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
if let Ok(_) = Ok::<_, ()>(A::<String>::default()) {}
|
||||
}
|
10
src/tools/clippy/tests/ui/crashes/ice-7169.stderr
Normal file
10
src/tools/clippy/tests/ui/crashes/ice-7169.stderr
Normal file
@ -0,0 +1,10 @@
|
||||
error: redundant pattern matching, consider using `is_ok()`
|
||||
--> $DIR/ice-7169.rs:8:12
|
||||
|
|
||||
LL | if let Ok(_) = Ok::<_, ()>(A::<String>::default()) {}
|
||||
| -------^^^^^-------------------------------------- help: try this: `if Ok::<_, ()>(A::<String>::default()).is_ok()`
|
||||
|
|
||||
= note: `-D clippy::redundant-pattern-matching` implied by `-D warnings`
|
||||
|
||||
error: aborting due to previous error
|
||||
|
@ -1,3 +1,5 @@
|
||||
// edition:2018
|
||||
|
||||
#[warn(clippy::eval_order_dependence)]
|
||||
#[allow(
|
||||
unused_assignments,
|
||||
@ -107,3 +109,7 @@ fn main() {
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
async fn issue_6925() {
|
||||
let _ = vec![async { true }.await, async { false }.await];
|
||||
}
|
||||
|
@ -1,48 +1,48 @@
|
||||
error: unsequenced read of a variable
|
||||
--> $DIR/eval_order_dependence.rs:15:9
|
||||
error: unsequenced read of `x`
|
||||
--> $DIR/eval_order_dependence.rs:17:9
|
||||
|
|
||||
LL | } + x;
|
||||
| ^
|
||||
|
|
||||
= note: `-D clippy::eval-order-dependence` implied by `-D warnings`
|
||||
note: whether read occurs before this write depends on evaluation order
|
||||
--> $DIR/eval_order_dependence.rs:13:9
|
||||
--> $DIR/eval_order_dependence.rs:15:9
|
||||
|
|
||||
LL | x = 1;
|
||||
| ^^^^^
|
||||
|
||||
error: unsequenced read of a variable
|
||||
--> $DIR/eval_order_dependence.rs:18:5
|
||||
error: unsequenced read of `x`
|
||||
--> $DIR/eval_order_dependence.rs:20:5
|
||||
|
|
||||
LL | x += {
|
||||
| ^
|
||||
|
|
||||
note: whether read occurs before this write depends on evaluation order
|
||||
--> $DIR/eval_order_dependence.rs:19:9
|
||||
--> $DIR/eval_order_dependence.rs:21:9
|
||||
|
|
||||
LL | x = 20;
|
||||
| ^^^^^^
|
||||
|
||||
error: unsequenced read of a variable
|
||||
--> $DIR/eval_order_dependence.rs:31:12
|
||||
error: unsequenced read of `x`
|
||||
--> $DIR/eval_order_dependence.rs:33:12
|
||||
|
|
||||
LL | a: x,
|
||||
| ^
|
||||
|
|
||||
note: whether read occurs before this write depends on evaluation order
|
||||
--> $DIR/eval_order_dependence.rs:33:13
|
||||
--> $DIR/eval_order_dependence.rs:35:13
|
||||
|
|
||||
LL | x = 6;
|
||||
| ^^^^^
|
||||
|
||||
error: unsequenced read of a variable
|
||||
--> $DIR/eval_order_dependence.rs:40:9
|
||||
error: unsequenced read of `x`
|
||||
--> $DIR/eval_order_dependence.rs:42:9
|
||||
|
|
||||
LL | x += {
|
||||
| ^
|
||||
|
|
||||
note: whether read occurs before this write depends on evaluation order
|
||||
--> $DIR/eval_order_dependence.rs:41:13
|
||||
--> $DIR/eval_order_dependence.rs:43:13
|
||||
|
|
||||
LL | x = 20;
|
||||
| ^^^^^^
|
||||
|
@ -1,7 +1,8 @@
|
||||
// edition:2018
|
||||
// run-rustfix
|
||||
|
||||
#![warn(clippy::implicit_return)]
|
||||
#![allow(clippy::needless_return, unused)]
|
||||
#![allow(clippy::needless_return, clippy::needless_bool, unused, clippy::never_loop)]
|
||||
|
||||
fn test_end_of_fn() -> bool {
|
||||
if true {
|
||||
@ -12,7 +13,6 @@ fn test_end_of_fn() -> bool {
|
||||
return true
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_bool)]
|
||||
fn test_if_block() -> bool {
|
||||
if true { return true } else { return false }
|
||||
}
|
||||
@ -25,7 +25,6 @@ fn test_match(x: bool) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_return)]
|
||||
fn test_match_with_unreachable(x: bool) -> bool {
|
||||
match x {
|
||||
true => return false,
|
||||
@ -33,14 +32,12 @@ fn test_match_with_unreachable(x: bool) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::never_loop)]
|
||||
fn test_loop() -> bool {
|
||||
loop {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::never_loop)]
|
||||
fn test_loop_with_block() -> bool {
|
||||
loop {
|
||||
{
|
||||
@ -49,7 +46,6 @@ fn test_loop_with_block() -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::never_loop)]
|
||||
fn test_loop_with_nests() -> bool {
|
||||
loop {
|
||||
if true {
|
||||
@ -83,15 +79,53 @@ fn test_return_macro() -> String {
|
||||
return format!("test {}", "test")
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let _ = test_end_of_fn();
|
||||
let _ = test_if_block();
|
||||
let _ = test_match(true);
|
||||
let _ = test_match_with_unreachable(true);
|
||||
let _ = test_loop();
|
||||
let _ = test_loop_with_block();
|
||||
let _ = test_loop_with_nests();
|
||||
let _ = test_loop_with_if_let();
|
||||
test_closure();
|
||||
let _ = test_return_macro();
|
||||
fn macro_branch_test() -> bool {
|
||||
macro_rules! m {
|
||||
($t:expr, $f:expr) => {
|
||||
if true { $t } else { $f }
|
||||
};
|
||||
}
|
||||
return m!(true, false)
|
||||
}
|
||||
|
||||
fn loop_test() -> bool {
|
||||
'outer: loop {
|
||||
if true {
|
||||
return true;
|
||||
}
|
||||
|
||||
let _ = loop {
|
||||
if false {
|
||||
return false;
|
||||
}
|
||||
if true {
|
||||
break true;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn loop_macro_test() -> bool {
|
||||
macro_rules! m {
|
||||
($e:expr) => {
|
||||
break $e
|
||||
};
|
||||
}
|
||||
return loop {
|
||||
m!(true);
|
||||
}
|
||||
}
|
||||
|
||||
fn divergent_test() -> bool {
|
||||
fn diverge() -> ! {
|
||||
panic!()
|
||||
}
|
||||
diverge()
|
||||
}
|
||||
|
||||
// issue #6940
|
||||
async fn foo() -> bool {
|
||||
return true
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
@ -1,7 +1,8 @@
|
||||
// edition:2018
|
||||
// run-rustfix
|
||||
|
||||
#![warn(clippy::implicit_return)]
|
||||
#![allow(clippy::needless_return, unused)]
|
||||
#![allow(clippy::needless_return, clippy::needless_bool, unused, clippy::never_loop)]
|
||||
|
||||
fn test_end_of_fn() -> bool {
|
||||
if true {
|
||||
@ -12,7 +13,6 @@ fn test_end_of_fn() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_bool)]
|
||||
fn test_if_block() -> bool {
|
||||
if true { true } else { false }
|
||||
}
|
||||
@ -25,7 +25,6 @@ fn test_match(x: bool) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::needless_return)]
|
||||
fn test_match_with_unreachable(x: bool) -> bool {
|
||||
match x {
|
||||
true => return false,
|
||||
@ -33,14 +32,12 @@ fn test_match_with_unreachable(x: bool) -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::never_loop)]
|
||||
fn test_loop() -> bool {
|
||||
loop {
|
||||
break true;
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::never_loop)]
|
||||
fn test_loop_with_block() -> bool {
|
||||
loop {
|
||||
{
|
||||
@ -49,7 +46,6 @@ fn test_loop_with_block() -> bool {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::never_loop)]
|
||||
fn test_loop_with_nests() -> bool {
|
||||
loop {
|
||||
if true {
|
||||
@ -83,15 +79,53 @@ fn test_return_macro() -> String {
|
||||
format!("test {}", "test")
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let _ = test_end_of_fn();
|
||||
let _ = test_if_block();
|
||||
let _ = test_match(true);
|
||||
let _ = test_match_with_unreachable(true);
|
||||
let _ = test_loop();
|
||||
let _ = test_loop_with_block();
|
||||
let _ = test_loop_with_nests();
|
||||
let _ = test_loop_with_if_let();
|
||||
test_closure();
|
||||
let _ = test_return_macro();
|
||||
fn macro_branch_test() -> bool {
|
||||
macro_rules! m {
|
||||
($t:expr, $f:expr) => {
|
||||
if true { $t } else { $f }
|
||||
};
|
||||
}
|
||||
m!(true, false)
|
||||
}
|
||||
|
||||
fn loop_test() -> bool {
|
||||
'outer: loop {
|
||||
if true {
|
||||
break true;
|
||||
}
|
||||
|
||||
let _ = loop {
|
||||
if false {
|
||||
break 'outer false;
|
||||
}
|
||||
if true {
|
||||
break true;
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn loop_macro_test() -> bool {
|
||||
macro_rules! m {
|
||||
($e:expr) => {
|
||||
break $e
|
||||
};
|
||||
}
|
||||
loop {
|
||||
m!(true);
|
||||
}
|
||||
}
|
||||
|
||||
fn divergent_test() -> bool {
|
||||
fn diverge() -> ! {
|
||||
panic!()
|
||||
}
|
||||
diverge()
|
||||
}
|
||||
|
||||
// issue #6940
|
||||
async fn foo() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn main() {}
|
||||
|
@ -1,5 +1,5 @@
|
||||
error: missing `return` statement
|
||||
--> $DIR/implicit_return.rs:12:5
|
||||
--> $DIR/implicit_return.rs:13:5
|
||||
|
|
||||
LL | true
|
||||
| ^^^^ help: add `return` as shown: `return true`
|
||||
@ -31,40 +31,79 @@ LL | false => { true },
|
||||
| ^^^^ help: add `return` as shown: `return true`
|
||||
|
||||
error: missing `return` statement
|
||||
--> $DIR/implicit_return.rs:39:9
|
||||
--> $DIR/implicit_return.rs:37:9
|
||||
|
|
||||
LL | break true;
|
||||
| ^^^^^^^^^^ help: change `break` to `return` as shown: `return true`
|
||||
|
||||
error: missing `return` statement
|
||||
--> $DIR/implicit_return.rs:47:13
|
||||
--> $DIR/implicit_return.rs:44:13
|
||||
|
|
||||
LL | break true;
|
||||
| ^^^^^^^^^^ help: change `break` to `return` as shown: `return true`
|
||||
|
||||
error: missing `return` statement
|
||||
--> $DIR/implicit_return.rs:56:13
|
||||
--> $DIR/implicit_return.rs:52:13
|
||||
|
|
||||
LL | break true;
|
||||
| ^^^^^^^^^^ help: change `break` to `return` as shown: `return true`
|
||||
|
||||
error: missing `return` statement
|
||||
--> $DIR/implicit_return.rs:74:18
|
||||
--> $DIR/implicit_return.rs:70:18
|
||||
|
|
||||
LL | let _ = || { true };
|
||||
| ^^^^ help: add `return` as shown: `return true`
|
||||
|
||||
error: missing `return` statement
|
||||
--> $DIR/implicit_return.rs:75:16
|
||||
--> $DIR/implicit_return.rs:71:16
|
||||
|
|
||||
LL | let _ = || true;
|
||||
| ^^^^ help: add `return` as shown: `return true`
|
||||
|
||||
error: missing `return` statement
|
||||
--> $DIR/implicit_return.rs:83:5
|
||||
--> $DIR/implicit_return.rs:79:5
|
||||
|
|
||||
LL | format!("test {}", "test")
|
||||
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ help: add `return` as shown: `return format!("test {}", "test")`
|
||||
|
||||
error: aborting due to 11 previous errors
|
||||
error: missing `return` statement
|
||||
--> $DIR/implicit_return.rs:88:5
|
||||
|
|
||||
LL | m!(true, false)
|
||||
| ^^^^^^^^^^^^^^^ help: add `return` as shown: `return m!(true, false)`
|
||||
|
||||
error: missing `return` statement
|
||||
--> $DIR/implicit_return.rs:94:13
|
||||
|
|
||||
LL | break true;
|
||||
| ^^^^^^^^^^ help: change `break` to `return` as shown: `return true`
|
||||
|
||||
error: missing `return` statement
|
||||
--> $DIR/implicit_return.rs:99:17
|
||||
|
|
||||
LL | break 'outer false;
|
||||
| ^^^^^^^^^^^^^^^^^^ help: change `break` to `return` as shown: `return false`
|
||||
|
||||
error: missing `return` statement
|
||||
--> $DIR/implicit_return.rs:114:5
|
||||
|
|
||||
LL | / loop {
|
||||
LL | | m!(true);
|
||||
LL | | }
|
||||
| |_____^
|
||||
|
|
||||
help: add `return` as shown
|
||||
|
|
||||
LL | return loop {
|
||||
LL | m!(true);
|
||||
LL | }
|
||||
|
|
||||
|
||||
error: missing `return` statement
|
||||
--> $DIR/implicit_return.rs:128:5
|
||||
|
|
||||
LL | true
|
||||
| ^^^^ help: add `return` as shown: `return true`
|
||||
|
||||
error: aborting due to 16 previous errors
|
||||
|
||||
|
@ -192,11 +192,23 @@ fn while_loop_with_break_and_return() {
|
||||
}
|
||||
}
|
||||
|
||||
fn immutable_condition_false_positive(mut n: u64) -> u32 {
|
||||
let mut count = 0;
|
||||
while {
|
||||
n >>= 1;
|
||||
n != 0
|
||||
} {
|
||||
count += 1;
|
||||
}
|
||||
count
|
||||
}
|
||||
|
||||
fn main() {
|
||||
immutable_condition();
|
||||
unused_var();
|
||||
used_immutable();
|
||||
internally_mutable();
|
||||
immutable_condition_false_positive(5);
|
||||
|
||||
let mut c = Counter { count: 0 };
|
||||
c.inc_n(5);
|
||||
|
@ -1,4 +1,4 @@
|
||||
use std::collections::{HashMap, VecDeque};
|
||||
use std::collections::{BinaryHeap, HashMap, LinkedList, VecDeque};
|
||||
|
||||
fn main() {
|
||||
let sample = [1; 5];
|
||||
@ -43,3 +43,35 @@ fn main() {
|
||||
.collect::<Vec<_>>();
|
||||
}
|
||||
}
|
||||
|
||||
mod issue7110 {
|
||||
// #7110 - lint for type annotation cases
|
||||
use super::*;
|
||||
|
||||
fn lint_vec(string: &str) -> usize {
|
||||
let buffer: Vec<&str> = string.split('/').collect();
|
||||
buffer.len()
|
||||
}
|
||||
fn lint_vec_deque() -> usize {
|
||||
let sample = [1; 5];
|
||||
let indirect_len: VecDeque<_> = sample.iter().collect();
|
||||
indirect_len.len()
|
||||
}
|
||||
fn lint_linked_list() -> usize {
|
||||
let sample = [1; 5];
|
||||
let indirect_len: LinkedList<_> = sample.iter().collect();
|
||||
indirect_len.len()
|
||||
}
|
||||
fn lint_binary_heap() -> usize {
|
||||
let sample = [1; 5];
|
||||
let indirect_len: BinaryHeap<_> = sample.iter().collect();
|
||||
indirect_len.len()
|
||||
}
|
||||
fn dont_lint(string: &str) -> usize {
|
||||
let buffer: Vec<&str> = string.split('/').collect();
|
||||
for buff in &buffer {
|
||||
println!("{}", buff);
|
||||
}
|
||||
buffer.len()
|
||||
}
|
||||
}
|
||||
|
@ -69,5 +69,61 @@ LL |
|
||||
LL | sample.into_iter().any(|x| x == a);
|
||||
|
|
||||
|
||||
error: aborting due to 5 previous errors
|
||||
error: avoid using `collect()` when not needed
|
||||
--> $DIR/needless_collect_indirect.rs:52:51
|
||||
|
|
||||
LL | let buffer: Vec<&str> = string.split('/').collect();
|
||||
| ^^^^^^^
|
||||
LL | buffer.len()
|
||||
| ------------ the iterator could be used here instead
|
||||
|
|
||||
help: take the original Iterator's count instead of collecting it and finding the length
|
||||
|
|
||||
LL |
|
||||
LL | string.split('/').count()
|
||||
|
|
||||
|
||||
error: avoid using `collect()` when not needed
|
||||
--> $DIR/needless_collect_indirect.rs:57:55
|
||||
|
|
||||
LL | let indirect_len: VecDeque<_> = sample.iter().collect();
|
||||
| ^^^^^^^
|
||||
LL | indirect_len.len()
|
||||
| ------------------ the iterator could be used here instead
|
||||
|
|
||||
help: take the original Iterator's count instead of collecting it and finding the length
|
||||
|
|
||||
LL |
|
||||
LL | sample.iter().count()
|
||||
|
|
||||
|
||||
error: avoid using `collect()` when not needed
|
||||
--> $DIR/needless_collect_indirect.rs:62:57
|
||||
|
|
||||
LL | let indirect_len: LinkedList<_> = sample.iter().collect();
|
||||
| ^^^^^^^
|
||||
LL | indirect_len.len()
|
||||
| ------------------ the iterator could be used here instead
|
||||
|
|
||||
help: take the original Iterator's count instead of collecting it and finding the length
|
||||
|
|
||||
LL |
|
||||
LL | sample.iter().count()
|
||||
|
|
||||
|
||||
error: avoid using `collect()` when not needed
|
||||
--> $DIR/needless_collect_indirect.rs:67:57
|
||||
|
|
||||
LL | let indirect_len: BinaryHeap<_> = sample.iter().collect();
|
||||
| ^^^^^^^
|
||||
LL | indirect_len.len()
|
||||
| ------------------ the iterator could be used here instead
|
||||
|
|
||||
help: take the original Iterator's count instead of collecting it and finding the length
|
||||
|
|
||||
LL |
|
||||
LL | sample.iter().count()
|
||||
|
|
||||
|
||||
error: aborting due to 9 previous errors
|
||||
|
||||
|
@ -33,6 +33,8 @@ fn main() {
|
||||
x.rmatch_indices('x');
|
||||
x.trim_start_matches('x');
|
||||
x.trim_end_matches('x');
|
||||
x.strip_prefix('x');
|
||||
x.strip_suffix('x');
|
||||
// Make sure we escape characters correctly.
|
||||
x.split('\n');
|
||||
x.split('\'');
|
||||
|
@ -33,6 +33,8 @@ fn main() {
|
||||
x.rmatch_indices("x");
|
||||
x.trim_start_matches("x");
|
||||
x.trim_end_matches("x");
|
||||
x.strip_prefix("x");
|
||||
x.strip_suffix("x");
|
||||
// Make sure we escape characters correctly.
|
||||
x.split("\n");
|
||||
x.split("'");
|
||||
|
@ -121,64 +121,76 @@ LL | x.trim_end_matches("x");
|
||||
| ^^^ help: try using a `char` instead: `'x'`
|
||||
|
||||
error: single-character string constant used as pattern
|
||||
--> $DIR/single_char_pattern.rs:37:13
|
||||
--> $DIR/single_char_pattern.rs:36:20
|
||||
|
|
||||
LL | x.strip_prefix("x");
|
||||
| ^^^ help: try using a `char` instead: `'x'`
|
||||
|
||||
error: single-character string constant used as pattern
|
||||
--> $DIR/single_char_pattern.rs:37:20
|
||||
|
|
||||
LL | x.strip_suffix("x");
|
||||
| ^^^ help: try using a `char` instead: `'x'`
|
||||
|
||||
error: single-character string constant used as pattern
|
||||
--> $DIR/single_char_pattern.rs:39:13
|
||||
|
|
||||
LL | x.split("/n");
|
||||
| ^^^^ help: try using a `char` instead: `'/n'`
|
||||
|
||||
error: single-character string constant used as pattern
|
||||
--> $DIR/single_char_pattern.rs:38:13
|
||||
--> $DIR/single_char_pattern.rs:40:13
|
||||
|
|
||||
LL | x.split("'");
|
||||
| ^^^ help: try using a `char` instead: `'/''`
|
||||
|
||||
error: single-character string constant used as pattern
|
||||
--> $DIR/single_char_pattern.rs:39:13
|
||||
--> $DIR/single_char_pattern.rs:41:13
|
||||
|
|
||||
LL | x.split("/'");
|
||||
| ^^^^ help: try using a `char` instead: `'/''`
|
||||
|
||||
error: single-character string constant used as pattern
|
||||
--> $DIR/single_char_pattern.rs:44:31
|
||||
--> $DIR/single_char_pattern.rs:46:31
|
||||
|
|
||||
LL | x.replace(";", ",").split(","); // issue #2978
|
||||
| ^^^ help: try using a `char` instead: `','`
|
||||
|
||||
error: single-character string constant used as pattern
|
||||
--> $DIR/single_char_pattern.rs:45:19
|
||||
--> $DIR/single_char_pattern.rs:47:19
|
||||
|
|
||||
LL | x.starts_with("/x03"); // issue #2996
|
||||
| ^^^^^^ help: try using a `char` instead: `'/x03'`
|
||||
|
||||
error: single-character string constant used as pattern
|
||||
--> $DIR/single_char_pattern.rs:52:13
|
||||
--> $DIR/single_char_pattern.rs:54:13
|
||||
|
|
||||
LL | x.split(r"a");
|
||||
| ^^^^ help: try using a `char` instead: `'a'`
|
||||
|
||||
error: single-character string constant used as pattern
|
||||
--> $DIR/single_char_pattern.rs:53:13
|
||||
--> $DIR/single_char_pattern.rs:55:13
|
||||
|
|
||||
LL | x.split(r#"a"#);
|
||||
| ^^^^^^ help: try using a `char` instead: `'a'`
|
||||
|
||||
error: single-character string constant used as pattern
|
||||
--> $DIR/single_char_pattern.rs:54:13
|
||||
--> $DIR/single_char_pattern.rs:56:13
|
||||
|
|
||||
LL | x.split(r###"a"###);
|
||||
| ^^^^^^^^^^ help: try using a `char` instead: `'a'`
|
||||
|
||||
error: single-character string constant used as pattern
|
||||
--> $DIR/single_char_pattern.rs:55:13
|
||||
--> $DIR/single_char_pattern.rs:57:13
|
||||
|
|
||||
LL | x.split(r###"'"###);
|
||||
| ^^^^^^^^^^ help: try using a `char` instead: `'/''`
|
||||
|
||||
error: single-character string constant used as pattern
|
||||
--> $DIR/single_char_pattern.rs:56:13
|
||||
--> $DIR/single_char_pattern.rs:58:13
|
||||
|
|
||||
LL | x.split(r###"#"###);
|
||||
| ^^^^^^^^^^ help: try using a `char` instead: `'#'`
|
||||
|
||||
error: aborting due to 30 previous errors
|
||||
error: aborting due to 32 previous errors
|
||||
|
||||
|
@ -15,3 +15,7 @@ fn main() {
|
||||
|
||||
let _ = (0..4).filter_map(i32::checked_abs);
|
||||
}
|
||||
|
||||
fn filter_map_none_changes_item_type() -> impl Iterator<Item = bool> {
|
||||
"".chars().filter_map(|_| None)
|
||||
}
|
||||
|
@ -80,3 +80,10 @@ fn test2(){}
|
||||
|
||||
#[rustfmt::skip]
|
||||
fn test3(){}
|
||||
|
||||
fn macro_expr() {
|
||||
macro_rules! e {
|
||||
() => (());
|
||||
}
|
||||
e!()
|
||||
}
|
||||
|
@ -80,3 +80,10 @@ fn test2() ->(){}
|
||||
|
||||
#[rustfmt::skip]
|
||||
fn test3()-> (){}
|
||||
|
||||
fn macro_expr() {
|
||||
macro_rules! e {
|
||||
() => (());
|
||||
}
|
||||
e!()
|
||||
}
|
||||
|
@ -14,7 +14,7 @@ lintname_re = re.compile(r'''pub\s+([A-Z_][A-Z_0-9]*)''')
|
||||
group_re = re.compile(r'''\s*([a-z_][a-z_0-9]+)''')
|
||||
conf_re = re.compile(r'''define_Conf! {\n([^}]*)\n}''', re.MULTILINE)
|
||||
confvar_re = re.compile(
|
||||
r'''/// Lint: ([\w,\s]+)\. (.*)\n\s*\([^,]+,\s+"([^"]+)":\s+([^,]+),\s+([^\.\)]+).*\),''', re.MULTILINE)
|
||||
r'''/// Lint: ([\w,\s]+)\. (.*)\n\s*\(([^:]+):\s*([^\s=]+)\s*=\s*([^\.\)]+).*\),''', re.MULTILINE)
|
||||
comment_re = re.compile(r'''\s*/// ?(.*)''')
|
||||
|
||||
lint_levels = {
|
||||
|
Loading…
Reference in New Issue
Block a user