Merge from rustc

This commit is contained in:
The Miri Conjob Bot 2023-10-07 05:10:55 +00:00
commit 722736ac4a
254 changed files with 7137 additions and 4948 deletions

View File

@ -508,10 +508,12 @@ checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b"
[[package]]
name = "clippy"
version = "0.1.74"
version = "0.1.75"
dependencies = [
"anstream",
"clippy_lints",
"clippy_utils",
"color-print",
"filetime",
"futures",
"if_chain",
@ -546,7 +548,7 @@ dependencies = [
[[package]]
name = "clippy_lints"
version = "0.1.74"
version = "0.1.75"
dependencies = [
"arrayvec",
"cargo_metadata",
@ -566,11 +568,12 @@ dependencies = [
"unicode-normalization",
"unicode-script",
"url",
"walkdir",
]
[[package]]
name = "clippy_utils"
version = "0.1.74"
version = "0.1.75"
dependencies = [
"arrayvec",
"if_chain",
@ -603,6 +606,27 @@ dependencies = [
"tracing-error",
]
[[package]]
name = "color-print"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a858372ff14bab9b1b30ea504f2a4bc534582aee3e42ba2d41d2a7baba63d5d"
dependencies = [
"color-print-proc-macro",
]
[[package]]
name = "color-print-proc-macro"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57e37866456a721d0a404439a1adae37a31be4e0055590d053dfe6981e05003f"
dependencies = [
"nom",
"proc-macro2",
"quote",
"syn 1.0.109",
]
[[package]]
name = "color-spantrace"
version = "0.2.0"
@ -933,7 +957,7 @@ checksum = "a0afaad2b26fa326569eb264b1363e8ae3357618c43982b3f285f0774ce76b69"
[[package]]
name = "declare_clippy_lint"
version = "0.1.74"
version = "0.1.75"
dependencies = [
"itertools",
"quote",
@ -4311,7 +4335,6 @@ dependencies = [
"rustc_errors",
"rustc_hir",
"rustc_index",
"rustc_macros",
"rustc_middle",
"rustc_query_system",
"rustc_serialize",
@ -4417,7 +4440,6 @@ dependencies = [
"rustc_hir",
"rustc_interface",
"rustc_middle",
"rustc_session",
"rustc_span",
"rustc_target",
"stable_mir",
@ -4991,6 +5013,7 @@ version = "0.0.0"
dependencies = [
"addr2line",
"alloc",
"cc",
"cfg-if",
"compiler_builtins",
"core",

View File

@ -353,28 +353,28 @@ pub fn find_body_stability(
body_stab
}
fn insert_or_error(sess: &Session, meta: &MetaItem, item: &mut Option<Symbol>) -> Option<()> {
if item.is_some() {
handle_errors(
&sess.parse_sess,
meta.span,
AttrError::MultipleItem(pprust::path_to_string(&meta.path)),
);
None
} else if let Some(v) = meta.value_str() {
*item = Some(v);
Some(())
} else {
sess.emit_err(session_diagnostics::IncorrectMetaItem { span: meta.span });
None
}
}
/// Read the content of a `stable`/`rustc_const_stable` attribute, and return the feature name and
/// its stability information.
fn parse_stability(sess: &Session, attr: &Attribute) -> Option<(Symbol, StabilityLevel)> {
let meta = attr.meta()?;
let MetaItem { kind: MetaItemKind::List(ref metas), .. } = meta else { return None };
let insert_or_error = |meta: &MetaItem, item: &mut Option<Symbol>| {
if item.is_some() {
handle_errors(
&sess.parse_sess,
meta.span,
AttrError::MultipleItem(pprust::path_to_string(&meta.path)),
);
return false;
}
if let Some(v) = meta.value_str() {
*item = Some(v);
true
} else {
sess.emit_err(session_diagnostics::IncorrectMetaItem { span: meta.span });
false
}
};
let mut feature = None;
let mut since = None;
@ -389,16 +389,8 @@ fn parse_stability(sess: &Session, attr: &Attribute) -> Option<(Symbol, Stabilit
};
match mi.name_or_empty() {
sym::feature => {
if !insert_or_error(mi, &mut feature) {
return None;
}
}
sym::since => {
if !insert_or_error(mi, &mut since) {
return None;
}
}
sym::feature => insert_or_error(sess, mi, &mut feature)?,
sym::since => insert_or_error(sess, mi, &mut since)?,
_ => {
handle_errors(
&sess.parse_sess,
@ -438,23 +430,6 @@ fn parse_stability(sess: &Session, attr: &Attribute) -> Option<(Symbol, Stabilit
fn parse_unstability(sess: &Session, attr: &Attribute) -> Option<(Symbol, StabilityLevel)> {
let meta = attr.meta()?;
let MetaItem { kind: MetaItemKind::List(ref metas), .. } = meta else { return None };
let insert_or_error = |meta: &MetaItem, item: &mut Option<Symbol>| {
if item.is_some() {
handle_errors(
&sess.parse_sess,
meta.span,
AttrError::MultipleItem(pprust::path_to_string(&meta.path)),
);
return false;
}
if let Some(v) = meta.value_str() {
*item = Some(v);
true
} else {
sess.emit_err(session_diagnostics::IncorrectMetaItem { span: meta.span });
false
}
};
let mut feature = None;
let mut reason = None;
@ -473,20 +448,10 @@ fn parse_unstability(sess: &Session, attr: &Attribute) -> Option<(Symbol, Stabil
};
match mi.name_or_empty() {
sym::feature => {
if !insert_or_error(mi, &mut feature) {
return None;
}
}
sym::reason => {
if !insert_or_error(mi, &mut reason) {
return None;
}
}
sym::feature => insert_or_error(sess, mi, &mut feature)?,
sym::reason => insert_or_error(sess, mi, &mut reason)?,
sym::issue => {
if !insert_or_error(mi, &mut issue) {
return None;
}
insert_or_error(sess, mi, &mut issue)?;
// These unwraps are safe because `insert_or_error` ensures the meta item
// is a name/value pair string literal.
@ -515,11 +480,7 @@ fn parse_unstability(sess: &Session, attr: &Attribute) -> Option<(Symbol, Stabil
}
is_soft = true;
}
sym::implied_by => {
if !insert_or_error(mi, &mut implied_by) {
return None;
}
}
sym::implied_by => insert_or_error(sess, mi, &mut implied_by)?,
_ => {
handle_errors(
&sess.parse_sess,

View File

@ -368,7 +368,7 @@ fn check_opaque_type_well_formed<'tcx>(
if errors.is_empty() {
Ok(definition_ty)
} else {
Err(infcx.err_ctxt().report_fulfillment_errors(&errors))
Err(infcx.err_ctxt().report_fulfillment_errors(errors))
}
}

View File

@ -250,17 +250,6 @@ pub(crate) fn verify_func(
}
fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) {
if let Err(err) =
fx.mir.post_mono_checks(fx.tcx, ty::ParamEnv::reveal_all(), |c| Ok(fx.monomorphize(c)))
{
err.emit_err(fx.tcx);
fx.bcx.append_block_params_for_function_params(fx.block_map[START_BLOCK]);
fx.bcx.switch_to_block(fx.block_map[START_BLOCK]);
// compilation should have been aborted
fx.bcx.ins().trap(TrapCode::UnreachableCodeReached);
return;
}
let arg_uninhabited = fx
.mir
.args_iter()

View File

@ -21,6 +21,8 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
}
pub fn eval_mir_constant(&self, constant: &mir::ConstOperand<'tcx>) -> mir::ConstValue<'tcx> {
// `MirUsedCollector` visited all constants before codegen began, so if we got here there
// can be no more constants that fail to evaluate.
self.monomorphize(constant.const_)
.eval(self.cx.tcx(), ty::ParamEnv::reveal_all(), Some(constant.span))
.expect("erroneous constant not captured by required_consts")

View File

@ -209,18 +209,11 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>(
caller_location: None,
};
fx.per_local_var_debug_info = fx.compute_per_local_var_debug_info(&mut start_bx);
// It may seem like we should iterate over `required_consts` to ensure they all successfully
// evaluate; however, the `MirUsedCollector` already did that during the collection phase of
// monomorphization so we don't have to do it again.
// Rust post-monomorphization checks; we later rely on them.
if let Err(err) =
mir.post_mono_checks(cx.tcx(), ty::ParamEnv::reveal_all(), |c| Ok(fx.monomorphize(c)))
{
err.emit_err(cx.tcx());
// This IR shouldn't ever be emitted, but let's try to guard against any of this code
// ever running.
start_bx.abort();
return;
}
fx.per_local_var_debug_info = fx.compute_per_local_var_debug_info(&mut start_bx);
let memory_locals = analyze::non_ssa_locals(&fx);

View File

@ -750,12 +750,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Make sure all the constants required by this frame evaluate successfully (post-monomorphization check).
if M::POST_MONO_CHECKS {
// `ctfe_query` does some error message decoration that we want to be in effect here.
self.ctfe_query(None, |tcx| {
body.post_mono_checks(*tcx, self.param_env, |c| {
self.subst_from_current_frame_and_normalize_erasing_regions(c)
})
})?;
for &const_ in &body.required_consts {
let c =
self.subst_from_current_frame_and_normalize_erasing_regions(const_.const_)?;
c.eval(*self.tcx, self.param_env, Some(const_.span)).map_err(|err| {
err.emit_note(*self.tcx);
err
})?;
}
}
// done
@ -1054,14 +1056,14 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Ok(())
}
/// Call a query that can return `ErrorHandled`. If `span` is `Some`, point to that span when an error occurs.
/// Call a query that can return `ErrorHandled`. Should be used for statics and other globals.
/// (`mir::Const`/`ty::Const` have `eval` methods that can be used directly instead.)
pub fn ctfe_query<T>(
&self,
span: Option<Span>,
query: impl FnOnce(TyCtxtAt<'tcx>) -> Result<T, ErrorHandled>,
) -> Result<T, ErrorHandled> {
// Use a precise span for better cycle errors.
query(self.tcx.at(span.unwrap_or_else(|| self.cur_span()))).map_err(|err| {
query(self.tcx.at(self.cur_span())).map_err(|err| {
err.emit_note(*self.tcx);
err
})
@ -1082,7 +1084,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
} else {
self.param_env
};
let val = self.ctfe_query(None, |tcx| tcx.eval_to_allocation_raw(param_env.and(gid)))?;
let val = self.ctfe_query(|tcx| tcx.eval_to_allocation_raw(param_env.and(gid)))?;
self.raw_const_to_mplace(val)
}
@ -1092,7 +1094,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
span: Option<Span>,
layout: Option<TyAndLayout<'tcx>>,
) -> InterpResult<'tcx, OpTy<'tcx, M::Provenance>> {
let const_val = self.ctfe_query(span, |tcx| val.eval(*tcx, self.param_env, span))?;
let const_val = val.eval(*self.tcx, self.param_env, span).map_err(|err| {
// FIXME: somehow this is reachable even when POST_MONO_CHECKS is on.
// Are we not always populating `required_consts`?
err.emit_note(*self.tcx);
err
})?;
self.const_val_to_op(const_val, val.ty(), layout)
}

View File

@ -164,7 +164,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
sym::type_name => Ty::new_static_str(self.tcx.tcx),
_ => bug!(),
};
let val = self.ctfe_query(None, |tcx| {
let val = self.ctfe_query(|tcx| {
tcx.const_eval_global_id(self.param_env, gid, Some(tcx.span))
})?;
let val = self.const_val_to_op(val, ty, Some(dest.layout))?;

View File

@ -536,7 +536,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}
// We don't give a span -- statics don't need that, they cannot be generic or associated.
let val = self.ctfe_query(None, |tcx| tcx.eval_static_initializer(def_id))?;
let val = self.ctfe_query(|tcx| tcx.eval_static_initializer(def_id))?;
(val, Some(def_id))
}
};

View File

@ -743,7 +743,7 @@ impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
infcx.err_ctxt().report_fulfillment_errors(&errors);
infcx.err_ctxt().report_fulfillment_errors(errors);
}
// Attempting to call a trait method?

View File

@ -3,7 +3,7 @@ The compiler could not infer a type and asked for a type annotation.
Erroneous code example:
```compile_fail,E0282
let x = "hello".chars().rev().collect();
let x = Vec::new();
```
This error indicates that type inference did not result in one unique possible
@ -11,21 +11,24 @@ type, and extra information is required. In most cases this can be provided
by adding a type annotation. Sometimes you need to specify a generic type
parameter manually.
A common example is the `collect` method on `Iterator`. It has a generic type
parameter with a `FromIterator` bound, which for a `char` iterator is
implemented by `Vec` and `String` among others. Consider the following snippet
that reverses the characters of a string:
In the example above, type `Vec` has a type parameter `T`. When calling
`Vec::new`, barring any other later usage of the variable `x` that allows the
compiler to infer what type `T` is, the compiler needs to be told what it is.
In the first code example, the compiler cannot infer what the type of `x` should
be: `Vec<char>` and `String` are both suitable candidates. To specify which type
to use, you can use a type annotation on `x`:
The type can be specified on the variable:
```
let x: Vec<char> = "hello".chars().rev().collect();
let x: Vec<i32> = Vec::new();
```
It is not necessary to annotate the full type. Once the ambiguity is resolved,
the compiler can infer the rest:
The type can also be specified in the path of the expression:
```
let x = Vec::<i32>::new();
```
In cases with more complex types, it is not necessary to annotate the full
type. Once the ambiguity is resolved, the compiler can infer the rest:
```
let x: Vec<_> = "hello".chars().rev().collect();

View File

@ -1,7 +1,51 @@
An implementation cannot be chosen unambiguously because of lack of information.
The compiler could not infer a type and asked for a type annotation.
Erroneous code example:
```compile_fail,E0283
let x = "hello".chars().rev().collect();
```
This error indicates that type inference did not result in one unique possible
type, and extra information is required. In most cases this can be provided
by adding a type annotation. Sometimes you need to specify a generic type
parameter manually.
A common example is the `collect` method on `Iterator`. It has a generic type
parameter with a `FromIterator` bound, which for a `char` iterator is
implemented by `Vec` and `String` among others. Consider the following snippet
that reverses the characters of a string:
In the first code example, the compiler cannot infer what the type of `x` should
be: `Vec<char>` and `String` are both suitable candidates. To specify which type
to use, you can use a type annotation on `x`:
```
let x: Vec<char> = "hello".chars().rev().collect();
```
It is not necessary to annotate the full type. Once the ambiguity is resolved,
the compiler can infer the rest:
```
let x: Vec<_> = "hello".chars().rev().collect();
```
Another way to provide the compiler with enough information, is to specify the
generic type parameter:
```
let x = "hello".chars().rev().collect::<Vec<char>>();
```
Again, you need not specify the full type if the compiler can infer it:
```
let x = "hello".chars().rev().collect::<Vec<_>>();
```
We can see a self-contained example below:
```compile_fail,E0283
struct Foo;

View File

@ -327,7 +327,7 @@ fn check_opaque_meets_bounds<'tcx>(
// version.
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
let guar = infcx.err_ctxt().report_fulfillment_errors(&errors);
let guar = infcx.err_ctxt().report_fulfillment_errors(errors);
return Err(guar);
}
match origin {
@ -1512,6 +1512,6 @@ pub(super) fn check_generator_obligations(tcx: TyCtxt<'_>, def_id: LocalDefId) {
let errors = fulfillment_cx.select_all_or_error(&infcx);
debug!(?errors);
if !errors.is_empty() {
infcx.err_ctxt().report_fulfillment_errors(&errors);
infcx.err_ctxt().report_fulfillment_errors(errors);
}
}

View File

@ -323,7 +323,7 @@ fn compare_method_predicate_entailment<'tcx>(
// FIXME(-Ztrait-solver=next): Not needed when the hack below is removed.
let errors = ocx.select_where_possible();
if !errors.is_empty() {
let reported = infcx.err_ctxt().report_fulfillment_errors(&errors);
let reported = infcx.err_ctxt().report_fulfillment_errors(errors);
return Err(reported);
}
@ -394,7 +394,7 @@ fn compare_method_predicate_entailment<'tcx>(
});
}
CheckImpliedWfMode::Skip => {
let reported = infcx.err_ctxt().report_fulfillment_errors(&errors);
let reported = infcx.err_ctxt().report_fulfillment_errors(errors);
return Err(reported);
}
}
@ -874,7 +874,7 @@ pub(super) fn collect_return_position_impl_trait_in_trait_tys<'tcx>(
// RPITs.
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
let reported = infcx.err_ctxt().report_fulfillment_errors(&errors);
let reported = infcx.err_ctxt().report_fulfillment_errors(errors);
return Err(reported);
}
@ -2050,7 +2050,7 @@ fn compare_const_predicate_entailment<'tcx>(
// version.
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
return Err(infcx.err_ctxt().report_fulfillment_errors(&errors));
return Err(infcx.err_ctxt().report_fulfillment_errors(errors));
}
let outlives_env = OutlivesEnvironment::new(param_env);
@ -2143,7 +2143,7 @@ fn compare_type_predicate_entailment<'tcx>(
// version.
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
let reported = infcx.err_ctxt().report_fulfillment_errors(&errors);
let reported = infcx.err_ctxt().report_fulfillment_errors(errors);
return Err(reported);
}
@ -2358,7 +2358,7 @@ pub(super) fn check_type_bounds<'tcx>(
// version.
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
let reported = infcx.err_ctxt().report_fulfillment_errors(&errors);
let reported = infcx.err_ctxt().report_fulfillment_errors(errors);
return Err(reported);
}

View File

@ -158,7 +158,7 @@ fn check_main_fn_ty(tcx: TyCtxt<'_>, main_def_id: DefId) {
ocx.register_bound(cause, param_env, norm_return_ty, term_did);
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
infcx.err_ctxt().report_fulfillment_errors(&errors);
infcx.err_ctxt().report_fulfillment_errors(errors);
error = true;
}
// now we can take the return type of the given main function

View File

@ -588,7 +588,7 @@ pub fn check_function_signature<'tcx>(
Ok(()) => {
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
infcx.err_ctxt().report_fulfillment_errors(&errors);
infcx.err_ctxt().report_fulfillment_errors(errors);
return;
}
}

View File

@ -116,7 +116,7 @@ pub(super) fn enter_wf_checking_ctxt<'tcx, F>(
let errors = wfcx.select_all_or_error();
if !errors.is_empty() {
infcx.err_ctxt().report_fulfillment_errors(&errors);
infcx.err_ctxt().report_fulfillment_errors(errors);
return;
}

View File

@ -261,7 +261,7 @@ fn visit_implementation_of_dispatch_from_dyn(tcx: TyCtxt<'_>, impl_did: LocalDef
}
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
infcx.err_ctxt().report_fulfillment_errors(&errors);
infcx.err_ctxt().report_fulfillment_errors(errors);
}
// Finally, resolve all regions.
@ -470,7 +470,7 @@ pub fn coerce_unsized_info<'tcx>(tcx: TyCtxt<'tcx>, impl_did: LocalDefId) -> Coe
ocx.register_obligation(obligation);
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
infcx.err_ctxt().report_fulfillment_errors(&errors);
infcx.err_ctxt().report_fulfillment_errors(errors);
}
// Finally, resolve all regions.

View File

@ -196,7 +196,7 @@ fn get_impl_args(
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
let guar = ocx.infcx.err_ctxt().report_fulfillment_errors(&errors);
let guar = ocx.infcx.err_ctxt().report_fulfillment_errors(errors);
return Err(guar);
}

View File

@ -2958,7 +2958,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// There should be at least one error reported. If not, we
// will still delay a span bug in `report_fulfillment_errors`.
Ok::<_, NoSolution>((
self.err_ctxt().report_fulfillment_errors(&errors),
self.err_ctxt().report_fulfillment_errors(errors),
impl_trait_ref.args.type_at(1),
element_ty,
))

View File

@ -564,7 +564,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if !errors.is_empty() {
self.adjust_fulfillment_errors_for_expr_obligation(&mut errors);
self.err_ctxt().report_fulfillment_errors(&errors);
self.err_ctxt().report_fulfillment_errors(errors);
}
}
@ -577,7 +577,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
if !result.is_empty() {
mutate_fulfillment_errors(&mut result);
self.adjust_fulfillment_errors_for_expr_obligation(&mut result);
self.err_ctxt().report_fulfillment_errors(&result);
self.err_ctxt().report_fulfillment_errors(result);
}
}
@ -1477,7 +1477,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
{
Ok(normalized_ty) => normalized_ty,
Err(errors) => {
let guar = self.err_ctxt().report_fulfillment_errors(&errors);
let guar = self.err_ctxt().report_fulfillment_errors(errors);
return Ty::new_error(self.tcx,guar);
}
}

View File

@ -27,7 +27,7 @@ use std::iter;
pub enum TypeAnnotationNeeded {
/// ```compile_fail,E0282
/// let x = "hello".chars().rev().collect();
/// let x;
/// ```
E0282,
/// An implementation cannot be chosen unambiguously because of lack of information.

View File

@ -1,3 +1,8 @@
//! "Hooks" provide a way for `tcx` functionality to be provided by some downstream crate without
//! everything in rustc having to depend on that crate. This is somewhat similar to queries, but
//! queries come with a lot of machinery for caching and incremental compilation, whereas hooks are
//! just plain function pointers without any of the query magic.
use crate::mir;
use crate::query::TyCtxtAt;
use crate::ty::{Ty, TyCtxt};

View File

@ -43,21 +43,6 @@ impl ErrorHandled {
}
}
pub fn emit_err(&self, tcx: TyCtxt<'_>) -> ErrorGuaranteed {
match self {
&ErrorHandled::Reported(err, span) => {
if !err.is_tainted_by_errors && !span.is_dummy() {
tcx.sess.emit_err(error::ErroneousConstant { span });
}
err.error
}
&ErrorHandled::TooGeneric(span) => tcx.sess.delay_span_bug(
span,
"encountered TooGeneric error when monomorphic data was expected",
),
}
}
pub fn emit_note(&self, tcx: TyCtxt<'_>) {
match self {
&ErrorHandled::Reported(err, span) => {

View File

@ -2,7 +2,7 @@
//!
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/mir/index.html
use crate::mir::interpret::{AllocRange, ConstAllocation, ErrorHandled, Scalar};
use crate::mir::interpret::{AllocRange, ConstAllocation, Scalar};
use crate::mir::visit::MirVisitable;
use crate::ty::codec::{TyDecoder, TyEncoder};
use crate::ty::fold::{FallibleTypeFolder, TypeFoldable};
@ -568,34 +568,6 @@ impl<'tcx> Body<'tcx> {
pub fn is_custom_mir(&self) -> bool {
self.injection_phase.is_some()
}
/// *Must* be called once the full substitution for this body is known, to ensure that the body
/// is indeed fit for code generation or consumption more generally.
///
/// Sadly there's no nice way to represent an "arbitrary normalizer", so we take one for
/// constants specifically. (`Option<GenericArgsRef>` could be used for that, but the fact
/// that `Instance::args_for_mir_body` is private and instead instance exposes normalization
/// functions makes it seem like exposing the generic args is not the intended strategy.)
///
/// Also sadly, CTFE doesn't even know whether it runs on MIR that is already polymorphic or still monomorphic,
/// so we cannot just immediately ICE on TooGeneric.
///
/// Returns Ok(()) if everything went fine, and `Err` if a problem occurred and got reported.
pub fn post_mono_checks(
&self,
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
normalize_const: impl Fn(Const<'tcx>) -> Result<Const<'tcx>, ErrorHandled>,
) -> Result<(), ErrorHandled> {
// For now, the only thing we have to check is is to ensure that all the constants used in
// the body successfully evaluate.
for &const_ in &self.required_consts {
let c = normalize_const(const_.const_)?;
c.eval(tcx, param_env, Some(const_.span))?;
}
Ok(())
}
}
#[derive(Copy, Clone, PartialEq, Eq, Debug, TyEncodable, TyDecodable, HashStable)]

View File

@ -184,6 +184,8 @@ macro_rules! make_mir_visitor {
visit_place_fns!($($mutability)?);
/// This is called for every constant in the MIR body and every `required_consts`
/// (i.e., including consts that have been dead-code-eliminated).
fn visit_constant(
&mut self,
constant: & $($mutability)? ConstOperand<'tcx>,

View File

@ -1426,6 +1426,8 @@ fn collect_used_items<'tcx>(
);
}
// Here we rely on the visitor also visiting `required_consts`, so that we evaluate them
// and abort compilation if any of them errors.
MirUsedCollector {
tcx,
body: &body,

View File

@ -225,6 +225,10 @@ parse_expected_semi_found_str = expected `;`, found `{$token}`
parse_expected_statement_after_outer_attr = expected statement after outer attribute
parse_expected_struct_field = expected one of `,`, `:`, or `{"}"}`, found `{$token}`
.label = expected one of `,`, `:`, or `{"}"}`
.ident_label = while parsing this struct field
parse_expected_trait_in_trait_impl_found_type = expected a trait, found type
parse_extern_crate_name_with_dashes = crate name using dashes are not valid in `extern crate` statements

View File

@ -430,6 +430,17 @@ pub(crate) struct ExpectedElseBlock {
pub condition_start: Span,
}
#[derive(Diagnostic)]
#[diag(parse_expected_struct_field)]
pub(crate) struct ExpectedStructField {
#[primary_span]
#[label]
pub span: Span,
pub token: Token,
#[label(parse_ident_label)]
pub ident_span: Span,
}
#[derive(Diagnostic)]
#[diag(parse_outer_attribute_not_allowed_on_if_else)]
pub(crate) struct OuterAttributeNotAllowedOnIfElse {

View File

@ -2834,7 +2834,7 @@ impl<'a> Parser<'a> {
)?;
let guard = if this.eat_keyword(kw::If) {
let if_span = this.prev_token.span;
let mut cond = this.parse_expr_res(Restrictions::ALLOW_LET, None)?;
let mut cond = this.parse_match_guard_condition()?;
CondChecker { parser: this, forbid_let_reason: None }.visit_expr(&mut cond);
@ -2860,9 +2860,9 @@ impl<'a> Parser<'a> {
{
err.span_suggestion(
this.token.span,
"try using a fat arrow here",
"use a fat arrow to start a match arm",
"=>",
Applicability::MaybeIncorrect,
Applicability::MachineApplicable,
);
err.emit();
this.bump();
@ -2979,6 +2979,33 @@ impl<'a> Parser<'a> {
})
}
fn parse_match_guard_condition(&mut self) -> PResult<'a, P<Expr>> {
self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, None).map_err(
|mut err| {
if self.prev_token == token::OpenDelim(Delimiter::Brace) {
let sugg_sp = self.prev_token.span.shrink_to_lo();
// Consume everything within the braces, let's avoid further parse
// errors.
self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore);
let msg = "you might have meant to start a match arm after the match guard";
if self.eat(&token::CloseDelim(Delimiter::Brace)) {
let applicability = if self.token.kind != token::FatArrow {
// We have high confidence that we indeed didn't have a struct
// literal in the match guard, but rather we had some operation
// that ended in a path, immediately followed by a block that was
// meant to be the match arm.
Applicability::MachineApplicable
} else {
Applicability::MaybeIncorrect
};
err.span_suggestion_verbose(sugg_sp, msg, "=> ".to_string(), applicability);
}
}
err
},
)
}
pub(crate) fn is_builtin(&self) -> bool {
self.token.is_keyword(kw::Builtin) && self.look_ahead(1, |t| *t == token::Pound)
}
@ -3049,9 +3076,10 @@ impl<'a> Parser<'a> {
|| self.look_ahead(2, |t| t == &token::Colon)
&& (
// `{ ident: token, ` cannot start a block.
self.look_ahead(4, |t| t == &token::Comma) ||
// `{ ident: ` cannot start a block unless it's a type ascription `ident: Type`.
self.look_ahead(3, |t| !t.can_begin_type())
self.look_ahead(4, |t| t == &token::Comma)
// `{ ident: ` cannot start a block unless it's a type ascription
// `ident: Type`.
|| self.look_ahead(3, |t| !t.can_begin_type())
)
)
}
@ -3091,6 +3119,7 @@ impl<'a> Parser<'a> {
let mut fields = ThinVec::new();
let mut base = ast::StructRest::None;
let mut recover_async = false;
let in_if_guard = self.restrictions.contains(Restrictions::IN_IF_GUARD);
let mut async_block_err = |e: &mut Diagnostic, span: Span| {
recover_async = true;
@ -3128,6 +3157,26 @@ impl<'a> Parser<'a> {
e.span_label(pth.span, "while parsing this struct");
}
if let Some((ident, _)) = self.token.ident()
&& !self.token.is_reserved_ident()
&& self.look_ahead(1, |t| {
AssocOp::from_token(&t).is_some()
|| matches!(t.kind, token::OpenDelim(_))
|| t.kind == token::Dot
})
{
// Looks like they tried to write a shorthand, complex expression.
e.span_suggestion_verbose(
self.token.span.shrink_to_lo(),
"try naming a field",
&format!("{ident}: ", ),
Applicability::MaybeIncorrect,
);
}
if in_if_guard && close_delim == Delimiter::Brace {
return Err(e);
}
if !recover {
return Err(e);
}
@ -3173,19 +3222,6 @@ impl<'a> Parser<'a> {
",",
Applicability::MachineApplicable,
);
} else if is_shorthand
&& (AssocOp::from_token(&self.token).is_some()
|| matches!(&self.token.kind, token::OpenDelim(_))
|| self.token.kind == token::Dot)
{
// Looks like they tried to write a shorthand, complex expression.
let ident = parsed_field.expect("is_shorthand implies Some").ident;
e.span_suggestion(
ident.span.shrink_to_lo(),
"try naming a field",
&format!("{ident}: "),
Applicability::HasPlaceholders,
);
}
}
if !recover {
@ -3288,6 +3324,24 @@ impl<'a> Parser<'a> {
// Check if a colon exists one ahead. This means we're parsing a fieldname.
let is_shorthand = !this.look_ahead(1, |t| t == &token::Colon || t == &token::Eq);
// Proactively check whether parsing the field will be incorrect.
let is_wrong = this.token.is_ident()
&& !this.token.is_reserved_ident()
&& !this.look_ahead(1, |t| {
t == &token::Colon
|| t == &token::Eq
|| t == &token::Comma
|| t == &token::CloseDelim(Delimiter::Brace)
|| t == &token::CloseDelim(Delimiter::Parenthesis)
});
if is_wrong {
return Err(errors::ExpectedStructField {
span: this.look_ahead(1, |t| t.span),
ident_span: this.token.span,
token: this.look_ahead(1, |t| t.clone()),
}
.into_diagnostic(&self.sess.span_diagnostic));
}
let (ident, expr) = if is_shorthand {
// Mimic `x: x` for the `x` field shorthand.
let ident = this.parse_ident_common(false)?;

View File

@ -52,6 +52,7 @@ bitflags::bitflags! {
const NO_STRUCT_LITERAL = 1 << 1;
const CONST_EXPR = 1 << 2;
const ALLOW_LET = 1 << 3;
const IN_IF_GUARD = 1 << 4;
}
}

View File

@ -2373,7 +2373,7 @@ impl CheckAttrVisitor<'_> {
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
infcx.err_ctxt().report_fulfillment_errors(&errors);
infcx.err_ctxt().report_fulfillment_errors(errors);
self.abort.set(true);
}
}

View File

@ -57,7 +57,7 @@ pub fn ensure_wf<'tcx>(
ocx.register_obligation(obligation);
let errors = ocx.select_all_or_error();
if !errors.is_empty() {
infcx.err_ctxt().report_fulfillment_errors(&errors);
infcx.err_ctxt().report_fulfillment_errors(errors);
false
} else {
// looks WF!

View File

@ -13,7 +13,6 @@ rustc_data_structures = { path = "../rustc_data_structures" }
rustc_errors = { path = "../rustc_errors" }
rustc_hir = { path = "../rustc_hir" }
rustc_index = { path = "../rustc_index" }
rustc_macros = { path = "../rustc_macros" }
rustc_middle = { path = "../rustc_middle" }
rustc_query_system = { path = "../rustc_query_system" }
rustc-rayon-core = { version = "0.5.0", optional = true }

View File

@ -813,7 +813,6 @@ impl Input {
FileName::Anon(_) => None,
FileName::MacroExpansion(_) => None,
FileName::ProcMacroSourceCode(_) => None,
FileName::CfgSpec(_) => None,
FileName::CliCrateAttr(_) => None,
FileName::Custom(_) => None,
FileName::DocTest(path, _) => Some(path),

View File

@ -4,14 +4,13 @@ version = "0.0.0"
edition = "2021"
[dependencies]
rustc_driver = { path = "../rustc_driver" }
rustc_hir = { path = "../rustc_hir" }
rustc_interface = { path = "../rustc_interface" }
rustc_middle = { path = "../rustc_middle" }
rustc_span = { path = "../rustc_span" }
rustc_target = { path = "../rustc_target" }
rustc_driver = { path = "../rustc_driver" }
rustc_interface = { path = "../rustc_interface" }
rustc_session = {path = "../rustc_session" }
tracing = "0.1"
stable_mir = {path = "../stable_mir" }
tracing = "0.1"
[features]

View File

@ -10,10 +10,6 @@
html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/",
test(attr(allow(unused_variables), deny(warnings)))
)]
#![feature(rustc_private)]
#![feature(ptr_metadata)]
#![feature(type_alias_impl_trait)] // Used to define opaque types.
#![feature(intra_doc_pointers)]
pub mod rustc_internal;

View File

@ -11,7 +11,7 @@ use rustc_driver::{Callbacks, Compilation, RunCompiler};
use rustc_interface::{interface, Queries};
use rustc_middle::mir::interpret::AllocId;
use rustc_middle::ty::TyCtxt;
pub use rustc_span::def_id::{CrateNum, DefId};
use rustc_span::def_id::{CrateNum, DefId};
use rustc_span::Span;
use stable_mir::CompilerError;

View File

@ -280,8 +280,7 @@ impl RealFileName {
}
/// Differentiates between real files and common virtual files.
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash)]
#[derive(Decodable, Encodable)]
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash, Decodable, Encodable)]
pub enum FileName {
Real(RealFileName),
/// Call to `quote!`.
@ -292,8 +291,6 @@ pub enum FileName {
// FIXME(jseyfried)
MacroExpansion(Hash64),
ProcMacroSourceCode(Hash64),
/// Strings provided as `--cfg [cfgspec]` stored in a `crate_cfg`.
CfgSpec(Hash64),
/// Strings provided as crate attributes in the CLI.
CliCrateAttr(Hash64),
/// Custom sources for explicit parser calls from plugins and drivers.
@ -305,7 +302,6 @@ pub enum FileName {
impl From<PathBuf> for FileName {
fn from(p: PathBuf) -> Self {
assert!(!p.to_string_lossy().ends_with('>'));
FileName::Real(RealFileName::LocalPath(p))
}
}
@ -339,7 +335,6 @@ impl fmt::Display for FileNameDisplay<'_> {
MacroExpansion(_) => write!(fmt, "<macro expansion>"),
Anon(_) => write!(fmt, "<anon>"),
ProcMacroSourceCode(_) => write!(fmt, "<proc-macro source code>"),
CfgSpec(_) => write!(fmt, "<cfgspec>"),
CliCrateAttr(_) => write!(fmt, "<crate attribute>"),
Custom(ref s) => write!(fmt, "<{s}>"),
DocTest(ref path, _) => write!(fmt, "{}", path.display()),
@ -365,7 +360,6 @@ impl FileName {
Anon(_)
| MacroExpansion(_)
| ProcMacroSourceCode(_)
| CfgSpec(_)
| CliCrateAttr(_)
| Custom(_)
| QuoteExpansion(_)

View File

@ -218,7 +218,7 @@ impl<'a, 'tcx> ObligationCtxt<'a, 'tcx> {
def_id: LocalDefId,
) -> Result<FxIndexSet<Ty<'tcx>>, ErrorGuaranteed> {
self.assumed_wf_types(param_env, def_id)
.map_err(|errors| self.infcx.err_ctxt().report_fulfillment_errors(&errors))
.map_err(|errors| self.infcx.err_ctxt().report_fulfillment_errors(errors))
}
pub fn assumed_wf_types(

View File

@ -0,0 +1,275 @@
use crate::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use crate::infer::InferCtxt;
use crate::traits::{Obligation, ObligationCause, ObligationCtxt};
use rustc_errors::{pluralize, struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed};
use rustc_hir as hir;
use rustc_hir::Node;
use rustc_middle::ty::{self, Ty};
use rustc_span::{Span, DUMMY_SP};
use super::ArgKind;
pub use rustc_infer::traits::error_reporting::*;
pub trait InferCtxtExt<'tcx> {
/// Given some node representing a fn-like thing in the HIR map,
/// returns a span and `ArgKind` information that describes the
/// arguments it expects. This can be supplied to
/// `report_arg_count_mismatch`.
fn get_fn_like_arguments(&self, node: Node<'_>) -> Option<(Span, Option<Span>, Vec<ArgKind>)>;
/// Reports an error when the number of arguments needed by a
/// trait match doesn't match the number that the expression
/// provides.
fn report_arg_count_mismatch(
&self,
span: Span,
found_span: Option<Span>,
expected_args: Vec<ArgKind>,
found_args: Vec<ArgKind>,
is_closure: bool,
closure_pipe_span: Option<Span>,
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed>;
/// Checks if the type implements one of `Fn`, `FnMut`, or `FnOnce`
/// in that order, and returns the generic type corresponding to the
/// argument of that trait (corresponding to the closure arguments).
fn type_implements_fn_trait(
&self,
param_env: ty::ParamEnv<'tcx>,
ty: ty::Binder<'tcx, Ty<'tcx>>,
polarity: ty::ImplPolarity,
) -> Result<(ty::ClosureKind, ty::Binder<'tcx, Ty<'tcx>>), ()>;
}
impl<'tcx> InferCtxtExt<'tcx> for InferCtxt<'tcx> {
/// Given some node representing a fn-like thing in the HIR map,
/// returns a span and `ArgKind` information that describes the
/// arguments it expects. This can be supplied to
/// `report_arg_count_mismatch`.
fn get_fn_like_arguments(&self, node: Node<'_>) -> Option<(Span, Option<Span>, Vec<ArgKind>)> {
let sm = self.tcx.sess.source_map();
let hir = self.tcx.hir();
Some(match node {
Node::Expr(&hir::Expr {
kind: hir::ExprKind::Closure(&hir::Closure { body, fn_decl_span, fn_arg_span, .. }),
..
}) => (
fn_decl_span,
fn_arg_span,
hir.body(body)
.params
.iter()
.map(|arg| {
if let hir::Pat { kind: hir::PatKind::Tuple(ref args, _), span, .. } =
*arg.pat
{
Some(ArgKind::Tuple(
Some(span),
args.iter()
.map(|pat| {
sm.span_to_snippet(pat.span)
.ok()
.map(|snippet| (snippet, "_".to_owned()))
})
.collect::<Option<Vec<_>>>()?,
))
} else {
let name = sm.span_to_snippet(arg.pat.span).ok()?;
Some(ArgKind::Arg(name, "_".to_owned()))
}
})
.collect::<Option<Vec<ArgKind>>>()?,
),
Node::Item(&hir::Item { kind: hir::ItemKind::Fn(ref sig, ..), .. })
| Node::ImplItem(&hir::ImplItem { kind: hir::ImplItemKind::Fn(ref sig, _), .. })
| Node::TraitItem(&hir::TraitItem {
kind: hir::TraitItemKind::Fn(ref sig, _), ..
}) => (
sig.span,
None,
sig.decl
.inputs
.iter()
.map(|arg| match arg.kind {
hir::TyKind::Tup(ref tys) => ArgKind::Tuple(
Some(arg.span),
vec![("_".to_owned(), "_".to_owned()); tys.len()],
),
_ => ArgKind::empty(),
})
.collect::<Vec<ArgKind>>(),
),
Node::Ctor(ref variant_data) => {
let span = variant_data.ctor_hir_id().map_or(DUMMY_SP, |id| hir.span(id));
(span, None, vec![ArgKind::empty(); variant_data.fields().len()])
}
_ => panic!("non-FnLike node found: {node:?}"),
})
}
/// Reports an error when the number of arguments needed by a
/// trait match doesn't match the number that the expression
/// provides.
fn report_arg_count_mismatch(
&self,
span: Span,
found_span: Option<Span>,
expected_args: Vec<ArgKind>,
found_args: Vec<ArgKind>,
is_closure: bool,
closure_arg_span: Option<Span>,
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
let kind = if is_closure { "closure" } else { "function" };
let args_str = |arguments: &[ArgKind], other: &[ArgKind]| {
let arg_length = arguments.len();
let distinct = matches!(other, &[ArgKind::Tuple(..)]);
match (arg_length, arguments.get(0)) {
(1, Some(ArgKind::Tuple(_, fields))) => {
format!("a single {}-tuple as argument", fields.len())
}
_ => format!(
"{} {}argument{}",
arg_length,
if distinct && arg_length > 1 { "distinct " } else { "" },
pluralize!(arg_length)
),
}
};
let expected_str = args_str(&expected_args, &found_args);
let found_str = args_str(&found_args, &expected_args);
let mut err = struct_span_err!(
self.tcx.sess,
span,
E0593,
"{} is expected to take {}, but it takes {}",
kind,
expected_str,
found_str,
);
err.span_label(span, format!("expected {kind} that takes {expected_str}"));
if let Some(found_span) = found_span {
err.span_label(found_span, format!("takes {found_str}"));
// Suggest to take and ignore the arguments with expected_args_length `_`s if
// found arguments is empty (assume the user just wants to ignore args in this case).
// For example, if `expected_args_length` is 2, suggest `|_, _|`.
if found_args.is_empty() && is_closure {
let underscores = vec!["_"; expected_args.len()].join(", ");
err.span_suggestion_verbose(
closure_arg_span.unwrap_or(found_span),
format!(
"consider changing the closure to take and ignore the expected argument{}",
pluralize!(expected_args.len())
),
format!("|{underscores}|"),
Applicability::MachineApplicable,
);
}
if let &[ArgKind::Tuple(_, ref fields)] = &found_args[..] {
if fields.len() == expected_args.len() {
let sugg = fields
.iter()
.map(|(name, _)| name.to_owned())
.collect::<Vec<String>>()
.join(", ");
err.span_suggestion_verbose(
found_span,
"change the closure to take multiple arguments instead of a single tuple",
format!("|{sugg}|"),
Applicability::MachineApplicable,
);
}
}
if let &[ArgKind::Tuple(_, ref fields)] = &expected_args[..]
&& fields.len() == found_args.len()
&& is_closure
{
let sugg = format!(
"|({}){}|",
found_args
.iter()
.map(|arg| match arg {
ArgKind::Arg(name, _) => name.to_owned(),
_ => "_".to_owned(),
})
.collect::<Vec<String>>()
.join(", "),
// add type annotations if available
if found_args.iter().any(|arg| match arg {
ArgKind::Arg(_, ty) => ty != "_",
_ => false,
}) {
format!(
": ({})",
fields
.iter()
.map(|(_, ty)| ty.to_owned())
.collect::<Vec<String>>()
.join(", ")
)
} else {
String::new()
},
);
err.span_suggestion_verbose(
found_span,
"change the closure to accept a tuple instead of individual arguments",
sugg,
Applicability::MachineApplicable,
);
}
}
err
}
fn type_implements_fn_trait(
&self,
param_env: ty::ParamEnv<'tcx>,
ty: ty::Binder<'tcx, Ty<'tcx>>,
polarity: ty::ImplPolarity,
) -> Result<(ty::ClosureKind, ty::Binder<'tcx, Ty<'tcx>>), ()> {
self.commit_if_ok(|_| {
for trait_def_id in [
self.tcx.lang_items().fn_trait(),
self.tcx.lang_items().fn_mut_trait(),
self.tcx.lang_items().fn_once_trait(),
] {
let Some(trait_def_id) = trait_def_id else { continue };
// Make a fresh inference variable so we can determine what the substitutions
// of the trait are.
let var = self.next_ty_var(TypeVariableOrigin {
span: DUMMY_SP,
kind: TypeVariableOriginKind::MiscVariable,
});
// FIXME(effects)
let trait_ref = ty::TraitRef::new(self.tcx, trait_def_id, [ty.skip_binder(), var]);
let obligation = Obligation::new(
self.tcx,
ObligationCause::dummy(),
param_env,
ty.rebind(ty::TraitPredicate { trait_ref, polarity }),
);
let ocx = ObligationCtxt::new(self);
ocx.register_obligation(obligation);
if ocx.select_all_or_error().is_empty() {
return Ok((
self.tcx
.fn_trait_kind_from_def_id(trait_def_id)
.expect("expected to map DefId to ClosureKind"),
ty.rebind(self.resolve_vars_if_possible(var)),
));
}
}
Err(())
})
}
}

View File

@ -18,7 +18,7 @@ use crate::errors::{
EmptyOnClauseInOnUnimplemented, InvalidOnClauseInOnUnimplemented, NoValueInOnUnimplemented,
};
use super::InferCtxtPrivExt;
use crate::traits::error_reporting::type_err_ctxt_ext::InferCtxtPrivExt;
pub trait TypeErrCtxtExt<'tcx> {
/*private*/

View File

@ -41,8 +41,8 @@ use rustc_target::spec::abi;
use std::borrow::Cow;
use std::iter;
use super::InferCtxtPrivExt;
use crate::infer::InferCtxtExt as _;
use crate::traits::error_reporting::type_err_ctxt_ext::InferCtxtPrivExt;
use crate::traits::query::evaluate_obligation::InferCtxtExt as _;
use rustc_middle::ty::print::{with_forced_trimmed_paths, with_no_trimmed_paths};
@ -4254,6 +4254,39 @@ impl<'a, 'hir> hir::intravisit::Visitor<'hir> for ReplaceImplTraitVisitor<'a> {
}
}
pub(super) fn get_explanation_based_on_obligation<'tcx>(
obligation: &PredicateObligation<'tcx>,
trait_ref: ty::PolyTraitRef<'tcx>,
trait_predicate: &ty::PolyTraitPredicate<'tcx>,
pre_message: String,
) -> String {
if let ObligationCauseCode::MainFunctionType = obligation.cause.code() {
"consider using `()`, or a `Result`".to_owned()
} else {
let ty_desc = match trait_ref.skip_binder().self_ty().kind() {
ty::FnDef(_, _) => Some("fn item"),
ty::Closure(_, _) => Some("closure"),
_ => None,
};
match ty_desc {
Some(desc) => format!(
"{}the trait `{}` is not implemented for {} `{}`",
pre_message,
trait_predicate.print_modifiers_and_trait_path(),
desc,
trait_ref.skip_binder().self_ty(),
),
None => format!(
"{}the trait `{}` is not implemented for `{}`",
pre_message,
trait_predicate.print_modifiers_and_trait_path(),
trait_ref.skip_binder().self_ty(),
),
}
}
}
// Replace `param` with `replace_ty`
struct ReplaceImplTraitFolder<'tcx> {
tcx: TyCtxt<'tcx>,

File diff suppressed because it is too large Load Diff

View File

@ -204,7 +204,7 @@ fn do_normalize_predicates<'tcx>(
let predicates = match fully_normalize(&infcx, cause, elaborated_env, predicates) {
Ok(predicates) => predicates,
Err(errors) => {
let reported = infcx.err_ctxt().report_fulfillment_errors(&errors);
let reported = infcx.err_ctxt().report_fulfillment_errors(errors);
return Err(reported);
}
};

View File

@ -9,7 +9,7 @@ extern crate tracing;
pub(crate) use rustc_data_structures::fx::{FxIndexMap as Map, FxIndexSet as Set};
pub mod layout;
pub(crate) mod maybe_transmutable;
mod maybe_transmutable;
#[derive(Default)]
pub struct Assume {
@ -19,7 +19,7 @@ pub struct Assume {
pub validity: bool,
}
/// Either we have an error, transmutation is allowed, or we have an optional
/// Either transmutation is allowed, we have an error, or we have an optional
/// Condition that must hold.
#[derive(Debug, Hash, Eq, PartialEq, Clone)]
pub enum Answer<R> {

View File

@ -32,26 +32,6 @@ where
) -> Self {
Self { src, dst, scope, assume, context }
}
// FIXME(bryangarza): Delete this when all usages are removed
pub(crate) fn map_layouts<F, M>(
self,
f: F,
) -> Result<MaybeTransmutableQuery<M, C>, Answer<<C as QueryContext>::Ref>>
where
F: FnOnce(
L,
L,
<C as QueryContext>::Scope,
&C,
) -> Result<(M, M), Answer<<C as QueryContext>::Ref>>,
{
let Self { src, dst, scope, assume, context } = self;
let (src, dst) = f(src, dst, scope, &context)?;
Ok(MaybeTransmutableQuery { src, dst, scope, assume, context })
}
}
// FIXME: Nix this cfg, so we can write unit tests independently of rustc
@ -107,42 +87,42 @@ where
#[instrument(level = "debug", skip(self), fields(src = ?self.src, dst = ?self.dst))]
pub(crate) fn answer(self) -> Answer<<C as QueryContext>::Ref> {
let assume_visibility = self.assume.safety;
// FIXME(bryangarza): Refactor this code to get rid of `map_layouts`
let query_or_answer = self.map_layouts(|src, dst, scope, context| {
// Remove all `Def` nodes from `src`, without checking their visibility.
let src = src.prune(&|def| true);
trace!(?src, "pruned src");
let Self { src, dst, scope, assume, context } = self;
// Remove all `Def` nodes from `dst`, additionally...
let dst = if assume_visibility {
// ...if visibility is assumed, don't check their visibility.
dst.prune(&|def| true)
} else {
// ...otherwise, prune away all unreachable paths through the `Dst` layout.
dst.prune(&|def| context.is_accessible_from(def, scope))
};
// Remove all `Def` nodes from `src`, without checking their visibility.
let src = src.prune(&|def| true);
trace!(?dst, "pruned dst");
trace!(?src, "pruned src");
// Convert `src` from a tree-based representation to an NFA-based representation.
// If the conversion fails because `src` is uninhabited, conclude that the transmutation
// is acceptable, because instances of the `src` type do not exist.
let src = Nfa::from_tree(src).map_err(|Uninhabited| Answer::Yes)?;
// Remove all `Def` nodes from `dst`, additionally...
let dst = if assume_visibility {
// ...if visibility is assumed, don't check their visibility.
dst.prune(&|def| true)
} else {
// ...otherwise, prune away all unreachable paths through the `Dst` layout.
dst.prune(&|def| context.is_accessible_from(def, scope))
};
// Convert `dst` from a tree-based representation to an NFA-based representation.
// If the conversion fails because `src` is uninhabited, conclude that the transmutation
// is unacceptable, because instances of the `dst` type do not exist.
let dst =
Nfa::from_tree(dst).map_err(|Uninhabited| Answer::No(Reason::DstIsPrivate))?;
trace!(?dst, "pruned dst");
Ok((src, dst))
});
// Convert `src` from a tree-based representation to an NFA-based representation.
// If the conversion fails because `src` is uninhabited, conclude that the transmutation
// is acceptable, because instances of the `src` type do not exist.
let src = match Nfa::from_tree(src) {
Ok(src) => src,
Err(Uninhabited) => return Answer::Yes,
};
match query_or_answer {
Ok(query) => query.answer(),
Err(answer) => answer,
}
// Convert `dst` from a tree-based representation to an NFA-based representation.
// If the conversion fails because `src` is uninhabited, conclude that the transmutation
// is unacceptable, because instances of the `dst` type do not exist.
let dst = match Nfa::from_tree(dst) {
Ok(dst) => dst,
Err(Uninhabited) => return Answer::No(Reason::DstIsPrivate),
};
MaybeTransmutableQuery { src, dst, scope, assume, context }.answer()
}
}
@ -156,14 +136,10 @@ where
#[inline(always)]
#[instrument(level = "debug", skip(self), fields(src = ?self.src, dst = ?self.dst))]
pub(crate) fn answer(self) -> Answer<<C as QueryContext>::Ref> {
// FIXME(bryangarza): Refactor this code to get rid of `map_layouts`
let query_or_answer = self
.map_layouts(|src, dst, scope, context| Ok((Dfa::from_nfa(src), Dfa::from_nfa(dst))));
match query_or_answer {
Ok(query) => query.answer(),
Err(answer) => answer,
}
let Self { src, dst, scope, assume, context } = self;
let src = Dfa::from_nfa(src);
let dst = Dfa::from_nfa(dst);
MaybeTransmutableQuery { src, dst, scope, assume, context }.answer()
}
}
@ -171,26 +147,8 @@ impl<C> MaybeTransmutableQuery<Dfa<<C as QueryContext>::Ref>, C>
where
C: QueryContext,
{
/// Answers whether a `Nfa` is transmutable into another `Nfa`.
///
/// This method converts `src` and `dst` to DFAs, then computes an answer using those DFAs.
/// Answers whether a `Dfa` is transmutable into another `Dfa`.
pub(crate) fn answer(self) -> Answer<<C as QueryContext>::Ref> {
MaybeTransmutableQuery {
src: &self.src,
dst: &self.dst,
scope: self.scope,
assume: self.assume,
context: self.context,
}
.answer()
}
}
impl<'l, C> MaybeTransmutableQuery<&'l Dfa<<C as QueryContext>::Ref>, C>
where
C: QueryContext,
{
pub(crate) fn answer(&mut self) -> Answer<<C as QueryContext>::Ref> {
self.answer_memo(&mut Map::default(), self.src.start, self.dst.start)
}

View File

@ -479,6 +479,46 @@ pub trait Into<T>: Sized {
/// - `From<T> for U` implies [`Into`]`<U> for T`
/// - `From` is reflexive, which means that `From<T> for T` is implemented
///
/// # When to implement `From`
///
/// While there's no technical restrictions on which conversions can be done using
/// a `From` implementation, the general expectation is that the conversions
/// should typically be restricted as follows:
///
/// * The conversion is *infallible*: if the conversion can fail, use [`TryFrom`]
/// instead; don't provide a `From` impl that panics.
///
/// * The conversion is *lossless*: semantically, it should not lose or discard
/// information. For example, `i32: From<u16>` exists, where the original
/// value can be recovered using `u16: TryFrom<i32>`. And `String: From<&str>`
/// exists, where you can get something equivalent to the original value via
/// `Deref`. But `From` cannot be used to convert from `u32` to `u16`, since
/// that cannot succeed in a lossless way. (There's some wiggle room here for
/// information not considered semantically relevant. For example,
/// `Box<[T]>: From<Vec<T>>` exists even though it might not preserve capacity,
/// like how two vectors can be equal despite differing capacities.)
///
/// * The conversion is *value-preserving*: the conceptual kind and meaning of
/// the resulting value is the same, even though the Rust type and technical
/// representation might be different. For example `-1_i8 as u8` is *lossless*,
/// since `as` casting back can recover the original value, but that conversion
/// is *not* available via `From` because `-1` and `255` are different conceptual
/// values (despite being identical bit patterns technically). But
/// `f32: From<i16>` *is* available because `1_i16` and `1.0_f32` are conceptually
/// the same real number (despite having very different bit patterns technically).
/// `String: From<char>` is available because they're both *text*, but
/// `String: From<u32>` is *not* available, since `1` (a number) and `"1"`
/// (text) are too different. (Converting values to text is instead covered
/// by the [`Display`](crate::fmt::Display) trait.)
///
/// * The conversion is *obvious*: it's the only reasonable conversion between
/// the two types. Otherwise it's better to have it be a named method or
/// constructor, like how [`str::as_bytes`] is a method and how integers have
/// methods like [`u32::from_ne_bytes`], [`u32::from_le_bytes`], and
/// [`u32::from_be_bytes`], none of which are `From` implementations. Whereas
/// there's only one reasonable way to wrap an [`Ipv6Addr`](crate::net::Ipv6Addr)
/// into an [`IpAddr`](crate::net::IpAddr), thus `IpAddr: From<Ipv6Addr>` exists.
///
/// # Examples
///
/// [`String`] implements `From<&str>`:

View File

@ -1475,7 +1475,7 @@ impl<T> Option<T> {
#[stable(feature = "rust1", since = "1.0.0")]
pub fn or(self, optb: Option<T>) -> Option<T> {
match self {
Some(x) => Some(x),
x @ Some(_) => x,
None => optb,
}
}
@ -1500,7 +1500,7 @@ impl<T> Option<T> {
F: FnOnce() -> Option<T>,
{
match self {
Some(x) => Some(x),
x @ Some(_) => x,
None => f(),
}
}
@ -1530,8 +1530,8 @@ impl<T> Option<T> {
#[stable(feature = "option_xor", since = "1.37.0")]
pub fn xor(self, optb: Option<T>) -> Option<T> {
match (self, optb) {
(Some(a), None) => Some(a),
(None, Some(b)) => Some(b),
(a @ Some(_), None) => a,
(None, b @ Some(_)) => b,
_ => None,
}
}

View File

@ -36,6 +36,10 @@ object = { version = "0.32.0", default-features = false, optional = true, featur
rand = { version = "0.8.5", default-features = false, features = ["alloc"] }
rand_xorshift = "0.3.0"
[build-dependencies]
# Dependency of the `backtrace` crate's build script
cc = "1.0.67"
[target.'cfg(any(all(target_family = "wasm", target_os = "unknown"), target_os = "xous", all(target_vendor = "fortanix", target_env = "sgx")))'.dependencies]
dlmalloc = { version = "0.2.4", features = ['rustc-dep-of-std'] }

View File

@ -1,5 +1,11 @@
use std::env;
// backtrace-rs requires a feature check on Android targets, so
// we need to run its build.rs as well.
#[allow(unused_extern_crates)]
#[path = "../backtrace/build.rs"]
mod backtrace_build_rs;
fn main() {
println!("cargo:rerun-if-changed=build.rs");
let target = env::var("TARGET").expect("TARGET was not set");
@ -58,4 +64,6 @@ fn main() {
}
println!("cargo:rustc-env=STD_ENV_ARCH={}", env::var("CARGO_CFG_TARGET_ARCH").unwrap());
println!("cargo:rustc-cfg=backtrace_in_libstd");
backtrace_build_rs::main();
}

View File

@ -125,7 +125,7 @@ impl Once {
///
/// # Panics
///
/// The closure `f` will only be executed once if this is called
/// The closure `f` will only be executed once even if this is called
/// concurrently amongst many threads. If that closure panics, however, then
/// it will *poison* this [`Once`] instance, causing all future invocations of
/// `call_once` to also panic.

View File

@ -67,7 +67,7 @@ pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) {
// workaround below is to register, via _tlv_atexit, a custom DTOR list once per
// thread. thread_local dtors are pushed to the DTOR list without calling
// _tlv_atexit.
#[cfg(any(target_os = "macos", target_os = "ios", target_os = "watchos"))]
#[cfg(any(target_os = "macos", target_os = "ios", target_os = "watchos", target_os = "tvos"))]
pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) {
use crate::cell::Cell;
use crate::mem;

View File

@ -6,11 +6,101 @@ document.
## Unreleased / Beta / In Rust Nightly
[37f4c172...master](https://github.com/rust-lang/rust-clippy/compare/37f4c172...master)
[1e8fdf49...master](https://github.com/rust-lang/rust-clippy/compare/1e8fdf49...master)
## Rust 1.73
Current stable, released 2023-10-05
[View all 103 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2023-07-02T12%3A24%3A40Z..2023-08-11T11%3A09%3A56Z+base%3Amaster)
### New Lints
* [`impossible_comparisons`]
[#10843](https://github.com/rust-lang/rust-clippy/pull/10843)
* [`redundant_comparisons`]
[#10843](https://github.com/rust-lang/rust-clippy/pull/10843)
* [`ignored_unit_patterns`]
[#11242](https://github.com/rust-lang/rust-clippy/pull/11242)
* [`readonly_write_lock`]
[#11210](https://github.com/rust-lang/rust-clippy/pull/11210)
* [`filter_map_bool_then`]
[#11115](https://github.com/rust-lang/rust-clippy/pull/11115)
* [`needless_return_with_question_mark`]
[#11031](https://github.com/rust-lang/rust-clippy/pull/11031)
* [`redundant_guards`]
[#10955](https://github.com/rust-lang/rust-clippy/pull/10955)
* [`redundant_locals`]
[#10885](https://github.com/rust-lang/rust-clippy/pull/10885)
* [`absolute_paths`]
[#11003](https://github.com/rust-lang/rust-clippy/pull/11003)
* [`error_impl_error`]
[#11107](https://github.com/rust-lang/rust-clippy/pull/11107)
* [`iter_skip_zero`]
[#11046](https://github.com/rust-lang/rust-clippy/pull/11046)
* [`string_lit_chars_any`]
[#11052](https://github.com/rust-lang/rust-clippy/pull/11052)
* [`four_forward_slashes`]
[#11140](https://github.com/rust-lang/rust-clippy/pull/11140)
* [`format_collect`]
[#11116](https://github.com/rust-lang/rust-clippy/pull/11116)
* [`needless_pass_by_ref_mut`]
[#10900](https://github.com/rust-lang/rust-clippy/pull/10900)
* [`manual_is_infinite`]
[#11049](https://github.com/rust-lang/rust-clippy/pull/11049)
* [`manual_is_finite`]
[#11049](https://github.com/rust-lang/rust-clippy/pull/11049)
* [`incorrect_partial_ord_impl_on_ord_type`]
[#10788](https://github.com/rust-lang/rust-clippy/pull/10788)
* [`read_line_without_trim`]
[#10970](https://github.com/rust-lang/rust-clippy/pull/10970)
* [`type_id_on_box`]
[#10987](https://github.com/rust-lang/rust-clippy/pull/10987)
### Moves and Deprecations
* Renamed `unwrap_or_else_default` to [`unwrap_or_default`]
[#10120](https://github.com/rust-lang/rust-clippy/pull/10120)
* Moved [`tuple_array_conversions`] to `pedantic` (Now allow-by-default)
[#11146](https://github.com/rust-lang/rust-clippy/pull/11146)
* Moved [`arc_with_non_send_sync`] to `suspicious` (Now warn-by-default)
[#11104](https://github.com/rust-lang/rust-clippy/pull/11104)
* Moved [`needless_raw_string_hashes`] to `pedantic` (Now allow-by-default)
[#11415](https://github.com/rust-lang/rust-clippy/pull/11415)
### Enhancements
* [`unwrap_used`]: No longer lints on the never-type or never-like enums
[#11252](https://github.com/rust-lang/rust-clippy/pull/11252)
* [`expect_used`]: No longer lints on the never-type or never-like enums
[#11252](https://github.com/rust-lang/rust-clippy/pull/11252)
### False Positive Fixes
* [`panic_in_result_fn`]: No longer triggers on `todo!`, `unimplemented!`, `unreachable!`
[#11123](https://github.com/rust-lang/rust-clippy/pull/11123)
### Suggestion Fixes/Improvements
* [`semicolon_if_nothing_returned`]: The suggestion is now machine-applicable with rustfix
[#11083](https://github.com/rust-lang/rust-clippy/pull/11083)
### ICE Fixes
* [`filter_map_bool_then`]: No longer crashes on late-bound regions
[#11318](https://github.com/rust-lang/rust-clippy/pull/11318)
* [`unwrap_or_default`]: No longer crashes on alias types for local items
[#11258](https://github.com/rust-lang/rust-clippy/pull/11258)
* [`unnecessary_literal_unwrap`]: No longer crashes on `None.unwrap_or_default()`
[#11106](https://github.com/rust-lang/rust-clippy/pull/11106)
* Fixed MIR-related ICE
[#11130](https://github.com/rust-lang/rust-clippy/pull/11130)
* [`missing_fields_in_debug`]: No longer crashes on non-ADT self types
[#11069](https://github.com/rust-lang/rust-clippy/pull/11069)
## Rust 1.72
Current stable, released 2023-08-24
Released 2023-08-24
[View all 131 merged pull requests](https://github.com/rust-lang/rust-clippy/pulls?q=merged%3A2023-05-22T14%3A53%3A59Z..2023-07-01T22%3A57%3A20Z+base%3Amaster)
@ -5011,6 +5101,7 @@ Released 2018-09-13
[`integer_division`]: https://rust-lang.github.io/rust-clippy/master/index.html#integer_division
[`into_iter_on_array`]: https://rust-lang.github.io/rust-clippy/master/index.html#into_iter_on_array
[`into_iter_on_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#into_iter_on_ref
[`into_iter_without_iter`]: https://rust-lang.github.io/rust-clippy/master/index.html#into_iter_without_iter
[`invalid_atomic_ordering`]: https://rust-lang.github.io/rust-clippy/master/index.html#invalid_atomic_ordering
[`invalid_null_ptr_usage`]: https://rust-lang.github.io/rust-clippy/master/index.html#invalid_null_ptr_usage
[`invalid_ref`]: https://rust-lang.github.io/rust-clippy/master/index.html#invalid_ref
@ -5036,6 +5127,7 @@ Released 2018-09-13
[`iter_skip_next`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_skip_next
[`iter_skip_zero`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_skip_zero
[`iter_with_drain`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_with_drain
[`iter_without_into_iter`]: https://rust-lang.github.io/rust-clippy/master/index.html#iter_without_into_iter
[`iterator_step_by_zero`]: https://rust-lang.github.io/rust-clippy/master/index.html#iterator_step_by_zero
[`just_underscores_and_digits`]: https://rust-lang.github.io/rust-clippy/master/index.html#just_underscores_and_digits
[`large_const_arrays`]: https://rust-lang.github.io/rust-clippy/master/index.html#large_const_arrays
@ -5072,6 +5164,7 @@ Released 2018-09-13
[`manual_find`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_find
[`manual_find_map`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_find_map
[`manual_flatten`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_flatten
[`manual_hash_one`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_hash_one
[`manual_instant_elapsed`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_instant_elapsed
[`manual_is_ascii_check`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_is_ascii_check
[`manual_is_finite`]: https://rust-lang.github.io/rust-clippy/master/index.html#manual_is_finite

View File

@ -1,6 +1,6 @@
[package]
name = "clippy"
version = "0.1.74"
version = "0.1.75"
description = "A bunch of helpful lints to avoid common pitfalls in Rust"
repository = "https://github.com/rust-lang/rust-clippy"
readme = "README.md"
@ -25,6 +25,8 @@ clippy_lints = { path = "clippy_lints" }
rustc_tools_util = "0.3.0"
tempfile = { version = "3.2", optional = true }
termize = "0.1"
color-print = "0.3.4" # Sync version with Cargo
anstream = "0.5.0"
[dev-dependencies]
ui_test = "0.20"

View File

@ -261,7 +261,7 @@ impl EarlyLintPass for FooFunctions {}
[declare_clippy_lint]: https://github.com/rust-lang/rust-clippy/blob/557f6848bd5b7183f55c1e1522a326e9e1df6030/clippy_lints/src/lib.rs#L60
[example_lint_page]: https://rust-lang.github.io/rust-clippy/master/index.html#redundant_closure
[lint_naming]: https://rust-lang.github.io/rfcs/0344-conventions-galore.html#lints
[category_level_mapping]: https://github.com/rust-lang/rust-clippy/blob/557f6848bd5b7183f55c1e1522a326e9e1df6030/clippy_lints/src/lib.rs#L110
[category_level_mapping]: ../index.html
## Lint registration

View File

@ -151,6 +151,7 @@ The minimum rust version that the project supports
* [`type_repetition_in_bounds`](https://rust-lang.github.io/rust-clippy/master/index.html#type_repetition_in_bounds)
* [`tuple_array_conversions`](https://rust-lang.github.io/rust-clippy/master/index.html#tuple_array_conversions)
* [`manual_try_fold`](https://rust-lang.github.io/rust-clippy/master/index.html#manual_try_fold)
* [`manual_hash_one`](https://rust-lang.github.io/rust-clippy/master/index.html#manual_hash_one)
## `cognitive-complexity-threshold`

View File

@ -58,7 +58,7 @@ pub fn create(
};
create_lint(&lint, msrv).context("Unable to create lint implementation")?;
create_test(&lint).context("Unable to create a test for the new lint")?;
create_test(&lint, msrv).context("Unable to create a test for the new lint")?;
if lint.ty.is_none() {
add_lint(&lint, msrv).context("Unable to add lint to clippy_lints/src/lib.rs")?;
@ -88,15 +88,21 @@ fn create_lint(lint: &LintData<'_>, enable_msrv: bool) -> io::Result<()> {
}
}
fn create_test(lint: &LintData<'_>) -> io::Result<()> {
fn create_project_layout<P: Into<PathBuf>>(lint_name: &str, location: P, case: &str, hint: &str) -> io::Result<()> {
fn create_test(lint: &LintData<'_>, msrv: bool) -> io::Result<()> {
fn create_project_layout<P: Into<PathBuf>>(
lint_name: &str,
location: P,
case: &str,
hint: &str,
msrv: bool,
) -> io::Result<()> {
let mut path = location.into().join(case);
fs::create_dir(&path)?;
write_file(path.join("Cargo.toml"), get_manifest_contents(lint_name, hint))?;
path.push("src");
fs::create_dir(&path)?;
write_file(path.join("main.rs"), get_test_file_contents(lint_name))?;
write_file(path.join("main.rs"), get_test_file_contents(lint_name, msrv))?;
Ok(())
}
@ -106,13 +112,25 @@ fn create_test(lint: &LintData<'_>) -> io::Result<()> {
let test_dir = lint.project_root.join(&relative_test_dir);
fs::create_dir(&test_dir)?;
create_project_layout(lint.name, &test_dir, "fail", "Content that triggers the lint goes here")?;
create_project_layout(lint.name, &test_dir, "pass", "This file should not trigger the lint")?;
create_project_layout(
lint.name,
&test_dir,
"fail",
"Content that triggers the lint goes here",
msrv,
)?;
create_project_layout(
lint.name,
&test_dir,
"pass",
"This file should not trigger the lint",
false,
)?;
println!("Generated test directories: `{relative_test_dir}/pass`, `{relative_test_dir}/fail`");
} else {
let test_path = format!("tests/ui/{}.rs", lint.name);
let test_contents = get_test_file_contents(lint.name);
let test_contents = get_test_file_contents(lint.name, msrv);
write_file(lint.project_root.join(&test_path), test_contents)?;
println!("Generated test file: `{test_path}`");
@ -194,8 +212,8 @@ pub(crate) fn get_stabilization_version() -> String {
parse_manifest(&contents).expect("Unable to find package version in `Cargo.toml`")
}
fn get_test_file_contents(lint_name: &str) -> String {
formatdoc!(
fn get_test_file_contents(lint_name: &str, msrv: bool) -> String {
let mut test = formatdoc!(
r#"
#![warn(clippy::{lint_name})]
@ -203,7 +221,29 @@ fn get_test_file_contents(lint_name: &str) -> String {
// test code goes here
}}
"#
)
);
if msrv {
let _ = writedoc!(
test,
r#"
// TODO: set xx to the version one below the MSRV used by the lint, and yy to
// the version used by the lint
#[clippy::msrv = "1.xx"]
fn msrv_1_xx() {{
// a simple example that would trigger the lint if the MSRV were met
}}
#[clippy::msrv = "1.yy"]
fn msrv_1_yy() {{
// the same example as above
}}
"#
);
}
test
}
fn get_manifest_contents(lint_name: &str, hint: &str) -> String {
@ -258,7 +298,7 @@ fn get_lint_file_contents(lint: &LintData<'_>, enable_msrv: bool) -> String {
)
});
let _: fmt::Result = write!(result, "{}", get_lint_declaration(&name_upper, category));
let _: fmt::Result = writeln!(result, "{}", get_lint_declaration(&name_upper, category));
result.push_str(&if enable_msrv {
formatdoc!(
@ -281,7 +321,6 @@ fn get_lint_file_contents(lint: &LintData<'_>, enable_msrv: bool) -> String {
}}
// TODO: Add MSRV level to `clippy_utils/src/msrvs.rs` if needed.
// TODO: Add MSRV test to `tests/ui/min_rust_version_attr.rs`.
// TODO: Update msrv config comment in `clippy_lints/src/utils/conf.rs`
"#
)

View File

@ -1,6 +1,6 @@
[package]
name = "clippy_lints"
version = "0.1.74"
version = "0.1.75"
description = "A bunch of helpful lints to avoid common pitfalls in Rust"
repository = "https://github.com/rust-lang/rust-clippy"
readme = "README.md"
@ -28,6 +28,9 @@ semver = "1.0"
rustc-semver = "1.1"
url = "2.2"
[dev-dependencies]
walkdir = "2.3"
[features]
deny-warnings = ["clippy_utils/deny-warnings"]
# build clippy with internal lints enabled, off by default

View File

@ -8,6 +8,7 @@ use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_lint_pass, declare_tool_lint};
declare_clippy_lint! {
/// ### What it does
/// Checks for usage of the `#[allow]` attribute and suggests replacing it with
/// the `#[expect]` (See [RFC 2383](https://rust-lang.github.io/rfcs/2383-lint-reasons.html))
///
@ -19,7 +20,6 @@ declare_clippy_lint! {
/// (`#![allow]`) are usually used to enable or disable lints on a global scale.
///
/// ### Why is this bad?
///
/// `#[expect]` attributes suppress the lint emission, but emit a warning, if
/// the expectation is unfulfilled. This can be useful to be notified when the
/// lint is no longer triggered.

View File

@ -183,7 +183,7 @@ declare_clippy_lint! {
declare_clippy_lint! {
/// ### What it does
/// Checks for empty lines after documenation comments.
/// Checks for empty lines after documentation comments.
///
/// ### Why is this bad?
/// The documentation comment was most likely meant to be an inner attribute or regular comment.
@ -795,7 +795,7 @@ impl EarlyLintPass for EarlyAttributes {
/// Check for empty lines after outer attributes.
///
/// Attributes and documenation comments are both considered outer attributes
/// Attributes and documentation comments are both considered outer attributes
/// by the AST. However, the average user likely considers them to be different.
/// Checking for empty lines after each of these attributes is split into two different
/// lints but can share the same logic.

View File

@ -229,6 +229,8 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::items_after_statements::ITEMS_AFTER_STATEMENTS_INFO,
crate::items_after_test_module::ITEMS_AFTER_TEST_MODULE_INFO,
crate::iter_not_returning_iterator::ITER_NOT_RETURNING_ITERATOR_INFO,
crate::iter_without_into_iter::INTO_ITER_WITHOUT_ITER_INFO,
crate::iter_without_into_iter::ITER_WITHOUT_INTO_ITER_INFO,
crate::large_const_arrays::LARGE_CONST_ARRAYS_INFO,
crate::large_enum_variant::LARGE_ENUM_VARIANT_INFO,
crate::large_futures::LARGE_FUTURES_INFO,
@ -280,6 +282,7 @@ pub(crate) static LINTS: &[&crate::LintInfo] = &[
crate::manual_clamp::MANUAL_CLAMP_INFO,
crate::manual_float_methods::MANUAL_IS_FINITE_INFO,
crate::manual_float_methods::MANUAL_IS_INFINITE_INFO,
crate::manual_hash_one::MANUAL_HASH_ONE_INFO,
crate::manual_is_ascii_check::MANUAL_IS_ASCII_CHECK_INFO,
crate::manual_let_else::MANUAL_LET_ELSE_INFO,
crate::manual_main_separator_str::MANUAL_MAIN_SEPARATOR_STR_INFO,

View File

@ -1,7 +1,7 @@
//! lint on C-like enums that are `repr(isize/usize)` and have values that
//! don't fit into an `i32`
use clippy_utils::consts::{miri_to_const, Constant};
use clippy_utils::consts::{mir_to_const, Constant};
use clippy_utils::diagnostics::span_lint;
use rustc_hir::{Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass};
@ -51,7 +51,7 @@ impl<'tcx> LateLintPass<'tcx> for UnportableVariant {
.const_eval_poly(def_id.to_def_id())
.ok()
.map(|val| rustc_middle::mir::Const::from_value(val, ty));
if let Some(Constant::Int(val)) = constant.and_then(|c| miri_to_const(cx, c)) {
if let Some(Constant::Int(val)) = constant.and_then(|c| mir_to_const(cx, c)) {
if let ty::Adt(adt, _) = ty.kind() {
if adt.is_enum() {
ty = adt.repr().discr_type().to_ty(cx.tcx);

View File

@ -27,7 +27,7 @@ declare_clippy_lint! {
///
/// impl std::error::Error for Error { ... }
/// ```
#[clippy::version = "1.72.0"]
#[clippy::version = "1.73.0"]
pub ERROR_IMPL_ERROR,
restriction,
"exported types named `Error` that implement `Error`"

View File

@ -28,7 +28,7 @@ declare_clippy_lint! {
/// // ...
/// }
/// ```
#[clippy::version = "1.72.0"]
#[clippy::version = "1.73.0"]
pub FOUR_FORWARD_SLASHES,
suspicious,
"comments with 4 forward slashes (`////`) likely intended to be doc comments (`///`)"

View File

@ -37,6 +37,10 @@ declare_lint_pass!(IgnoredUnitPatterns => [IGNORED_UNIT_PATTERNS]);
impl<'tcx> LateLintPass<'tcx> for IgnoredUnitPatterns {
fn check_pat(&mut self, cx: &LateContext<'tcx>, pat: &'tcx hir::Pat<'tcx>) {
if pat.span.from_expansion() {
return;
}
match cx.tcx.hir().get_parent(pat.hir_id) {
Node::Param(param) if matches!(cx.tcx.hir().get_parent(param.hir_id), Node::Item(_)) => {
// Ignore function parameters

View File

@ -6,9 +6,8 @@ use rustc_hir as hir;
use rustc_hir::intravisit::{walk_body, walk_expr, walk_inf, walk_ty, Visitor};
use rustc_hir::{Body, Expr, ExprKind, GenericArg, Item, ItemKind, QPath, TyKind};
use rustc_hir_analysis::hir_ty_to_ty;
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::hir::nested_filter;
use rustc_middle::lint::in_external_macro;
use rustc_middle::ty::{Ty, TypeckResults};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
@ -162,7 +161,7 @@ impl<'tcx> LateLintPass<'tcx> for ImplicitHasher {
vis.visit_ty(ty);
for target in &vis.found {
if in_external_macro(cx.sess(), generics.span) {
if generics.span.from_expansion() {
continue;
}
let generics_suggestion_span = generics.span.substitute_dummy({

View File

@ -1,10 +1,12 @@
use clippy_utils::diagnostics::span_lint_and_help;
use clippy_utils::{is_from_proc_macro, is_in_cfg_test};
use rustc_hir::{HirId, ItemId, ItemKind, Mod};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use clippy_utils::diagnostics::span_lint_hir_and_then;
use clippy_utils::source::snippet_opt;
use clippy_utils::{fulfill_or_allowed, is_cfg_test, is_from_proc_macro};
use rustc_errors::{Applicability, SuggestionStyle};
use rustc_hir::{HirId, Item, ItemKind, Mod};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::{sym, Span};
use rustc_span::hygiene::AstPass;
use rustc_span::{sym, ExpnKind};
declare_clippy_lint! {
/// ### What it does
@ -41,46 +43,72 @@ declare_clippy_lint! {
declare_lint_pass!(ItemsAfterTestModule => [ITEMS_AFTER_TEST_MODULE]);
fn cfg_test_module<'tcx>(cx: &LateContext<'tcx>, item: &Item<'tcx>) -> bool {
if let ItemKind::Mod(test_mod) = item.kind
&& item.span.hi() == test_mod.spans.inner_span.hi()
&& is_cfg_test(cx.tcx, item.hir_id())
&& !item.span.from_expansion()
&& !is_from_proc_macro(cx, item)
{
true
} else {
false
}
}
impl LateLintPass<'_> for ItemsAfterTestModule {
fn check_mod(&mut self, cx: &LateContext<'_>, _: &Mod<'_>, _: HirId) {
let mut was_test_mod_visited = false;
let mut test_mod_span: Option<Span> = None;
fn check_mod(&mut self, cx: &LateContext<'_>, module: &Mod<'_>, _: HirId) {
let mut items = module.item_ids.iter().map(|&id| cx.tcx.hir().item(id));
let hir = cx.tcx.hir();
let items = hir.items().collect::<Vec<ItemId>>();
let Some((mod_pos, test_mod)) = items.by_ref().enumerate().find(|(_, item)| cfg_test_module(cx, item)) else {
return;
};
for (i, itid) in items.iter().enumerate() {
let item = hir.item(*itid);
let after: Vec<_> = items
.filter(|item| {
// Ignore the generated test main function
!(item.ident.name == sym::main
&& item.span.ctxt().outer_expn_data().kind == ExpnKind::AstPass(AstPass::TestHarness))
})
.collect();
if_chain! {
if was_test_mod_visited;
if i == (items.len() - 3 /* Weird magic number (HIR-translation behaviour) */);
if cx.sess().source_map().lookup_char_pos(item.span.lo()).file.name_hash
== cx.sess().source_map().lookup_char_pos(test_mod_span.unwrap().lo()).file.name_hash; // Will never fail
if !matches!(item.kind, ItemKind::Mod(_));
if !is_in_cfg_test(cx.tcx, itid.hir_id()); // The item isn't in the testing module itself
if !in_external_macro(cx.sess(), item.span);
if !is_from_proc_macro(cx, item);
if let Some(last) = after.last()
&& after.iter().all(|&item| {
!matches!(item.kind, ItemKind::Mod(_))
&& !item.span.from_expansion()
&& !is_from_proc_macro(cx, item)
})
&& !fulfill_or_allowed(cx, ITEMS_AFTER_TEST_MODULE, after.iter().map(|item| item.hir_id()))
{
let def_spans: Vec<_> = std::iter::once(test_mod.owner_id)
.chain(after.iter().map(|item| item.owner_id))
.map(|id| cx.tcx.def_span(id))
.collect();
then {
span_lint_and_help(cx, ITEMS_AFTER_TEST_MODULE, test_mod_span.unwrap().with_hi(item.span.hi()), "items were found after the testing module", None, "move the items to before the testing module was defined");
}};
if let ItemKind::Mod(module) = item.kind && item.span.hi() == module.spans.inner_span.hi() {
// Check that it works the same way, the only I way I've found for #10713
for attr in cx.tcx.get_attrs(item.owner_id.to_def_id(), sym::cfg) {
if_chain! {
if attr.has_name(sym::cfg);
if let Some(mitems) = attr.meta_item_list();
if let [mitem] = &*mitems;
if mitem.has_name(sym::test);
then {
was_test_mod_visited = true;
test_mod_span = Some(item.span);
}
span_lint_hir_and_then(
cx,
ITEMS_AFTER_TEST_MODULE,
test_mod.hir_id(),
def_spans,
"items after a test module",
|diag| {
if let Some(prev) = mod_pos.checked_sub(1)
&& let prev = cx.tcx.hir().item(module.item_ids[prev])
&& let items_span = last.span.with_lo(test_mod.span.hi())
&& let Some(items) = snippet_opt(cx, items_span)
{
diag.multipart_suggestion_with_style(
"move the items to before the test module was defined",
vec![
(prev.span.shrink_to_hi(), items),
(items_span, String::new())
],
Applicability::MachineApplicable,
SuggestionStyle::HideCodeAlways,
);
}
}
}
},
);
}
}
}

View File

@ -0,0 +1,249 @@
use clippy_utils::diagnostics::span_lint_and_then;
use clippy_utils::get_parent_as_impl;
use clippy_utils::source::snippet;
use clippy_utils::ty::{implements_trait, make_normalized_projection};
use rustc_ast::Mutability;
use rustc_errors::Applicability;
use rustc_hir::{FnRetTy, ImplItemKind, ImplicitSelfKind, ItemKind, TyKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_middle::ty::{self, Ty};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::{sym, Symbol};
declare_clippy_lint! {
/// ### What it does
/// Looks for `iter` and `iter_mut` methods without an associated `IntoIterator for (&|&mut) Type` implementation.
///
/// ### Why is this bad?
/// It's not bad, but having them is idiomatic and allows the type to be used in for loops directly
/// (`for val in &iter {}`), without having to first call `iter()` or `iter_mut()`.
///
/// ### Example
/// ```rust
/// struct MySlice<'a>(&'a [u8]);
/// impl<'a> MySlice<'a> {
/// pub fn iter(&self) -> std::slice::Iter<'a, u8> {
/// self.0.iter()
/// }
/// }
/// ```
/// Use instead:
/// ```rust
/// struct MySlice<'a>(&'a [u8]);
/// impl<'a> MySlice<'a> {
/// pub fn iter(&self) -> std::slice::Iter<'a, u8> {
/// self.0.iter()
/// }
/// }
/// impl<'a> IntoIterator for &MySlice<'a> {
/// type Item = &'a u8;
/// type IntoIter = std::slice::Iter<'a, u8>;
/// fn into_iter(self) -> Self::IntoIter {
/// self.iter()
/// }
/// }
/// ```
#[clippy::version = "1.74.0"]
pub ITER_WITHOUT_INTO_ITER,
pedantic,
"implementing `iter(_mut)` without an associated `IntoIterator for (&|&mut) Type` impl"
}
declare_clippy_lint! {
/// ### What it does
/// This is the opposite of the `iter_without_into_iter` lint.
/// It looks for `IntoIterator for (&|&mut) Type` implementations without an inherent `iter` or `iter_mut` method.
///
/// ### Why is this bad?
/// It's not bad, but having them is idiomatic and allows the type to be used in iterator chains
/// by just calling `.iter()`, instead of the more awkward `<&Type>::into_iter` or `(&val).iter()` syntax
/// in case of ambiguity with another `Intoiterator` impl.
///
/// ### Example
/// ```rust
/// struct MySlice<'a>(&'a [u8]);
/// impl<'a> IntoIterator for &MySlice<'a> {
/// type Item = &'a u8;
/// type IntoIter = std::slice::Iter<'a, u8>;
/// fn into_iter(self) -> Self::IntoIter {
/// self.0.iter()
/// }
/// }
/// ```
/// Use instead:
/// ```rust
/// struct MySlice<'a>(&'a [u8]);
/// impl<'a> MySlice<'a> {
/// pub fn iter(&self) -> std::slice::Iter<'a, u8> {
/// self.into_iter()
/// }
/// }
/// impl<'a> IntoIterator for &MySlice<'a> {
/// type Item = &'a u8;
/// type IntoIter = std::slice::Iter<'a, u8>;
/// fn into_iter(self) -> Self::IntoIter {
/// self.0.iter()
/// }
/// }
/// ```
#[clippy::version = "1.74.0"]
pub INTO_ITER_WITHOUT_ITER,
pedantic,
"implementing `IntoIterator for (&|&mut) Type` without an inherent `iter(_mut)` method"
}
declare_lint_pass!(IterWithoutIntoIter => [ITER_WITHOUT_INTO_ITER, INTO_ITER_WITHOUT_ITER]);
/// Checks if a given type is nameable in a trait (impl).
/// RPIT is stable, but impl Trait in traits is not (yet), so when we have
/// a function such as `fn iter(&self) -> impl IntoIterator`, we can't
/// suggest `type IntoIter = impl IntoIterator`.
fn is_nameable_in_impl_trait(ty: &rustc_hir::Ty<'_>) -> bool {
!matches!(ty.kind, TyKind::OpaqueDef(..))
}
fn type_has_inherent_method(cx: &LateContext<'_>, ty: Ty<'_>, method_name: Symbol) -> bool {
if let Some(ty_did) = ty.ty_adt_def().map(ty::AdtDef::did) {
cx.tcx.inherent_impls(ty_did).iter().any(|&did| {
cx.tcx
.associated_items(did)
.filter_by_name_unhygienic(method_name)
.next()
.is_some_and(|item| item.kind == ty::AssocKind::Fn)
})
} else {
false
}
}
impl LateLintPass<'_> for IterWithoutIntoIter {
fn check_item(&mut self, cx: &LateContext<'_>, item: &rustc_hir::Item<'_>) {
if let ItemKind::Impl(imp) = item.kind
&& let TyKind::Ref(_, self_ty_without_ref) = &imp.self_ty.kind
&& let Some(trait_ref) = imp.of_trait
&& trait_ref.trait_def_id().is_some_and(|did| cx.tcx.is_diagnostic_item(sym::IntoIterator, did))
&& let &ty::Ref(_, ty, mtbl) = cx.tcx.type_of(item.owner_id).instantiate_identity().kind()
&& let expected_method_name = match mtbl {
Mutability::Mut => sym::iter_mut,
Mutability::Not => sym::iter,
}
&& !type_has_inherent_method(cx, ty, expected_method_name)
&& let Some(iter_assoc_span) = imp.items.iter().find_map(|item| {
if item.ident.name == sym!(IntoIter) {
Some(cx.tcx.hir().impl_item(item.id).expect_type().span)
} else {
None
}
})
{
span_lint_and_then(
cx,
INTO_ITER_WITHOUT_ITER,
item.span,
&format!("`IntoIterator` implemented for a reference type without an `{expected_method_name}` method"),
|diag| {
// The suggestion forwards to the `IntoIterator` impl and uses a form of UFCS
// to avoid name ambiguities, as there might be an inherent into_iter method
// that we don't want to call.
let sugg = format!(
"
impl {self_ty_without_ref} {{
fn {expected_method_name}({ref_self}self) -> {iter_ty} {{
<{ref_self}Self as IntoIterator>::into_iter(self)
}}
}}
",
self_ty_without_ref = snippet(cx, self_ty_without_ref.ty.span, ".."),
ref_self = mtbl.ref_prefix_str(),
iter_ty = snippet(cx, iter_assoc_span, ".."),
);
diag.span_suggestion_verbose(
item.span.shrink_to_lo(),
format!("consider implementing `{expected_method_name}`"),
sugg,
// Just like iter_without_into_iter, this suggestion is on a best effort basis
// and requires potentially adding lifetimes or moving them around.
Applicability::Unspecified
);
}
);
}
}
fn check_impl_item(&mut self, cx: &LateContext<'_>, item: &rustc_hir::ImplItem<'_>) {
let item_did = item.owner_id.to_def_id();
let (borrow_prefix, expected_implicit_self) = match item.ident.name {
sym::iter => ("&", ImplicitSelfKind::ImmRef),
sym::iter_mut => ("&mut ", ImplicitSelfKind::MutRef),
_ => return,
};
if let ImplItemKind::Fn(sig, _) = item.kind
&& let FnRetTy::Return(ret) = sig.decl.output
&& is_nameable_in_impl_trait(ret)
&& cx.tcx.generics_of(item_did).params.is_empty()
&& sig.decl.implicit_self == expected_implicit_self
&& sig.decl.inputs.len() == 1
&& let Some(imp) = get_parent_as_impl(cx.tcx, item.hir_id())
&& imp.of_trait.is_none()
&& let sig = cx.tcx.liberate_late_bound_regions(
item_did,
cx.tcx.fn_sig(item_did).instantiate_identity()
)
&& let ref_ty = sig.inputs()[0]
&& let Some(into_iter_did) = cx.tcx.get_diagnostic_item(sym::IntoIterator)
&& let Some(iterator_did) = cx.tcx.get_diagnostic_item(sym::Iterator)
&& let ret_ty = sig.output()
// Order is important here, we need to check that the `fn iter` return type actually implements `IntoIterator`
// *before* normalizing `<_ as IntoIterator>::Item` (otherwise make_normalized_projection ICEs)
&& implements_trait(cx, ret_ty, iterator_did, &[])
&& let Some(iter_ty) = make_normalized_projection(
cx.tcx,
cx.param_env,
iterator_did,
sym!(Item),
[ret_ty],
)
// Only lint if the `IntoIterator` impl doesn't actually exist
&& !implements_trait(cx, ref_ty, into_iter_did, &[])
{
let self_ty_snippet = format!("{borrow_prefix}{}", snippet(cx, imp.self_ty.span, ".."));
span_lint_and_then(
cx,
ITER_WITHOUT_INTO_ITER,
item.span,
&format!("`{}` method without an `IntoIterator` impl for `{self_ty_snippet}`", item.ident),
|diag| {
// Get the lower span of the `impl` block, and insert the suggestion right before it:
// impl X {
// ^ fn iter(&self) -> impl IntoIterator { ... }
// }
let span_behind_impl = cx.tcx
.def_span(cx.tcx.hir().parent_id(item.hir_id()).owner.def_id)
.shrink_to_lo();
let sugg = format!(
"
impl IntoIterator for {self_ty_snippet} {{
type IntoIter = {ret_ty};
type Iter = {iter_ty};
fn into_iter() -> Self::IntoIter {{
self.iter()
}}
}}
"
);
diag.span_suggestion_verbose(
span_behind_impl,
format!("consider implementing `IntoIterator` for `{self_ty_snippet}`"),
sugg,
// Suggestion is on a best effort basis, might need some adjustments by the user
// such as adding some lifetimes in the associated types, or importing types.
Applicability::Unspecified,
);
});
}
}
}

View File

@ -169,6 +169,7 @@ mod invalid_upcast_comparisons;
mod items_after_statements;
mod items_after_test_module;
mod iter_not_returning_iterator;
mod iter_without_into_iter;
mod large_const_arrays;
mod large_enum_variant;
mod large_futures;
@ -190,6 +191,7 @@ mod manual_async_fn;
mod manual_bits;
mod manual_clamp;
mod manual_float_methods;
mod manual_hash_one;
mod manual_is_ascii_check;
mod manual_let_else;
mod manual_main_separator_str;
@ -1119,6 +1121,8 @@ pub fn register_plugins(store: &mut rustc_lint::LintStore, sess: &Session, conf:
msrv(),
))
});
store.register_late_pass(move |_| Box::new(manual_hash_one::ManualHashOne::new(msrv())));
store.register_late_pass(|_| Box::new(iter_without_into_iter::IterWithoutIntoIter));
// add lints here, do not remove this comment, it's used in `new_lint`
}

View File

@ -26,7 +26,7 @@ declare_clippy_lint! {
/// # let x = 1.0f32;
/// if x.is_infinite() {}
/// ```
#[clippy::version = "1.72.0"]
#[clippy::version = "1.73.0"]
pub MANUAL_IS_INFINITE,
style,
"use dedicated method to check if a float is infinite"
@ -51,7 +51,7 @@ declare_clippy_lint! {
/// if x.is_finite() {}
/// if x.is_finite() {}
/// ```
#[clippy::version = "1.72.0"]
#[clippy::version = "1.73.0"]
pub MANUAL_IS_FINITE,
style,
"use dedicated method to check if a float is finite"

View File

@ -0,0 +1,133 @@
use clippy_utils::diagnostics::span_lint_hir_and_then;
use clippy_utils::msrvs::{self, Msrv};
use clippy_utils::source::snippet_opt;
use clippy_utils::visitors::{is_local_used, local_used_once};
use clippy_utils::{is_trait_method, path_to_local_id};
use rustc_errors::Applicability;
use rustc_hir::{BindingAnnotation, ExprKind, Local, Node, PatKind, StmtKind};
use rustc_lint::{LateContext, LateLintPass};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::sym;
declare_clippy_lint! {
/// ### What it does
/// Checks for cases where [`BuildHasher::hash_one`] can be used.
///
/// [`BuildHasher::hash_one`]: https://doc.rust-lang.org/std/hash/trait.BuildHasher.html#method.hash_one
///
/// ### Why is this bad?
/// It is more concise to use the `hash_one` method.
///
/// ### Example
/// ```rust
/// use std::hash::{BuildHasher, Hash, Hasher};
/// use std::collections::hash_map::RandomState;
///
/// let s = RandomState::new();
/// let value = vec![1, 2, 3];
///
/// let mut hasher = s.build_hasher();
/// value.hash(&mut hasher);
/// let hash = hasher.finish();
/// ```
/// Use instead:
/// ```rust
/// use std::hash::BuildHasher;
/// use std::collections::hash_map::RandomState;
///
/// let s = RandomState::new();
/// let value = vec![1, 2, 3];
///
/// let hash = s.hash_one(&value);
/// ```
#[clippy::version = "1.74.0"]
pub MANUAL_HASH_ONE,
complexity,
"manual implementations of `BuildHasher::hash_one`"
}
pub struct ManualHashOne {
msrv: Msrv,
}
impl ManualHashOne {
#[must_use]
pub fn new(msrv: Msrv) -> Self {
Self { msrv }
}
}
impl_lint_pass!(ManualHashOne => [MANUAL_HASH_ONE]);
impl LateLintPass<'_> for ManualHashOne {
fn check_local(&mut self, cx: &LateContext<'_>, local: &Local<'_>) {
// `let mut hasher = seg.build_hasher();`
if let PatKind::Binding(BindingAnnotation::MUT, hasher, _, None) = local.pat.kind
&& let Some(init) = local.init
&& !init.span.from_expansion()
&& let ExprKind::MethodCall(seg, build_hasher, [], _) = init.kind
&& seg.ident.name == sym!(build_hasher)
&& let Node::Stmt(local_stmt) = cx.tcx.hir().get_parent(local.hir_id)
&& let Node::Block(block) = cx.tcx.hir().get_parent(local_stmt.hir_id)
&& let mut stmts = block.stmts.iter()
.skip_while(|stmt| stmt.hir_id != local_stmt.hir_id)
.skip(1)
.filter(|&stmt| is_local_used(cx, stmt, hasher))
// `hashed_value.hash(&mut hasher);`
&& let Some(hash_stmt) = stmts.next()
&& let StmtKind::Semi(hash_expr) = hash_stmt.kind
&& !hash_expr.span.from_expansion()
&& let ExprKind::MethodCall(seg, hashed_value, [ref_to_hasher], _) = hash_expr.kind
&& seg.ident.name == sym::hash
&& is_trait_method(cx, hash_expr, sym::Hash)
&& path_to_local_id(ref_to_hasher.peel_borrows(), hasher)
&& let maybe_finish_stmt = stmts.next()
// There should be no more statements referencing `hasher`
&& stmts.next().is_none()
// `hasher.finish()`, may be anywhere in a statement or the trailing expr of the block
&& let Some(path_expr) = local_used_once(cx, (maybe_finish_stmt, block.expr), hasher)
&& let Node::Expr(finish_expr) = cx.tcx.hir().get_parent(path_expr.hir_id)
&& !finish_expr.span.from_expansion()
&& let ExprKind::MethodCall(seg, _, [], _) = finish_expr.kind
&& seg.ident.name == sym!(finish)
&& self.msrv.meets(msrvs::BUILD_HASHER_HASH_ONE)
{
span_lint_hir_and_then(
cx,
MANUAL_HASH_ONE,
finish_expr.hir_id,
finish_expr.span,
"manual implementation of `BuildHasher::hash_one`",
|diag| {
if let Some(build_hasher) = snippet_opt(cx, build_hasher.span)
&& let Some(hashed_value) = snippet_opt(cx, hashed_value.span)
{
diag.multipart_suggestion(
"try",
vec![
(local_stmt.span, String::new()),
(hash_stmt.span, String::new()),
(
finish_expr.span,
// `needless_borrows_for_generic_args` will take care of
// removing the `&` when it isn't needed
format!("{build_hasher}.hash_one(&{hashed_value})")
)
],
Applicability::MachineApplicable,
);
}
},
);
}
}
extract_msrv_attr!(LateContext);
}

View File

@ -136,9 +136,9 @@ fn emit_manual_let_else(
// for this to be machine applicable.
let mut app = Applicability::HasPlaceholders;
let (sn_expr, _) = snippet_with_context(cx, expr.span, span.ctxt(), "", &mut app);
let (sn_else, _) = snippet_with_context(cx, else_body.span, span.ctxt(), "", &mut app);
let (sn_else, else_is_mac_call) = snippet_with_context(cx, else_body.span, span.ctxt(), "", &mut app);
let else_bl = if matches!(else_body.kind, ExprKind::Block(..)) {
let else_bl = if matches!(else_body.kind, ExprKind::Block(..)) && !else_is_mac_call {
sn_else.into_owned()
} else {
format!("{{ {sn_else} }}")

View File

@ -3,6 +3,7 @@ use clippy_utils::is_doc_hidden;
use clippy_utils::msrvs::{self, Msrv};
use clippy_utils::source::snippet_opt;
use rustc_ast::ast::{self, VisibilityKind};
use rustc_ast::attr;
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::Applicability;
use rustc_hir::def::{CtorKind, CtorOf, DefKind, Res};
@ -158,7 +159,8 @@ impl<'tcx> LateLintPass<'tcx> for ManualNonExhaustiveEnum {
let mut iter = def.variants.iter().filter_map(|v| {
(matches!(v.data, hir::VariantData::Unit(_, _))
&& v.ident.as_str().starts_with('_')
&& is_doc_hidden(cx.tcx.hir().attrs(v.hir_id)))
&& is_doc_hidden(cx.tcx.hir().attrs(v.hir_id))
&& !attr::contains_name(cx.tcx.hir().attrs(item.hir_id()), sym::non_exhaustive))
.then_some((v.def_id, v.span))
});
if let Some((id, span)) = iter.next()
@ -198,16 +200,14 @@ impl<'tcx> LateLintPass<'tcx> for ManualNonExhaustiveEnum {
enum_span,
"this seems like a manual implementation of the non-exhaustive pattern",
|diag| {
if !cx.tcx.adt_def(enum_id).is_variant_list_non_exhaustive()
&& let header_span = cx.sess().source_map().span_until_char(enum_span, '{')
&& let Some(snippet) = snippet_opt(cx, header_span)
{
diag.span_suggestion(
header_span,
"add the attribute",
format!("#[non_exhaustive] {snippet}"),
Applicability::Unspecified,
);
let header_span = cx.sess().source_map().span_until_char(enum_span, '{');
if let Some(snippet) = snippet_opt(cx, header_span) {
diag.span_suggestion(
header_span,
"add the attribute",
format!("#[non_exhaustive] {snippet}"),
Applicability::Unspecified,
);
}
diag.span_help(variant_span, "remove this variant");
},

View File

@ -961,7 +961,7 @@ declare_clippy_lint! {
/// _ => todo!(),
/// }
/// ```
#[clippy::version = "1.72.0"]
#[clippy::version = "1.73.0"]
pub REDUNDANT_GUARDS,
complexity,
"checks for unnecessary guards in match expressions"

View File

@ -1,4 +1,4 @@
use clippy_utils::consts::{constant, constant_full_int, miri_to_const, FullInt};
use clippy_utils::consts::{constant, constant_full_int, mir_to_const, FullInt};
use clippy_utils::diagnostics::span_lint_and_note;
use core::cmp::Ordering;
use rustc_hir::{Arm, Expr, PatKind, RangeEnd};
@ -37,14 +37,14 @@ fn all_ranges<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'_>], ty: Ty<'tcx>)
Some(lhs) => constant(cx, cx.typeck_results(), lhs)?,
None => {
let min_val_const = ty.numeric_min_val(cx.tcx)?;
miri_to_const(cx, mir::Const::from_ty_const(min_val_const, cx.tcx))?
mir_to_const(cx, mir::Const::from_ty_const(min_val_const, cx.tcx))?
},
};
let rhs_const = match rhs {
Some(rhs) => constant(cx, cx.typeck_results(), rhs)?,
None => {
let max_val_const = ty.numeric_max_val(cx.tcx)?;
miri_to_const(cx, mir::Const::from_ty_const(max_val_const, cx.tcx))?
mir_to_const(cx, mir::Const::from_ty_const(max_val_const, cx.tcx))?
},
};
let lhs_val = lhs_const.int_value(cx, ty)?;

View File

@ -2970,7 +2970,7 @@ declare_clippy_lint! {
/// assert_eq!((*any_box).type_id(), TypeId::of::<i32>());
/// // ^ dereference first, to call `type_id` on `dyn Any`
/// ```
#[clippy::version = "1.72.0"]
#[clippy::version = "1.73.0"]
pub TYPE_ID_ON_BOX,
suspicious,
"calling `.type_id()` on `Box<dyn Any>`"
@ -3368,6 +3368,7 @@ declare_clippy_lint! {
}
declare_clippy_lint! {
/// ### What it does
/// Looks for calls to [`Stdin::read_line`] to read a line from the standard input
/// into a string, then later attempting to parse this string into a type without first trimming it, which will
/// always fail because the string has a trailing newline in it.
@ -3390,7 +3391,7 @@ declare_clippy_lint! {
/// // ^^^^^^^^^^^ remove the trailing newline
/// assert_eq!(num, 42);
/// ```
#[clippy::version = "1.72.0"]
#[clippy::version = "1.73.0"]
pub READ_LINE_WITHOUT_TRIM,
correctness,
"calling `Stdin::read_line`, then trying to parse it without first trimming"
@ -3418,7 +3419,7 @@ declare_clippy_lint! {
/// # let c = 'c';
/// matches!(c, '\\' | '.' | '+' | '*' | '(' | ')' | '|' | '[' | ']' | '{' | '}' | '^' | '$' | '#' | '&' | '-' | '~');
/// ```
#[clippy::version = "1.72.0"]
#[clippy::version = "1.73.0"]
pub STRING_LIT_CHARS_ANY,
restriction,
"checks for `<string_lit>.chars().any(|i| i == c)`"
@ -3453,7 +3454,7 @@ declare_clippy_lint! {
/// })
/// }
/// ```
#[clippy::version = "1.72.0"]
#[clippy::version = "1.73.0"]
pub FORMAT_COLLECT,
perf,
"`format!`ing every element in a collection, then collecting the strings into a new `String`"
@ -3474,7 +3475,7 @@ declare_clippy_lint! {
/// let y = v.iter().collect::<Vec<_>>();
/// assert_eq!(x, y);
/// ```
#[clippy::version = "1.72.0"]
#[clippy::version = "1.73.0"]
pub ITER_SKIP_ZERO,
correctness,
"disallows `.skip(0)`"
@ -3505,7 +3506,7 @@ declare_clippy_lint! {
/// # let v = vec![];
/// _ = v.into_iter().filter(|i| i % 2 == 0).map(|i| really_expensive_fn(i));
/// ```
#[clippy::version = "1.72.0"]
#[clippy::version = "1.73.0"]
pub FILTER_MAP_BOOL_THEN,
style,
"checks for usage of `bool::then` in `Iterator::filter_map`"

View File

@ -24,7 +24,6 @@ fn get_ty_from_args<'a>(args: Option<&'a [hir::GenericArg<'a>]>, index: usize) -
}
}
#[expect(clippy::too_many_lines)]
pub(super) fn check(
cx: &LateContext<'_>,
expr: &hir::Expr<'_>,
@ -101,11 +100,11 @@ pub(super) fn check(
(expr.span.with_lo(args[0].span.hi()), String::new()),
]),
("None", "unwrap_or_else", _) => match args[0].kind {
hir::ExprKind::Closure(hir::Closure {
body,
..
}) => Some(vec![
(expr.span.with_hi(cx.tcx.hir().body(*body).value.span.lo()), String::new()),
hir::ExprKind::Closure(hir::Closure { body, .. }) => Some(vec![
(
expr.span.with_hi(cx.tcx.hir().body(*body).value.span.lo()),
String::new(),
),
(expr.span.with_lo(args[0].span.hi()), String::new()),
]),
_ => None,

View File

@ -15,6 +15,10 @@ declare_clippy_lint! {
/// A good custom message should be more about why the failure of the assertion is problematic
/// and not what is failed because the assertion already conveys that.
///
/// Although the same reasoning applies to testing functions, this lint ignores them as they would be too noisy.
/// Also, in most cases understanding the test failure would be easier
/// compared to understanding a complex invariant distributed around the codebase.
///
/// ### Known problems
/// This lint cannot check the quality of the custom panic messages.
/// Hence, you can suppress this lint simply by adding placeholder messages

View File

@ -135,10 +135,7 @@ impl<'a, 'tcx> DivergenceVisitor<'a, 'tcx> {
}
fn stmt_might_diverge(stmt: &Stmt<'_>) -> bool {
match stmt.kind {
StmtKind::Item(..) => false,
_ => true,
}
!matches!(stmt.kind, StmtKind::Item(..))
}
impl<'a, 'tcx> Visitor<'tcx> for DivergenceVisitor<'a, 'tcx> {
@ -148,7 +145,7 @@ impl<'a, 'tcx> Visitor<'tcx> for DivergenceVisitor<'a, 'tcx> {
ExprKind::Block(block, ..) => match (block.stmts, block.expr) {
(stmts, Some(e)) => {
if stmts.iter().all(|stmt| !stmt_might_diverge(stmt)) {
self.visit_expr(e)
self.visit_expr(e);
}
},
([first @ .., stmt], None) => {

View File

@ -47,9 +47,9 @@ declare_clippy_lint! {
/// 12 + *y
/// }
/// ```
#[clippy::version = "1.72.0"]
#[clippy::version = "1.73.0"]
pub NEEDLESS_PASS_BY_REF_MUT,
suspicious,
nursery,
"using a `&mut` argument when it's not mutated"
}

View File

@ -102,7 +102,7 @@ declare_clippy_lint! {
/// }
/// }
/// ```
#[clippy::version = "1.72.0"]
#[clippy::version = "1.73.0"]
pub NON_CANONICAL_PARTIAL_ORD_IMPL,
suspicious,
"non-canonical implementation of `PartialOrd` on an `Ord` type"

View File

@ -312,7 +312,7 @@ declare_clippy_lint! {
/// # let status_code = 200;
/// if status_code <= 400 && status_code > 500 {}
/// ```
#[clippy::version = "1.71.0"]
#[clippy::version = "1.73.0"]
pub IMPOSSIBLE_COMPARISONS,
correctness,
"double comparisons that will never evaluate to `true`"
@ -332,7 +332,7 @@ declare_clippy_lint! {
/// # let status_code = 200;
/// if status_code <= 400 && status_code < 500 {}
/// ```
#[clippy::version = "1.71.0"]
#[clippy::version = "1.73.0"]
pub REDUNDANT_COMPARISONS,
correctness,
"double comparisons where one of them can be removed"

View File

@ -75,6 +75,7 @@ impl EarlyLintPass for RawStrings {
if !snippet(cx, expr.span, prefix).trim().starts_with(prefix) {
return;
}
let descr = lit.kind.descr();
if !str.contains(['\\', '"']) {
span_lint_and_then(
@ -89,20 +90,17 @@ impl EarlyLintPass for RawStrings {
let r_pos = expr.span.lo() + BytePos::from_usize(prefix.len() - 1);
let start = start.with_lo(r_pos);
if end.is_empty() {
diag.span_suggestion(
start,
"use a string literal instead",
format!("\"{str}\""),
Applicability::MachineApplicable,
);
} else {
diag.multipart_suggestion(
"try",
vec![(start, String::new()), (end, String::new())],
Applicability::MachineApplicable,
);
let mut remove = vec![(start, String::new())];
// avoid debug ICE from empty suggestions
if !end.is_empty() {
remove.push((end, String::new()));
}
diag.multipart_suggestion_verbose(
format!("use a plain {descr} literal instead"),
remove,
Applicability::MachineApplicable,
);
},
);
if !matches!(cx.get_lint_level(NEEDLESS_RAW_STRINGS), rustc_lint::Allow) {
@ -149,9 +147,9 @@ impl EarlyLintPass for RawStrings {
let (start, end) = hash_spans(expr.span, prefix, req, max);
let message = match max - req {
_ if req == 0 => "remove all the hashes around the literal".to_string(),
1 => "remove one hash from both sides of the literal".to_string(),
n => format!("remove {n} hashes from both sides of the literal"),
_ if req == 0 => format!("remove all the hashes around the {descr} literal"),
1 => format!("remove one hash from both sides of the {descr} literal"),
n => format!("remove {n} hashes from both sides of the {descr} literal"),
};
diag.multipart_suggestion(

View File

@ -3,11 +3,9 @@ use clippy_utils::is_from_proc_macro;
use clippy_utils::ty::needs_ordered_drop;
use rustc_ast::Mutability;
use rustc_hir::def::Res;
use rustc_hir::{
BindingAnnotation, ByRef, Expr, ExprKind, HirId, Local, Node, Pat, PatKind, QPath,
};
use rustc_hir::{BindingAnnotation, ByRef, ExprKind, HirId, Local, Node, Pat, PatKind, QPath};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::{in_external_macro, is_from_async_await};
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::symbol::Ident;
use rustc_span::DesugaringKind;
@ -39,7 +37,7 @@ declare_clippy_lint! {
/// // no redefinition with the same name
/// }
/// ```
#[clippy::version = "1.72.0"]
#[clippy::version = "1.73.0"]
pub REDUNDANT_LOCALS,
correctness,
"redundant redefinition of a local binding"
@ -68,21 +66,18 @@ impl<'tcx> LateLintPass<'tcx> for RedundantLocals {
// the local does not change the effect of assignments to the binding. see #11290
if !affects_assignments(cx, mutability, binding_id, local.hir_id);
// the local does not affect the code's drop behavior
if !affects_drop_behavior(cx, binding_id, local.hir_id, expr);
if !needs_ordered_drop(cx, cx.typeck_results().expr_ty(expr));
// the local is user-controlled
if !in_external_macro(cx.sess(), local.span);
if !is_from_proc_macro(cx, expr);
// Async function parameters are lowered into the closure body, so we can't lint them.
// see `lower_maybe_async_body` in `rust_ast_lowering`
if !is_from_async_await(local.span);
then {
span_lint_and_help(
cx,
REDUNDANT_LOCALS,
vec![binding_pat.span, local.span],
"redundant redefinition of a binding",
None,
&format!("remove the redefinition of `{ident}`"),
local.span,
&format!("redundant redefinition of a binding `{ident}`"),
Some(binding_pat.span),
&format!("`{ident}` is initially defined here"),
);
}
}
@ -109,18 +104,3 @@ fn affects_assignments(cx: &LateContext<'_>, mutability: Mutability, bind: HirId
// the binding is mutable and the rebinding is in a different scope than the original binding
mutability == Mutability::Mut && hir.get_enclosing_scope(bind) != hir.get_enclosing_scope(rebind)
}
/// Check if a rebinding of a local affects the code's drop behavior.
fn affects_drop_behavior<'tcx>(
cx: &LateContext<'tcx>,
bind: HirId,
rebind: HirId,
rebind_expr: &Expr<'tcx>,
) -> bool {
let hir = cx.tcx.hir();
// the rebinding is in a different scope than the original binding
// and the type of the binding cares about drop order
hir.get_enclosing_scope(bind) != hir.get_enclosing_scope(rebind)
&& needs_ordered_drop(cx, cx.typeck_results().expr_ty(rebind_expr))
}

View File

@ -1,9 +1,11 @@
use clippy_utils::diagnostics::span_lint_and_sugg;
use clippy_utils::is_from_proc_macro;
use rustc_errors::Applicability;
use rustc_hir::def::Res;
use rustc_hir::def_id::DefId;
use rustc_hir::{HirId, Path, PathSegment};
use rustc_lint::{LateContext, LateLintPass};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::symbol::kw;
use rustc_span::{sym, Span};
@ -99,6 +101,8 @@ impl<'tcx> LateLintPass<'tcx> for StdReexports {
if let Res::Def(_, def_id) = path.res
&& let Some(first_segment) = get_first_segment(path)
&& is_stable(cx, def_id)
&& !in_external_macro(cx.sess(), path.span)
&& !is_from_proc_macro(cx, &first_segment.ident)
{
let (lint, used_mod, replace_with) = match first_segment.ident.name {
sym::std => match cx.tcx.crate_name(def_id.krate) {

View File

@ -294,7 +294,7 @@ define_Conf! {
///
/// Suppress lints whenever the suggested change would cause breakage for other crates.
(avoid_breaking_exported_api: bool = true),
/// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, OPTION_MAP_UNWRAP_OR, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF, ALMOST_COMPLETE_RANGE, NEEDLESS_BORROW, DERIVABLE_IMPLS, MANUAL_IS_ASCII_CHECK, MANUAL_REM_EUCLID, MANUAL_RETAIN, TYPE_REPETITION_IN_BOUNDS, TUPLE_ARRAY_CONVERSIONS, MANUAL_TRY_FOLD.
/// Lint: MANUAL_SPLIT_ONCE, MANUAL_STR_REPEAT, CLONED_INSTEAD_OF_COPIED, REDUNDANT_FIELD_NAMES, OPTION_MAP_UNWRAP_OR, REDUNDANT_STATIC_LIFETIMES, FILTER_MAP_NEXT, CHECKED_CONVERSIONS, MANUAL_RANGE_CONTAINS, USE_SELF, MEM_REPLACE_WITH_DEFAULT, MANUAL_NON_EXHAUSTIVE, OPTION_AS_REF_DEREF, MAP_UNWRAP_OR, MATCH_LIKE_MATCHES_MACRO, MANUAL_STRIP, MISSING_CONST_FOR_FN, UNNESTED_OR_PATTERNS, FROM_OVER_INTO, PTR_AS_PTR, IF_THEN_SOME_ELSE_NONE, APPROX_CONSTANT, DEPRECATED_CFG_ATTR, INDEX_REFUTABLE_SLICE, MAP_CLONE, BORROW_AS_PTR, MANUAL_BITS, ERR_EXPECT, CAST_ABS_TO_UNSIGNED, UNINLINED_FORMAT_ARGS, MANUAL_CLAMP, MANUAL_LET_ELSE, UNCHECKED_DURATION_SUBTRACTION, COLLAPSIBLE_STR_REPLACE, SEEK_FROM_CURRENT, SEEK_REWIND, UNNECESSARY_LAZY_EVALUATIONS, TRANSMUTE_PTR_TO_REF, ALMOST_COMPLETE_RANGE, NEEDLESS_BORROW, DERIVABLE_IMPLS, MANUAL_IS_ASCII_CHECK, MANUAL_REM_EUCLID, MANUAL_RETAIN, TYPE_REPETITION_IN_BOUNDS, TUPLE_ARRAY_CONVERSIONS, MANUAL_TRY_FOLD, MANUAL_HASH_ONE.
///
/// The minimum rust version that the project supports
(msrv: Option<String> = None),
@ -744,3 +744,44 @@ fn calculate_dimensions(fields: &[&str]) -> (usize, Vec<usize>) {
(rows, column_widths)
}
#[cfg(test)]
mod tests {
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use serde::de::IgnoredAny;
use std::fs;
use walkdir::WalkDir;
#[test]
fn configs_are_tested() {
let mut names: FxHashSet<String> = super::metadata::get_configuration_metadata()
.into_iter()
.map(|meta| meta.name.replace('_', "-"))
.collect();
let toml_files = WalkDir::new("../tests")
.into_iter()
.map(Result::unwrap)
.filter(|entry| entry.file_name() == "clippy.toml");
for entry in toml_files {
let file = fs::read_to_string(entry.path()).unwrap();
#[allow(clippy::zero_sized_map_values)]
if let Ok(map) = toml::from_str::<FxHashMap<String, IgnoredAny>>(&file) {
for name in map.keys() {
names.remove(name.as_str());
}
}
}
assert!(
names.remove("allow-one-hash-in-raw-strings"),
"remove this when #11481 is fixed"
);
assert!(
names.is_empty(),
"Configuration variable lacks test: {names:?}\nAdd a test to `tests/ui-toml`"
);
}
}

View File

@ -132,6 +132,7 @@ impl LateLintPass<'_> for WildcardImports {
if self.warn_on_all || !self.check_exceptions(item, use_path.segments);
let used_imports = cx.tcx.names_imported_by_glob_use(item.owner_id.def_id);
if !used_imports.is_empty(); // Already handled by `unused_imports`
if !used_imports.contains(&kw::Underscore);
then {
let mut applicability = Applicability::MachineApplicable;
let import_source_snippet = snippet_with_applicability(cx, use_path.span, "..", &mut applicability);

View File

@ -3,12 +3,15 @@ use clippy_utils::macros::{find_format_args, format_arg_removal_span, root_macro
use clippy_utils::source::{expand_past_previous_comma, snippet_opt};
use clippy_utils::{is_in_cfg_test, is_in_test_function};
use rustc_ast::token::LitKind;
use rustc_ast::{FormatArgPosition, FormatArgs, FormatArgsPiece, FormatOptions, FormatPlaceholder, FormatTrait};
use rustc_ast::{
FormatArgPosition, FormatArgPositionKind, FormatArgs, FormatArgsPiece, FormatOptions, FormatPlaceholder,
FormatTrait,
};
use rustc_errors::Applicability;
use rustc_hir::{Expr, Impl, Item, ItemKind};
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::{sym, BytePos};
use rustc_span::{sym, BytePos, Span};
declare_clippy_lint! {
/// ### What it does
@ -450,6 +453,12 @@ fn check_empty_string(cx: &LateContext<'_>, format_args: &FormatArgs, macro_call
fn check_literal(cx: &LateContext<'_>, format_args: &FormatArgs, name: &str) {
let arg_index = |argument: &FormatArgPosition| argument.index.unwrap_or_else(|pos| pos);
let lint_name = if name.starts_with("write") {
WRITE_LITERAL
} else {
PRINT_LITERAL
};
let mut counts = vec![0u32; format_args.arguments.all_args().len()];
for piece in &format_args.template {
if let FormatArgsPiece::Placeholder(placeholder) = piece {
@ -457,6 +466,12 @@ fn check_literal(cx: &LateContext<'_>, format_args: &FormatArgs, name: &str) {
}
}
let mut suggestion: Vec<(Span, String)> = vec![];
// holds index of replaced positional arguments; used to decrement the index of the remaining
// positional arguments.
let mut replaced_position: Vec<usize> = vec![];
let mut sug_span: Option<Span> = None;
for piece in &format_args.template {
if let FormatArgsPiece::Placeholder(FormatPlaceholder {
argument,
@ -471,9 +486,9 @@ fn check_literal(cx: &LateContext<'_>, format_args: &FormatArgs, name: &str) {
&& let rustc_ast::ExprKind::Lit(lit) = &arg.expr.kind
&& !arg.expr.span.from_expansion()
&& let Some(value_string) = snippet_opt(cx, arg.expr.span)
{
{
let (replacement, replace_raw) = match lit.kind {
LitKind::Str | LitKind::StrRaw(_) => match extract_str_literal(&value_string) {
LitKind::Str | LitKind::StrRaw(_) => match extract_str_literal(&value_string) {
Some(extracted) => extracted,
None => return,
},
@ -493,12 +508,6 @@ fn check_literal(cx: &LateContext<'_>, format_args: &FormatArgs, name: &str) {
_ => continue,
};
let lint = if name.starts_with("write") {
WRITE_LITERAL
} else {
PRINT_LITERAL
};
let Some(format_string_snippet) = snippet_opt(cx, format_args.span) else { continue };
let format_string_is_raw = format_string_snippet.starts_with('r');
@ -519,29 +528,58 @@ fn check_literal(cx: &LateContext<'_>, format_args: &FormatArgs, name: &str) {
},
};
span_lint_and_then(
cx,
lint,
arg.expr.span,
"literal with an empty format string",
|diag| {
if let Some(replacement) = replacement
// `format!("{}", "a")`, `format!("{named}", named = "b")
// ~~~~~ ~~~~~~~~~~~~~
&& let Some(removal_span) = format_arg_removal_span(format_args, index)
{
let replacement = replacement.replace('{', "{{").replace('}', "}}");
diag.multipart_suggestion(
"try",
vec![(*placeholder_span, replacement), (removal_span, String::new())],
Applicability::MachineApplicable,
);
}
},
);
sug_span = Some(sug_span.unwrap_or(arg.expr.span).to(arg.expr.span));
if let Some((_, index)) = positional_arg_piece_span(piece) {
replaced_position.push(index);
}
if let Some(replacement) = replacement
// `format!("{}", "a")`, `format!("{named}", named = "b")
// ~~~~~ ~~~~~~~~~~~~~
&& let Some(removal_span) = format_arg_removal_span(format_args, index) {
let replacement = escape_braces(&replacement, !format_string_is_raw && !replace_raw);
suggestion.push((*placeholder_span, replacement));
suggestion.push((removal_span, String::new()));
}
}
}
// Decrement the index of the remaining by the number of replaced positional arguments
if !suggestion.is_empty() {
for piece in &format_args.template {
if let Some((span, index)) = positional_arg_piece_span(piece)
&& suggestion.iter().all(|(s, _)| *s != span) {
let decrement = replaced_position.iter().filter(|i| **i < index).count();
suggestion.push((span, format!("{{{}}}", index.saturating_sub(decrement))));
}
}
}
if let Some(span) = sug_span {
span_lint_and_then(cx, lint_name, span, "literal with an empty format string", |diag| {
if !suggestion.is_empty() {
diag.multipart_suggestion("try", suggestion, Applicability::MachineApplicable);
}
});
}
}
/// Extract Span and its index from the given `piece`, iff it's positional argument.
fn positional_arg_piece_span(piece: &FormatArgsPiece) -> Option<(Span, usize)> {
match piece {
FormatArgsPiece::Placeholder(FormatPlaceholder {
argument:
FormatArgPosition {
index: Ok(index),
kind: FormatArgPositionKind::Number,
..
},
span: Some(span),
..
}) => Some((*span, *index)),
_ => None,
}
}
/// Removes the raw marker, `#`s and quotes from a str, and returns if the literal is raw
@ -593,3 +631,47 @@ fn conservative_unescape(literal: &str) -> Result<String, UnescapeErr> {
if err { Err(UnescapeErr::Lint) } else { Ok(unescaped) }
}
/// Replaces `{` with `{{` and `}` with `}}`. If `preserve_unicode_escapes` is `true` the braces in
/// `\u{xxxx}` are left unmodified
#[expect(clippy::match_same_arms)]
fn escape_braces(literal: &str, preserve_unicode_escapes: bool) -> String {
#[derive(Clone, Copy)]
enum State {
Normal,
Backslash,
UnicodeEscape,
}
let mut escaped = String::with_capacity(literal.len());
let mut state = State::Normal;
for ch in literal.chars() {
state = match (ch, state) {
// Escape braces outside of unicode escapes by doubling them up
('{' | '}', State::Normal) => {
escaped.push(ch);
State::Normal
},
// If `preserve_unicode_escapes` isn't enabled stay in `State::Normal`, otherwise:
//
// \u{aaaa} \\ \x01
// ^ ^ ^
('\\', State::Normal) if preserve_unicode_escapes => State::Backslash,
// \u{aaaa}
// ^
('u', State::Backslash) => State::UnicodeEscape,
// \xAA \\
// ^ ^
(_, State::Backslash) => State::Normal,
// \u{aaaa}
// ^
('}', State::UnicodeEscape) => State::Normal,
_ => state,
};
escaped.push(ch);
}
escaped
}

View File

@ -1,6 +1,6 @@
[package]
name = "clippy_utils"
version = "0.1.74"
version = "0.1.75"
edition = "2021"
publish = false

View File

@ -9,11 +9,12 @@ use rustc_hir::def::{DefKind, Res};
use rustc_hir::{BinOp, BinOpKind, Block, ConstBlock, Expr, ExprKind, HirId, Item, ItemKind, Node, QPath, UnOp};
use rustc_lexer::tokenize;
use rustc_lint::LateContext;
use rustc_middle::mir::interpret::Scalar;
use rustc_middle::mir::interpret::{alloc_range, Scalar};
use rustc_middle::ty::{self, EarlyBinder, FloatTy, GenericArgsRef, List, ScalarInt, Ty, TyCtxt};
use rustc_middle::{bug, mir, span_bug};
use rustc_span::symbol::{Ident, Symbol};
use rustc_span::SyntaxContext;
use rustc_target::abi::Size;
use std::cmp::Ordering::{self, Equal};
use std::hash::{Hash, Hasher};
use std::iter;
@ -403,7 +404,7 @@ impl<'a, 'tcx> ConstEvalLateContext<'a, 'tcx> {
&& adt_def.is_struct()
&& let Some(desired_field) = field_of_struct(*adt_def, self.lcx, *constant, field)
{
miri_to_const(self.lcx, desired_field)
mir_to_const(self.lcx, desired_field)
}
else {
result
@ -483,7 +484,7 @@ impl<'a, 'tcx> ConstEvalLateContext<'a, 'tcx> {
.const_eval_resolve(self.param_env, mir::UnevaluatedConst::new(def_id, args), None)
.ok()
.map(|val| rustc_middle::mir::Const::from_value(val, ty))?;
let result = miri_to_const(self.lcx, result)?;
let result = mir_to_const(self.lcx, result)?;
self.source = ConstantSource::Constant;
Some(result)
},
@ -655,10 +656,14 @@ impl<'a, 'tcx> ConstEvalLateContext<'a, 'tcx> {
}
}
pub fn miri_to_const<'tcx>(lcx: &LateContext<'tcx>, result: mir::Const<'tcx>) -> Option<Constant<'tcx>> {
pub fn mir_to_const<'tcx>(lcx: &LateContext<'tcx>, result: mir::Const<'tcx>) -> Option<Constant<'tcx>> {
use rustc_middle::mir::ConstValue;
match result {
mir::Const::Val(ConstValue::Scalar(Scalar::Int(int)), _) => match result.ty().kind() {
let mir::Const::Val(val, _) = result else {
// We only work on evaluated consts.
return None;
};
match (val, result.ty().kind()) {
(ConstValue::Scalar(Scalar::Int(int)), _) => match result.ty().kind() {
ty::Adt(adt_def, _) if adt_def.is_struct() => Some(Constant::Adt(result)),
ty::Bool => Some(Constant::Bool(int == ScalarInt::TRUE)),
ty::Uint(_) | ty::Int(_) => Some(Constant::Int(int.assert_bits(int.size()))),
@ -671,42 +676,28 @@ pub fn miri_to_const<'tcx>(lcx: &LateContext<'tcx>, result: mir::Const<'tcx>) ->
ty::RawPtr(_) => Some(Constant::RawPtr(int.assert_bits(int.size()))),
_ => None,
},
mir::Const::Val(cv, _) if matches!(result.ty().kind(), ty::Ref(_, inner_ty, _) if matches!(inner_ty.kind(), ty::Str)) =>
{
let data = cv.try_get_slice_bytes_for_diagnostics(lcx.tcx)?;
(_, ty::Ref(_, inner_ty, _)) if matches!(inner_ty.kind(), ty::Str) => {
let data = val.try_get_slice_bytes_for_diagnostics(lcx.tcx)?;
String::from_utf8(data.to_owned()).ok().map(Constant::Str)
},
mir::Const::Val(ConstValue::Indirect { alloc_id, offset: _ }, _) => {
let alloc = lcx.tcx.global_alloc(alloc_id).unwrap_memory();
match result.ty().kind() {
ty::Adt(adt_def, _) if adt_def.is_struct() => Some(Constant::Adt(result)),
ty::Array(sub_type, len) => match sub_type.kind() {
ty::Float(FloatTy::F32) => match len.try_to_target_usize(lcx.tcx) {
Some(len) => alloc
.inner()
.inspect_with_uninit_and_ptr_outside_interpreter(0..(4 * usize::try_from(len).unwrap()))
.to_owned()
.array_chunks::<4>()
.map(|&chunk| Some(Constant::F32(f32::from_le_bytes(chunk))))
.collect::<Option<Vec<Constant<'tcx>>>>()
.map(Constant::Vec),
_ => None,
},
ty::Float(FloatTy::F64) => match len.try_to_target_usize(lcx.tcx) {
Some(len) => alloc
.inner()
.inspect_with_uninit_and_ptr_outside_interpreter(0..(8 * usize::try_from(len).unwrap()))
.to_owned()
.array_chunks::<8>()
.map(|&chunk| Some(Constant::F64(f64::from_le_bytes(chunk))))
.collect::<Option<Vec<Constant<'tcx>>>>()
.map(Constant::Vec),
_ => None,
},
_ => None,
},
_ => None,
(_, ty::Adt(adt_def, _)) if adt_def.is_struct() => Some(Constant::Adt(result)),
(ConstValue::Indirect { alloc_id, offset }, ty::Array(sub_type, len)) => {
let alloc = lcx.tcx.global_alloc(alloc_id).unwrap_memory().inner();
let len = len.try_to_target_usize(lcx.tcx)?;
let ty::Float(flt) = sub_type.kind() else {
return None;
};
let size = Size::from_bits(flt.bit_width());
let mut res = Vec::new();
for idx in 0..len {
let range = alloc_range(offset + size * idx, size);
let val = alloc.read_scalar(&lcx.tcx, range, /* read_provenance */ false).ok()?;
res.push(match flt {
FloatTy::F32 => Constant::F32(f32::from_bits(val.to_u32().ok()?)),
FloatTy::F64 => Constant::F64(f64::from_bits(val.to_u64().ok()?)),
});
}
Some(Constant::Vec(res))
},
_ => None,
}

View File

@ -80,7 +80,6 @@ use std::sync::{Mutex, MutexGuard, OnceLock};
use if_chain::if_chain;
use itertools::Itertools;
use rustc_ast::ast::{self, LitKind, RangeLimits};
use rustc_ast::Attribute;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::unhash::UnhashMap;
use rustc_hir::def::{DefKind, Res};
@ -113,7 +112,7 @@ use rustc_span::{sym, Span};
use rustc_target::abi::Integer;
use visitors::Visitable;
use crate::consts::{constant, miri_to_const, Constant};
use crate::consts::{constant, mir_to_const, Constant};
use crate::higher::Range;
use crate::ty::{
adt_and_variant_of_res, can_partially_move_ty, expr_sig, is_copy, is_recursively_primitive_type,
@ -1509,9 +1508,7 @@ pub fn is_range_full(cx: &LateContext<'_>, expr: &Expr<'_>, container_path: Opti
if let rustc_ty::Adt(_, subst) = ty.kind()
&& let bnd_ty = subst.type_at(0)
&& let Some(min_val) = bnd_ty.numeric_min_val(cx.tcx)
&& let const_val = cx.tcx.valtree_to_const_val((bnd_ty, min_val.to_valtree()))
&& let min_const_kind = Const::from_value(const_val, bnd_ty)
&& let Some(min_const) = miri_to_const(cx, min_const_kind)
&& let Some(min_const) = mir_to_const(cx, Const::from_ty_const(min_val, cx.tcx))
&& let Some(start_const) = constant(cx, cx.typeck_results(), start)
{
start_const == min_const
@ -1525,9 +1522,7 @@ pub fn is_range_full(cx: &LateContext<'_>, expr: &Expr<'_>, container_path: Opti
if let rustc_ty::Adt(_, subst) = ty.kind()
&& let bnd_ty = subst.type_at(0)
&& let Some(max_val) = bnd_ty.numeric_max_val(cx.tcx)
&& let const_val = cx.tcx.valtree_to_const_val((bnd_ty, max_val.to_valtree()))
&& let max_const_kind = Const::from_value(const_val, bnd_ty)
&& let Some(max_const) = miri_to_const(cx, max_const_kind)
&& let Some(max_const) = mir_to_const(cx, Const::from_ty_const(max_val, cx.tcx))
&& let Some(end_const) = constant(cx, cx.typeck_results(), end)
{
end_const == max_const
@ -2456,11 +2451,12 @@ pub fn is_in_test_function(tcx: TyCtxt<'_>, id: hir::HirId) -> bool {
})
}
/// Checks if the item containing the given `HirId` has `#[cfg(test)]` attribute applied
/// Checks if `id` has a `#[cfg(test)]` attribute applied
///
/// Note: Add `//@compile-flags: --test` to UI tests with a `#[cfg(test)]` function
pub fn is_in_cfg_test(tcx: TyCtxt<'_>, id: hir::HirId) -> bool {
fn is_cfg_test(attr: &Attribute) -> bool {
/// This only checks directly applied attributes, to see if a node is inside a `#[cfg(test)]` parent
/// use [`is_in_cfg_test`]
pub fn is_cfg_test(tcx: TyCtxt<'_>, id: hir::HirId) -> bool {
tcx.hir().attrs(id).iter().any(|attr| {
if attr.has_name(sym::cfg)
&& let Some(items) = attr.meta_item_list()
&& let [item] = &*items
@ -2470,11 +2466,14 @@ pub fn is_in_cfg_test(tcx: TyCtxt<'_>, id: hir::HirId) -> bool {
} else {
false
}
}
})
}
/// Checks if any parent node of `HirId` has `#[cfg(test)]` attribute applied
pub fn is_in_cfg_test(tcx: TyCtxt<'_>, id: hir::HirId) -> bool {
tcx.hir()
.parent_iter(id)
.flat_map(|(parent_id, _)| tcx.hir().attrs(parent_id))
.any(is_cfg_test)
.parent_id_iter(id)
.any(|parent_id| is_cfg_test(tcx, parent_id))
}
/// Checks if the item of any of its parents has `#[cfg(...)]` attribute applied.

View File

@ -19,7 +19,7 @@ macro_rules! msrv_aliases {
// names may refer to stabilized feature flags or library items
msrv_aliases! {
1,71,0 { TUPLE_ARRAY_CONVERSIONS }
1,71,0 { TUPLE_ARRAY_CONVERSIONS, BUILD_HASHER_HASH_ONE }
1,70,0 { OPTION_IS_SOME_AND, BINARY_HEAP_RETAIN }
1,68,0 { PATH_MAIN_SEPARATOR_STR }
1,65,0 { LET_ELSE, POINTER_CAST_CONSTNESS }

View File

@ -62,6 +62,27 @@ where
}
}
}
impl<'tcx, A, B> Visitable<'tcx> for (A, B)
where
A: Visitable<'tcx>,
B: Visitable<'tcx>,
{
fn visit<V: Visitor<'tcx>>(self, visitor: &mut V) {
let (a, b) = self;
a.visit(visitor);
b.visit(visitor);
}
}
impl<'tcx, T> Visitable<'tcx> for Option<T>
where
T: Visitable<'tcx>,
{
fn visit<V: Visitor<'tcx>>(self, visitor: &mut V) {
if let Some(x) = self {
x.visit(visitor);
}
}
}
macro_rules! visitable_ref {
($t:ident, $f:ident) => {
impl<'tcx> Visitable<'tcx> for &'tcx $t<'tcx> {
@ -748,3 +769,26 @@ pub fn contains_break_or_continue(expr: &Expr<'_>) -> bool {
})
.is_some()
}
/// If the local is only used once in `visitable` returns the path expression referencing the given
/// local
pub fn local_used_once<'tcx>(
cx: &LateContext<'tcx>,
visitable: impl Visitable<'tcx>,
id: HirId,
) -> Option<&'tcx Expr<'tcx>> {
let mut expr = None;
let cf = for_each_expr_with_closures(cx, visitable, |e| {
if path_to_local_id(e, id) && expr.replace(e).is_some() {
ControlFlow::Break(())
} else {
ControlFlow::Continue(())
}
});
if cf.is_some() {
return None;
}
expr
}

View File

@ -1,6 +1,6 @@
[package]
name = "declare_clippy_lint"
version = "0.1.74"
version = "0.1.75"
edition = "2021"
publish = false

View File

@ -1,3 +1,3 @@
[toolchain]
channel = "nightly-2023-09-25"
channel = "nightly-2023-10-06"
components = ["cargo", "llvm-tools", "rust-src", "rust-std", "rustc", "rustc-dev", "rustfmt"]

View File

@ -26,6 +26,8 @@ use std::ops::Deref;
use std::path::Path;
use std::process::exit;
use anstream::println;
/// If a command-line option matches `find_arg`, then apply the predicate `pred` on its value. If
/// true, then return it. The parameter is assumed to be either `--arg=value` or `--arg value`.
fn arg_value<'a, T: Deref<Target = str>>(
@ -162,39 +164,15 @@ impl rustc_driver::Callbacks for ClippyCallbacks {
}
}
#[allow(clippy::ignored_unit_patterns)]
fn display_help() {
println!(
"\
Checks a package to catch common mistakes and improve your Rust code.
Usage:
cargo clippy [options] [--] [<opts>...]
Common options:
-h, --help Print this message
--rustc Pass all args to rustc
-V, --version Print version info and exit
For the other options see `cargo check --help`.
To allow or deny a lint from the command line you can use `cargo clippy --`
with:
-W --warn OPT Set lint warnings
-A --allow OPT Set lint allowed
-D --deny OPT Set lint denied
-F --forbid OPT Set lint forbidden
You can use tool lints to allow or deny lints from your code, eg.:
#[allow(clippy::needless_lifetimes)]
"
);
println!("{}", help_message());
}
const BUG_REPORT_URL: &str = "https://github.com/rust-lang/rust-clippy/issues/new?template=ice.yml";
#[allow(clippy::too_many_lines)]
#[allow(clippy::ignored_unit_patterns)]
pub fn main() {
let handler = EarlyErrorHandler::new(ErrorOutputType::default());
@ -236,6 +214,7 @@ pub fn main() {
if orig_args.iter().any(|a| a == "--version" || a == "-V") {
let version_info = rustc_tools_util::get_version_info!();
println!("{version_info}");
exit(0);
}
@ -292,3 +271,25 @@ pub fn main() {
}
}))
}
#[must_use]
fn help_message() -> &'static str {
color_print::cstr!(
"Checks a file to catch common mistakes and improve your Rust code.
Run <cyan>clippy-driver</> with the same arguments you use for <cyan>rustc</>
<green,bold>Usage</>:
<cyan,bold>clippy-driver</> <cyan>[OPTIONS] INPUT</>
<green,bold>Common options:</>
<cyan,bold>-h</>, <cyan,bold>--help</> Print this message
<cyan,bold>-V</>, <cyan,bold>--version</> Print version info and exit
<cyan,bold>--rustc</> Pass all arguments to <cyan>rustc</>
<green,bold>Allowing / Denying lints</>
You can use tool lints to allow or deny lints from your code, e.g.:
<yellow,bold>#[allow(clippy::needless_lifetimes)]</>
"
)
}

View File

@ -6,37 +6,14 @@ use std::env;
use std::path::PathBuf;
use std::process::{self, Command};
const CARGO_CLIPPY_HELP: &str = "Checks a package to catch common mistakes and improve your Rust code.
Usage:
cargo clippy [options] [--] [<opts>...]
Common options:
--no-deps Run Clippy only on the given crate, without linting the dependencies
--fix Automatically apply lint suggestions. This flag implies `--no-deps` and `--all-targets`
-h, --help Print this message
-V, --version Print version info and exit
--explain LINT Print the documentation for a given lint
For the other options see `cargo check --help`.
To allow or deny a lint from the command line you can use `cargo clippy --`
with:
-W --warn OPT Set lint warnings
-A --allow OPT Set lint allowed
-D --deny OPT Set lint denied
-F --forbid OPT Set lint forbidden
You can use tool lints to allow or deny lints from your code, e.g.:
#[allow(clippy::needless_lifetimes)]
";
use anstream::println;
#[allow(clippy::ignored_unit_patterns)]
fn show_help() {
println!("{CARGO_CLIPPY_HELP}");
println!("{}", help_message());
}
#[allow(clippy::ignored_unit_patterns)]
fn show_version() {
let version_info = rustc_tools_util::get_version_info!();
println!("{version_info}");
@ -168,6 +145,38 @@ where
}
}
#[must_use]
pub fn help_message() -> &'static str {
color_print::cstr!(
"Checks a package to catch common mistakes and improve your Rust code.
<green,bold>Usage</>:
<cyan,bold>cargo clippy</> <cyan>[OPTIONS] [--] [<<ARGS>>...]</>
<green,bold>Common options:</>
<cyan,bold>--no-deps</> Run Clippy only on the given crate, without linting the dependencies
<cyan,bold>--fix</> Automatically apply lint suggestions. This flag implies <cyan>--no-deps</> and <cyan>--all-targets</>
<cyan,bold>-h</>, <cyan,bold>--help</> Print this message
<cyan,bold>-V</>, <cyan,bold>--version</> Print version info and exit
<cyan,bold>--explain [LINT]</> Print the documentation for a given lint
See all options with <cyan,bold>cargo check --help</>.
<green,bold>Allowing / Denying lints</>
To allow or deny a lint from the command line you can use <cyan,bold>cargo clippy --</> with:
<cyan,bold>-W</> / <cyan,bold>--warn</> <cyan>[LINT]</> Set lint warnings
<cyan,bold>-A</> / <cyan,bold>--allow</> <cyan>[LINT]</> Set lint allowed
<cyan,bold>-D</> / <cyan,bold>--deny</> <cyan>[LINT]</> Set lint denied
<cyan,bold>-F</> / <cyan,bold>--forbid</> <cyan>[LINT]</> Set lint forbidden
You can use tool lints to allow or deny lints from your code, e.g.:
<yellow,bold>#[allow(clippy::needless_lifetimes)]</>
"
)
}
#[cfg(test)]
mod tests {
use super::ClippyCmd;

View File

@ -21,6 +21,18 @@ pub fn derive(_: TokenStream) -> TokenStream {
output
}
#[proc_macro_derive(ImplStructWithStdDisplay)]
pub fn derive_std(_: TokenStream) -> TokenStream {
quote! {
struct A {}
impl ::std::fmt::Display for A {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
write!(f, "A")
}
}
}
}
#[proc_macro_derive(FieldReassignWithDefault)]
pub fn derive_foo(_input: TokenStream) -> TokenStream {
quote! {
@ -141,3 +153,19 @@ pub fn shadow_derive(_: TokenStream) -> TokenStream {
.into(),
])
}
#[proc_macro_derive(StructIgnoredUnitPattern)]
pub fn derive_ignored_unit_pattern(_: TokenStream) -> TokenStream {
quote! {
struct A;
impl A {
fn a(&self) -> Result<(), ()> {
unimplemented!()
}
pub fn b(&self) {
let _ = self.a().unwrap();
}
}
}
}

Some files were not shown because too many files have changed in this diff Show More