mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-02 07:22:42 +00:00
Rollup merge of #85200 - FabianWolff:issue-84647, r=nikomatsakis
Ignore derived Clone and Debug implementations during dead code analysis This pull request fixes #84647. Derived implementations of `Clone` and `Debug` always trivially read all fields, so "field is never read" dead code warnings are never triggered. Arguably, though, a user most likely will only be interested in whether _their_ code ever reads those fields, which is the behavior I have implemented here. Note that implementations of `Clone` and `Debug` are only ignored if they are `#[derive(...)]`d; a custom `impl Clone/Debug for ...` will still be analyzed normally (i.e. if a custom `Clone` implementation uses all fields of the struct, this will continue to suppress dead code warnings about unused fields); this seemed like the least intrusive change to me (although it would be easy to change — just drop the `&& [impl_]item.span.in_derive_expansion()` in the if conditions). The only thing that I am slightly unsure about is that in #84647, `@matklad` said > Doesn't seem easy to fix though :( However, it _was_ pretty straightforward to fix, so did I perhaps overlook something obvious? `@matklad,` could you weigh in on this?
This commit is contained in:
commit
acfe7c4141
@ -16,7 +16,6 @@ macro_rules! declare_features {
|
||||
since: $ver,
|
||||
issue: to_nonzero($issue),
|
||||
edition: None,
|
||||
description: concat!($($doc,)*),
|
||||
}
|
||||
),+
|
||||
];
|
||||
|
@ -37,7 +37,6 @@ macro_rules! declare_features {
|
||||
since: $ver,
|
||||
issue: to_nonzero($issue),
|
||||
edition: $edition,
|
||||
description: concat!($($doc,)*),
|
||||
}
|
||||
),+];
|
||||
|
||||
|
@ -453,6 +453,9 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[
|
||||
),
|
||||
// Enumerates "identity-like" conversion methods to suggest on type mismatch.
|
||||
rustc_attr!(rustc_conversion_suggestion, Normal, template!(Word), INTERNAL_UNSTABLE),
|
||||
// Prevents field reads in the marked trait or method to be considered
|
||||
// during dead code analysis.
|
||||
rustc_attr!(rustc_trivial_field_reads, Normal, template!(Word), INTERNAL_UNSTABLE),
|
||||
|
||||
// ==========================================================================
|
||||
// Internal attributes, Const related:
|
||||
|
@ -51,7 +51,6 @@ pub struct Feature {
|
||||
pub since: &'static str,
|
||||
issue: Option<NonZeroU32>,
|
||||
pub edition: Option<Edition>,
|
||||
description: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
|
@ -16,7 +16,6 @@ macro_rules! declare_features {
|
||||
since: $ver,
|
||||
issue: to_nonzero($issue),
|
||||
edition: None,
|
||||
description: concat!($($doc,)*),
|
||||
}
|
||||
),+
|
||||
];
|
||||
@ -34,7 +33,6 @@ macro_rules! declare_features {
|
||||
since: $ver,
|
||||
issue: to_nonzero($issue),
|
||||
edition: None,
|
||||
description: concat!($($doc,)*),
|
||||
}
|
||||
),+
|
||||
];
|
||||
|
@ -62,13 +62,6 @@ fn hash_body(
|
||||
stable_hasher.finish()
|
||||
}
|
||||
|
||||
/// Represents an entry and its parent `HirId`.
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct Entry<'hir> {
|
||||
parent: HirId,
|
||||
node: Node<'hir>,
|
||||
}
|
||||
|
||||
impl<'a, 'hir> NodeCollector<'a, 'hir> {
|
||||
pub(super) fn root(
|
||||
sess: &'a Session,
|
||||
|
@ -28,7 +28,6 @@ fn compute_ignored_attr_names() -> FxHashSet<Symbol> {
|
||||
/// things (e.g., each `DefId`/`DefPath` is only hashed once).
|
||||
#[derive(Clone)]
|
||||
pub struct StableHashingContext<'a> {
|
||||
sess: &'a Session,
|
||||
definitions: &'a Definitions,
|
||||
cstore: &'a dyn CrateStore,
|
||||
pub(super) body_resolver: BodyResolver<'a>,
|
||||
@ -78,7 +77,6 @@ impl<'a> StableHashingContext<'a> {
|
||||
!always_ignore_spans && !sess.opts.debugging_opts.incremental_ignore_spans;
|
||||
|
||||
StableHashingContext {
|
||||
sess,
|
||||
body_resolver: BodyResolver(krate),
|
||||
definitions,
|
||||
cstore,
|
||||
|
@ -900,10 +900,7 @@ fn traverse_candidate<'pat, 'tcx: 'pat, C, T, I>(
|
||||
struct Binding<'tcx> {
|
||||
span: Span,
|
||||
source: Place<'tcx>,
|
||||
name: Symbol,
|
||||
var_id: HirId,
|
||||
var_ty: Ty<'tcx>,
|
||||
mutability: Mutability,
|
||||
binding_mode: BindingMode,
|
||||
}
|
||||
|
||||
|
@ -176,17 +176,22 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
PatKind::Binding { name, mutability, mode, var, ty, ref subpattern, is_primary: _ } => {
|
||||
PatKind::Binding {
|
||||
name: _,
|
||||
mutability: _,
|
||||
mode,
|
||||
var,
|
||||
ty: _,
|
||||
ref subpattern,
|
||||
is_primary: _,
|
||||
} => {
|
||||
if let Ok(place_resolved) =
|
||||
match_pair.place.clone().try_upvars_resolved(self.tcx, self.typeck_results)
|
||||
{
|
||||
candidate.bindings.push(Binding {
|
||||
name,
|
||||
mutability,
|
||||
span: match_pair.pattern.span,
|
||||
source: place_resolved.into_place(self.tcx, self.typeck_results),
|
||||
var_id: var,
|
||||
var_ty: ty,
|
||||
binding_mode: mode,
|
||||
});
|
||||
}
|
||||
|
@ -118,9 +118,6 @@ struct Scope {
|
||||
/// the region span of this scope within source code.
|
||||
region_scope: region::Scope,
|
||||
|
||||
/// the span of that region_scope
|
||||
region_scope_span: Span,
|
||||
|
||||
/// set of places to drop when exiting this scope. This starts
|
||||
/// out empty but grows as variables are declared during the
|
||||
/// building process. This is a stack, so we always drop from the
|
||||
@ -420,7 +417,6 @@ impl<'tcx> Scopes<'tcx> {
|
||||
self.scopes.push(Scope {
|
||||
source_scope: vis_scope,
|
||||
region_scope: region_scope.0,
|
||||
region_scope_span: region_scope.1.span,
|
||||
drops: vec![],
|
||||
moved_locals: vec![],
|
||||
cached_unwind_block: None,
|
||||
|
@ -263,7 +263,7 @@ impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
|
||||
}
|
||||
|
||||
if let Err(e) = result {
|
||||
bug!("Error processing: {:?}: {:?}", self.mir_body.source.def_id(), e)
|
||||
bug!("Error processing: {:?}: {:?}", self.mir_body.source.def_id(), e.message)
|
||||
};
|
||||
|
||||
// Depending on current `debug_options()`, `alert_on_unused_expressions()` could panic, so
|
||||
|
@ -239,7 +239,69 @@ impl<'tcx> MarkSymbolVisitor<'tcx> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Automatically generated items marked with `rustc_trivial_field_reads`
|
||||
/// will be ignored for the purposes of dead code analysis (see PR #85200
|
||||
/// for discussion).
|
||||
fn should_ignore_item(&self, def_id: DefId) -> bool {
|
||||
if !self.tcx.has_attr(def_id, sym::automatically_derived)
|
||||
&& !self
|
||||
.tcx
|
||||
.impl_of_method(def_id)
|
||||
.map_or(false, |impl_id| self.tcx.has_attr(impl_id, sym::automatically_derived))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
let has_attr = |def_id| self.tcx.has_attr(def_id, sym::rustc_trivial_field_reads);
|
||||
|
||||
if has_attr(def_id) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if let Some(impl_of) = self.tcx.impl_of_method(def_id) {
|
||||
if has_attr(impl_of) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if let Some(trait_of) = self.tcx.trait_id_of_impl(impl_of) {
|
||||
if has_attr(trait_of) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if let Some(method_ident) = self.tcx.opt_item_name(def_id) {
|
||||
if let Some(trait_method) = self
|
||||
.tcx
|
||||
.associated_items(trait_of)
|
||||
.find_by_name_and_kind(self.tcx, method_ident, ty::AssocKind::Fn, trait_of)
|
||||
{
|
||||
if has_attr(trait_method.def_id) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if let Some(trait_of) = self.tcx.trait_of_item(def_id) {
|
||||
if has_attr(trait_of) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
fn visit_node(&mut self, node: Node<'tcx>) {
|
||||
if let Some(item_def_id) = match node {
|
||||
Node::Item(hir::Item { def_id, .. })
|
||||
| Node::ForeignItem(hir::ForeignItem { def_id, .. })
|
||||
| Node::TraitItem(hir::TraitItem { def_id, .. })
|
||||
| Node::ImplItem(hir::ImplItem { def_id, .. }) => Some(def_id.to_def_id()),
|
||||
_ => None,
|
||||
} {
|
||||
if self.should_ignore_item(item_def_id) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let had_repr_c = self.repr_has_repr_c;
|
||||
let had_inherited_pub_visibility = self.inherited_pub_visibility;
|
||||
let had_pub_visibility = self.pub_visibility;
|
||||
|
@ -1134,6 +1134,7 @@ symbols! {
|
||||
rustc_synthetic,
|
||||
rustc_test_marker,
|
||||
rustc_then_this_would_need,
|
||||
rustc_trivial_field_reads,
|
||||
rustc_unsafe_specialization_marker,
|
||||
rustc_variance,
|
||||
rustdoc,
|
||||
|
@ -105,6 +105,7 @@
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
#[lang = "clone"]
|
||||
#[rustc_diagnostic_item = "Clone"]
|
||||
#[cfg_attr(not(bootstrap), rustc_trivial_field_reads)]
|
||||
pub trait Clone: Sized {
|
||||
/// Returns a copy of the value.
|
||||
///
|
||||
|
@ -582,6 +582,7 @@ impl Display for Arguments<'_> {
|
||||
)]
|
||||
#[doc(alias = "{:?}")]
|
||||
#[rustc_diagnostic_item = "debug_trait"]
|
||||
#[cfg_attr(not(bootstrap), rustc_trivial_field_reads)]
|
||||
pub trait Debug {
|
||||
/// Formats the value using the given formatter.
|
||||
///
|
||||
|
@ -653,6 +653,7 @@ mod debug_list {
|
||||
fn test_formatting_parameters_are_forwarded() {
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
#[derive(Debug)]
|
||||
#[allow(dead_code)]
|
||||
struct Foo {
|
||||
bar: u32,
|
||||
baz: u32,
|
||||
|
@ -468,9 +468,6 @@ struct ProgrammableSink {
|
||||
// Writes append to this slice
|
||||
pub buffer: Vec<u8>,
|
||||
|
||||
// Flush sets this flag
|
||||
pub flushed: bool,
|
||||
|
||||
// If true, writes will always be an error
|
||||
pub always_write_error: bool,
|
||||
|
||||
@ -520,7 +517,6 @@ impl Write for ProgrammableSink {
|
||||
if self.always_flush_error {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "test - always_flush_error"))
|
||||
} else {
|
||||
self.flushed = true;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -16,6 +16,7 @@ use parser::compiled::{msys_terminfo, parse};
|
||||
use searcher::get_dbpath_for_term;
|
||||
|
||||
/// A parsed terminfo database entry.
|
||||
#[allow(unused)]
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct TermInfo {
|
||||
/// Names for the terminal
|
||||
|
@ -557,7 +557,6 @@ fn build_macro(
|
||||
name: Symbol,
|
||||
import_def_id: Option<DefId>,
|
||||
) -> clean::ItemKind {
|
||||
let imported_from = cx.tcx.crate_name(def_id.krate);
|
||||
match CStore::from_tcx(cx.tcx).load_macro_untracked(def_id, cx.sess()) {
|
||||
LoadedMacro::MacroDef(item_def, _) => {
|
||||
if let ast::ItemKind::MacroDef(ref def) = item_def.kind {
|
||||
@ -569,7 +568,6 @@ fn build_macro(
|
||||
def_id,
|
||||
cx.tcx.visibility(import_def_id.unwrap_or(def_id)),
|
||||
),
|
||||
imported_from: Some(imported_from),
|
||||
})
|
||||
} else {
|
||||
unreachable!()
|
||||
|
@ -1859,7 +1859,6 @@ impl Clean<Vec<Item>> for (&hir::Item<'_>, Option<Symbol>) {
|
||||
}
|
||||
ItemKind::Macro(ref macro_def) => MacroItem(Macro {
|
||||
source: display_macro_source(cx, name, ¯o_def, def_id, &item.vis),
|
||||
imported_from: None,
|
||||
}),
|
||||
ItemKind::Trait(is_auto, unsafety, ref generics, ref bounds, ref item_ids) => {
|
||||
let items = item_ids
|
||||
|
@ -2202,7 +2202,6 @@ crate struct ImportSource {
|
||||
#[derive(Clone, Debug)]
|
||||
crate struct Macro {
|
||||
crate source: String,
|
||||
crate imported_from: Option<Symbol>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
|
@ -224,7 +224,6 @@ struct AllTypes {
|
||||
opaque_tys: FxHashSet<ItemEntry>,
|
||||
statics: FxHashSet<ItemEntry>,
|
||||
constants: FxHashSet<ItemEntry>,
|
||||
keywords: FxHashSet<ItemEntry>,
|
||||
attributes: FxHashSet<ItemEntry>,
|
||||
derives: FxHashSet<ItemEntry>,
|
||||
trait_aliases: FxHashSet<ItemEntry>,
|
||||
@ -245,7 +244,6 @@ impl AllTypes {
|
||||
opaque_tys: new_set(100),
|
||||
statics: new_set(100),
|
||||
constants: new_set(100),
|
||||
keywords: new_set(100),
|
||||
attributes: new_set(100),
|
||||
derives: new_set(100),
|
||||
trait_aliases: new_set(100),
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
// Test binary_search_by_key lifetime. Issue #34683
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug)]
|
||||
struct Assignment {
|
||||
topic: String,
|
||||
|
@ -1,4 +1,5 @@
|
||||
// run-pass
|
||||
#![allow(dead_code)]
|
||||
#![deny(unused_mut)]
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -3,6 +3,7 @@
|
||||
// check-pass
|
||||
#![allow(unreachable_code)]
|
||||
#![warn(unused)]
|
||||
#![allow(dead_code)]
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Point {
|
||||
|
@ -1,5 +1,5 @@
|
||||
warning: value captured by `a` is never read
|
||||
--> $DIR/liveness.rs:23:9
|
||||
--> $DIR/liveness.rs:24:9
|
||||
|
|
||||
LL | a = 1;
|
||||
| ^
|
||||
@ -13,7 +13,7 @@ LL | #![warn(unused)]
|
||||
= help: did you mean to capture by reference instead?
|
||||
|
||||
warning: unused variable: `a`
|
||||
--> $DIR/liveness.rs:32:9
|
||||
--> $DIR/liveness.rs:33:9
|
||||
|
|
||||
LL | a += 1;
|
||||
| ^
|
||||
@ -27,7 +27,7 @@ LL | #![warn(unused)]
|
||||
= help: did you mean to capture by reference instead?
|
||||
|
||||
warning: value assigned to `a` is never read
|
||||
--> $DIR/liveness.rs:52:9
|
||||
--> $DIR/liveness.rs:53:9
|
||||
|
|
||||
LL | a += 1;
|
||||
| ^
|
||||
@ -35,7 +35,7 @@ LL | a += 1;
|
||||
= help: maybe it is overwritten before being read?
|
||||
|
||||
warning: value assigned to `a` is never read
|
||||
--> $DIR/liveness.rs:76:13
|
||||
--> $DIR/liveness.rs:77:13
|
||||
|
|
||||
LL | a = Some("d1");
|
||||
| ^
|
||||
@ -43,7 +43,7 @@ LL | a = Some("d1");
|
||||
= help: maybe it is overwritten before being read?
|
||||
|
||||
warning: value assigned to `b` is never read
|
||||
--> $DIR/liveness.rs:84:13
|
||||
--> $DIR/liveness.rs:85:13
|
||||
|
|
||||
LL | b = Some("e1");
|
||||
| ^
|
||||
@ -51,7 +51,7 @@ LL | b = Some("e1");
|
||||
= help: maybe it is overwritten before being read?
|
||||
|
||||
warning: value assigned to `b` is never read
|
||||
--> $DIR/liveness.rs:86:13
|
||||
--> $DIR/liveness.rs:87:13
|
||||
|
|
||||
LL | b = Some("e2");
|
||||
| ^
|
||||
@ -59,7 +59,7 @@ LL | b = Some("e2");
|
||||
= help: maybe it is overwritten before being read?
|
||||
|
||||
warning: unused variable: `b`
|
||||
--> $DIR/liveness.rs:84:13
|
||||
--> $DIR/liveness.rs:85:13
|
||||
|
|
||||
LL | b = Some("e1");
|
||||
| ^
|
||||
|
@ -2,6 +2,7 @@
|
||||
|
||||
// check-pass
|
||||
#![warn(unused)]
|
||||
#![allow(dead_code)]
|
||||
|
||||
#[derive(Debug)]
|
||||
struct MyStruct {
|
||||
|
@ -1,5 +1,5 @@
|
||||
warning: value assigned to `a` is never read
|
||||
--> $DIR/liveness_unintentional_copy.rs:19:9
|
||||
--> $DIR/liveness_unintentional_copy.rs:20:9
|
||||
|
|
||||
LL | a = s;
|
||||
| ^
|
||||
@ -13,7 +13,7 @@ LL | #![warn(unused)]
|
||||
= help: maybe it is overwritten before being read?
|
||||
|
||||
warning: unused variable: `a`
|
||||
--> $DIR/liveness_unintentional_copy.rs:19:9
|
||||
--> $DIR/liveness_unintentional_copy.rs:20:9
|
||||
|
|
||||
LL | a = s;
|
||||
| ^
|
||||
@ -27,7 +27,7 @@ LL | #![warn(unused)]
|
||||
= help: did you mean to capture by reference instead?
|
||||
|
||||
warning: unused variable: `a`
|
||||
--> $DIR/liveness_unintentional_copy.rs:35:9
|
||||
--> $DIR/liveness_unintentional_copy.rs:36:9
|
||||
|
|
||||
LL | a += x;
|
||||
| ^
|
||||
|
@ -1,6 +1,7 @@
|
||||
// edition:2021
|
||||
//check-pass
|
||||
#![warn(unused)]
|
||||
#![allow(dead_code)]
|
||||
#![feature(rustc_attrs)]
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
|
@ -3,6 +3,8 @@
|
||||
|
||||
// Test that we can use raw ptrs when using `capture_disjoint_fields`.
|
||||
|
||||
#![allow(dead_code)]
|
||||
|
||||
#[derive(Debug)]
|
||||
struct S {
|
||||
s: String,
|
||||
|
@ -1,4 +1,7 @@
|
||||
// run-pass
|
||||
|
||||
#![allow(dead_code)]
|
||||
|
||||
use std::fmt::Debug;
|
||||
|
||||
#[derive(Debug)]
|
||||
|
45
src/test/ui/derives/clone-debug-dead-code.rs
Normal file
45
src/test/ui/derives/clone-debug-dead-code.rs
Normal file
@ -0,0 +1,45 @@
|
||||
// Checks that derived implementations of Clone and Debug do not
|
||||
// contribute to dead code analysis (issue #84647).
|
||||
|
||||
#![forbid(dead_code)]
|
||||
|
||||
struct A { f: () }
|
||||
//~^ ERROR: field is never read: `f`
|
||||
|
||||
#[derive(Clone)]
|
||||
struct B { f: () }
|
||||
//~^ ERROR: field is never read: `f`
|
||||
|
||||
#[derive(Debug)]
|
||||
struct C { f: () }
|
||||
//~^ ERROR: field is never read: `f`
|
||||
|
||||
#[derive(Debug,Clone)]
|
||||
struct D { f: () }
|
||||
//~^ ERROR: field is never read: `f`
|
||||
|
||||
struct E { f: () }
|
||||
//~^ ERROR: field is never read: `f`
|
||||
// Custom impl, still doesn't read f
|
||||
impl Clone for E {
|
||||
fn clone(&self) -> Self {
|
||||
Self { f: () }
|
||||
}
|
||||
}
|
||||
|
||||
struct F { f: () }
|
||||
// Custom impl that actually reads f
|
||||
impl Clone for F {
|
||||
fn clone(&self) -> Self {
|
||||
Self { f: self.f }
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let _ = A { f: () };
|
||||
let _ = B { f: () };
|
||||
let _ = C { f: () };
|
||||
let _ = D { f: () };
|
||||
let _ = E { f: () };
|
||||
let _ = F { f: () };
|
||||
}
|
38
src/test/ui/derives/clone-debug-dead-code.stderr
Normal file
38
src/test/ui/derives/clone-debug-dead-code.stderr
Normal file
@ -0,0 +1,38 @@
|
||||
error: field is never read: `f`
|
||||
--> $DIR/clone-debug-dead-code.rs:6:12
|
||||
|
|
||||
LL | struct A { f: () }
|
||||
| ^^^^^
|
||||
|
|
||||
note: the lint level is defined here
|
||||
--> $DIR/clone-debug-dead-code.rs:4:11
|
||||
|
|
||||
LL | #![forbid(dead_code)]
|
||||
| ^^^^^^^^^
|
||||
|
||||
error: field is never read: `f`
|
||||
--> $DIR/clone-debug-dead-code.rs:10:12
|
||||
|
|
||||
LL | struct B { f: () }
|
||||
| ^^^^^
|
||||
|
||||
error: field is never read: `f`
|
||||
--> $DIR/clone-debug-dead-code.rs:14:12
|
||||
|
|
||||
LL | struct C { f: () }
|
||||
| ^^^^^
|
||||
|
||||
error: field is never read: `f`
|
||||
--> $DIR/clone-debug-dead-code.rs:18:12
|
||||
|
|
||||
LL | struct D { f: () }
|
||||
| ^^^^^
|
||||
|
||||
error: field is never read: `f`
|
||||
--> $DIR/clone-debug-dead-code.rs:21:12
|
||||
|
|
||||
LL | struct E { f: () }
|
||||
| ^^^^^
|
||||
|
||||
error: aborting due to 5 previous errors
|
||||
|
@ -1,6 +1,8 @@
|
||||
// run-pass
|
||||
// pretty-expanded FIXME #23616
|
||||
|
||||
#![allow(dead_code)]
|
||||
|
||||
#[derive(Clone)]
|
||||
struct S<T> {
|
||||
foo: (),
|
||||
|
@ -1,6 +1,8 @@
|
||||
// run-pass
|
||||
// pretty-expanded FIXME #23616
|
||||
|
||||
#![allow(dead_code)]
|
||||
|
||||
#[derive(Clone)]
|
||||
struct S {
|
||||
_int: isize,
|
||||
|
@ -1,6 +1,8 @@
|
||||
// run-pass
|
||||
// pretty-expanded FIXME #23616
|
||||
|
||||
#![allow(dead_code)]
|
||||
|
||||
#[derive(Clone)]
|
||||
struct S((), ());
|
||||
|
||||
|
@ -1,4 +1,7 @@
|
||||
// run-pass
|
||||
|
||||
#![allow(dead_code)]
|
||||
|
||||
pub fn main() {
|
||||
#[derive(Debug)]
|
||||
struct Foo {
|
||||
|
@ -1,4 +1,7 @@
|
||||
// run-pass
|
||||
|
||||
#![allow(dead_code)]
|
||||
|
||||
trait Trait { fn dummy(&self) { } }
|
||||
|
||||
#[derive(Debug)]
|
||||
|
@ -1,5 +1,6 @@
|
||||
// run-pass
|
||||
#![feature(box_syntax)]
|
||||
#![allow(dead_code)]
|
||||
|
||||
trait T {
|
||||
fn print(&self);
|
||||
|
@ -1,6 +1,7 @@
|
||||
// run-pass
|
||||
|
||||
#![allow(non_upper_case_globals)]
|
||||
#![allow(dead_code)]
|
||||
/*!
|
||||
* On x86_64-linux-gnu and possibly other platforms, structs get 8-byte "preferred" alignment,
|
||||
* but their "ABI" alignment (i.e., what actually matters for data layout) is the largest alignment
|
||||
|
@ -1,5 +1,6 @@
|
||||
// run-pass
|
||||
#![allow(unused_mut)]
|
||||
#![allow(dead_code)]
|
||||
#![feature(box_syntax)]
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -1,5 +1,6 @@
|
||||
// run-pass
|
||||
#![allow(unused_mut)]
|
||||
#![allow(dead_code)]
|
||||
#![feature(box_syntax)]
|
||||
|
||||
#[derive(Clone)]
|
||||
|
@ -1,5 +1,7 @@
|
||||
// run-pass
|
||||
|
||||
#![allow(dead_code)]
|
||||
|
||||
use std::rc::Rc;
|
||||
use std::ops::Deref;
|
||||
|
||||
|
@ -1,5 +1,6 @@
|
||||
// run-pass
|
||||
#![feature(box_syntax)]
|
||||
#![allow(dead_code)]
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
struct Foo {
|
||||
|
@ -1,5 +1,6 @@
|
||||
// run-pass
|
||||
#![allow(non_camel_case_types)]
|
||||
#![allow(dead_code)]
|
||||
|
||||
trait noisy {
|
||||
fn speak(&mut self) -> isize;
|
||||
|
@ -1,5 +1,6 @@
|
||||
// run-pass
|
||||
#![allow(non_camel_case_types)]
|
||||
#![allow(dead_code)]
|
||||
|
||||
trait noisy {
|
||||
fn speak(&mut self);
|
||||
|
@ -1,4 +1,7 @@
|
||||
// run-pass
|
||||
|
||||
#![allow(dead_code)]
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Foo {
|
||||
x: isize,
|
||||
|
@ -1,5 +1,6 @@
|
||||
// run-pass
|
||||
#![allow(non_camel_case_types)]
|
||||
#![allow(dead_code)]
|
||||
|
||||
#[derive(Debug)]
|
||||
enum a_tag {
|
||||
|
@ -29,36 +29,21 @@ declare_clippy_lint! {
|
||||
"#[macro_use] is no longer needed"
|
||||
}
|
||||
|
||||
const BRACKETS: &[char] = &['<', '>'];
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
struct PathAndSpan {
|
||||
path: String,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
/// `MacroRefData` includes the name of the macro
|
||||
/// and the path from `SourceMap::span_to_filename`.
|
||||
/// `MacroRefData` includes the name of the macro.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MacroRefData {
|
||||
name: String,
|
||||
path: String,
|
||||
}
|
||||
|
||||
impl MacroRefData {
|
||||
pub fn new(name: String, callee: Span, cx: &LateContext<'_>) -> Self {
|
||||
let sm = cx.sess().source_map();
|
||||
let mut path = sm.filename_for_diagnostics(&sm.span_to_filename(callee)).to_string();
|
||||
|
||||
// std lib paths are <::std::module::file type>
|
||||
// so remove brackets, space and type.
|
||||
if path.contains('<') {
|
||||
path = path.replace(BRACKETS, "");
|
||||
}
|
||||
if path.contains(' ') {
|
||||
path = path.split(' ').next().unwrap().to_string();
|
||||
}
|
||||
Self { name, path }
|
||||
pub fn new(name: String) -> Self {
|
||||
Self { name }
|
||||
}
|
||||
}
|
||||
|
||||
@ -78,7 +63,7 @@ impl MacroUseImports {
|
||||
fn push_unique_macro(&mut self, cx: &LateContext<'_>, span: Span) {
|
||||
let call_site = span.source_callsite();
|
||||
let name = snippet(cx, cx.sess().source_map().span_until_char(call_site, '!'), "_");
|
||||
if let Some(callee) = span.source_callee() {
|
||||
if let Some(_callee) = span.source_callee() {
|
||||
if !self.collected.contains(&call_site) {
|
||||
let name = if name.contains("::") {
|
||||
name.split("::").last().unwrap().to_string()
|
||||
@ -86,7 +71,7 @@ impl MacroUseImports {
|
||||
name.to_string()
|
||||
};
|
||||
|
||||
self.mac_refs.push(MacroRefData::new(name, callee.def_site, cx));
|
||||
self.mac_refs.push(MacroRefData::new(name));
|
||||
self.collected.insert(call_site);
|
||||
}
|
||||
}
|
||||
@ -95,10 +80,10 @@ impl MacroUseImports {
|
||||
fn push_unique_macro_pat_ty(&mut self, cx: &LateContext<'_>, span: Span) {
|
||||
let call_site = span.source_callsite();
|
||||
let name = snippet(cx, cx.sess().source_map().span_until_char(call_site, '!'), "_");
|
||||
if let Some(callee) = span.source_callee() {
|
||||
if let Some(_callee) = span.source_callee() {
|
||||
if !self.collected.contains(&call_site) {
|
||||
self.mac_refs
|
||||
.push(MacroRefData::new(name.to_string(), callee.def_site, cx));
|
||||
.push(MacroRefData::new(name.to_string()));
|
||||
self.collected.insert(call_site);
|
||||
}
|
||||
}
|
||||
|
@ -3,8 +3,7 @@ use clippy_utils::diagnostics::{span_lint, span_lint_and_help};
|
||||
use clippy_utils::{match_def_path, paths};
|
||||
use if_chain::if_chain;
|
||||
use rustc_ast::ast::{LitKind, StrStyle};
|
||||
use rustc_data_structures::fx::FxHashSet;
|
||||
use rustc_hir::{BorrowKind, Expr, ExprKind, HirId};
|
||||
use rustc_hir::{BorrowKind, Expr, ExprKind};
|
||||
use rustc_lint::{LateContext, LateLintPass};
|
||||
use rustc_session::{declare_tool_lint, impl_lint_pass};
|
||||
use rustc_span::source_map::{BytePos, Span};
|
||||
@ -53,10 +52,7 @@ declare_clippy_lint! {
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct Regex {
|
||||
spans: FxHashSet<Span>,
|
||||
last: Option<HirId>,
|
||||
}
|
||||
pub struct Regex {}
|
||||
|
||||
impl_lint_pass!(Regex => [INVALID_REGEX, TRIVIAL_REGEX]);
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
// run-rustfix
|
||||
|
||||
#![allow(unused_imports)]
|
||||
#![allow(unused_imports,dead_code)]
|
||||
#![deny(clippy::default_trait_access)]
|
||||
|
||||
use std::default;
|
||||
|
@ -1,6 +1,6 @@
|
||||
// run-rustfix
|
||||
|
||||
#![allow(unused_imports)]
|
||||
#![allow(unused_imports,dead_code)]
|
||||
#![deny(clippy::default_trait_access)]
|
||||
|
||||
use std::default;
|
||||
|
@ -762,7 +762,6 @@ impl MacroArgKind {
|
||||
#[derive(Debug, Clone)]
|
||||
struct ParsedMacroArg {
|
||||
kind: MacroArgKind,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl ParsedMacroArg {
|
||||
@ -780,14 +779,10 @@ impl ParsedMacroArg {
|
||||
struct MacroArgParser {
|
||||
/// Either a name of the next metavariable, a separator, or junk.
|
||||
buf: String,
|
||||
/// The start position on the current buffer.
|
||||
lo: BytePos,
|
||||
/// The first token of the current buffer.
|
||||
start_tok: Token,
|
||||
/// `true` if we are parsing a metavariable or a repeat.
|
||||
is_meta_var: bool,
|
||||
/// The position of the last token.
|
||||
hi: BytePos,
|
||||
/// The last token parsed.
|
||||
last_tok: Token,
|
||||
/// Holds the parsed arguments.
|
||||
@ -807,8 +802,6 @@ fn last_tok(tt: &TokenTree) -> Token {
|
||||
impl MacroArgParser {
|
||||
fn new() -> MacroArgParser {
|
||||
MacroArgParser {
|
||||
lo: BytePos(0),
|
||||
hi: BytePos(0),
|
||||
buf: String::new(),
|
||||
is_meta_var: false,
|
||||
last_tok: Token {
|
||||
@ -824,7 +817,6 @@ impl MacroArgParser {
|
||||
}
|
||||
|
||||
fn set_last_tok(&mut self, tok: &TokenTree) {
|
||||
self.hi = tok.span().hi();
|
||||
self.last_tok = last_tok(tok);
|
||||
}
|
||||
|
||||
@ -836,7 +828,6 @@ impl MacroArgParser {
|
||||
};
|
||||
self.result.push(ParsedMacroArg {
|
||||
kind: MacroArgKind::Separator(self.buf.clone(), prefix),
|
||||
span: mk_sp(self.lo, self.hi),
|
||||
});
|
||||
self.buf.clear();
|
||||
}
|
||||
@ -849,7 +840,6 @@ impl MacroArgParser {
|
||||
};
|
||||
self.result.push(ParsedMacroArg {
|
||||
kind: MacroArgKind::Other(self.buf.clone(), prefix),
|
||||
span: mk_sp(self.lo, self.hi),
|
||||
});
|
||||
self.buf.clear();
|
||||
}
|
||||
@ -858,11 +848,10 @@ impl MacroArgParser {
|
||||
match iter.next() {
|
||||
Some(TokenTree::Token(Token {
|
||||
kind: TokenKind::Ident(name, _),
|
||||
span,
|
||||
..
|
||||
})) => {
|
||||
self.result.push(ParsedMacroArg {
|
||||
kind: MacroArgKind::MetaVariable(name, self.buf.clone()),
|
||||
span: mk_sp(self.lo, span.hi()),
|
||||
});
|
||||
|
||||
self.buf.clear();
|
||||
@ -873,10 +862,9 @@ impl MacroArgParser {
|
||||
}
|
||||
}
|
||||
|
||||
fn add_delimited(&mut self, inner: Vec<ParsedMacroArg>, delim: DelimToken, span: Span) {
|
||||
fn add_delimited(&mut self, inner: Vec<ParsedMacroArg>, delim: DelimToken) {
|
||||
self.result.push(ParsedMacroArg {
|
||||
kind: MacroArgKind::Delimited(delim, inner),
|
||||
span,
|
||||
});
|
||||
}
|
||||
|
||||
@ -886,19 +874,15 @@ impl MacroArgParser {
|
||||
inner: Vec<ParsedMacroArg>,
|
||||
delim: DelimToken,
|
||||
iter: &mut Cursor,
|
||||
span: Span,
|
||||
) -> Option<()> {
|
||||
let mut buffer = String::new();
|
||||
let mut first = true;
|
||||
let mut lo = span.lo();
|
||||
let mut hi = span.hi();
|
||||
|
||||
// Parse '*', '+' or '?.
|
||||
for tok in iter {
|
||||
self.set_last_tok(&tok);
|
||||
if first {
|
||||
first = false;
|
||||
lo = tok.span().lo();
|
||||
}
|
||||
|
||||
match tok {
|
||||
@ -918,7 +902,6 @@ impl MacroArgParser {
|
||||
}
|
||||
TokenTree::Token(ref t) => {
|
||||
buffer.push_str(&pprust::token_to_string(&t));
|
||||
hi = t.span.hi();
|
||||
}
|
||||
_ => return None,
|
||||
}
|
||||
@ -930,20 +913,17 @@ impl MacroArgParser {
|
||||
} else {
|
||||
Some(Box::new(ParsedMacroArg {
|
||||
kind: MacroArgKind::Other(buffer, "".to_owned()),
|
||||
span: mk_sp(lo, hi),
|
||||
}))
|
||||
};
|
||||
|
||||
self.result.push(ParsedMacroArg {
|
||||
kind: MacroArgKind::Repeat(delim, inner, another, self.last_tok.clone()),
|
||||
span: mk_sp(self.lo, self.hi),
|
||||
});
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn update_buffer(&mut self, t: &Token) {
|
||||
if self.buf.is_empty() {
|
||||
self.lo = t.span.lo();
|
||||
self.start_tok = t.clone();
|
||||
} else {
|
||||
let needs_space = match next_space(&self.last_tok.kind) {
|
||||
@ -999,7 +979,6 @@ impl MacroArgParser {
|
||||
|
||||
// Start keeping the name of this metavariable in the buffer.
|
||||
self.is_meta_var = true;
|
||||
self.lo = span.lo();
|
||||
self.start_tok = Token {
|
||||
kind: TokenKind::Dollar,
|
||||
span,
|
||||
@ -1012,7 +991,7 @@ impl MacroArgParser {
|
||||
self.add_meta_variable(&mut iter)?;
|
||||
}
|
||||
TokenTree::Token(ref t) => self.update_buffer(t),
|
||||
TokenTree::Delimited(delimited_span, delimited, ref tts) => {
|
||||
TokenTree::Delimited(_delimited_span, delimited, ref tts) => {
|
||||
if !self.buf.is_empty() {
|
||||
if next_space(&self.last_tok.kind) == SpaceState::Always {
|
||||
self.add_separator();
|
||||
@ -1022,16 +1001,14 @@ impl MacroArgParser {
|
||||
}
|
||||
|
||||
// Parse the stuff inside delimiters.
|
||||
let mut parser = MacroArgParser::new();
|
||||
parser.lo = delimited_span.open.lo();
|
||||
let parser = MacroArgParser::new();
|
||||
let delimited_arg = parser.parse(tts.clone())?;
|
||||
|
||||
let span = delimited_span.entire();
|
||||
if self.is_meta_var {
|
||||
self.add_repeat(delimited_arg, delimited, &mut iter, span)?;
|
||||
self.add_repeat(delimited_arg, delimited, &mut iter)?;
|
||||
self.is_meta_var = false;
|
||||
} else {
|
||||
self.add_delimited(delimited_arg, delimited, span);
|
||||
self.add_delimited(delimited_arg, delimited);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -27,7 +27,6 @@ type FileModMap<'ast> = BTreeMap<FileName, Module<'ast>>;
|
||||
pub(crate) struct Module<'a> {
|
||||
ast_mod_kind: Option<Cow<'a, ast::ModKind>>,
|
||||
pub(crate) items: Cow<'a, Vec<rustc_ast::ptr::P<ast::Item>>>,
|
||||
attrs: Cow<'a, Vec<ast::Attribute>>,
|
||||
inner_attr: Vec<ast::Attribute>,
|
||||
pub(crate) span: Span,
|
||||
}
|
||||
@ -46,7 +45,6 @@ impl<'a> Module<'a> {
|
||||
.collect();
|
||||
Module {
|
||||
items: mod_items,
|
||||
attrs: mod_attrs,
|
||||
inner_attr,
|
||||
span: mod_span,
|
||||
ast_mod_kind,
|
||||
|
Loading…
Reference in New Issue
Block a user