Auto merge of #105017 - matthiaskrgr:rollup-j0x550l, r=matthiaskrgr

Rollup of 9 pull requests

Successful merges:

 - #104804 (Rename `ast::Lit` as `ast::MetaItemLit`.)
 - #104891 (Add documentation for `has_escaping_bound_vars`)
 - #104933 (interpret: remove PartialOrd from a bunch of types that do not have or need a sensible order)
 - #104936 (Ignore bivariant parameters in test_type_match.)
 - #104954 (make simple check of prinf function)
 - #104956 (Avoid ICE if the Clone trait is not found while building error suggestions)
 - #104982 (interpret: get rid of run() function)
 - #104998 (Update my mailmap)
 - #105006 (stricter alignment enforcement for ScalarPair)

Failed merges:

r? `@ghost`
`@rustbot` modify labels: rollup
This commit is contained in:
bors 2022-11-28 20:53:42 +00:00
commit 2585bcea0b
47 changed files with 571 additions and 419 deletions

View File

@ -229,7 +229,7 @@ Jacob <jacob.macritchie@gmail.com>
Jacob Greenfield <xales@naveria.com>
Jacob Pratt <jacob@jhpratt.dev> <the.z.cuber@gmail.com>
Jake Vossen <jake@vossen.dev>
Jakob Degen <jakob@degen.com>
Jakob Degen <jakob.e.degen@gmail.com> <jakob@degen.com>
Jakob Lautrup Nysom <jako3047@gmail.com>
Jakub Adam Wieczorek <jakub.adam.wieczorek@gmail.com>
Jakub Adam Wieczorek <jakub.adam.wieczorek@gmail.com> <jakub.bukaj@yahoo.com>

View File

@ -13,7 +13,7 @@
//! - [`FnDecl`], [`FnHeader`] and [`Param`]: Metadata associated with a function declaration.
//! - [`Generics`], [`GenericParam`], [`WhereClause`]: Metadata associated with generic parameters.
//! - [`EnumDef`] and [`Variant`]: Enum declaration.
//! - [`Lit`] and [`LitKind`]: Literal expressions.
//! - [`MetaItemLit`] and [`LitKind`]: Literal expressions.
//! - [`MacroDef`], [`MacStmtStyle`], [`MacCall`], [`MacDelimiter`]: Macro definition and invocation.
//! - [`Attribute`]: Metadata associated with item.
//! - [`UnOp`], [`BinOp`], and [`BinOpKind`]: Unary and binary operators.
@ -489,7 +489,7 @@ pub enum NestedMetaItem {
/// A literal.
///
/// E.g., `"foo"`, `64`, `true`.
Literal(Lit),
Lit(MetaItemLit),
}
/// A spanned compile-time attribute item.
@ -518,7 +518,7 @@ pub enum MetaItemKind {
/// Name value meta item.
///
/// E.g., `feature = "foo"` as in `#[feature = "foo"]`.
NameValue(Lit),
NameValue(MetaItemLit),
}
/// A block (`{ .. }`).
@ -1599,12 +1599,12 @@ pub enum AttrArgs {
}
// The RHS of an `AttrArgs::Eq` starts out as an expression. Once macro
// expansion is completed, all cases end up either as a literal, which is the
// form used after lowering to HIR, or as an error.
// expansion is completed, all cases end up either as a meta item literal,
// which is the form used after lowering to HIR, or as an error.
#[derive(Clone, Encodable, Decodable, Debug)]
pub enum AttrArgsEq {
Ast(P<Expr>),
Hir(Lit),
Hir(MetaItemLit),
}
impl AttrArgs {
@ -1726,19 +1726,18 @@ pub enum StrStyle {
Raw(u8),
}
/// An AST literal.
/// A literal in a meta item.
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
pub struct Lit {
pub struct MetaItemLit {
/// The original literal token as written in source code.
pub token_lit: token::Lit,
/// The "semantic" representation of the literal lowered from the original tokens.
/// Strings are unescaped, hexadecimal forms are eliminated, etc.
/// FIXME: Remove this and only create the semantic representation during lowering to HIR.
pub kind: LitKind,
pub span: Span,
}
/// Same as `Lit`, but restricted to string literals.
/// Similar to `MetaItemLit`, but restricted to string literals.
#[derive(Clone, Copy, Encodable, Decodable, Debug)]
pub struct StrLit {
/// The original literal token as written in source code.
@ -1747,7 +1746,6 @@ pub struct StrLit {
pub suffix: Option<Symbol>,
pub span: Span,
/// The unescaped "semantic" representation of the literal lowered from the original token.
/// FIXME: Remove this and only create the semantic representation during lowering to HIR.
pub symbol_unescaped: Symbol,
}
@ -1783,6 +1781,8 @@ pub enum LitFloatType {
Unsuffixed,
}
/// This type is used within both `ast::MetaItemLit` and `hir::Lit`.
///
/// Note that the entire literal (including the suffix) is considered when
/// deciding the `LitKind`. This means that float literals like `1f32` are
/// classified by this type as `Float`. This is different to `token::LitKind`
@ -3096,9 +3096,9 @@ mod size_asserts {
static_assert_size!(Impl, 184);
static_assert_size!(Item, 184);
static_assert_size!(ItemKind, 112);
static_assert_size!(Lit, 48);
static_assert_size!(LitKind, 24);
static_assert_size!(Local, 72);
static_assert_size!(MetaItemLit, 48);
static_assert_size!(Param, 40);
static_assert_size!(Pat, 88);
static_assert_size!(Path, 24);

View File

@ -2,7 +2,7 @@
use crate::ast;
use crate::ast::{AttrArgs, AttrArgsEq, AttrId, AttrItem, AttrKind, AttrStyle, Attribute};
use crate::ast::{DelimArgs, Lit, LitKind};
use crate::ast::{DelimArgs, LitKind, MetaItemLit};
use crate::ast::{MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem};
use crate::ast::{Path, PathSegment};
use crate::ptr::P;
@ -50,10 +50,10 @@ impl NestedMetaItem {
}
}
/// Returns the `Lit` if `self` is a `NestedMetaItem::Literal`s.
pub fn literal(&self) -> Option<&Lit> {
/// Returns the `MetaItemLit` if `self` is a `NestedMetaItem::Literal`s.
pub fn lit(&self) -> Option<&MetaItemLit> {
match self {
NestedMetaItem::Literal(lit) => Some(lit),
NestedMetaItem::Lit(lit) => Some(lit),
_ => None,
}
}
@ -78,12 +78,12 @@ impl NestedMetaItem {
}
/// Returns a name and single literal value tuple of the `MetaItem`.
pub fn name_value_literal(&self) -> Option<(Symbol, &Lit)> {
pub fn name_value_literal(&self) -> Option<(Symbol, &MetaItemLit)> {
self.meta_item().and_then(|meta_item| {
meta_item.meta_item_list().and_then(|meta_item_list| {
if meta_item_list.len() == 1
&& let Some(ident) = meta_item.ident()
&& let Some(lit) = meta_item_list[0].literal()
&& let Some(lit) = meta_item_list[0].lit()
{
return Some((ident.name, lit));
}
@ -179,7 +179,7 @@ impl MetaItem {
/// #[attribute(name = "value")]
/// ^^^^^^^^^^^^^^
/// ```
pub fn name_value_literal(&self) -> Option<&Lit> {
pub fn name_value_literal(&self) -> Option<&MetaItemLit> {
match &self.kind {
MetaItemKind::NameValue(v) => Some(v),
_ => None,
@ -334,7 +334,7 @@ pub fn mk_name_value_item_str(ident: Ident, str: Symbol, str_span: Span) -> Meta
}
pub fn mk_name_value_item(ident: Ident, lit_kind: LitKind, lit_span: Span) -> MetaItem {
let lit = Lit::from_lit_kind(lit_kind, lit_span);
let lit = MetaItemLit::from_lit_kind(lit_kind, lit_span);
let span = ident.span.to(lit_span);
MetaItem { path: Path::from_ident(ident), span, kind: MetaItemKind::NameValue(lit) }
}
@ -604,7 +604,7 @@ impl MetaItemKind {
MetaItemKind::name_value_from_tokens(&mut inner_tokens.into_trees())
}
Some(TokenTree::Token(token, _)) => {
Lit::from_token(&token).map(MetaItemKind::NameValue)
MetaItemLit::from_token(&token).map(MetaItemKind::NameValue)
}
_ => None,
}
@ -622,7 +622,7 @@ impl MetaItemKind {
AttrArgs::Eq(_, AttrArgsEq::Ast(expr)) => match expr.kind {
ast::ExprKind::Lit(token_lit) => {
// Turn failures to `None`, we'll get parse errors elsewhere.
Lit::from_token_lit(token_lit, expr.span)
MetaItemLit::from_token_lit(token_lit, expr.span)
.ok()
.map(|lit| MetaItemKind::NameValue(lit))
}
@ -655,14 +655,14 @@ impl NestedMetaItem {
pub fn span(&self) -> Span {
match self {
NestedMetaItem::MetaItem(item) => item.span,
NestedMetaItem::Literal(lit) => lit.span,
NestedMetaItem::Lit(lit) => lit.span,
}
}
fn token_trees(&self) -> Vec<TokenTree> {
match self {
NestedMetaItem::MetaItem(item) => item.token_trees(),
NestedMetaItem::Literal(lit) => {
NestedMetaItem::Lit(lit) => {
vec![TokenTree::Token(lit.to_token(), Spacing::Alone)]
}
}
@ -674,10 +674,10 @@ impl NestedMetaItem {
{
match tokens.peek() {
Some(TokenTree::Token(token, _))
if let Some(lit) = Lit::from_token(token) =>
if let Some(lit) = MetaItemLit::from_token(token) =>
{
tokens.next();
return Some(NestedMetaItem::Literal(lit));
return Some(NestedMetaItem::Lit(lit));
}
Some(TokenTree::Delimited(_, Delimiter::Invisible, inner_tokens)) => {
let inner_tokens = inner_tokens.clone();

View File

@ -628,7 +628,7 @@ pub fn noop_visit_macro_def<T: MutVisitor>(macro_def: &mut MacroDef, vis: &mut T
pub fn noop_visit_meta_list_item<T: MutVisitor>(li: &mut NestedMetaItem, vis: &mut T) {
match li {
NestedMetaItem::MetaItem(mi) => vis.visit_meta_item(mi),
NestedMetaItem::Literal(_lit) => {}
NestedMetaItem::Lit(_lit) => {}
}
}

View File

@ -1,8 +1,7 @@
//! Code related to parsing literals.
use crate::ast::{self, Lit, LitKind};
use crate::ast::{self, LitKind, MetaItemLit};
use crate::token::{self, Token};
use rustc_data_structures::sync::Lrc;
use rustc_lexer::unescape::{byte_from_char, unescape_byte, unescape_char, unescape_literal, Mode};
use rustc_span::symbol::{kw, sym, Symbol};
use rustc_span::Span;
@ -196,33 +195,26 @@ impl LitKind {
}
}
impl Lit {
/// Converts literal token into an AST literal.
pub fn from_token_lit(token_lit: token::Lit, span: Span) -> Result<Lit, LitError> {
Ok(Lit { token_lit, kind: LitKind::from_token_lit(token_lit)?, span })
impl MetaItemLit {
/// Converts token literal into a meta item literal.
pub fn from_token_lit(token_lit: token::Lit, span: Span) -> Result<MetaItemLit, LitError> {
Ok(MetaItemLit { token_lit, kind: LitKind::from_token_lit(token_lit)?, span })
}
/// Converts an arbitrary token into an AST literal.
pub fn from_token(token: &Token) -> Option<Lit> {
/// Converts an arbitrary token into meta item literal.
pub fn from_token(token: &Token) -> Option<MetaItemLit> {
token::Lit::from_token(token)
.and_then(|token_lit| Lit::from_token_lit(token_lit, token.span).ok())
.and_then(|token_lit| MetaItemLit::from_token_lit(token_lit, token.span).ok())
}
/// Attempts to recover an AST literal from semantic literal.
/// Attempts to create a meta item literal from a `LitKind`.
/// This function is used when the original token doesn't exist (e.g. the literal is created
/// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
pub fn from_lit_kind(kind: LitKind, span: Span) -> Lit {
Lit { token_lit: kind.to_token_lit(), kind, span }
pub fn from_lit_kind(kind: LitKind, span: Span) -> MetaItemLit {
MetaItemLit { token_lit: kind.to_token_lit(), kind, span }
}
/// Recovers an AST literal from a string of bytes produced by `include_bytes!`.
/// This requires ASCII-escaping the string, which can result in poor performance
/// for very large strings of bytes.
pub fn from_included_bytes(bytes: &Lrc<[u8]>, span: Span) -> Lit {
Self::from_lit_kind(LitKind::ByteStr(bytes.clone()), span)
}
/// Losslessly convert an AST literal into a token.
/// Losslessly convert a meta item literal into a token.
pub fn to_token(&self) -> Token {
let kind = match self.token_lit.kind {
token::Bool => token::Ident(self.token_lit.symbol, false),

View File

@ -948,17 +948,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
AttrArgs::Eq(eq_span, AttrArgsEq::Ast(expr)) => {
// In valid code the value always ends up as a single literal. Otherwise, a dummy
// literal suffices because the error is handled elsewhere.
let lit = if let ExprKind::Lit(token_lit) = expr.kind {
match Lit::from_token_lit(token_lit, expr.span) {
Ok(lit) => lit,
Err(_err) => Lit {
token_lit: token::Lit::new(token::LitKind::Err, kw::Empty, None),
kind: LitKind::Err,
span: DUMMY_SP,
},
}
let lit = if let ExprKind::Lit(token_lit) = expr.kind
&& let Ok(lit) = MetaItemLit::from_token_lit(token_lit, expr.span)
{
lit
} else {
Lit {
MetaItemLit {
token_lit: token::Lit::new(token::LitKind::Err, kw::Empty, None),
kind: LitKind::Err,
span: DUMMY_SP,

View File

@ -371,7 +371,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
}
}
fn print_literal(&mut self, lit: &ast::Lit) {
fn print_meta_item_lit(&mut self, lit: &ast::MetaItemLit) {
self.print_token_literal(lit.token_lit, lit.span)
}
@ -488,7 +488,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
self.print_path(&item.path, false, 0);
self.space();
self.word_space("=");
let token_str = self.literal_to_string(lit);
let token_str = self.meta_item_lit_to_string(lit);
self.word(token_str);
}
}
@ -498,7 +498,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
fn print_meta_list_item(&mut self, item: &ast::NestedMetaItem) {
match item {
ast::NestedMetaItem::MetaItem(ref mi) => self.print_meta_item(mi),
ast::NestedMetaItem::Literal(ref lit) => self.print_literal(lit),
ast::NestedMetaItem::Lit(ref lit) => self.print_meta_item_lit(lit),
}
}
@ -510,7 +510,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
self.print_path(&item.path, false, 0);
self.space();
self.word_space("=");
self.print_literal(value);
self.print_meta_item_lit(value);
}
ast::MetaItemKind::List(ref items) => {
self.print_path(&item.path, false, 0);
@ -825,8 +825,8 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
Self::to_string(|s| s.print_expr(e))
}
fn literal_to_string(&self, lit: &ast::Lit) -> String {
Self::to_string(|s| s.print_literal(lit))
fn meta_item_lit_to_string(&self, lit: &ast::MetaItemLit) -> String {
Self::to_string(|s| s.print_meta_item_lit(lit))
}
fn tt_to_string(&self, tt: &TokenTree) -> String {

View File

@ -328,8 +328,8 @@ impl<'a> State<'a> {
self.print_token_literal(token_lit, expr.span);
}
ast::ExprKind::IncludedBytes(ref bytes) => {
let lit = ast::Lit::from_included_bytes(bytes, expr.span);
self.print_literal(&lit)
let lit = ast::LitKind::ByteStr(bytes.clone()).to_token_lit();
self.print_token_literal(lit, expr.span)
}
ast::ExprKind::Cast(ref expr, ref ty) => {
let prec = AssocOp::As.precedence() as i8;

View File

@ -1,7 +1,7 @@
//! Parsing and validation of builtin attributes
use rustc_ast as ast;
use rustc_ast::{Attribute, Lit, LitKind, MetaItem, MetaItemKind, NestedMetaItem, NodeId};
use rustc_ast::{Attribute, LitKind, MetaItem, MetaItemKind, MetaItemLit, NestedMetaItem, NodeId};
use rustc_ast_pretty::pprust;
use rustc_feature::{find_gated_cfg, is_builtin_attr_name, Features, GatedCfg};
use rustc_macros::HashStable_Generic;
@ -486,7 +486,7 @@ where
continue 'outer;
}
},
NestedMetaItem::Literal(lit) => {
NestedMetaItem::Lit(lit) => {
handle_errors(
&sess.parse_sess,
lit.span,
@ -658,11 +658,11 @@ pub fn eval_condition(
ast::MetaItemKind::List(ref mis) if cfg.name_or_empty() == sym::version => {
try_gate_cfg(sym::version, cfg.span, sess, features);
let (min_version, span) = match &mis[..] {
[NestedMetaItem::Literal(Lit { kind: LitKind::Str(sym, ..), span, .. })] => {
[NestedMetaItem::Lit(MetaItemLit { kind: LitKind::Str(sym, ..), span, .. })] => {
(sym, span)
}
[
NestedMetaItem::Literal(Lit { span, .. })
NestedMetaItem::Lit(MetaItemLit { span, .. })
| NestedMetaItem::MetaItem(MetaItem { span, .. }),
] => {
sess.emit_err(session_diagnostics::ExpectedVersionLiteral { span: *span });
@ -899,7 +899,7 @@ where
continue 'outer;
}
},
NestedMetaItem::Literal(lit) => {
NestedMetaItem::Lit(lit) => {
handle_errors(
&sess.parse_sess,
lit.span,

View File

@ -732,13 +732,15 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
let tcx = self.infcx.tcx;
// Try to find predicates on *generic params* that would allow copying `ty`
let infcx = tcx.infer_ctxt().build();
if infcx
.type_implements_trait(
tcx.lang_items().clone_trait().unwrap(),
[tcx.erase_regions(ty)],
self.param_env,
)
.must_apply_modulo_regions()
if let Some(clone_trait_def) = tcx.lang_items().clone_trait()
&& infcx
.type_implements_trait(
clone_trait_def,
[tcx.erase_regions(ty)],
self.param_env,
)
.must_apply_modulo_regions()
{
err.span_suggestion_verbose(
span.shrink_to_hi(),

View File

@ -48,9 +48,9 @@ impl MultiItemModifier for Expander {
.into_iter()
.filter_map(|nested_meta| match nested_meta {
NestedMetaItem::MetaItem(meta) => Some(meta),
NestedMetaItem::Literal(lit) => {
NestedMetaItem::Lit(lit) => {
// Reject `#[derive("Debug")]`.
report_unexpected_literal(sess, &lit);
report_unexpected_meta_item_lit(sess, &lit);
None
}
})
@ -127,7 +127,7 @@ fn report_bad_target(sess: &Session, item: &Annotatable, span: Span) -> bool {
bad_target
}
fn report_unexpected_literal(sess: &Session, lit: &ast::Lit) {
fn report_unexpected_meta_item_lit(sess: &Session, lit: &ast::MetaItemLit) {
let help_msg = match lit.token_lit.kind {
token::Str if rustc_lexer::is_ident(lit.token_lit.symbol.as_str()) => {
format!("try using `#[derive({})]`", lit.token_lit.symbol)

View File

@ -66,7 +66,7 @@ fn eval_body_using_ecx<'mir, 'tcx>(
)?;
// The main interpreter loop.
ecx.run()?;
while ecx.step()? {}
// Intern the result
let intern_kind = if cid.promoted.is_some() {

View File

@ -32,11 +32,6 @@ fn binop_right_homogeneous(op: mir::BinOp) -> bool {
}
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
pub fn run(&mut self) -> InterpResult<'tcx> {
while self.step()? {}
Ok(())
}
/// Returns `true` as long as there are more things to do.
///
/// This is used by [priroda](https://github.com/oli-obk/priroda)

View File

@ -526,11 +526,8 @@ impl server::TokenStream for Rustc<'_, '_> {
Ok(tokenstream::TokenStream::token_alone(token::Literal(*token_lit), expr.span))
}
ast::ExprKind::IncludedBytes(bytes) => {
let lit = ast::Lit::from_included_bytes(bytes, expr.span);
Ok(tokenstream::TokenStream::token_alone(
token::TokenKind::Literal(lit.token_lit),
expr.span,
))
let lit = ast::LitKind::ByteStr(bytes.clone()).to_token_lit();
Ok(tokenstream::TokenStream::token_alone(token::TokenKind::Literal(lit), expr.span))
}
ast::ExprKind::Unary(ast::UnOp::Neg, e) => match &e.kind {
ast::ExprKind::Lit(token_lit) => match token_lit {

View File

@ -2145,7 +2145,7 @@ fn should_inherit_track_caller(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
}
fn check_link_ordinal(tcx: TyCtxt<'_>, attr: &ast::Attribute) -> Option<u16> {
use rustc_ast::{Lit, LitIntType, LitKind};
use rustc_ast::{LitIntType, LitKind, MetaItemLit};
if !tcx.features().raw_dylib && tcx.sess.target.arch == "x86" {
feature_err(
&tcx.sess.parse_sess,
@ -2158,7 +2158,7 @@ fn check_link_ordinal(tcx: TyCtxt<'_>, attr: &ast::Attribute) -> Option<u16> {
let meta_item_list = attr.meta_item_list();
let meta_item_list = meta_item_list.as_deref();
let sole_meta_list = match meta_item_list {
Some([item]) => item.literal(),
Some([item]) => item.lit(),
Some(_) => {
tcx.sess
.struct_span_err(attr.span, "incorrect number of arguments to `#[link_ordinal]`")
@ -2168,7 +2168,9 @@ fn check_link_ordinal(tcx: TyCtxt<'_>, attr: &ast::Attribute) -> Option<u16> {
}
_ => None,
};
if let Some(Lit { kind: LitKind::Int(ordinal, LitIntType::Unsuffixed), .. }) = sole_meta_list {
if let Some(MetaItemLit { kind: LitKind::Int(ordinal, LitIntType::Unsuffixed), .. }) =
sole_meta_list
{
// According to the table at https://docs.microsoft.com/en-us/windows/win32/debug/pe-format#import-header,
// the ordinal must fit into 16 bits. Similarly, the Ordinal field in COFFShortExport (defined
// in llvm/include/llvm/Object/COFFImportFile.h), which we use to communicate import information

View File

@ -155,14 +155,17 @@ impl<'tcx> TypeRelation<'tcx> for Match<'tcx> {
bug!()
}
#[instrument(level = "trace", skip(self))]
fn relate_with_variance<T: Relate<'tcx>>(
&mut self,
_: ty::Variance,
variance: ty::Variance,
_: ty::VarianceDiagInfo<'tcx>,
a: T,
b: T,
) -> RelateResult<'tcx, T> {
self.relate(a, b)
// Opaque types substs have lifetime parameters.
// We must not check them to be equal, as we never insert anything to make them so.
if variance != ty::Bivariant { self.relate(a, b) } else { Ok(a) }
}
#[instrument(skip(self), level = "debug")]

View File

@ -194,7 +194,7 @@ pub fn parse_check_cfg(specs: Vec<String>) -> CheckCfg {
for val in values {
if let Some(LitKind::Str(s, _)) =
val.literal().map(|lit| &lit.kind)
val.lit().map(|lit| &lit.kind)
{
ident_values.insert(s.to_string());
} else {

View File

@ -462,8 +462,8 @@ impl LateLintPass<'_> for BadOptAccess {
let Some(attr) = cx.tcx.get_attr(field.did, sym::rustc_lint_opt_deny_field_access) &&
let Some(items) = attr.meta_item_list() &&
let Some(item) = items.first() &&
let Some(literal) = item.literal() &&
let ast::LitKind::Str(val, _) = literal.kind
let Some(lit) = item.lit() &&
let ast::LitKind::Str(val, _) = lit.kind
{
cx.struct_span_lint(BAD_OPT_ACCESS, expr.span, val.as_str(), |lint|
lint

View File

@ -36,7 +36,7 @@ pub use init_mask::{InitChunk, InitChunkIter};
/// module provides higher-level access.
// Note: for performance reasons when interning, some of the `Allocation` fields can be partially
// hashed. (see the `Hash` impl below for more details), so the impl is not derived.
#[derive(Clone, Eq, PartialEq, PartialOrd, Ord, TyEncodable, TyDecodable)]
#[derive(Clone, Eq, PartialEq, TyEncodable, TyDecodable)]
#[derive(HashStable)]
pub struct Allocation<Prov: Provenance = AllocId, Extra = ()> {
/// The actual bytes of the allocation.
@ -108,7 +108,7 @@ impl hash::Hash for Allocation {
/// Here things are different because only const allocations are interned. This
/// means that both the inner type (`Allocation`) and the outer type
/// (`ConstAllocation`) are used quite a bit.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, HashStable)]
#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable)]
#[rustc_pass_by_value]
pub struct ConstAllocation<'tcx>(pub Interned<'tcx, Allocation>);

View File

@ -12,7 +12,7 @@ type Block = u64;
/// is initialized. If it is `false` the byte is uninitialized.
// Note: for performance reasons when interning, some of the `InitMask` fields can be partially
// hashed. (see the `Hash` impl below for more details), so the impl is not derived.
#[derive(Clone, Debug, Eq, PartialEq, PartialOrd, Ord, TyEncodable, TyDecodable)]
#[derive(Clone, Debug, Eq, PartialEq, TyEncodable, TyDecodable)]
#[derive(HashStable)]
pub struct InitMask {
blocks: Vec<Block>,

View File

@ -10,7 +10,7 @@ use super::{alloc_range, AllocError, AllocId, AllocRange, AllocResult, Provenanc
use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
/// Stores the provenance information of pointers stored in memory.
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
#[derive(HashStable)]
pub struct ProvenanceMap<Prov = AllocId> {
/// Provenance in this map applies from the given offset for an entire pointer-size worth of

View File

@ -173,7 +173,7 @@ impl Provenance for AllocId {
/// Represents a pointer in the Miri engine.
///
/// Pointers are "tagged" with provenance information; typically the `AllocId` they belong to.
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, TyEncodable, TyDecodable, Hash)]
#[derive(Copy, Clone, Eq, PartialEq, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable)]
pub struct Pointer<Prov = AllocId> {
pub(super) offset: Size, // kept private to avoid accidental misinterpretation (meaning depends on `Prov` type)

View File

@ -28,7 +28,7 @@ pub struct ConstAlloc<'tcx> {
/// Represents a constant value in Rust. `Scalar` and `Slice` are optimizations for
/// array length computations, enum discriminants and the pattern matching logic.
#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, TyEncodable, TyDecodable, Hash)]
#[derive(Copy, Clone, Debug, Eq, PartialEq, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable, Lift)]
pub enum ConstValue<'tcx> {
/// Used only for types with `layout::abi::Scalar` ABI.
@ -110,7 +110,7 @@ impl<'tcx> ConstValue<'tcx> {
///
/// These variants would be private if there was a convenient way to achieve that in Rust.
/// Do *not* match on a `Scalar`! Use the various `to_*` methods instead.
#[derive(Clone, Copy, Eq, PartialEq, Ord, PartialOrd, TyEncodable, TyDecodable, Hash)]
#[derive(Clone, Copy, Eq, PartialEq, TyEncodable, TyDecodable, Hash)]
#[derive(HashStable)]
pub enum Scalar<Prov = AllocId> {
/// The raw bytes of a simple value.

View File

@ -1191,8 +1191,9 @@ impl<'tcx> TyCtxt<'tcx> {
debug!("layout_scalar_valid_range: attr={:?}", attr);
if let Some(
&[
ast::NestedMetaItem::Literal(ast::Lit {
kind: ast::LitKind::Int(a, _), ..
ast::NestedMetaItem::Lit(ast::MetaItemLit {
kind: ast::LitKind::Int(a, _),
..
}),
],
) = attr.meta_item_list().as_deref()

View File

@ -72,12 +72,18 @@ pub trait TypeVisitable<'tcx>: fmt::Debug + Clone {
self.visit_with(&mut HasEscapingVarsVisitor { outer_index: binder }).is_break()
}
/// Returns `true` if this `self` has any regions that escape `binder` (and
/// Returns `true` if this type has any regions that escape `binder` (and
/// hence are not bound by it).
fn has_vars_bound_above(&self, binder: ty::DebruijnIndex) -> bool {
self.has_vars_bound_at_or_above(binder.shifted_in(1))
}
/// Return `true` if this type has regions that are not a part of the type.
/// For example, `for<'a> fn(&'a i32)` return `false`, while `fn(&'a i32)`
/// would return `true`. The latter can occur when traversing through the
/// former.
///
/// See [`HasEscapingVarsVisitor`] for more information.
fn has_escaping_bound_vars(&self) -> bool {
self.has_vars_bound_at_or_above(ty::INNERMOST)
}

View File

@ -315,8 +315,9 @@ impl<'a> Parser<'a> {
Ok(attrs)
}
pub(crate) fn parse_unsuffixed_lit(&mut self) -> PResult<'a, ast::Lit> {
let lit = self.parse_ast_lit()?;
// Note: must be unsuffixed.
pub(crate) fn parse_unsuffixed_meta_item_lit(&mut self) -> PResult<'a, ast::MetaItemLit> {
let lit = self.parse_meta_item_lit()?;
debug!("checking if {:?} is unsuffixed", lit);
if !lit.kind.is_unsuffixed() {
@ -391,7 +392,7 @@ impl<'a> Parser<'a> {
pub(crate) fn parse_meta_item_kind(&mut self) -> PResult<'a, ast::MetaItemKind> {
Ok(if self.eat(&token::Eq) {
ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?)
ast::MetaItemKind::NameValue(self.parse_unsuffixed_meta_item_lit()?)
} else if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
// Matches `meta_seq = ( COMMASEP(meta_item_inner) )`.
let (list, _) = self.parse_paren_comma_seq(|p| p.parse_meta_item_inner())?;
@ -403,8 +404,8 @@ impl<'a> Parser<'a> {
/// Matches `meta_item_inner : (meta_item | UNSUFFIXED_LIT) ;`.
fn parse_meta_item_inner(&mut self) -> PResult<'a, ast::NestedMetaItem> {
match self.parse_unsuffixed_lit() {
Ok(lit) => return Ok(ast::NestedMetaItem::Literal(lit)),
match self.parse_unsuffixed_meta_item_lit() {
Ok(lit) => return Ok(ast::NestedMetaItem::Lit(lit)),
Err(err) => err.cancel(),
}

View File

@ -33,10 +33,10 @@ use rustc_ast::util::case::Case;
use rustc_ast::util::classify;
use rustc_ast::util::parser::{prec_let_scrutinee_needs_par, AssocOp, Fixity};
use rustc_ast::visit::Visitor;
use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, Lit, UnOp, DUMMY_NODE_ID};
use rustc_ast::{self as ast, AttrStyle, AttrVec, CaptureBy, ExprField, UnOp, DUMMY_NODE_ID};
use rustc_ast::{AnonConst, BinOp, BinOpKind, FnDecl, FnRetTy, MacCall, Param, Ty, TyKind};
use rustc_ast::{Arm, Async, BlockCheckMode, Expr, ExprKind, Label, Movability, RangeLimits};
use rustc_ast::{ClosureBinder, StmtKind};
use rustc_ast::{ClosureBinder, MetaItemLit, StmtKind};
use rustc_ast_pretty::pprust;
use rustc_errors::{
Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
@ -1631,7 +1631,7 @@ impl<'a> Parser<'a> {
&self,
lifetime: Ident,
err: impl FnOnce(&Self) -> DiagnosticBuilder<'a, ErrorGuaranteed>,
) -> ast::Lit {
) -> ast::MetaItemLit {
if let Some(mut diag) =
self.sess.span_diagnostic.steal_diagnostic(lifetime.span, StashKey::LifetimeIsChar)
{
@ -1653,7 +1653,7 @@ impl<'a> Parser<'a> {
.emit();
}
let name = lifetime.without_first_quote().name;
ast::Lit {
ast::MetaItemLit {
token_lit: token::Lit::new(token::LitKind::Char, name, None),
kind: ast::LitKind::Char(name.as_str().chars().next().unwrap_or('_')),
span: lifetime.span,
@ -1768,8 +1768,8 @@ impl<'a> Parser<'a> {
/// Returns a string literal if the next token is a string literal.
/// In case of error returns `Some(lit)` if the next token is a literal with a wrong kind,
/// and returns `None` if the next token is not literal at all.
pub fn parse_str_lit(&mut self) -> Result<ast::StrLit, Option<Lit>> {
match self.parse_opt_ast_lit() {
pub fn parse_str_lit(&mut self) -> Result<ast::StrLit, Option<MetaItemLit>> {
match self.parse_opt_meta_item_lit() {
Some(lit) => match lit.kind {
ast::LitKind::Str(symbol_unescaped, style) => Ok(ast::StrLit {
style,
@ -1784,7 +1784,7 @@ impl<'a> Parser<'a> {
}
}
fn handle_missing_lit(&mut self) -> PResult<'a, Lit> {
fn handle_missing_lit(&mut self) -> PResult<'a, MetaItemLit> {
if let token::Interpolated(inner) = &self.token.kind {
let expr = match inner.as_ref() {
token::NtExpr(expr) => Some(expr),
@ -1820,8 +1820,8 @@ impl<'a> Parser<'a> {
.or_else(|()| self.handle_missing_lit().map(|lit| (lit.token_lit, lit.span)))
}
pub(super) fn parse_ast_lit(&mut self) -> PResult<'a, Lit> {
self.parse_opt_ast_lit().ok_or(()).or_else(|()| self.handle_missing_lit())
pub(super) fn parse_meta_item_lit(&mut self) -> PResult<'a, MetaItemLit> {
self.parse_opt_meta_item_lit().ok_or(()).or_else(|()| self.handle_missing_lit())
}
fn recover_after_dot(&mut self) -> Option<Token> {
@ -1867,12 +1867,12 @@ impl<'a> Parser<'a> {
/// Matches `lit = true | false | token_lit`.
/// Returns `None` if the next token is not a literal.
pub(super) fn parse_opt_ast_lit(&mut self) -> Option<Lit> {
pub(super) fn parse_opt_meta_item_lit(&mut self) -> Option<MetaItemLit> {
let recovered = self.recover_after_dot();
let token = recovered.as_ref().unwrap_or(&self.token);
match token::Lit::from_token(token) {
Some(token_lit) => {
match Lit::from_token_lit(token_lit, token.span) {
match MetaItemLit::from_token_lit(token_lit, token.span) {
Ok(lit) => {
self.bump();
Some(lit)
@ -1889,7 +1889,10 @@ impl<'a> Parser<'a> {
let suffixless_lit = token::Lit::new(lit.kind, lit.symbol, None);
let symbol = Symbol::intern(&suffixless_lit.to_string());
let lit = token::Lit::new(token::Err, symbol, lit.suffix);
Some(Lit::from_token_lit(lit, span).unwrap_or_else(|_| unreachable!()))
Some(
MetaItemLit::from_token_lit(lit, span)
.unwrap_or_else(|_| unreachable!()),
)
}
}
}

View File

@ -358,7 +358,7 @@ impl<'a> Parser<'a> {
/// report error for `let 1x = 123`
pub fn report_invalid_identifier_error(&mut self) -> PResult<'a, ()> {
if let token::Literal(lit) = self.token.uninterpolate().kind &&
rustc_ast::Lit::from_token(&self.token).is_none() &&
rustc_ast::MetaItemLit::from_token(&self.token).is_none() &&
(lit.kind == token::LitKind::Integer || lit.kind == token::LitKind::Float) &&
self.look_ahead(1, |t| matches!(t.kind, token::Eq) || matches!(t.kind, token::Colon ) ) {
return Err(self.sess.create_err(InvalidIdentiferStartsWithNumber { span: self.token.span }));

View File

@ -51,7 +51,7 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
}
AttrArgs::Eq(_, AttrArgsEq::Ast(expr)) => {
if let ast::ExprKind::Lit(token_lit) = expr.kind
&& let Ok(lit) = ast::Lit::from_token_lit(token_lit, expr.span)
&& let Ok(lit) = ast::MetaItemLit::from_token_lit(token_lit, expr.span)
{
if token_lit.suffix.is_some() {
let mut err = sess.span_diagnostic.struct_span_err(

View File

@ -8,7 +8,7 @@ use crate::errors::{
self, AttrApplication, DebugVisualizerUnreadable, InvalidAttrAtCrateLevel, ObjectLifetimeErr,
OnlyHasEffectOn, TransparentIncompatible, UnrecognizedReprHint,
};
use rustc_ast::{ast, AttrStyle, Attribute, Lit, LitKind, MetaItemKind, NestedMetaItem};
use rustc_ast::{ast, AttrStyle, Attribute, LitKind, MetaItemKind, MetaItemLit, NestedMetaItem};
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{fluent, Applicability, MultiSpan};
use rustc_expand::base::resolve_path;
@ -715,7 +715,7 @@ impl CheckAttrVisitor<'_> {
if let Some(values) = meta.meta_item_list() {
let mut errors = 0;
for v in values {
match v.literal() {
match v.lit() {
Some(l) => match l.kind {
LitKind::Str(s, _) => {
if !self.check_doc_alias_value(v, s, hir_id, target, true, aliases) {
@ -1355,7 +1355,7 @@ impl CheckAttrVisitor<'_> {
return false;
};
if matches!(&list[..], &[NestedMetaItem::Literal(Lit { kind: LitKind::Int(..), .. })]) {
if matches!(&list[..], &[NestedMetaItem::Lit(MetaItemLit { kind: LitKind::Int(..), .. })]) {
true
} else {
self.tcx.sess.emit_err(errors::RustcLayoutScalarValidRangeArg { attr_span: attr.span });
@ -1418,7 +1418,7 @@ impl CheckAttrVisitor<'_> {
let arg_count = decl.inputs.len() as u128 + generics.params.len() as u128;
let mut invalid_args = vec![];
for meta in list {
if let Some(LitKind::Int(val, _)) = meta.literal().map(|lit| &lit.kind) {
if let Some(LitKind::Int(val, _)) = meta.lit().map(|lit| &lit.kind) {
if *val >= arg_count {
let span = meta.span();
self.tcx.sess.emit_err(errors::RustcLegacyConstGenericsIndexExceed {

View File

@ -282,6 +282,14 @@ impl<'a: 'ast, 'ast> LateResolutionVisitor<'a, '_, 'ast> {
"you may want to use a bool value instead",
format!("{}", item_typo),
))
// FIXME(vicnenzopalazzo): make the check smarter,
// and maybe expand with levenshtein distance checks
} else if item_str.as_str() == "printf" {
Some((
item_span,
"you may have meant to use the `print` macro",
"print!".to_owned(),
))
} else {
suggestion
};

View File

@ -1989,7 +1989,7 @@ impl<'a> Resolver<'a> {
.find(|a| a.has_name(sym::rustc_legacy_const_generics))?;
let mut ret = Vec::new();
for meta in attr.meta_item_list()? {
match meta.literal()?.kind {
match meta.lit()?.kind {
LitKind::Int(a, _) => ret.push(a as usize),
_ => panic!("invalid arg index"),
}

View File

@ -20,283 +20,293 @@ pub(super) fn sanity_check_layout<'tcx>(
bug!("size is not a multiple of align, in the following layout:\n{layout:#?}");
}
if cfg!(debug_assertions) {
/// Yields non-ZST fields of the type
fn non_zst_fields<'tcx, 'a>(
cx: &'a LayoutCx<'tcx, TyCtxt<'tcx>>,
layout: &'a TyAndLayout<'tcx>,
) -> impl Iterator<Item = (Size, TyAndLayout<'tcx>)> + 'a {
(0..layout.layout.fields().count()).filter_map(|i| {
let field = layout.field(cx, i);
// Also checking `align == 1` here leads to test failures in
// `layout/zero-sized-array-union.rs`, where a type has a zero-size field with
// alignment 4 that still gets ignored during layout computation (which is okay
// since other fields already force alignment 4).
let zst = field.is_zst();
(!zst).then(|| (layout.fields.offset(i), field))
})
}
if !cfg!(debug_assertions) {
// Stop here, the rest is kind of expensive.
return;
}
fn skip_newtypes<'tcx>(
cx: &LayoutCx<'tcx, TyCtxt<'tcx>>,
layout: &TyAndLayout<'tcx>,
) -> TyAndLayout<'tcx> {
if matches!(layout.layout.variants(), Variants::Multiple { .. }) {
// Definitely not a newtype of anything.
return *layout;
}
let mut fields = non_zst_fields(cx, layout);
let Some(first) = fields.next() else {
// No fields here, so this could be a primitive or enum -- either way it's not a newtype around a thing
return *layout
};
if fields.next().is_none() {
let (offset, first) = first;
if offset == Size::ZERO && first.layout.size() == layout.size {
// This is a newtype, so keep recursing.
// FIXME(RalfJung): I don't think it would be correct to do any checks for
// alignment here, so we don't. Is that correct?
return skip_newtypes(cx, &first);
}
}
// No more newtypes here.
*layout
}
/// Yields non-ZST fields of the type
fn non_zst_fields<'tcx, 'a>(
cx: &'a LayoutCx<'tcx, TyCtxt<'tcx>>,
layout: &'a TyAndLayout<'tcx>,
) -> impl Iterator<Item = (Size, TyAndLayout<'tcx>)> + 'a {
(0..layout.layout.fields().count()).filter_map(|i| {
let field = layout.field(cx, i);
// Also checking `align == 1` here leads to test failures in
// `layout/zero-sized-array-union.rs`, where a type has a zero-size field with
// alignment 4 that still gets ignored during layout computation (which is okay
// since other fields already force alignment 4).
let zst = field.is_zst();
(!zst).then(|| (layout.fields.offset(i), field))
})
}
fn check_layout_abi<'tcx>(cx: &LayoutCx<'tcx, TyCtxt<'tcx>>, layout: &TyAndLayout<'tcx>) {
match layout.layout.abi() {
Abi::Scalar(scalar) => {
// No padding in scalars.
let size = scalar.size(cx);
let align = scalar.align(cx).abi;
assert_eq!(
layout.layout.size(),
size,
"size mismatch between ABI and layout in {layout:#?}"
);
assert_eq!(
layout.layout.align().abi,
align,
"alignment mismatch between ABI and layout in {layout:#?}"
);
// Check that this matches the underlying field.
let inner = skip_newtypes(cx, layout);
assert!(
matches!(inner.layout.abi(), Abi::Scalar(_)),
"`Scalar` type {} is newtype around non-`Scalar` type {}",
layout.ty,
inner.ty
);
match inner.layout.fields() {
FieldsShape::Primitive => {
// Fine.
}
FieldsShape::Union(..) => {
// FIXME: I guess we could also check something here? Like, look at all fields?
return;
}
FieldsShape::Arbitrary { .. } => {
// Should be an enum, the only field is the discriminant.
assert!(
inner.ty.is_enum(),
"`Scalar` layout for non-primitive non-enum type {}",
inner.ty
);
assert_eq!(
inner.layout.fields().count(),
1,
"`Scalar` layout for multiple-field type in {inner:#?}",
);
let offset = inner.layout.fields().offset(0);
let field = inner.field(cx, 0);
// The field should be at the right offset, and match the `scalar` layout.
assert_eq!(
offset,
Size::ZERO,
"`Scalar` field at non-0 offset in {inner:#?}",
);
assert_eq!(
field.size, size,
"`Scalar` field with bad size in {inner:#?}",
);
assert_eq!(
field.align.abi, align,
"`Scalar` field with bad align in {inner:#?}",
);
assert!(
matches!(field.abi, Abi::Scalar(_)),
"`Scalar` field with bad ABI in {inner:#?}",
);
}
_ => {
panic!("`Scalar` layout for non-primitive non-enum type {}", inner.ty);
}
fn skip_newtypes<'tcx>(
cx: &LayoutCx<'tcx, TyCtxt<'tcx>>,
layout: &TyAndLayout<'tcx>,
) -> TyAndLayout<'tcx> {
if matches!(layout.layout.variants(), Variants::Multiple { .. }) {
// Definitely not a newtype of anything.
return *layout;
}
let mut fields = non_zst_fields(cx, layout);
let Some(first) = fields.next() else {
// No fields here, so this could be a primitive or enum -- either way it's not a newtype around a thing
return *layout
};
if fields.next().is_none() {
let (offset, first) = first;
if offset == Size::ZERO && first.layout.size() == layout.size {
// This is a newtype, so keep recursing.
// FIXME(RalfJung): I don't think it would be correct to do any checks for
// alignment here, so we don't. Is that correct?
return skip_newtypes(cx, &first);
}
}
// No more newtypes here.
*layout
}
fn check_layout_abi<'tcx>(cx: &LayoutCx<'tcx, TyCtxt<'tcx>>, layout: &TyAndLayout<'tcx>) {
match layout.layout.abi() {
Abi::Scalar(scalar) => {
// No padding in scalars.
let size = scalar.size(cx);
let align = scalar.align(cx).abi;
assert_eq!(
layout.layout.size(),
size,
"size mismatch between ABI and layout in {layout:#?}"
);
assert_eq!(
layout.layout.align().abi,
align,
"alignment mismatch between ABI and layout in {layout:#?}"
);
// Check that this matches the underlying field.
let inner = skip_newtypes(cx, layout);
assert!(
matches!(inner.layout.abi(), Abi::Scalar(_)),
"`Scalar` type {} is newtype around non-`Scalar` type {}",
layout.ty,
inner.ty
);
match inner.layout.fields() {
FieldsShape::Primitive => {
// Fine.
}
}
Abi::ScalarPair(scalar1, scalar2) => {
// Sanity-check scalar pairs. These are a bit more flexible and support
// padding, but we can at least ensure both fields actually fit into the layout
// and the alignment requirement has not been weakened.
let size1 = scalar1.size(cx);
let align1 = scalar1.align(cx).abi;
let size2 = scalar2.size(cx);
let align2 = scalar2.align(cx).abi;
assert!(
layout.layout.align().abi >= cmp::max(align1, align2),
"alignment mismatch between ABI and layout in {layout:#?}",
);
let field2_offset = size1.align_to(align2);
assert!(
layout.layout.size() >= field2_offset + size2,
"size mismatch between ABI and layout in {layout:#?}"
);
// Check that the underlying pair of fields matches.
let inner = skip_newtypes(cx, layout);
assert!(
matches!(inner.layout.abi(), Abi::ScalarPair(..)),
"`ScalarPair` type {} is newtype around non-`ScalarPair` type {}",
layout.ty,
inner.ty
);
if matches!(inner.layout.variants(), Variants::Multiple { .. }) {
// FIXME: ScalarPair for enums is enormously complicated and it is very hard
// to check anything about them.
FieldsShape::Union(..) => {
// FIXME: I guess we could also check something here? Like, look at all fields?
return;
}
match inner.layout.fields() {
FieldsShape::Arbitrary { .. } => {
// Checked below.
}
FieldsShape::Union(..) => {
// FIXME: I guess we could also check something here? Like, look at all fields?
return;
}
_ => {
panic!("`ScalarPair` layout with unexpected field shape in {inner:#?}");
}
FieldsShape::Arbitrary { .. } => {
// Should be an enum, the only field is the discriminant.
assert!(
inner.ty.is_enum(),
"`Scalar` layout for non-primitive non-enum type {}",
inner.ty
);
assert_eq!(
inner.layout.fields().count(),
1,
"`Scalar` layout for multiple-field type in {inner:#?}",
);
let offset = inner.layout.fields().offset(0);
let field = inner.field(cx, 0);
// The field should be at the right offset, and match the `scalar` layout.
assert_eq!(
offset,
Size::ZERO,
"`Scalar` field at non-0 offset in {inner:#?}",
);
assert_eq!(field.size, size, "`Scalar` field with bad size in {inner:#?}",);
assert_eq!(
field.align.abi, align,
"`Scalar` field with bad align in {inner:#?}",
);
assert!(
matches!(field.abi, Abi::Scalar(_)),
"`Scalar` field with bad ABI in {inner:#?}",
);
}
_ => {
panic!("`Scalar` layout for non-primitive non-enum type {}", inner.ty);
}
let mut fields = non_zst_fields(cx, &inner);
let (offset1, field1) = fields.next().unwrap_or_else(|| {
panic!("`ScalarPair` layout for type with not even one non-ZST field: {inner:#?}")
});
let (offset2, field2) = fields.next().unwrap_or_else(|| {
panic!("`ScalarPair` layout for type with less than two non-ZST fields: {inner:#?}")
});
assert!(
fields.next().is_none(),
"`ScalarPair` layout for type with at least three non-ZST fields: {inner:#?}"
);
// The fields might be in opposite order.
let (offset1, field1, offset2, field2) = if offset1 <= offset2 {
(offset1, field1, offset2, field2)
} else {
(offset2, field2, offset1, field1)
};
// The fields should be at the right offset, and match the `scalar` layout.
assert_eq!(
offset1,
Size::ZERO,
"`ScalarPair` first field at non-0 offset in {inner:#?}",
);
assert_eq!(
field1.size, size1,
"`ScalarPair` first field with bad size in {inner:#?}",
);
assert_eq!(
field1.align.abi, align1,
"`ScalarPair` first field with bad align in {inner:#?}",
);
assert!(
matches!(field1.abi, Abi::Scalar(_)),
"`ScalarPair` first field with bad ABI in {inner:#?}",
);
assert_eq!(
offset2, field2_offset,
"`ScalarPair` second field at bad offset in {inner:#?}",
);
assert_eq!(
field2.size, size2,
"`ScalarPair` second field with bad size in {inner:#?}",
);
assert_eq!(
field2.align.abi, align2,
"`ScalarPair` second field with bad align in {inner:#?}",
);
assert!(
matches!(field2.abi, Abi::Scalar(_)),
"`ScalarPair` second field with bad ABI in {inner:#?}",
);
}
Abi::Vector { count, element } => {
// No padding in vectors. Alignment can be strengthened, though.
assert!(
layout.layout.align().abi >= element.align(cx).abi,
"alignment mismatch between ABI and layout in {layout:#?}"
);
let size = element.size(cx) * count;
assert_eq!(
layout.layout.size(),
size.align_to(cx.data_layout().vector_align(size).abi),
"size mismatch between ABI and layout in {layout:#?}"
);
}
Abi::Uninhabited | Abi::Aggregate { .. } => {} // Nothing to check.
}
}
check_layout_abi(cx, layout);
if let Variants::Multiple { variants, .. } = &layout.variants {
for variant in variants.iter() {
// No nested "multiple".
assert!(matches!(variant.variants, Variants::Single { .. }));
// Variants should have the same or a smaller size as the full thing,
// and same for alignment.
if variant.size > layout.size {
bug!(
"Type with size {} bytes has variant with size {} bytes: {layout:#?}",
layout.size.bytes(),
variant.size.bytes(),
)
Abi::ScalarPair(scalar1, scalar2) => {
// Sanity-check scalar pairs. Computing the expected size and alignment is a bit of work.
let size1 = scalar1.size(cx);
let align1 = scalar1.align(cx).abi;
let size2 = scalar2.size(cx);
let align2 = scalar2.align(cx).abi;
let align = cmp::max(align1, align2);
let field2_offset = size1.align_to(align2);
let size = (field2_offset + size2).align_to(align);
assert_eq!(
layout.layout.size(),
size,
"size mismatch between ABI and layout in {layout:#?}"
);
assert_eq!(
layout.layout.align().abi,
align,
"alignment mismatch between ABI and layout in {layout:#?}",
);
// Check that the underlying pair of fields matches.
let inner = skip_newtypes(cx, layout);
assert!(
matches!(inner.layout.abi(), Abi::ScalarPair(..)),
"`ScalarPair` type {} is newtype around non-`ScalarPair` type {}",
layout.ty,
inner.ty
);
if matches!(inner.layout.variants(), Variants::Multiple { .. }) {
// FIXME: ScalarPair for enums is enormously complicated and it is very hard
// to check anything about them.
return;
}
if variant.align.abi > layout.align.abi {
bug!(
"Type with alignment {} bytes has variant with alignment {} bytes: {layout:#?}",
layout.align.abi.bytes(),
variant.align.abi.bytes(),
)
}
// Skip empty variants.
if variant.size == Size::ZERO
|| variant.fields.count() == 0
|| variant.abi.is_uninhabited()
{
// These are never actually accessed anyway, so we can skip the coherence check
// for them. They also fail that check, since they have
// `Aggregate`/`Uninhbaited` ABI even when the main type is
// `Scalar`/`ScalarPair`. (Note that sometimes, variants with fields have size
// 0, and sometimes, variants without fields have non-0 size.)
continue;
}
// The top-level ABI and the ABI of the variants should be coherent.
let scalar_coherent = |s1: Scalar, s2: Scalar| {
s1.size(cx) == s2.size(cx) && s1.align(cx) == s2.align(cx)
};
let abi_coherent = match (layout.abi, variant.abi) {
(Abi::Scalar(s1), Abi::Scalar(s2)) => scalar_coherent(s1, s2),
(Abi::ScalarPair(a1, b1), Abi::ScalarPair(a2, b2)) => {
scalar_coherent(a1, a2) && scalar_coherent(b1, b2)
match inner.layout.fields() {
FieldsShape::Arbitrary { .. } => {
// Checked below.
}
FieldsShape::Union(..) => {
// FIXME: I guess we could also check something here? Like, look at all fields?
return;
}
_ => {
panic!("`ScalarPair` layout with unexpected field shape in {inner:#?}");
}
(Abi::Uninhabited, _) => true,
(Abi::Aggregate { .. }, _) => true,
_ => false,
};
if !abi_coherent {
bug!(
"Variant ABI is incompatible with top-level ABI:\nvariant={:#?}\nTop-level: {layout:#?}",
variant
);
}
let mut fields = non_zst_fields(cx, &inner);
let (offset1, field1) = fields.next().unwrap_or_else(|| {
panic!(
"`ScalarPair` layout for type with not even one non-ZST field: {inner:#?}"
)
});
let (offset2, field2) = fields.next().unwrap_or_else(|| {
panic!(
"`ScalarPair` layout for type with less than two non-ZST fields: {inner:#?}"
)
});
assert!(
fields.next().is_none(),
"`ScalarPair` layout for type with at least three non-ZST fields: {inner:#?}"
);
// The fields might be in opposite order.
let (offset1, field1, offset2, field2) = if offset1 <= offset2 {
(offset1, field1, offset2, field2)
} else {
(offset2, field2, offset1, field1)
};
// The fields should be at the right offset, and match the `scalar` layout.
assert_eq!(
offset1,
Size::ZERO,
"`ScalarPair` first field at non-0 offset in {inner:#?}",
);
assert_eq!(
field1.size, size1,
"`ScalarPair` first field with bad size in {inner:#?}",
);
assert_eq!(
field1.align.abi, align1,
"`ScalarPair` first field with bad align in {inner:#?}",
);
assert!(
matches!(field1.abi, Abi::Scalar(_)),
"`ScalarPair` first field with bad ABI in {inner:#?}",
);
assert_eq!(
offset2, field2_offset,
"`ScalarPair` second field at bad offset in {inner:#?}",
);
assert_eq!(
field2.size, size2,
"`ScalarPair` second field with bad size in {inner:#?}",
);
assert_eq!(
field2.align.abi, align2,
"`ScalarPair` second field with bad align in {inner:#?}",
);
assert!(
matches!(field2.abi, Abi::Scalar(_)),
"`ScalarPair` second field with bad ABI in {inner:#?}",
);
}
Abi::Vector { count, element } => {
// No padding in vectors, except possibly for trailing padding to make the size a multiple of align.
let size = element.size(cx) * count;
let align = cx.data_layout().vector_align(size).abi;
let size = size.align_to(align); // needed e.g. for vectors of size 3
assert!(align >= element.align(cx).abi); // just sanity-checking `vector_align`.
assert_eq!(
layout.layout.size(),
size,
"size mismatch between ABI and layout in {layout:#?}"
);
assert_eq!(
layout.layout.align().abi,
align,
"alignment mismatch between ABI and layout in {layout:#?}"
);
// FIXME: Do some kind of check of the inner type, like for Scalar and ScalarPair.
}
Abi::Uninhabited | Abi::Aggregate { .. } => {} // Nothing to check.
}
}
check_layout_abi(cx, layout);
if let Variants::Multiple { variants, .. } = &layout.variants {
for variant in variants.iter() {
// No nested "multiple".
assert!(matches!(variant.variants, Variants::Single { .. }));
// Variants should have the same or a smaller size as the full thing,
// and same for alignment.
if variant.size > layout.size {
bug!(
"Type with size {} bytes has variant with size {} bytes: {layout:#?}",
layout.size.bytes(),
variant.size.bytes(),
)
}
if variant.align.abi > layout.align.abi {
bug!(
"Type with alignment {} bytes has variant with alignment {} bytes: {layout:#?}",
layout.align.abi.bytes(),
variant.align.abi.bytes(),
)
}
// Skip empty variants.
if variant.size == Size::ZERO
|| variant.fields.count() == 0
|| variant.abi.is_uninhabited()
{
// These are never actually accessed anyway, so we can skip the coherence check
// for them. They also fail that check, since they have
// `Aggregate`/`Uninhbaited` ABI even when the main type is
// `Scalar`/`ScalarPair`. (Note that sometimes, variants with fields have size
// 0, and sometimes, variants without fields have non-0 size.)
continue;
}
// The top-level ABI and the ABI of the variants should be coherent.
let scalar_coherent =
|s1: Scalar, s2: Scalar| s1.size(cx) == s2.size(cx) && s1.align(cx) == s2.align(cx);
let abi_coherent = match (layout.abi, variant.abi) {
(Abi::Scalar(s1), Abi::Scalar(s2)) => scalar_coherent(s1, s2),
(Abi::ScalarPair(a1, b1), Abi::ScalarPair(a2, b2)) => {
scalar_coherent(a1, a2) && scalar_coherent(b1, b2)
}
(Abi::Uninhabited, _) => true,
(Abi::Aggregate { .. }, _) => true,
_ => false,
};
if !abi_coherent {
bug!(
"Variant ABI is incompatible with top-level ABI:\nvariant={:#?}\nTop-level: {layout:#?}",
variant
);
}
}
}

View File

@ -50,7 +50,7 @@ impl Cfg {
) -> Result<Option<Cfg>, InvalidCfgError> {
match nested_cfg {
NestedMetaItem::MetaItem(ref cfg) => Cfg::parse_without(cfg, exclude),
NestedMetaItem::Literal(ref lit) => {
NestedMetaItem::Lit(ref lit) => {
Err(InvalidCfgError { msg: "unexpected literal", span: lit.span })
}
}

View File

@ -893,7 +893,7 @@ fn clean_fn_decl_legacy_const_generics(func: &mut Function, attrs: &[ast::Attrib
.filter(|a| a.has_name(sym::rustc_legacy_const_generics))
.filter_map(|a| a.meta_item_list())
{
for (pos, literal) in meta_item_list.iter().filter_map(|meta| meta.literal()).enumerate() {
for (pos, literal) in meta_item_list.iter().filter_map(|meta| meta.lit()).enumerate() {
match literal.kind {
ast::LitKind::Int(a, _) => {
let gen = func.generics.params.remove(0);

View File

@ -1305,7 +1305,7 @@ impl Attributes {
for attr in self.other_attrs.lists(sym::doc).filter(|a| a.has_name(sym::alias)) {
if let Some(values) = attr.meta_item_list() {
for l in values {
match l.literal().unwrap().kind {
match l.lit().unwrap().kind {
ast::LitKind::Str(s, _) => {
aliases.insert(s);
}

View File

@ -0,0 +1,66 @@
// check-pass
struct It;
struct Data {
items: Vec<It>,
}
impl Data {
fn new() -> Self {
Self {
items: vec![It, It],
}
}
fn content(&self) -> impl Iterator<Item = &It> {
self.items.iter()
}
}
struct Container<'a> {
name: String,
resolver: Box<dyn Resolver + 'a>,
}
impl<'a> Container<'a> {
fn new<R: Resolver + 'a>(name: &str, resolver: R) -> Self {
Self {
name: name.to_owned(),
resolver: Box::new(resolver),
}
}
}
trait Resolver {}
impl<R: Resolver> Resolver for &R {}
impl Resolver for It {}
fn get<'a>(mut items: impl Iterator<Item = &'a It>) -> impl Resolver + 'a {
items.next().unwrap()
}
fn get2<'a, 'b: 'b>(mut items: impl Iterator<Item = &'a It>) -> impl Resolver + 'a {
items.next().unwrap()
}
fn main() {
let data = Data::new();
let resolver = get(data.content());
let _ = ["a", "b"]
.iter()
.map(|&n| Container::new(n, &resolver))
.map(|c| c.name)
.collect::<Vec<_>>();
let resolver = get2(data.content());
let _ = ["a", "b"]
.iter()
.map(|&n| Container::new(n, &resolver))
.map(|c| c.name)
.collect::<Vec<_>>();
}

View File

@ -0,0 +1,20 @@
// Avoid panicking if the Clone trait is not found while building error suggestions
// See #104870
#![feature(no_core, lang_items)]
#![no_core]
#[lang = "sized"]
trait Sized {}
#[lang = "copy"]
trait Copy {}
fn g<T>(x: T) {}
fn f(x: *mut u8) {
g(x);
g(x); //~ ERROR use of moved value: `x`
}
fn main() {}

View File

@ -0,0 +1,21 @@
error[E0382]: use of moved value: `x`
--> $DIR/missing-clone-for-suggestion.rs:17:7
|
LL | fn f(x: *mut u8) {
| - move occurs because `x` has type `*mut u8`, which does not implement the `Copy` trait
LL | g(x);
| - value moved here
LL | g(x);
| ^ value used here after move
|
note: consider changing this parameter type in function `g` to borrow instead if owning the value isn't necessary
--> $DIR/missing-clone-for-suggestion.rs:13:12
|
LL | fn g<T>(x: T) {}
| - ^ this parameter takes ownership of the value
| |
| in this function
error: aborting due to previous error
For more information about this error, try `rustc --explain E0382`.

View File

@ -0,0 +1,9 @@
// Suggest to a user to use the print macros
// instead to use the printf.
fn main() {
let x = 4;
printf("%d", x);
//~^ ERROR cannot find function `printf` in this scope
//~| HELP you may have meant to use the `print` macro
}

View File

@ -0,0 +1,14 @@
error[E0425]: cannot find function `printf` in this scope
--> $DIR/seggest_print_over_printf.rs:6:5
|
LL | printf("%d", x);
| ^^^^^^ not found in this scope
|
help: you may have meant to use the `print` macro
|
LL | print!("%d", x);
| ~~~~~~
error: aborting due to previous error
For more information about this error, try `rustc --explain E0425`.

View File

@ -6,7 +6,7 @@ use clippy_utils::msrvs;
use clippy_utils::source::{first_line_of_span, is_present_in_source, snippet_opt, without_block_comments};
use clippy_utils::{extract_msrv_attr, meets_msrv};
use if_chain::if_chain;
use rustc_ast::{AttrKind, AttrStyle, Attribute, Lit, LitKind, MetaItemKind, NestedMetaItem};
use rustc_ast::{AttrKind, AttrStyle, Attribute, LitKind, MetaItemKind, MetaItemLit, NestedMetaItem};
use rustc_errors::Applicability;
use rustc_hir::{
Block, Expr, ExprKind, ImplItem, ImplItemKind, Item, ItemKind, StmtKind, TraitFn, TraitItem, TraitItemKind,
@ -576,7 +576,7 @@ fn check_attrs(cx: &LateContext<'_>, span: Span, name: Symbol, attrs: &[Attribut
}
}
fn check_semver(cx: &LateContext<'_>, span: Span, lit: &Lit) {
fn check_semver(cx: &LateContext<'_>, span: Span, lit: &MetaItemLit) {
if let LitKind::Str(is, _) = lit.kind {
if Version::parse(is.as_str()).is_ok() {
return;

View File

@ -158,7 +158,7 @@ impl ThreadClockSet {
/// Error returned by finding a data race
/// should be elaborated upon.
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)]
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct DataRace;
/// Externally stored memory cell clocks

View File

@ -260,9 +260,7 @@ impl Rewrite for ast::NestedMetaItem {
fn rewrite(&self, context: &RewriteContext<'_>, shape: Shape) -> Option<String> {
match self {
ast::NestedMetaItem::MetaItem(ref meta_item) => meta_item.rewrite(context, shape),
ast::NestedMetaItem::Literal(ref l) => {
rewrite_literal(context, l.token_lit, l.span, shape)
}
ast::NestedMetaItem::Lit(ref l) => rewrite_literal(context, l.token_lit, l.span, shape),
}
}
}
@ -527,14 +525,19 @@ pub(crate) trait MetaVisitor<'ast> {
fn visit_meta_word(&mut self, _meta_item: &'ast ast::MetaItem) {}
fn visit_meta_name_value(&mut self, _meta_item: &'ast ast::MetaItem, _lit: &'ast ast::Lit) {}
fn visit_meta_name_value(
&mut self,
_meta_item: &'ast ast::MetaItem,
_lit: &'ast ast::MetaItemLit,
) {
}
fn visit_nested_meta_item(&mut self, nm: &'ast ast::NestedMetaItem) {
match nm {
ast::NestedMetaItem::MetaItem(ref meta_item) => self.visit_meta_item(meta_item),
ast::NestedMetaItem::Literal(ref lit) => self.visit_literal(lit),
ast::NestedMetaItem::Lit(ref lit) => self.visit_meta_item_lit(lit),
}
}
fn visit_literal(&mut self, _lit: &'ast ast::Lit) {}
fn visit_meta_item_lit(&mut self, _lit: &'ast ast::MetaItemLit) {}
}

View File

@ -84,15 +84,19 @@ impl PathVisitor {
}
impl<'ast> MetaVisitor<'ast> for PathVisitor {
fn visit_meta_name_value(&mut self, meta_item: &'ast ast::MetaItem, lit: &'ast ast::Lit) {
fn visit_meta_name_value(
&mut self,
meta_item: &'ast ast::MetaItem,
lit: &'ast ast::MetaItemLit,
) {
if meta_item.has_name(Symbol::intern("path")) && lit.kind.is_str() {
self.paths.push(lit_to_str(lit));
self.paths.push(meta_item_lit_to_str(lit));
}
}
}
#[cfg(not(windows))]
fn lit_to_str(lit: &ast::Lit) -> String {
fn meta_item_lit_to_str(lit: &ast::MetaItemLit) -> String {
match lit.kind {
ast::LitKind::Str(symbol, ..) => symbol.to_string(),
_ => unreachable!(),
@ -100,7 +104,7 @@ fn lit_to_str(lit: &ast::Lit) -> String {
}
#[cfg(windows)]
fn lit_to_str(lit: &ast::Lit) -> String {
fn meta_item_lit_to_str(lit: &ast::MetaItemLit) -> String {
match lit.kind {
ast::LitKind::Str(symbol, ..) => symbol.as_str().replace("/", "\\"),
_ => unreachable!(),

View File

@ -125,7 +125,7 @@ impl<'a> OverflowableItem<'a> {
OverflowableItem::MacroArg(MacroArg::Keyword(..)) => true,
OverflowableItem::MacroArg(MacroArg::Expr(expr)) => is_simple_expr(expr),
OverflowableItem::NestedMetaItem(nested_meta_item) => match nested_meta_item {
ast::NestedMetaItem::Literal(..) => true,
ast::NestedMetaItem::Lit(..) => true,
ast::NestedMetaItem::MetaItem(ref meta_item) => {
matches!(meta_item.kind, ast::MetaItemKind::Word)
}
@ -169,7 +169,7 @@ impl<'a> OverflowableItem<'a> {
},
OverflowableItem::NestedMetaItem(nested_meta_item) if len == 1 => {
match nested_meta_item {
ast::NestedMetaItem::Literal(..) => false,
ast::NestedMetaItem::Lit(..) => false,
ast::NestedMetaItem::MetaItem(..) => true,
}
}

View File

@ -263,7 +263,7 @@ fn is_skip(meta_item: &MetaItem) -> bool {
fn is_skip_nested(meta_item: &NestedMetaItem) -> bool {
match meta_item {
NestedMetaItem::MetaItem(ref mi) => is_skip(mi),
NestedMetaItem::Literal(_) => false,
NestedMetaItem::Lit(_) => false,
}
}