Auto merge of #3012 - rust-lang:rustup-2023-08-04, r=oli-obk

Automatic sync from rustc
This commit is contained in:
bors 2023-08-04 05:44:54 +00:00
commit 15c05fe25f
226 changed files with 3265 additions and 1684 deletions

View File

@ -494,6 +494,7 @@ Ryan Sullivant <rsulli55@gmail.com>
Ryan Wiedemann <Ryan1729@gmail.com> Ryan Wiedemann <Ryan1729@gmail.com>
S Pradeep Kumar <gohanpra@gmail.com> S Pradeep Kumar <gohanpra@gmail.com>
Sam Radhakrishnan <sk09idm@gmail.com> Sam Radhakrishnan <sk09idm@gmail.com>
Samuel Tardieu <sam@rfc1149.net>
Scott McMurray <scottmcm@users.noreply.github.com> Scott McMurray <scottmcm@users.noreply.github.com>
Scott Olson <scott@solson.me> Scott Olson <scott@scott-olson.org> Scott Olson <scott@solson.me> Scott Olson <scott@scott-olson.org>
Sean Gillespie <sean.william.g@gmail.com> swgillespie <sean.william.g@gmail.com> Sean Gillespie <sean.william.g@gmail.com> swgillespie <sean.william.g@gmail.com>

View File

@ -1,4 +1,5 @@
#![cfg_attr(feature = "nightly", feature(step_trait, rustc_attrs, min_specialization))] #![cfg_attr(feature = "nightly", feature(step_trait, rustc_attrs, min_specialization))]
#![cfg_attr(all(not(bootstrap), feature = "nightly"), allow(internal_features))]
use std::fmt; use std::fmt;
#[cfg(feature = "nightly")] #[cfg(feature = "nightly")]

View File

@ -23,6 +23,7 @@
#![deny(unsafe_op_in_unsafe_fn)] #![deny(unsafe_op_in_unsafe_fn)]
#![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)] #![deny(rustc::diagnostic_outside_of_impl)]
#![cfg_attr(not(bootstrap), allow(internal_features))]
#![allow(clippy::mut_from_ref)] // Arena allocators are one of the places where this pattern is fine. #![allow(clippy::mut_from_ref)] // Arena allocators are one of the places where this pattern is fine.
use smallvec::SmallVec; use smallvec::SmallVec;

View File

@ -14,7 +14,7 @@
//! - [`Generics`], [`GenericParam`], [`WhereClause`]: Metadata associated with generic parameters. //! - [`Generics`], [`GenericParam`], [`WhereClause`]: Metadata associated with generic parameters.
//! - [`EnumDef`] and [`Variant`]: Enum declaration. //! - [`EnumDef`] and [`Variant`]: Enum declaration.
//! - [`MetaItemLit`] and [`LitKind`]: Literal expressions. //! - [`MetaItemLit`] and [`LitKind`]: Literal expressions.
//! - [`MacroDef`], [`MacStmtStyle`], [`MacCall`], [`MacDelimiter`]: Macro definition and invocation. //! - [`MacroDef`], [`MacStmtStyle`], [`MacCall`]: Macro definition and invocation.
//! - [`Attribute`]: Metadata associated with item. //! - [`Attribute`]: Metadata associated with item.
//! - [`UnOp`], [`BinOp`], and [`BinOpKind`]: Unary and binary operators. //! - [`UnOp`], [`BinOp`], and [`BinOpKind`]: Unary and binary operators.
@ -1693,7 +1693,7 @@ where
#[derive(Clone, Encodable, Decodable, Debug)] #[derive(Clone, Encodable, Decodable, Debug)]
pub struct DelimArgs { pub struct DelimArgs {
pub dspan: DelimSpan, pub dspan: DelimSpan,
pub delim: MacDelimiter, pub delim: Delimiter, // Note: `Delimiter::Invisible` never occurs
pub tokens: TokenStream, pub tokens: TokenStream,
} }
@ -1701,7 +1701,7 @@ impl DelimArgs {
/// Whether a macro with these arguments needs a semicolon /// Whether a macro with these arguments needs a semicolon
/// when used as a standalone item or statement. /// when used as a standalone item or statement.
pub fn need_semicolon(&self) -> bool { pub fn need_semicolon(&self) -> bool {
!matches!(self, DelimArgs { delim: MacDelimiter::Brace, .. }) !matches!(self, DelimArgs { delim: Delimiter::Brace, .. })
} }
} }
@ -1717,32 +1717,6 @@ where
} }
} }
#[derive(Copy, Clone, PartialEq, Eq, Encodable, Decodable, Debug, HashStable_Generic)]
pub enum MacDelimiter {
Parenthesis,
Bracket,
Brace,
}
impl MacDelimiter {
pub fn to_token(self) -> Delimiter {
match self {
MacDelimiter::Parenthesis => Delimiter::Parenthesis,
MacDelimiter::Bracket => Delimiter::Bracket,
MacDelimiter::Brace => Delimiter::Brace,
}
}
pub fn from_token(delim: Delimiter) -> Option<MacDelimiter> {
match delim {
Delimiter::Parenthesis => Some(MacDelimiter::Parenthesis),
Delimiter::Bracket => Some(MacDelimiter::Bracket),
Delimiter::Brace => Some(MacDelimiter::Brace),
Delimiter::Invisible => None,
}
}
}
/// Represents a macro definition. /// Represents a macro definition.
#[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)] #[derive(Clone, Encodable, Decodable, Debug, HashStable_Generic)]
pub struct MacroDef { pub struct MacroDef {

View File

@ -2,7 +2,7 @@
use crate::ast::{AttrArgs, AttrArgsEq, AttrId, AttrItem, AttrKind, AttrStyle, AttrVec, Attribute}; use crate::ast::{AttrArgs, AttrArgsEq, AttrId, AttrItem, AttrKind, AttrStyle, AttrVec, Attribute};
use crate::ast::{DelimArgs, Expr, ExprKind, LitKind, MetaItemLit}; use crate::ast::{DelimArgs, Expr, ExprKind, LitKind, MetaItemLit};
use crate::ast::{MacDelimiter, MetaItem, MetaItemKind, NestedMetaItem, NormalAttr}; use crate::ast::{MetaItem, MetaItemKind, NestedMetaItem, NormalAttr};
use crate::ast::{Path, PathSegment, DUMMY_NODE_ID}; use crate::ast::{Path, PathSegment, DUMMY_NODE_ID};
use crate::ptr::P; use crate::ptr::P;
use crate::token::{self, CommentKind, Delimiter, Token}; use crate::token::{self, CommentKind, Delimiter, Token};
@ -196,7 +196,7 @@ impl AttrItem {
fn meta_item_list(&self) -> Option<ThinVec<NestedMetaItem>> { fn meta_item_list(&self) -> Option<ThinVec<NestedMetaItem>> {
match &self.args { match &self.args {
AttrArgs::Delimited(args) if args.delim == MacDelimiter::Parenthesis => { AttrArgs::Delimited(args) if args.delim == Delimiter::Parenthesis => {
MetaItemKind::list_from_tokens(args.tokens.clone()) MetaItemKind::list_from_tokens(args.tokens.clone())
} }
AttrArgs::Delimited(_) | AttrArgs::Eq(..) | AttrArgs::Empty => None, AttrArgs::Delimited(_) | AttrArgs::Eq(..) | AttrArgs::Empty => None,
@ -402,11 +402,9 @@ impl MetaItemKind {
fn from_attr_args(args: &AttrArgs) -> Option<MetaItemKind> { fn from_attr_args(args: &AttrArgs) -> Option<MetaItemKind> {
match args { match args {
AttrArgs::Empty => Some(MetaItemKind::Word), AttrArgs::Empty => Some(MetaItemKind::Word),
AttrArgs::Delimited(DelimArgs { AttrArgs::Delimited(DelimArgs { dspan: _, delim: Delimiter::Parenthesis, tokens }) => {
dspan: _, MetaItemKind::list_from_tokens(tokens.clone()).map(MetaItemKind::List)
delim: MacDelimiter::Parenthesis, }
tokens,
}) => MetaItemKind::list_from_tokens(tokens.clone()).map(MetaItemKind::List),
AttrArgs::Delimited(..) => None, AttrArgs::Delimited(..) => None,
AttrArgs::Eq(_, AttrArgsEq::Ast(expr)) => match expr.kind { AttrArgs::Eq(_, AttrArgsEq::Ast(expr)) => match expr.kind {
ExprKind::Lit(token_lit) => { ExprKind::Lit(token_lit) => {
@ -578,7 +576,7 @@ pub fn mk_attr_nested_word(
let path = Path::from_ident(outer_ident); let path = Path::from_ident(outer_ident);
let attr_args = AttrArgs::Delimited(DelimArgs { let attr_args = AttrArgs::Delimited(DelimArgs {
dspan: DelimSpan::from_single(span), dspan: DelimSpan::from_single(span),
delim: MacDelimiter::Parenthesis, delim: Delimiter::Parenthesis,
tokens: inner_tokens, tokens: inner_tokens,
}); });
mk_attr(g, style, path, attr_args, span) mk_attr(g, style, path, attr_args, span)

View File

@ -41,8 +41,6 @@ pub enum BinOpToken {
/// Describes how a sequence of token trees is delimited. /// Describes how a sequence of token trees is delimited.
/// Cannot use `proc_macro::Delimiter` directly because this /// Cannot use `proc_macro::Delimiter` directly because this
/// structure should implement some additional traits. /// structure should implement some additional traits.
/// The `None` variant is also renamed to `Invisible` to be
/// less confusing and better convey the semantics.
#[derive(Copy, Clone, Debug, PartialEq, Eq)] #[derive(Copy, Clone, Debug, PartialEq, Eq)]
#[derive(Encodable, Decodable, Hash, HashStable_Generic)] #[derive(Encodable, Decodable, Hash, HashStable_Generic)]
pub enum Delimiter { pub enum Delimiter {

View File

@ -476,7 +476,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
Some(MacHeader::Path(&item.path)), Some(MacHeader::Path(&item.path)),
false, false,
None, None,
delim.to_token(), *delim,
tokens, tokens,
true, true,
span, span,
@ -640,7 +640,7 @@ pub trait PrintState<'a>: std::ops::Deref<Target = pp::Printer> + std::ops::Dere
Some(MacHeader::Keyword(kw)), Some(MacHeader::Keyword(kw)),
has_bang, has_bang,
Some(*ident), Some(*ident),
macro_def.body.delim.to_token(), macro_def.body.delim,
&macro_def.body.tokens.clone(), &macro_def.body.tokens.clone(),
true, true,
sp, sp,
@ -1240,7 +1240,7 @@ impl<'a> State<'a> {
Some(MacHeader::Path(&m.path)), Some(MacHeader::Path(&m.path)),
true, true,
None, None,
m.args.delim.to_token(), m.args.delim,
&m.args.tokens.clone(), &m.args.tokens.clone(),
true, true,
m.span(), m.span(),

View File

@ -11,6 +11,7 @@
#![feature(trusted_step)] #![feature(trusted_step)]
#![feature(try_blocks)] #![feature(try_blocks)]
#![recursion_limit = "256"] #![recursion_limit = "256"]
#![cfg_attr(not(bootstrap), allow(internal_features))]
#[macro_use] #[macro_use]
extern crate rustc_middle; extern crate rustc_middle;

View File

@ -4,8 +4,9 @@ use crate::edition_panic::use_panic_2021;
use crate::errors; use crate::errors;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token; use rustc_ast::token;
use rustc_ast::token::Delimiter;
use rustc_ast::tokenstream::{DelimSpan, TokenStream}; use rustc_ast::tokenstream::{DelimSpan, TokenStream};
use rustc_ast::{DelimArgs, Expr, ExprKind, MacCall, MacDelimiter, Path, PathSegment, UnOp}; use rustc_ast::{DelimArgs, Expr, ExprKind, MacCall, Path, PathSegment, UnOp};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_errors::PResult; use rustc_errors::PResult;
use rustc_expand::base::{DummyResult, ExtCtxt, MacEager, MacResult}; use rustc_expand::base::{DummyResult, ExtCtxt, MacEager, MacResult};
@ -58,7 +59,7 @@ pub fn expand_assert<'cx>(
path: panic_path(), path: panic_path(),
args: P(DelimArgs { args: P(DelimArgs {
dspan: DelimSpan::from_single(call_site_span), dspan: DelimSpan::from_single(call_site_span),
delim: MacDelimiter::Parenthesis, delim: Delimiter::Parenthesis,
tokens, tokens,
}), }),
})), })),

View File

@ -1,9 +1,10 @@
use rustc_ast::{ use rustc_ast::{
ptr::P, ptr::P,
token, token,
token::Delimiter,
tokenstream::{DelimSpan, TokenStream, TokenTree}, tokenstream::{DelimSpan, TokenStream, TokenTree},
BinOpKind, BorrowKind, DelimArgs, Expr, ExprKind, ItemKind, MacCall, MacDelimiter, MethodCall, BinOpKind, BorrowKind, DelimArgs, Expr, ExprKind, ItemKind, MacCall, MethodCall, Mutability,
Mutability, Path, PathSegment, Stmt, StructRest, UnOp, UseTree, UseTreeKind, DUMMY_NODE_ID, Path, PathSegment, Stmt, StructRest, UnOp, UseTree, UseTreeKind, DUMMY_NODE_ID,
}; };
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashSet; use rustc_data_structures::fx::FxHashSet;
@ -179,7 +180,7 @@ impl<'cx, 'a> Context<'cx, 'a> {
path: panic_path, path: panic_path,
args: P(DelimArgs { args: P(DelimArgs {
dspan: DelimSpan::from_single(self.span), dspan: DelimSpan::from_single(self.span),
delim: MacDelimiter::Parenthesis, delim: Delimiter::Parenthesis,
tokens: initial.into_iter().chain(captures).collect::<TokenStream>(), tokens: initial.into_iter().chain(captures).collect::<TokenStream>(),
}), }),
})), })),

View File

@ -1,4 +1,5 @@
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::Delimiter;
use rustc_ast::tokenstream::{DelimSpan, TokenStream}; use rustc_ast::tokenstream::{DelimSpan, TokenStream};
use rustc_ast::*; use rustc_ast::*;
use rustc_expand::base::*; use rustc_expand::base::*;
@ -60,7 +61,7 @@ fn expand<'cx>(
}, },
args: P(DelimArgs { args: P(DelimArgs {
dspan: DelimSpan::from_single(sp), dspan: DelimSpan::from_single(sp),
delim: MacDelimiter::Parenthesis, delim: Delimiter::Parenthesis,
tokens: tts, tokens: tts,
}), }),
})), })),

View File

@ -117,8 +117,8 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
}); });
} }
// simd_shuffle32<T, U>(x: T, y: T, idx: [u32; 32]) -> U // simd_shuffle<T, I, U>(x: T, y: T, idx: I) -> U
_ if intrinsic.as_str().starts_with("simd_shuffle") => { sym::simd_shuffle => {
let (x, y, idx) = match args { let (x, y, idx) = match args {
[x, y, idx] => (x, y, idx), [x, y, idx] => (x, y, idx),
_ => { _ => {
@ -133,36 +133,26 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
return; return;
} }
// If this intrinsic is the older "simd_shuffleN" form, simply parse the integer. // Make sure this is actually an array, since typeck only checks the length-suffixed
// If there is no suffix, use the index array length. // version of this intrinsic.
let n: u16 = if intrinsic == sym::simd_shuffle { let idx_ty = fx.monomorphize(idx.ty(fx.mir, fx.tcx));
// Make sure this is actually an array, since typeck only checks the length-suffixed let n: u16 = match idx_ty.kind() {
// version of this intrinsic. ty::Array(ty, len) if matches!(ty.kind(), ty::Uint(ty::UintTy::U32)) => len
let idx_ty = fx.monomorphize(idx.ty(fx.mir, fx.tcx)); .try_eval_target_usize(fx.tcx, ty::ParamEnv::reveal_all())
match idx_ty.kind() { .unwrap_or_else(|| {
ty::Array(ty, len) if matches!(ty.kind(), ty::Uint(ty::UintTy::U32)) => len span_bug!(span, "could not evaluate shuffle index array length")
.try_eval_target_usize(fx.tcx, ty::ParamEnv::reveal_all()) })
.unwrap_or_else(|| { .try_into()
span_bug!(span, "could not evaluate shuffle index array length") .unwrap(),
}) _ => {
.try_into() fx.tcx.sess.span_err(
.unwrap(), span,
_ => { format!("simd_shuffle index must be an array of `u32`, got `{}`", idx_ty),
fx.tcx.sess.span_err( );
span, // Prevent verifier error
format!( fx.bcx.ins().trap(TrapCode::UnreachableCodeReached);
"simd_shuffle index must be an array of `u32`, got `{}`", return;
idx_ty,
),
);
// Prevent verifier error
fx.bcx.ins().trap(TrapCode::UnreachableCodeReached);
return;
}
} }
} else {
// FIXME remove this case
intrinsic.as_str()["simd_shuffle".len()..].parse().unwrap()
}; };
assert_eq!(x.layout(), y.layout()); assert_eq!(x.layout(), y.layout());
@ -179,7 +169,7 @@ pub(super) fn codegen_simd_intrinsic_call<'tcx>(
let indexes = { let indexes = {
use rustc_middle::mir::interpret::*; use rustc_middle::mir::interpret::*;
let idx_const = crate::constant::mir_operand_get_const_val(fx, idx) let idx_const = crate::constant::mir_operand_get_const_val(fx, idx)
.expect("simd_shuffle* idx not const"); .expect("simd_shuffle idx not const");
let idx_bytes = match idx_const { let idx_bytes = match idx_const {
ConstValue::ByRef { alloc, offset } => { ConstValue::ByRef { alloc, offset } => {

View File

@ -1,11 +1,11 @@
#[cfg(feature="master")]
use gccjit::{ComparisonOp, UnaryOp};
use gccjit::ToRValue; use gccjit::ToRValue;
use gccjit::{BinaryOp, RValue, Type}; use gccjit::{BinaryOp, RValue, Type};
#[cfg(feature = "master")]
use gccjit::{ComparisonOp, UnaryOp};
use rustc_codegen_ssa::base::compare_simd_types; use rustc_codegen_ssa::base::compare_simd_types;
use rustc_codegen_ssa::common::{IntPredicate, TypeKind}; use rustc_codegen_ssa::common::{IntPredicate, TypeKind};
#[cfg(feature="master")] #[cfg(feature = "master")]
use rustc_codegen_ssa::errors::ExpectedPointerMutability; use rustc_codegen_ssa::errors::ExpectedPointerMutability;
use rustc_codegen_ssa::errors::InvalidMonomorphization; use rustc_codegen_ssa::errors::InvalidMonomorphization;
use rustc_codegen_ssa::mir::operand::OperandRef; use rustc_codegen_ssa::mir::operand::OperandRef;
@ -19,7 +19,7 @@ use rustc_span::{sym, Span, Symbol};
use rustc_target::abi::Align; use rustc_target::abi::Align;
use crate::builder::Builder; use crate::builder::Builder;
#[cfg(feature="master")] #[cfg(feature = "master")]
use crate::context::CodegenCx; use crate::context::CodegenCx;
pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>( pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
@ -57,7 +57,10 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
let arg_tys = sig.inputs(); let arg_tys = sig.inputs();
if name == sym::simd_select_bitmask { if name == sym::simd_select_bitmask {
require_simd!(arg_tys[1], InvalidMonomorphization::SimdArgument { span, name, ty: arg_tys[1] }); require_simd!(
arg_tys[1],
InvalidMonomorphization::SimdArgument { span, name, ty: arg_tys[1] }
);
let (len, _) = arg_tys[1].simd_size_and_type(bx.tcx()); let (len, _) = arg_tys[1].simd_size_and_type(bx.tcx());
let expected_int_bits = (len.max(8) - 1).next_power_of_two(); let expected_int_bits = (len.max(8) - 1).next_power_of_two();
@ -95,7 +98,8 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
// NOTE: since the arguments can be vectors of floats, make sure the mask is a vector of // NOTE: since the arguments can be vectors of floats, make sure the mask is a vector of
// integer. // integer.
let mask_element_type = bx.type_ix(arg1_element_type.get_size() as u64 * 8); let mask_element_type = bx.type_ix(arg1_element_type.get_size() as u64 * 8);
let vector_mask_type = bx.context.new_vector_type(mask_element_type, arg1_vector_type.get_num_units() as u64); let vector_mask_type =
bx.context.new_vector_type(mask_element_type, arg1_vector_type.get_num_units() as u64);
let mut elements = vec![]; let mut elements = vec![];
let one = bx.context.new_rvalue_one(mask.get_type()); let one = bx.context.new_rvalue_one(mask.get_type());
@ -149,38 +153,24 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
// compare them as equal, so bitcast. // compare them as equal, so bitcast.
// FIXME(antoyo): allow comparing vector types as equal in libgccjit. // FIXME(antoyo): allow comparing vector types as equal in libgccjit.
let arg2 = bx.context.new_bitcast(None, args[1].immediate(), arg1.get_type()); let arg2 = bx.context.new_bitcast(None, args[1].immediate(), arg1.get_type());
return Ok(compare_simd_types( return Ok(compare_simd_types(bx, arg1, arg2, in_elem, llret_ty, cmp_op));
bx,
arg1,
arg2,
in_elem,
llret_ty,
cmp_op,
));
} }
if let Some(stripped) = name.as_str().strip_prefix("simd_shuffle") { if name == sym::simd_shuffle {
let n: u64 = if stripped.is_empty() { // Make sure this is actually an array, since typeck only checks the length-suffixed
// Make sure this is actually an array, since typeck only checks the length-suffixed // version of this intrinsic.
// version of this intrinsic. let n: u64 = match args[2].layout.ty.kind() {
match args[2].layout.ty.kind() { ty::Array(ty, len) if matches!(ty.kind(), ty::Uint(ty::UintTy::U32)) => {
ty::Array(ty, len) if matches!(ty.kind(), ty::Uint(ty::UintTy::U32)) => { len.try_eval_target_usize(bx.cx.tcx, ty::ParamEnv::reveal_all()).unwrap_or_else(
len.try_eval_target_usize(bx.cx.tcx, ty::ParamEnv::reveal_all()).unwrap_or_else( || span_bug!(span, "could not evaluate shuffle index array length"),
|| span_bug!(span, "could not evaluate shuffle index array length"), )
)
}
_ => return_error!(InvalidMonomorphization::SimdShuffle {
span,
name,
ty: args[2].layout.ty
}),
} }
} else { _ => return_error!(InvalidMonomorphization::SimdShuffle {
stripped.parse().unwrap_or_else(|_| { span,
span_bug!(span, "bad `simd_shuffle` instruction only caught in codegen?") name,
}) ty: args[2].layout.ty
}),
}; };
require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty }); require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty });
let (out_len, out_ty) = ret_ty.simd_size_and_type(bx.tcx()); let (out_len, out_ty) = ret_ty.simd_size_and_type(bx.tcx());
@ -202,7 +192,13 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
if name == sym::simd_insert { if name == sym::simd_insert {
require!( require!(
in_elem == arg_tys[2], in_elem == arg_tys[2],
InvalidMonomorphization::InsertedType { span, name, in_elem, in_ty, out_ty: arg_tys[2] } InvalidMonomorphization::InsertedType {
span,
name,
in_elem,
in_ty,
out_ty: arg_tys[2]
}
); );
let vector = args[0].immediate(); let vector = args[0].immediate();
let index = args[1].immediate(); let index = args[1].immediate();
@ -228,7 +224,10 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
if name == sym::simd_select { if name == sym::simd_select {
let m_elem_ty = in_elem; let m_elem_ty = in_elem;
let m_len = in_len; let m_len = in_len;
require_simd!(arg_tys[1], InvalidMonomorphization::SimdArgument { span, name, ty: arg_tys[1] }); require_simd!(
arg_tys[1],
InvalidMonomorphization::SimdArgument { span, name, ty: arg_tys[1] }
);
let (v_len, _) = arg_tys[1].simd_size_and_type(bx.tcx()); let (v_len, _) = arg_tys[1].simd_size_and_type(bx.tcx());
require!( require!(
m_len == v_len, m_len == v_len,
@ -241,7 +240,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
return Ok(bx.vector_select(args[0].immediate(), args[1].immediate(), args[2].immediate())); return Ok(bx.vector_select(args[0].immediate(), args[1].immediate(), args[2].immediate()));
} }
#[cfg(feature="master")] #[cfg(feature = "master")]
if name == sym::simd_cast || name == sym::simd_as { if name == sym::simd_cast || name == sym::simd_as {
require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty }); require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty });
let (out_len, out_elem) = ret_ty.simd_size_and_type(bx.tcx()); let (out_len, out_elem) = ret_ty.simd_size_and_type(bx.tcx());
@ -267,19 +266,17 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
Unsupported, Unsupported,
} }
let in_style = let in_style = match in_elem.kind() {
match in_elem.kind() { ty::Int(_) | ty::Uint(_) => Style::Int,
ty::Int(_) | ty::Uint(_) => Style::Int, ty::Float(_) => Style::Float,
ty::Float(_) => Style::Float, _ => Style::Unsupported,
_ => Style::Unsupported, };
};
let out_style = let out_style = match out_elem.kind() {
match out_elem.kind() { ty::Int(_) | ty::Uint(_) => Style::Int,
ty::Int(_) | ty::Uint(_) => Style::Int, ty::Float(_) => Style::Float,
ty::Float(_) => Style::Float, _ => Style::Unsupported,
_ => Style::Unsupported, };
};
match (in_style, out_style) { match (in_style, out_style) {
(Style::Unsupported, Style::Unsupported) => { (Style::Unsupported, Style::Unsupported) => {
@ -294,7 +291,7 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
out_elem out_elem
} }
); );
}, }
_ => return Ok(bx.context.convert_vector(None, args[0].immediate(), llret_ty)), _ => return Ok(bx.context.convert_vector(None, args[0].immediate(), llret_ty)),
} }
} }
@ -342,10 +339,13 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
let mut shift = 0; let mut shift = 0;
for i in 0..in_len { for i in 0..in_len {
let elem = bx.extract_element(vector, bx.context.new_rvalue_from_int(bx.int_type, i as i32)); let elem =
bx.extract_element(vector, bx.context.new_rvalue_from_int(bx.int_type, i as i32));
let shifted = elem >> sign_shift; let shifted = elem >> sign_shift;
let masked = shifted & one; let masked = shifted & one;
result = result | (bx.context.new_cast(None, masked, result_type) << bx.context.new_rvalue_from_int(result_type, shift)); result = result
| (bx.context.new_cast(None, masked, result_type)
<< bx.context.new_rvalue_from_int(result_type, shift));
shift += 1; shift += 1;
} }
@ -394,46 +394,50 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
return Err(()); return Err(());
}}; }};
} }
let (elem_ty_str, elem_ty) = let (elem_ty_str, elem_ty) = if let ty::Float(f) = in_elem.kind() {
if let ty::Float(f) = in_elem.kind() { let elem_ty = bx.cx.type_float_from_ty(*f);
let elem_ty = bx.cx.type_float_from_ty(*f); match f.bit_width() {
match f.bit_width() { 32 => ("f", elem_ty),
32 => ("f", elem_ty), 64 => ("", elem_ty),
64 => ("", elem_ty), _ => {
_ => { return_error!(InvalidMonomorphization::FloatingPointVector {
return_error!(InvalidMonomorphization::FloatingPointVector { span, name, f_ty: *f, in_ty }); span,
} name,
f_ty: *f,
in_ty
});
} }
} }
else { } else {
return_error!(InvalidMonomorphization::FloatingPointType { span, name, in_ty }); return_error!(InvalidMonomorphization::FloatingPointType { span, name, in_ty });
}; };
let vec_ty = bx.cx.type_vector(elem_ty, in_len); let vec_ty = bx.cx.type_vector(elem_ty, in_len);
let intr_name = let intr_name = match name {
match name { sym::simd_ceil => "ceil",
sym::simd_ceil => "ceil", sym::simd_fabs => "fabs", // TODO(antoyo): pand with 170141183420855150465331762880109871103
sym::simd_fabs => "fabs", // TODO(antoyo): pand with 170141183420855150465331762880109871103 sym::simd_fcos => "cos",
sym::simd_fcos => "cos", sym::simd_fexp2 => "exp2",
sym::simd_fexp2 => "exp2", sym::simd_fexp => "exp",
sym::simd_fexp => "exp", sym::simd_flog10 => "log10",
sym::simd_flog10 => "log10", sym::simd_flog2 => "log2",
sym::simd_flog2 => "log2", sym::simd_flog => "log",
sym::simd_flog => "log", sym::simd_floor => "floor",
sym::simd_floor => "floor", sym::simd_fma => "fma",
sym::simd_fma => "fma", sym::simd_fpowi => "__builtin_powi",
sym::simd_fpowi => "__builtin_powi", sym::simd_fpow => "pow",
sym::simd_fpow => "pow", sym::simd_fsin => "sin",
sym::simd_fsin => "sin", sym::simd_fsqrt => "sqrt",
sym::simd_fsqrt => "sqrt", sym::simd_round => "round",
sym::simd_round => "round", sym::simd_trunc => "trunc",
sym::simd_trunc => "trunc", _ => return_error!(InvalidMonomorphization::UnrecognizedIntrinsic { span, name }),
_ => return_error!(InvalidMonomorphization::UnrecognizedIntrinsic { span, name }) };
};
let builtin_name = format!("{}{}", intr_name, elem_ty_str); let builtin_name = format!("{}{}", intr_name, elem_ty_str);
let funcs = bx.cx.functions.borrow(); let funcs = bx.cx.functions.borrow();
let function = funcs.get(&builtin_name).unwrap_or_else(|| panic!("unable to find builtin function {}", builtin_name)); let function = funcs
.get(&builtin_name)
.unwrap_or_else(|| panic!("unable to find builtin function {}", builtin_name));
// TODO(antoyo): add platform-specific behavior here for architectures that have these // TODO(antoyo): add platform-specific behavior here for architectures that have these
// intrinsics as instructions (for instance, gpus) // intrinsics as instructions (for instance, gpus)
@ -479,8 +483,12 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
return simd_simple_float_intrinsic(name, in_elem, in_ty, in_len, bx, span, args); return simd_simple_float_intrinsic(name, in_elem, in_ty, in_len, bx, span, args);
} }
#[cfg(feature="master")] #[cfg(feature = "master")]
fn vector_ty<'gcc, 'tcx>(cx: &CodegenCx<'gcc, 'tcx>, elem_ty: Ty<'tcx>, vec_len: u64) -> Type<'gcc> { fn vector_ty<'gcc, 'tcx>(
cx: &CodegenCx<'gcc, 'tcx>,
elem_ty: Ty<'tcx>,
vec_len: u64,
) -> Type<'gcc> {
// FIXME: use cx.layout_of(ty).llvm_type() ? // FIXME: use cx.layout_of(ty).llvm_type() ?
let elem_ty = match *elem_ty.kind() { let elem_ty = match *elem_ty.kind() {
ty::Int(v) => cx.type_int_from_ty(v), ty::Int(v) => cx.type_int_from_ty(v),
@ -491,15 +499,22 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
cx.type_vector(elem_ty, vec_len) cx.type_vector(elem_ty, vec_len)
} }
#[cfg(feature="master")] #[cfg(feature = "master")]
fn gather<'a, 'gcc, 'tcx>(default: RValue<'gcc>, pointers: RValue<'gcc>, mask: RValue<'gcc>, pointer_count: usize, bx: &mut Builder<'a, 'gcc, 'tcx>, in_len: u64, underlying_ty: Ty<'tcx>, invert: bool) -> RValue<'gcc> { fn gather<'a, 'gcc, 'tcx>(
let vector_type = default: RValue<'gcc>,
if pointer_count > 1 { pointers: RValue<'gcc>,
bx.context.new_vector_type(bx.usize_type, in_len) mask: RValue<'gcc>,
} pointer_count: usize,
else { bx: &mut Builder<'a, 'gcc, 'tcx>,
vector_ty(bx, underlying_ty, in_len) in_len: u64,
}; underlying_ty: Ty<'tcx>,
invert: bool,
) -> RValue<'gcc> {
let vector_type = if pointer_count > 1 {
bx.context.new_vector_type(bx.usize_type, in_len)
} else {
vector_ty(bx, underlying_ty, in_len)
};
let elem_type = vector_type.dyncast_vector().expect("vector type").get_element_type(); let elem_type = vector_type.dyncast_vector().expect("vector type").get_element_type();
let mut values = vec![]; let mut values = vec![];
@ -530,13 +545,12 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
if invert { if invert {
bx.shuffle_vector(vector, default, mask) bx.shuffle_vector(vector, default, mask)
} } else {
else {
bx.shuffle_vector(default, vector, mask) bx.shuffle_vector(default, vector, mask)
} }
} }
#[cfg(feature="master")] #[cfg(feature = "master")]
if name == sym::simd_gather { if name == sym::simd_gather {
// simd_gather(values: <N x T>, pointers: <N x *_ T>, // simd_gather(values: <N x T>, pointers: <N x *_ T>,
// mask: <N x i{M}>) -> <N x T> // mask: <N x i{M}>) -> <N x T>
@ -546,8 +560,14 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
// All types must be simd vector types // All types must be simd vector types
require_simd!(in_ty, InvalidMonomorphization::SimdFirst { span, name, ty: in_ty }); require_simd!(in_ty, InvalidMonomorphization::SimdFirst { span, name, ty: in_ty });
require_simd!(arg_tys[1], InvalidMonomorphization::SimdSecond { span, name, ty: arg_tys[1] }); require_simd!(
require_simd!(arg_tys[2], InvalidMonomorphization::SimdThird { span, name, ty: arg_tys[2] }); arg_tys[1],
InvalidMonomorphization::SimdSecond { span, name, ty: arg_tys[1] }
);
require_simd!(
arg_tys[2],
InvalidMonomorphization::SimdThird { span, name, ty: arg_tys[2] }
);
require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty }); require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty });
// Of the same length: // Of the same length:
@ -641,10 +661,19 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
} }
} }
return Ok(gather(args[0].immediate(), args[1].immediate(), args[2].immediate(), pointer_count, bx, in_len, underlying_ty, false)); return Ok(gather(
args[0].immediate(),
args[1].immediate(),
args[2].immediate(),
pointer_count,
bx,
in_len,
underlying_ty,
false,
));
} }
#[cfg(feature="master")] #[cfg(feature = "master")]
if name == sym::simd_scatter { if name == sym::simd_scatter {
// simd_scatter(values: <N x T>, pointers: <N x *mut T>, // simd_scatter(values: <N x T>, pointers: <N x *mut T>,
// mask: <N x i{M}>) -> () // mask: <N x i{M}>) -> ()
@ -654,8 +683,14 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
// All types must be simd vector types // All types must be simd vector types
require_simd!(in_ty, InvalidMonomorphization::SimdFirst { span, name, ty: in_ty }); require_simd!(in_ty, InvalidMonomorphization::SimdFirst { span, name, ty: in_ty });
require_simd!(arg_tys[1], InvalidMonomorphization::SimdSecond { span, name, ty: arg_tys[1] }); require_simd!(
require_simd!(arg_tys[2], InvalidMonomorphization::SimdThird { span, name, ty: arg_tys[2] }); arg_tys[1],
InvalidMonomorphization::SimdSecond { span, name, ty: arg_tys[1] }
);
require_simd!(
arg_tys[2],
InvalidMonomorphization::SimdThird { span, name, ty: arg_tys[2] }
);
// Of the same length: // Of the same length:
let (element_len1, _) = arg_tys[1].simd_size_and_type(bx.tcx()); let (element_len1, _) = arg_tys[1].simd_size_and_type(bx.tcx());
@ -744,17 +779,24 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
} }
} }
let result = gather(args[0].immediate(), args[1].immediate(), args[2].immediate(), pointer_count, bx, in_len, underlying_ty, true); let result = gather(
args[0].immediate(),
args[1].immediate(),
args[2].immediate(),
pointer_count,
bx,
in_len,
underlying_ty,
true,
);
let pointers = args[1].immediate(); let pointers = args[1].immediate();
let vector_type = let vector_type = if pointer_count > 1 {
if pointer_count > 1 { bx.context.new_vector_type(bx.usize_type, in_len)
bx.context.new_vector_type(bx.usize_type, in_len) } else {
} vector_ty(bx, underlying_ty, in_len)
else { };
vector_ty(bx, underlying_ty, in_len)
};
let elem_type = vector_type.dyncast_vector().expect("vector type").get_element_type(); let elem_type = vector_type.dyncast_vector().expect("vector type").get_element_type();
for i in 0..in_len { for i in 0..in_len {
@ -809,11 +851,12 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
let rhs = args[1].immediate(); let rhs = args[1].immediate();
let is_add = name == sym::simd_saturating_add; let is_add = name == sym::simd_saturating_add;
let ptr_bits = bx.tcx().data_layout.pointer_size.bits() as _; let ptr_bits = bx.tcx().data_layout.pointer_size.bits() as _;
let (signed, elem_width, elem_ty) = let (signed, elem_width, elem_ty) = match *in_elem.kind() {
match *in_elem.kind() { ty::Int(i) => (true, i.bit_width().unwrap_or(ptr_bits) / 8, bx.cx.type_int_from_ty(i)),
ty::Int(i) => (true, i.bit_width().unwrap_or(ptr_bits) / 8, bx.cx.type_int_from_ty(i)), ty::Uint(i) => {
ty::Uint(i) => (false, i.bit_width().unwrap_or(ptr_bits) / 8, bx.cx.type_uint_from_ty(i)), (false, i.bit_width().unwrap_or(ptr_bits) / 8, bx.cx.type_uint_from_ty(i))
_ => { }
_ => {
return_error!(InvalidMonomorphization::ExpectedVectorElementType { return_error!(InvalidMonomorphization::ExpectedVectorElementType {
span, span,
name, name,
@ -823,77 +866,82 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
} }
}; };
let result = let result = match (signed, is_add) {
match (signed, is_add) { (false, true) => {
(false, true) => { let res = lhs + rhs;
let res = lhs + rhs; let cmp = bx.context.new_comparison(None, ComparisonOp::LessThan, res, lhs);
let cmp = bx.context.new_comparison(None, ComparisonOp::LessThan, res, lhs); res | cmp
res | cmp }
}, (true, true) => {
(true, true) => { // Algorithm from: https://codereview.stackexchange.com/questions/115869/saturated-signed-addition
// Algorithm from: https://codereview.stackexchange.com/questions/115869/saturated-signed-addition // TODO(antoyo): improve using conditional operators if possible.
// TODO(antoyo): improve using conditional operators if possible. // TODO(antoyo): dyncast_vector should not require a call to unqualified.
// TODO(antoyo): dyncast_vector should not require a call to unqualified. let arg_type = lhs.get_type().unqualified();
let arg_type = lhs.get_type().unqualified(); // TODO(antoyo): convert lhs and rhs to unsigned.
// TODO(antoyo): convert lhs and rhs to unsigned. let sum = lhs + rhs;
let sum = lhs + rhs; let vector_type = arg_type.dyncast_vector().expect("vector type");
let vector_type = arg_type.dyncast_vector().expect("vector type"); let unit = vector_type.get_num_units();
let unit = vector_type.get_num_units(); let a = bx.context.new_rvalue_from_int(elem_ty, ((elem_width as i32) << 3) - 1);
let a = bx.context.new_rvalue_from_int(elem_ty, ((elem_width as i32) << 3) - 1); let width = bx.context.new_rvalue_from_vector(None, lhs.get_type(), &vec![a; unit]);
let width = bx.context.new_rvalue_from_vector(None, lhs.get_type(), &vec![a; unit]);
let xor1 = lhs ^ rhs; let xor1 = lhs ^ rhs;
let xor2 = lhs ^ sum; let xor2 = lhs ^ sum;
let and = bx.context.new_unary_op(None, UnaryOp::BitwiseNegate, arg_type, xor1) & xor2; let and =
let mask = and >> width; bx.context.new_unary_op(None, UnaryOp::BitwiseNegate, arg_type, xor1) & xor2;
let mask = and >> width;
let one = bx.context.new_rvalue_one(elem_ty); let one = bx.context.new_rvalue_one(elem_ty);
let ones = bx.context.new_rvalue_from_vector(None, lhs.get_type(), &vec![one; unit]); let ones =
let shift1 = ones << width; bx.context.new_rvalue_from_vector(None, lhs.get_type(), &vec![one; unit]);
let shift2 = sum >> width; let shift1 = ones << width;
let mask_min = shift1 ^ shift2; let shift2 = sum >> width;
let mask_min = shift1 ^ shift2;
let and1 = bx.context.new_unary_op(None, UnaryOp::BitwiseNegate, arg_type, mask) & sum; let and1 =
let and2 = mask & mask_min; bx.context.new_unary_op(None, UnaryOp::BitwiseNegate, arg_type, mask) & sum;
let and2 = mask & mask_min;
and1 + and2 and1 + and2
}, }
(false, false) => { (false, false) => {
let res = lhs - rhs; let res = lhs - rhs;
let cmp = bx.context.new_comparison(None, ComparisonOp::LessThanEquals, res, lhs); let cmp = bx.context.new_comparison(None, ComparisonOp::LessThanEquals, res, lhs);
res & cmp res & cmp
}, }
(true, false) => { (true, false) => {
// TODO(antoyo): dyncast_vector should not require a call to unqualified. // TODO(antoyo): dyncast_vector should not require a call to unqualified.
let arg_type = lhs.get_type().unqualified(); let arg_type = lhs.get_type().unqualified();
// TODO(antoyo): this uses the same algorithm from saturating add, but add the // TODO(antoyo): this uses the same algorithm from saturating add, but add the
// negative of the right operand. Find a proper subtraction algorithm. // negative of the right operand. Find a proper subtraction algorithm.
let rhs = bx.context.new_unary_op(None, UnaryOp::Minus, arg_type, rhs); let rhs = bx.context.new_unary_op(None, UnaryOp::Minus, arg_type, rhs);
// TODO(antoyo): convert lhs and rhs to unsigned. // TODO(antoyo): convert lhs and rhs to unsigned.
let sum = lhs + rhs; let sum = lhs + rhs;
let vector_type = arg_type.dyncast_vector().expect("vector type"); let vector_type = arg_type.dyncast_vector().expect("vector type");
let unit = vector_type.get_num_units(); let unit = vector_type.get_num_units();
let a = bx.context.new_rvalue_from_int(elem_ty, ((elem_width as i32) << 3) - 1); let a = bx.context.new_rvalue_from_int(elem_ty, ((elem_width as i32) << 3) - 1);
let width = bx.context.new_rvalue_from_vector(None, lhs.get_type(), &vec![a; unit]); let width = bx.context.new_rvalue_from_vector(None, lhs.get_type(), &vec![a; unit]);
let xor1 = lhs ^ rhs; let xor1 = lhs ^ rhs;
let xor2 = lhs ^ sum; let xor2 = lhs ^ sum;
let and = bx.context.new_unary_op(None, UnaryOp::BitwiseNegate, arg_type, xor1) & xor2; let and =
let mask = and >> width; bx.context.new_unary_op(None, UnaryOp::BitwiseNegate, arg_type, xor1) & xor2;
let mask = and >> width;
let one = bx.context.new_rvalue_one(elem_ty); let one = bx.context.new_rvalue_one(elem_ty);
let ones = bx.context.new_rvalue_from_vector(None, lhs.get_type(), &vec![one; unit]); let ones =
let shift1 = ones << width; bx.context.new_rvalue_from_vector(None, lhs.get_type(), &vec![one; unit]);
let shift2 = sum >> width; let shift1 = ones << width;
let mask_min = shift1 ^ shift2; let shift2 = sum >> width;
let mask_min = shift1 ^ shift2;
let and1 = bx.context.new_unary_op(None, UnaryOp::BitwiseNegate, arg_type, mask) & sum; let and1 =
let and2 = mask & mask_min; bx.context.new_unary_op(None, UnaryOp::BitwiseNegate, arg_type, mask) & sum;
let and2 = mask & mask_min;
and1 + and2 and1 + and2
} }
}; };
return Ok(result); return Ok(result);
} }
@ -968,7 +1016,6 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
1.0 1.0
); );
macro_rules! minmax_red { macro_rules! minmax_red {
($name:ident: $int_red:ident, $float_red:ident) => { ($name:ident: $int_red:ident, $float_red:ident) => {
if name == sym::$name { if name == sym::$name {
@ -979,13 +1026,13 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
return match in_elem.kind() { return match in_elem.kind() {
ty::Int(_) | ty::Uint(_) => Ok(bx.$int_red(args[0].immediate())), ty::Int(_) | ty::Uint(_) => Ok(bx.$int_red(args[0].immediate())),
ty::Float(_) => Ok(bx.$float_red(args[0].immediate())), ty::Float(_) => Ok(bx.$float_red(args[0].immediate())),
_ => return_error!(InvalidMonomorphization::UnsupportedSymbol { _ => return_error!(InvalidMonomorphization::UnsupportedSymbol {
span, span,
name, name,
symbol: sym::$name, symbol: sym::$name,
in_ty, in_ty,
in_elem, in_elem,
ret_ty ret_ty
}), }),
}; };
} }
@ -1025,7 +1072,15 @@ pub fn generic_simd_intrinsic<'a, 'gcc, 'tcx>(
return match in_elem.kind() { return match in_elem.kind() {
ty::Int(_) | ty::Uint(_) => { ty::Int(_) | ty::Uint(_) => {
let r = bx.vector_reduce_op(input, $op); let r = bx.vector_reduce_op(input, $op);
Ok(if !$boolean { r } else { bx.icmp(IntPredicate::IntNE, r, bx.context.new_rvalue_zero(r.get_type())) }) Ok(if !$boolean {
r
} else {
bx.icmp(
IntPredicate::IntNE,
r,
bx.context.new_rvalue_zero(r.get_type()),
)
})
} }
_ => return_error!(InvalidMonomorphization::UnsupportedSymbol { _ => return_error!(InvalidMonomorphization::UnsupportedSymbol {
span, span,

View File

@ -1020,28 +1020,20 @@ fn generic_simd_intrinsic<'ll, 'tcx>(
)); ));
} }
if let Some(stripped) = name.as_str().strip_prefix("simd_shuffle") { if name == sym::simd_shuffle {
// If this intrinsic is the older "simd_shuffleN" form, simply parse the integer. // Make sure this is actually an array, since typeck only checks the length-suffixed
// If there is no suffix, use the index array length. // version of this intrinsic.
let n: u64 = if stripped.is_empty() { let n: u64 = match args[2].layout.ty.kind() {
// Make sure this is actually an array, since typeck only checks the length-suffixed ty::Array(ty, len) if matches!(ty.kind(), ty::Uint(ty::UintTy::U32)) => {
// version of this intrinsic. len.try_eval_target_usize(bx.cx.tcx, ty::ParamEnv::reveal_all()).unwrap_or_else(
match args[2].layout.ty.kind() { || span_bug!(span, "could not evaluate shuffle index array length"),
ty::Array(ty, len) if matches!(ty.kind(), ty::Uint(ty::UintTy::U32)) => { )
len.try_eval_target_usize(bx.cx.tcx, ty::ParamEnv::reveal_all()).unwrap_or_else(
|| span_bug!(span, "could not evaluate shuffle index array length"),
)
}
_ => return_error!(InvalidMonomorphization::SimdShuffle {
span,
name,
ty: args[2].layout.ty
}),
} }
} else { _ => return_error!(InvalidMonomorphization::SimdShuffle {
stripped.parse().unwrap_or_else(|_| { span,
span_bug!(span, "bad `simd_shuffle` instruction only caught in codegen?") name,
}) ty: args[2].layout.ty
}),
}; };
require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty }); require_simd!(ret_ty, InvalidMonomorphization::SimdReturn { span, name, ty: ret_ty });

View File

@ -3,9 +3,6 @@ name = "rustc_codegen_ssa"
version = "0.0.0" version = "0.0.0"
edition = "2021" edition = "2021"
[lib]
test = false
[dependencies] [dependencies]
ar_archive_writer = "0.1.3" ar_archive_writer = "0.1.3"
bitflags = "1.2.1" bitflags = "1.2.1"

View File

@ -862,11 +862,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
.iter() .iter()
.enumerate() .enumerate()
.map(|(i, arg)| { .map(|(i, arg)| {
// The indices passed to simd_shuffle* in the // The indices passed to simd_shuffle in the
// third argument must be constant. This is // third argument must be constant. This is
// checked by const-qualification, which also // checked by const-qualification, which also
// promotes any complex rvalues to constants. // promotes any complex rvalues to constants.
if i == 2 && intrinsic.as_str().starts_with("simd_shuffle") { if i == 2 && intrinsic == sym::simd_shuffle {
if let mir::Operand::Constant(constant) = arg { if let mir::Operand::Constant(constant) = arg {
let (llval, ty) = self.simd_shuffle_indices(&bx, constant); let (llval, ty) = self.simd_shuffle_indices(&bx, constant);
return OperandRef { return OperandRef {

View File

@ -15,9 +15,6 @@ const_eval_await_non_const =
cannot convert `{$ty}` into a future in {const_eval_const_context}s cannot convert `{$ty}` into a future in {const_eval_const_context}s
const_eval_bounds_check_failed = const_eval_bounds_check_failed =
indexing out of bounds: the len is {$len} but the index is {$index} indexing out of bounds: the len is {$len} but the index is {$index}
const_eval_box_to_mut = {$front_matter}: encountered a box pointing to mutable memory in a constant
const_eval_box_to_static = {$front_matter}: encountered a box pointing to a static variable in a constant
const_eval_box_to_uninhabited = {$front_matter}: encountered a box pointing to uninhabited type {$ty}
const_eval_call_nonzero_intrinsic = const_eval_call_nonzero_intrinsic =
`{$name}` called on 0 `{$name}` called on 0
@ -41,18 +38,12 @@ const_eval_const_context = {$kind ->
const_eval_copy_nonoverlapping_overlapping = const_eval_copy_nonoverlapping_overlapping =
`copy_nonoverlapping` called on overlapping ranges `copy_nonoverlapping` called on overlapping ranges
const_eval_dangling_box_no_provenance = {$front_matter}: encountered a dangling box ({$pointer} has no provenance)
const_eval_dangling_box_out_of_bounds = {$front_matter}: encountered a dangling box (going beyond the bounds of its allocation)
const_eval_dangling_box_use_after_free = {$front_matter}: encountered a dangling box (use-after-free)
const_eval_dangling_int_pointer = const_eval_dangling_int_pointer =
{$bad_pointer_message}: {$pointer} is a dangling pointer (it has no provenance) {$bad_pointer_message}: {$pointer} is a dangling pointer (it has no provenance)
const_eval_dangling_null_pointer = const_eval_dangling_null_pointer =
{$bad_pointer_message}: null pointer is a dangling pointer (it has no provenance) {$bad_pointer_message}: null pointer is a dangling pointer (it has no provenance)
const_eval_dangling_ptr_in_final = encountered dangling pointer in final constant const_eval_dangling_ptr_in_final = encountered dangling pointer in final constant
const_eval_dangling_ref_no_provenance = {$front_matter}: encountered a dangling reference ({$pointer} has no provenance)
const_eval_dangling_ref_out_of_bounds = {$front_matter}: encountered a dangling reference (going beyond the bounds of its allocation)
const_eval_dangling_ref_use_after_free = {$front_matter}: encountered a dangling reference (use-after-free)
const_eval_dead_local = const_eval_dead_local =
accessing a dead local variable accessing a dead local variable
const_eval_dealloc_immutable = const_eval_dealloc_immutable =
@ -105,7 +96,6 @@ const_eval_error = {$error_kind ->
const_eval_exact_div_has_remainder = const_eval_exact_div_has_remainder =
exact_div: {$a} cannot be divided by {$b} without remainder exact_div: {$a} cannot be divided by {$b} without remainder
const_eval_expected_non_ptr = {$front_matter}: encountered `{$value}`, but expected plain (non-pointer) bytes
const_eval_fn_ptr_call = const_eval_fn_ptr_call =
function pointers need an RFC before allowed to be called in {const_eval_const_context}s function pointers need an RFC before allowed to be called in {const_eval_const_context}s
const_eval_for_loop_into_iter_non_const = const_eval_for_loop_into_iter_non_const =
@ -156,8 +146,6 @@ const_eval_invalid_align_details =
const_eval_invalid_bool = const_eval_invalid_bool =
interpreting an invalid 8-bit value as a bool: 0x{$value} interpreting an invalid 8-bit value as a bool: 0x{$value}
const_eval_invalid_box_meta = {$front_matter}: encountered invalid box metadata: total size is bigger than largest supported object
const_eval_invalid_box_slice_meta = {$front_matter}: encountered invalid box metadata: slice is bigger than largest supported object
const_eval_invalid_char = const_eval_invalid_char =
interpreting an invalid 32-bit value as a char: 0x{$value} interpreting an invalid 32-bit value as a char: 0x{$value}
const_eval_invalid_dealloc = const_eval_invalid_dealloc =
@ -168,16 +156,12 @@ const_eval_invalid_dealloc =
*[other] {""} *[other] {""}
} }
const_eval_invalid_enum_tag = {$front_matter}: encountered {$value}, but expected a valid enum tag
const_eval_invalid_fn_ptr = {$front_matter}: encountered {$value}, but expected a function pointer
const_eval_invalid_function_pointer = const_eval_invalid_function_pointer =
using {$pointer} as function pointer but it does not point to a function using {$pointer} as function pointer but it does not point to a function
const_eval_invalid_meta = const_eval_invalid_meta =
invalid metadata in wide pointer: total size is bigger than largest supported object invalid metadata in wide pointer: total size is bigger than largest supported object
const_eval_invalid_meta_slice = const_eval_invalid_meta_slice =
invalid metadata in wide pointer: slice is bigger than largest supported object invalid metadata in wide pointer: slice is bigger than largest supported object
const_eval_invalid_ref_meta = {$front_matter}: encountered invalid reference metadata: total size is bigger than largest supported object
const_eval_invalid_ref_slice_meta = {$front_matter}: encountered invalid reference metadata: slice is bigger than largest supported object
const_eval_invalid_str = const_eval_invalid_str =
this string is not valid UTF-8: {$err} this string is not valid UTF-8: {$err}
const_eval_invalid_tag = const_eval_invalid_tag =
@ -189,14 +173,10 @@ const_eval_invalid_uninit_bytes =
reading memory at {$alloc}{$access}, but memory is uninitialized at {$uninit}, and this operation requires initialized memory reading memory at {$alloc}{$access}, but memory is uninitialized at {$uninit}, and this operation requires initialized memory
const_eval_invalid_uninit_bytes_unknown = const_eval_invalid_uninit_bytes_unknown =
using uninitialized data, but this operation requires initialized memory using uninitialized data, but this operation requires initialized memory
const_eval_invalid_value = constructing invalid value
const_eval_invalid_value_with_path = constructing invalid value at {$path}
## The `front_matter`s here refer to either `middle_invalid_value` or `middle_invalid_value_with_path`.
const_eval_invalid_vtable_pointer = const_eval_invalid_vtable_pointer =
using {$pointer} as vtable pointer but it does not point to a vtable using {$pointer} as vtable pointer but it does not point to a vtable
const_eval_invalid_vtable_ptr = {$front_matter}: encountered {$value}, but expected a vtable pointer
const_eval_live_drop = const_eval_live_drop =
destructor of `{$dropped_ty}` cannot be evaluated at compile-time destructor of `{$dropped_ty}` cannot be evaluated at compile-time
@ -218,14 +198,13 @@ const_eval_max_num_nodes_in_const = maximum number of nodes exceeded in constant
const_eval_memory_access_test = memory access failed const_eval_memory_access_test = memory access failed
const_eval_memory_exhausted = const_eval_memory_exhausted =
tried to allocate more memory than available to compiler tried to allocate more memory than available to compiler
const_eval_modified_global = const_eval_modified_global =
modifying a static's initial value from another static's initializer modifying a static's initial value from another static's initializer
const_eval_mut_deref = const_eval_mut_deref =
mutation through a reference is not allowed in {const_eval_const_context}s mutation through a reference is not allowed in {const_eval_const_context}s
const_eval_mutable_ref_in_const = {$front_matter}: encountered mutable reference in a `const`
const_eval_never_val = {$front_matter}: encountered a value of the never type `!`
const_eval_non_const_fmt_macro_call = const_eval_non_const_fmt_macro_call =
cannot call non-const formatting macro in {const_eval_const_context}s cannot call non-const formatting macro in {const_eval_const_context}s
@ -241,10 +220,6 @@ const_eval_noreturn_asm_returned =
const_eval_not_enough_caller_args = const_eval_not_enough_caller_args =
calling a function with fewer arguments than it requires calling a function with fewer arguments than it requires
const_eval_null_box = {$front_matter}: encountered a null box
const_eval_null_fn_ptr = {$front_matter}: encountered a null function pointer
const_eval_null_ref = {$front_matter}: encountered a null reference
const_eval_nullable_ptr_out_of_range = {$front_matter}: encountered a potentially null pointer, but expected something that cannot possibly fail to be {$in_range}
const_eval_nullary_intrinsic_fail = const_eval_nullary_intrinsic_fail =
could not evaluate nullary intrinsic could not evaluate nullary intrinsic
@ -257,7 +232,6 @@ const_eval_offset_from_underflow =
const_eval_operator_non_const = const_eval_operator_non_const =
cannot call non-const operator in {const_eval_const_context}s cannot call non-const operator in {const_eval_const_context}s
const_eval_out_of_range = {$front_matter}: encountered {$value}, but expected something {$in_range}
const_eval_overflow = const_eval_overflow =
overflow executing `{$name}` overflow executing `{$name}`
@ -287,7 +261,6 @@ const_eval_ptr_as_bytes_1 =
this code performed an operation that depends on the underlying bytes representing a pointer this code performed an operation that depends on the underlying bytes representing a pointer
const_eval_ptr_as_bytes_2 = const_eval_ptr_as_bytes_2 =
the absolute address of a pointer is not known at compile-time, so such operations are not supported the absolute address of a pointer is not known at compile-time, so such operations are not supported
const_eval_ptr_out_of_range = {$front_matter}: encountered a pointer, but expected something that cannot possibly fail to be {$in_range}
const_eval_question_branch_non_const = const_eval_question_branch_non_const =
`?` cannot determine the branch of `{$ty}` in {const_eval_const_context}s `?` cannot determine the branch of `{$ty}` in {const_eval_const_context}s
@ -315,8 +288,8 @@ const_eval_raw_ptr_to_int =
const_eval_read_extern_static = const_eval_read_extern_static =
cannot read from extern static ({$did}) cannot read from extern static ({$did})
const_eval_read_pointer_as_bytes = const_eval_read_pointer_as_int =
unable to turn pointer into raw bytes unable to turn pointer into integer
const_eval_realloc_or_alloc_with_offset = const_eval_realloc_or_alloc_with_offset =
{$kind -> {$kind ->
[dealloc] deallocating [dealloc] deallocating
@ -324,9 +297,6 @@ const_eval_realloc_or_alloc_with_offset =
*[other] {""} *[other] {""}
} {$ptr} which does not point to the beginning of an object } {$ptr} which does not point to the beginning of an object
const_eval_ref_to_mut = {$front_matter}: encountered a reference pointing to mutable memory in a constant
const_eval_ref_to_static = {$front_matter}: encountered a reference pointing to a static variable in a constant
const_eval_ref_to_uninhabited = {$front_matter}: encountered a reference pointing to uninhabited type {$ty}
const_eval_remainder_by_zero = const_eval_remainder_by_zero =
calculating the remainder with a divisor of zero calculating the remainder with a divisor of zero
const_eval_remainder_overflow = const_eval_remainder_overflow =
@ -363,8 +333,6 @@ const_eval_transient_mut_borrow_raw = raw mutable references are not allowed in
const_eval_try_block_from_output_non_const = const_eval_try_block_from_output_non_const =
`try` block cannot convert `{$ty}` to the result in {const_eval_const_context}s `try` block cannot convert `{$ty}` to the result in {const_eval_const_context}s
const_eval_unaligned_box = {$front_matter}: encountered an unaligned box (required {$required_bytes} byte alignment but found {$found_bytes})
const_eval_unaligned_ref = {$front_matter}: encountered an unaligned reference (required {$required_bytes} byte alignment but found {$found_bytes})
const_eval_unallowed_fn_pointer_call = function pointer calls are not allowed in {const_eval_const_context}s const_eval_unallowed_fn_pointer_call = function pointer calls are not allowed in {const_eval_const_context}s
const_eval_unallowed_heap_allocations = const_eval_unallowed_heap_allocations =
@ -408,29 +376,14 @@ const_eval_undefined_behavior =
const_eval_undefined_behavior_note = const_eval_undefined_behavior_note =
The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior. The rules on what exactly is undefined behavior aren't clear, so this check might be overzealous. Please open an issue on the rustc repository if you believe it should not be considered undefined behavior.
const_eval_uninhabited_enum_tag = {$front_matter}: encountered an uninhabited enum variant
const_eval_uninhabited_enum_variant_read = const_eval_uninhabited_enum_variant_read =
read discriminant of an uninhabited enum variant read discriminant of an uninhabited enum variant
const_eval_uninhabited_enum_variant_written = const_eval_uninhabited_enum_variant_written =
writing discriminant of an uninhabited enum variant writing discriminant of an uninhabited enum variant
const_eval_uninhabited_val = {$front_matter}: encountered a value of uninhabited type `{$ty}`
const_eval_uninit = {$front_matter}: encountered uninitialized bytes
const_eval_uninit_bool = {$front_matter}: encountered uninitialized memory, but expected a boolean
const_eval_uninit_box = {$front_matter}: encountered uninitialized memory, but expected a box
const_eval_uninit_char = {$front_matter}: encountered uninitialized memory, but expected a unicode scalar value
const_eval_uninit_enum_tag = {$front_matter}: encountered uninitialized bytes, but expected a valid enum tag
const_eval_uninit_float = {$front_matter}: encountered uninitialized memory, but expected a floating point number
const_eval_uninit_fn_ptr = {$front_matter}: encountered uninitialized memory, but expected a function pointer
const_eval_uninit_init_scalar = {$front_matter}: encountered uninitialized memory, but expected initialized scalar value
const_eval_uninit_int = {$front_matter}: encountered uninitialized memory, but expected an integer
const_eval_uninit_raw_ptr = {$front_matter}: encountered uninitialized memory, but expected a raw pointer
const_eval_uninit_ref = {$front_matter}: encountered uninitialized memory, but expected a reference
const_eval_uninit_str = {$front_matter}: encountered uninitialized data in `str`
const_eval_unreachable = entering unreachable code const_eval_unreachable = entering unreachable code
const_eval_unreachable_unwind = const_eval_unreachable_unwind =
unwinding past a stack frame that does not allow unwinding unwinding past a stack frame that does not allow unwinding
const_eval_unsafe_cell = {$front_matter}: encountered `UnsafeCell` in a `const`
const_eval_unsigned_offset_from_overflow = const_eval_unsigned_offset_from_overflow =
`ptr_offset_from_unsigned` called when first pointer has smaller offset than second: {$a_offset} < {$b_offset} `ptr_offset_from_unsigned` called when first pointer has smaller offset than second: {$a_offset} < {$b_offset}
@ -453,8 +406,63 @@ const_eval_unwind_past_top =
const_eval_upcast_mismatch = const_eval_upcast_mismatch =
upcast on a pointer whose vtable does not match its type upcast on a pointer whose vtable does not match its type
## The `front_matter`s here refer to either `const_eval_front_matter_invalid_value` or `const_eval_front_matter_invalid_value_with_path`.
## (We'd love to sort this differently to make that more clear but tidy won't let us...)
const_eval_validation_box_to_mut = {$front_matter}: encountered a box pointing to mutable memory in a constant
const_eval_validation_box_to_static = {$front_matter}: encountered a box pointing to a static variable in a constant
const_eval_validation_box_to_uninhabited = {$front_matter}: encountered a box pointing to uninhabited type {$ty}
const_eval_validation_dangling_box_no_provenance = {$front_matter}: encountered a dangling box ({$pointer} has no provenance)
const_eval_validation_dangling_box_out_of_bounds = {$front_matter}: encountered a dangling box (going beyond the bounds of its allocation)
const_eval_validation_dangling_box_use_after_free = {$front_matter}: encountered a dangling box (use-after-free)
const_eval_validation_dangling_ref_no_provenance = {$front_matter}: encountered a dangling reference ({$pointer} has no provenance)
const_eval_validation_dangling_ref_out_of_bounds = {$front_matter}: encountered a dangling reference (going beyond the bounds of its allocation)
const_eval_validation_dangling_ref_use_after_free = {$front_matter}: encountered a dangling reference (use-after-free)
const_eval_validation_expected_bool = expected a boolean
const_eval_validation_expected_box = expected a box
const_eval_validation_expected_char = expected a unicode scalar value
const_eval_validation_expected_enum_tag = expected a valid enum tag
const_eval_validation_expected_float = expected a floating point number
const_eval_validation_expected_fn_ptr = expected a function pointer
const_eval_validation_expected_init_scalar = expected initialized scalar value
const_eval_validation_expected_int = expected an integer
const_eval_validation_expected_raw_ptr = expected a raw pointer
const_eval_validation_expected_ref = expected a reference
const_eval_validation_expected_str = expected a string
const_eval_validation_front_matter_invalid_value = constructing invalid value
const_eval_validation_front_matter_invalid_value_with_path = constructing invalid value at {$path}
const_eval_validation_invalid_bool = {$front_matter}: encountered {$value}, but expected a boolean const_eval_validation_invalid_bool = {$front_matter}: encountered {$value}, but expected a boolean
const_eval_validation_invalid_box_meta = {$front_matter}: encountered invalid box metadata: total size is bigger than largest supported object
const_eval_validation_invalid_box_slice_meta = {$front_matter}: encountered invalid box metadata: slice is bigger than largest supported object
const_eval_validation_invalid_char = {$front_matter}: encountered {$value}, but expected a valid unicode scalar value (in `0..=0x10FFFF` but not in `0xD800..=0xDFFF`) const_eval_validation_invalid_char = {$front_matter}: encountered {$value}, but expected a valid unicode scalar value (in `0..=0x10FFFF` but not in `0xD800..=0xDFFF`)
const_eval_validation_invalid_enum_tag = {$front_matter}: encountered {$value}, but expected a valid enum tag
const_eval_validation_invalid_fn_ptr = {$front_matter}: encountered {$value}, but expected a function pointer
const_eval_validation_invalid_ref_meta = {$front_matter}: encountered invalid reference metadata: total size is bigger than largest supported object
const_eval_validation_invalid_ref_slice_meta = {$front_matter}: encountered invalid reference metadata: slice is bigger than largest supported object
const_eval_validation_invalid_vtable_ptr = {$front_matter}: encountered {$value}, but expected a vtable pointer
const_eval_validation_mutable_ref_in_const = {$front_matter}: encountered mutable reference in a `const`
const_eval_validation_never_val = {$front_matter}: encountered a value of the never type `!`
const_eval_validation_null_box = {$front_matter}: encountered a null box
const_eval_validation_null_fn_ptr = {$front_matter}: encountered a null function pointer
const_eval_validation_null_ref = {$front_matter}: encountered a null reference
const_eval_validation_nullable_ptr_out_of_range = {$front_matter}: encountered a potentially null pointer, but expected something that cannot possibly fail to be {$in_range}
const_eval_validation_out_of_range = {$front_matter}: encountered {$value}, but expected something {$in_range}
const_eval_validation_partial_pointer = {$front_matter}: encountered a partial pointer or a mix of pointers
const_eval_validation_pointer_as_int = {$front_matter}: encountered a pointer, but {$expected}
const_eval_validation_ptr_out_of_range = {$front_matter}: encountered a pointer, but expected something that cannot possibly fail to be {$in_range}
const_eval_validation_ref_to_mut = {$front_matter}: encountered a reference pointing to mutable memory in a constant
const_eval_validation_ref_to_static = {$front_matter}: encountered a reference pointing to a static variable in a constant
const_eval_validation_ref_to_uninhabited = {$front_matter}: encountered a reference pointing to uninhabited type {$ty}
const_eval_validation_unaligned_box = {$front_matter}: encountered an unaligned box (required {$required_bytes} byte alignment but found {$found_bytes})
const_eval_validation_unaligned_ref = {$front_matter}: encountered an unaligned reference (required {$required_bytes} byte alignment but found {$found_bytes})
const_eval_validation_uninhabited_enum_variant = {$front_matter}: encountered an uninhabited enum variant
const_eval_validation_uninhabited_val = {$front_matter}: encountered a value of uninhabited type `{$ty}`
const_eval_validation_uninit = {$front_matter}: encountered uninitialized memory, but {$expected}
const_eval_validation_unsafe_cell = {$front_matter}: encountered `UnsafeCell` in a `const`
const_eval_write_to_read_only = const_eval_write_to_read_only =
writing to {$allocation} which is read-only writing to {$allocation} which is read-only
const_eval_zst_pointer_out_of_bounds = const_eval_zst_pointer_out_of_bounds =

View File

@ -513,7 +513,7 @@ impl<'a> ReportErrorExt for UndefinedBehaviorInfo<'a> {
ScalarSizeMismatch(_) => const_eval_scalar_size_mismatch, ScalarSizeMismatch(_) => const_eval_scalar_size_mismatch,
UninhabitedEnumVariantWritten(_) => const_eval_uninhabited_enum_variant_written, UninhabitedEnumVariantWritten(_) => const_eval_uninhabited_enum_variant_written,
UninhabitedEnumVariantRead(_) => const_eval_uninhabited_enum_variant_read, UninhabitedEnumVariantRead(_) => const_eval_uninhabited_enum_variant_read,
Validation(e) => e.diagnostic_message(), ValidationError(e) => e.diagnostic_message(),
Custom(x) => (x.msg)(), Custom(x) => (x.msg)(),
} }
} }
@ -587,13 +587,13 @@ impl<'a> ReportErrorExt for UndefinedBehaviorInfo<'a> {
InvalidUninitBytes(Some((alloc, info))) => { InvalidUninitBytes(Some((alloc, info))) => {
builder.set_arg("alloc", alloc); builder.set_arg("alloc", alloc);
builder.set_arg("access", info.access); builder.set_arg("access", info.access);
builder.set_arg("uninit", info.uninit); builder.set_arg("uninit", info.bad);
} }
ScalarSizeMismatch(info) => { ScalarSizeMismatch(info) => {
builder.set_arg("target_size", info.target_size); builder.set_arg("target_size", info.target_size);
builder.set_arg("data_size", info.data_size); builder.set_arg("data_size", info.data_size);
} }
Validation(e) => e.add_args(handler, builder), ValidationError(e) => e.add_args(handler, builder),
Custom(custom) => { Custom(custom) => {
(custom.add_args)(&mut |name, value| { (custom.add_args)(&mut |name, value| {
builder.set_arg(name, value); builder.set_arg(name, value);
@ -608,74 +608,72 @@ impl<'tcx> ReportErrorExt for ValidationErrorInfo<'tcx> {
use crate::fluent_generated::*; use crate::fluent_generated::*;
use rustc_middle::mir::interpret::ValidationErrorKind::*; use rustc_middle::mir::interpret::ValidationErrorKind::*;
match self.kind { match self.kind {
PtrToUninhabited { ptr_kind: PointerKind::Box, .. } => const_eval_box_to_uninhabited, PtrToUninhabited { ptr_kind: PointerKind::Box, .. } => {
PtrToUninhabited { ptr_kind: PointerKind::Ref, .. } => const_eval_ref_to_uninhabited, const_eval_validation_box_to_uninhabited
}
PtrToUninhabited { ptr_kind: PointerKind::Ref, .. } => {
const_eval_validation_ref_to_uninhabited
}
PtrToStatic { ptr_kind: PointerKind::Box } => const_eval_box_to_static, PtrToStatic { ptr_kind: PointerKind::Box } => const_eval_validation_box_to_static,
PtrToStatic { ptr_kind: PointerKind::Ref } => const_eval_ref_to_static, PtrToStatic { ptr_kind: PointerKind::Ref } => const_eval_validation_ref_to_static,
PtrToMut { ptr_kind: PointerKind::Box } => const_eval_box_to_mut, PtrToMut { ptr_kind: PointerKind::Box } => const_eval_validation_box_to_mut,
PtrToMut { ptr_kind: PointerKind::Ref } => const_eval_ref_to_mut, PtrToMut { ptr_kind: PointerKind::Ref } => const_eval_validation_ref_to_mut,
ExpectedNonPtr { .. } => const_eval_expected_non_ptr, PointerAsInt { .. } => const_eval_validation_pointer_as_int,
MutableRefInConst => const_eval_mutable_ref_in_const, PartialPointer => const_eval_validation_partial_pointer,
NullFnPtr => const_eval_null_fn_ptr, MutableRefInConst => const_eval_validation_mutable_ref_in_const,
NeverVal => const_eval_never_val, NullFnPtr => const_eval_validation_null_fn_ptr,
NullablePtrOutOfRange { .. } => const_eval_nullable_ptr_out_of_range, NeverVal => const_eval_validation_never_val,
PtrOutOfRange { .. } => const_eval_ptr_out_of_range, NullablePtrOutOfRange { .. } => const_eval_validation_nullable_ptr_out_of_range,
OutOfRange { .. } => const_eval_out_of_range, PtrOutOfRange { .. } => const_eval_validation_ptr_out_of_range,
UnsafeCell => const_eval_unsafe_cell, OutOfRange { .. } => const_eval_validation_out_of_range,
UninhabitedVal { .. } => const_eval_uninhabited_val, UnsafeCell => const_eval_validation_unsafe_cell,
InvalidEnumTag { .. } => const_eval_invalid_enum_tag, UninhabitedVal { .. } => const_eval_validation_uninhabited_val,
UninhabitedEnumTag => const_eval_uninhabited_enum_tag, InvalidEnumTag { .. } => const_eval_validation_invalid_enum_tag,
UninitEnumTag => const_eval_uninit_enum_tag, UninhabitedEnumVariant => const_eval_validation_uninhabited_enum_variant,
UninitStr => const_eval_uninit_str, Uninit { .. } => const_eval_validation_uninit,
Uninit { expected: ExpectedKind::Bool } => const_eval_uninit_bool, InvalidVTablePtr { .. } => const_eval_validation_invalid_vtable_ptr,
Uninit { expected: ExpectedKind::Reference } => const_eval_uninit_ref,
Uninit { expected: ExpectedKind::Box } => const_eval_uninit_box,
Uninit { expected: ExpectedKind::RawPtr } => const_eval_uninit_raw_ptr,
Uninit { expected: ExpectedKind::InitScalar } => const_eval_uninit_init_scalar,
Uninit { expected: ExpectedKind::Char } => const_eval_uninit_char,
Uninit { expected: ExpectedKind::Float } => const_eval_uninit_float,
Uninit { expected: ExpectedKind::Int } => const_eval_uninit_int,
Uninit { expected: ExpectedKind::FnPtr } => const_eval_uninit_fn_ptr,
UninitVal => const_eval_uninit,
InvalidVTablePtr { .. } => const_eval_invalid_vtable_ptr,
InvalidMetaSliceTooLarge { ptr_kind: PointerKind::Box } => { InvalidMetaSliceTooLarge { ptr_kind: PointerKind::Box } => {
const_eval_invalid_box_slice_meta const_eval_validation_invalid_box_slice_meta
} }
InvalidMetaSliceTooLarge { ptr_kind: PointerKind::Ref } => { InvalidMetaSliceTooLarge { ptr_kind: PointerKind::Ref } => {
const_eval_invalid_ref_slice_meta const_eval_validation_invalid_ref_slice_meta
} }
InvalidMetaTooLarge { ptr_kind: PointerKind::Box } => const_eval_invalid_box_meta, InvalidMetaTooLarge { ptr_kind: PointerKind::Box } => {
InvalidMetaTooLarge { ptr_kind: PointerKind::Ref } => const_eval_invalid_ref_meta, const_eval_validation_invalid_box_meta
UnalignedPtr { ptr_kind: PointerKind::Ref, .. } => const_eval_unaligned_ref, }
UnalignedPtr { ptr_kind: PointerKind::Box, .. } => const_eval_unaligned_box, InvalidMetaTooLarge { ptr_kind: PointerKind::Ref } => {
const_eval_validation_invalid_ref_meta
}
UnalignedPtr { ptr_kind: PointerKind::Ref, .. } => const_eval_validation_unaligned_ref,
UnalignedPtr { ptr_kind: PointerKind::Box, .. } => const_eval_validation_unaligned_box,
NullPtr { ptr_kind: PointerKind::Box } => const_eval_null_box, NullPtr { ptr_kind: PointerKind::Box } => const_eval_validation_null_box,
NullPtr { ptr_kind: PointerKind::Ref } => const_eval_null_ref, NullPtr { ptr_kind: PointerKind::Ref } => const_eval_validation_null_ref,
DanglingPtrNoProvenance { ptr_kind: PointerKind::Box, .. } => { DanglingPtrNoProvenance { ptr_kind: PointerKind::Box, .. } => {
const_eval_dangling_box_no_provenance const_eval_validation_dangling_box_no_provenance
} }
DanglingPtrNoProvenance { ptr_kind: PointerKind::Ref, .. } => { DanglingPtrNoProvenance { ptr_kind: PointerKind::Ref, .. } => {
const_eval_dangling_ref_no_provenance const_eval_validation_dangling_ref_no_provenance
} }
DanglingPtrOutOfBounds { ptr_kind: PointerKind::Box } => { DanglingPtrOutOfBounds { ptr_kind: PointerKind::Box } => {
const_eval_dangling_box_out_of_bounds const_eval_validation_dangling_box_out_of_bounds
} }
DanglingPtrOutOfBounds { ptr_kind: PointerKind::Ref } => { DanglingPtrOutOfBounds { ptr_kind: PointerKind::Ref } => {
const_eval_dangling_ref_out_of_bounds const_eval_validation_dangling_ref_out_of_bounds
} }
DanglingPtrUseAfterFree { ptr_kind: PointerKind::Box } => { DanglingPtrUseAfterFree { ptr_kind: PointerKind::Box } => {
const_eval_dangling_box_use_after_free const_eval_validation_dangling_box_use_after_free
} }
DanglingPtrUseAfterFree { ptr_kind: PointerKind::Ref } => { DanglingPtrUseAfterFree { ptr_kind: PointerKind::Ref } => {
const_eval_dangling_ref_use_after_free const_eval_validation_dangling_ref_use_after_free
} }
InvalidBool { .. } => const_eval_validation_invalid_bool, InvalidBool { .. } => const_eval_validation_invalid_bool,
InvalidChar { .. } => const_eval_validation_invalid_char, InvalidChar { .. } => const_eval_validation_invalid_char,
InvalidFnPtr { .. } => const_eval_invalid_fn_ptr, InvalidFnPtr { .. } => const_eval_validation_invalid_fn_ptr,
} }
} }
@ -683,13 +681,21 @@ impl<'tcx> ReportErrorExt for ValidationErrorInfo<'tcx> {
use crate::fluent_generated as fluent; use crate::fluent_generated as fluent;
use rustc_middle::mir::interpret::ValidationErrorKind::*; use rustc_middle::mir::interpret::ValidationErrorKind::*;
if let PointerAsInt { .. } | PartialPointer = self.kind {
err.help(fluent::const_eval_ptr_as_bytes_1);
err.help(fluent::const_eval_ptr_as_bytes_2);
}
let message = if let Some(path) = self.path { let message = if let Some(path) = self.path {
handler.eagerly_translate_to_string( handler.eagerly_translate_to_string(
fluent::const_eval_invalid_value_with_path, fluent::const_eval_validation_front_matter_invalid_value_with_path,
[("path".into(), DiagnosticArgValue::Str(path.into()))].iter().map(|(a, b)| (a, b)), [("path".into(), DiagnosticArgValue::Str(path.into()))].iter().map(|(a, b)| (a, b)),
) )
} else { } else {
handler.eagerly_translate_to_string(fluent::const_eval_invalid_value, [].into_iter()) handler.eagerly_translate_to_string(
fluent::const_eval_validation_front_matter_invalid_value,
[].into_iter(),
)
}; };
err.set_arg("front_matter", message); err.set_arg("front_matter", message);
@ -729,8 +735,24 @@ impl<'tcx> ReportErrorExt for ValidationErrorInfo<'tcx> {
PtrToUninhabited { ty, .. } | UninhabitedVal { ty } => { PtrToUninhabited { ty, .. } | UninhabitedVal { ty } => {
err.set_arg("ty", ty); err.set_arg("ty", ty);
} }
ExpectedNonPtr { value } PointerAsInt { expected } | Uninit { expected } => {
| InvalidEnumTag { value } let msg = match expected {
ExpectedKind::Reference => fluent::const_eval_validation_expected_ref,
ExpectedKind::Box => fluent::const_eval_validation_expected_box,
ExpectedKind::RawPtr => fluent::const_eval_validation_expected_raw_ptr,
ExpectedKind::InitScalar => fluent::const_eval_validation_expected_init_scalar,
ExpectedKind::Bool => fluent::const_eval_validation_expected_bool,
ExpectedKind::Char => fluent::const_eval_validation_expected_char,
ExpectedKind::Float => fluent::const_eval_validation_expected_float,
ExpectedKind::Int => fluent::const_eval_validation_expected_int,
ExpectedKind::FnPtr => fluent::const_eval_validation_expected_fn_ptr,
ExpectedKind::EnumTag => fluent::const_eval_validation_expected_enum_tag,
ExpectedKind::Str => fluent::const_eval_validation_expected_str,
};
let msg = handler.eagerly_translate_to_string(msg, [].into_iter());
err.set_arg("expected", msg);
}
InvalidEnumTag { value }
| InvalidVTablePtr { value } | InvalidVTablePtr { value }
| InvalidBool { value } | InvalidBool { value }
| InvalidChar { value } | InvalidChar { value }
@ -758,15 +780,12 @@ impl<'tcx> ReportErrorExt for ValidationErrorInfo<'tcx> {
| NullFnPtr | NullFnPtr
| NeverVal | NeverVal
| UnsafeCell | UnsafeCell
| UninitEnumTag
| UninitStr
| Uninit { .. }
| UninitVal
| InvalidMetaSliceTooLarge { .. } | InvalidMetaSliceTooLarge { .. }
| InvalidMetaTooLarge { .. } | InvalidMetaTooLarge { .. }
| DanglingPtrUseAfterFree { .. } | DanglingPtrUseAfterFree { .. }
| DanglingPtrOutOfBounds { .. } | DanglingPtrOutOfBounds { .. }
| UninhabitedEnumTag => {} | UninhabitedEnumVariant
| PartialPointer => {}
} }
} }
} }
@ -776,9 +795,9 @@ impl ReportErrorExt for UnsupportedOpInfo {
use crate::fluent_generated::*; use crate::fluent_generated::*;
match self { match self {
UnsupportedOpInfo::Unsupported(s) => s.clone().into(), UnsupportedOpInfo::Unsupported(s) => s.clone().into(),
UnsupportedOpInfo::PartialPointerOverwrite(_) => const_eval_partial_pointer_overwrite, UnsupportedOpInfo::OverwritePartialPointer(_) => const_eval_partial_pointer_overwrite,
UnsupportedOpInfo::PartialPointerCopy(_) => const_eval_partial_pointer_copy, UnsupportedOpInfo::ReadPartialPointer(_) => const_eval_partial_pointer_copy,
UnsupportedOpInfo::ReadPointerAsBytes => const_eval_read_pointer_as_bytes, UnsupportedOpInfo::ReadPointerAsInt(_) => const_eval_read_pointer_as_int,
UnsupportedOpInfo::ThreadLocalStatic(_) => const_eval_thread_local_static, UnsupportedOpInfo::ThreadLocalStatic(_) => const_eval_thread_local_static,
UnsupportedOpInfo::ReadExternStatic(_) => const_eval_read_extern_static, UnsupportedOpInfo::ReadExternStatic(_) => const_eval_read_extern_static,
} }
@ -787,13 +806,16 @@ impl ReportErrorExt for UnsupportedOpInfo {
use crate::fluent_generated::*; use crate::fluent_generated::*;
use UnsupportedOpInfo::*; use UnsupportedOpInfo::*;
if let ReadPointerAsBytes | PartialPointerOverwrite(_) | PartialPointerCopy(_) = self { if let ReadPointerAsInt(_) | OverwritePartialPointer(_) | ReadPartialPointer(_) = self {
builder.help(const_eval_ptr_as_bytes_1); builder.help(const_eval_ptr_as_bytes_1);
builder.help(const_eval_ptr_as_bytes_2); builder.help(const_eval_ptr_as_bytes_2);
} }
match self { match self {
Unsupported(_) | ReadPointerAsBytes => {} // `ReadPointerAsInt(Some(info))` is never printed anyway, it only serves as an error to
PartialPointerOverwrite(ptr) | PartialPointerCopy(ptr) => { // be further processed by validity checking which then turns it into something nice to
// print. So it's not worth the effort of having diagnostics that can print the `info`.
Unsupported(_) | ReadPointerAsInt(_) => {}
OverwritePartialPointer(ptr) | ReadPartialPointer(ptr) => {
builder.set_arg("ptr", ptr); builder.set_arg("ptr", ptr);
} }
ThreadLocalStatic(did) | ReadExternStatic(did) => { ThreadLocalStatic(did) | ReadExternStatic(did) => {

View File

@ -1,4 +1,4 @@
//! Intrinsics and other functions that the miri engine executes without //! Intrinsics and other functions that the interpreter executes without
//! looking at their MIR. Intrinsics/functions supported here are shared by CTFE //! looking at their MIR. Intrinsics/functions supported here are shared by CTFE
//! and miri. //! and miri.

View File

@ -91,7 +91,7 @@ impl<'tcx, Other> FnVal<'tcx, Other> {
// `Memory` has to depend on the `Machine` because some of its operations // `Memory` has to depend on the `Machine` because some of its operations
// (e.g., `get`) call a `Machine` hook. // (e.g., `get`) call a `Machine` hook.
pub struct Memory<'mir, 'tcx, M: Machine<'mir, 'tcx>> { pub struct Memory<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
/// Allocations local to this instance of the miri engine. The kind /// Allocations local to this instance of the interpreter. The kind
/// helps ensure that the same mechanism is used for allocation and /// helps ensure that the same mechanism is used for allocation and
/// deallocation. When an allocation is not found here, it is a /// deallocation. When an allocation is not found here, it is a
/// global and looked up in the `tcx` for read access. Some machines may /// global and looked up in the `tcx` for read access. Some machines may

View File

@ -25,13 +25,17 @@ use rustc_target::abi::{
use std::hash::Hash; use std::hash::Hash;
// for the validation errors
use super::UndefinedBehaviorInfo::*;
use super::{ use super::{
AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy, AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx, InterpResult, MPlaceTy,
Machine, MemPlaceMeta, OpTy, Pointer, Projectable, Scalar, ValueVisitor, Machine, MemPlaceMeta, OpTy, Pointer, Projectable, Scalar, ValueVisitor,
}; };
// for the validation errors
use super::InterpError::UndefinedBehavior as Ub;
use super::InterpError::Unsupported as Unsup;
use super::UndefinedBehaviorInfo::*;
use super::UnsupportedOpInfo::*;
macro_rules! throw_validation_failure { macro_rules! throw_validation_failure {
($where:expr, $kind: expr) => {{ ($where:expr, $kind: expr) => {{
let where_ = &$where; let where_ = &$where;
@ -43,7 +47,7 @@ macro_rules! throw_validation_failure {
None None
}; };
throw_ub!(Validation(ValidationErrorInfo { path, kind: $kind })) throw_ub!(ValidationError(ValidationErrorInfo { path, kind: $kind }))
}}; }};
} }
@ -85,16 +89,16 @@ macro_rules! try_validation {
Ok(x) => x, Ok(x) => x,
// We catch the error and turn it into a validation failure. We are okay with // We catch the error and turn it into a validation failure. We are okay with
// allocation here as this can only slow down builds that fail anyway. // allocation here as this can only slow down builds that fail anyway.
Err(e) => match e.into_parts() { Err(e) => match e.kind() {
$( $(
(InterpError::UndefinedBehavior($($p)|+), _) => $($p)|+ =>
throw_validation_failure!( throw_validation_failure!(
$where, $where,
$kind $kind
) )
),+, ),+,
#[allow(unreachable_patterns)] #[allow(unreachable_patterns)]
(e, rest) => Err::<!, _>($crate::interpret::InterpErrorInfo::from_parts(e, rest))?, _ => Err::<!, _>(e)?,
} }
} }
}}; }};
@ -294,7 +298,13 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
Ok(try_validation!( Ok(try_validation!(
self.ecx.read_immediate(op), self.ecx.read_immediate(op),
self.path, self.path,
InvalidUninitBytes(None) => Uninit { expected } Ub(InvalidUninitBytes(None)) =>
Uninit { expected },
// The `Unsup` cases can only occur during CTFE
Unsup(ReadPointerAsInt(_)) =>
PointerAsInt { expected },
Unsup(ReadPartialPointer(_)) =>
PartialPointer,
)) ))
} }
@ -319,8 +329,8 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
let (_ty, _trait) = try_validation!( let (_ty, _trait) = try_validation!(
self.ecx.get_ptr_vtable(vtable), self.ecx.get_ptr_vtable(vtable),
self.path, self.path,
DanglingIntPointer(..) | Ub(DanglingIntPointer(..) | InvalidVTablePointer(..)) =>
InvalidVTablePointer(..) => InvalidVTablePtr { value: format!("{vtable}") } InvalidVTablePtr { value: format!("{vtable}") }
); );
// FIXME: check if the type/trait match what ty::Dynamic says? // FIXME: check if the type/trait match what ty::Dynamic says?
} }
@ -356,7 +366,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
let size_and_align = try_validation!( let size_and_align = try_validation!(
self.ecx.size_and_align_of_mplace(&place), self.ecx.size_and_align_of_mplace(&place),
self.path, self.path,
InvalidMeta(msg) => match msg { Ub(InvalidMeta(msg)) => match msg {
InvalidMetaKind::SliceTooBig => InvalidMetaSliceTooLarge { ptr_kind }, InvalidMetaKind::SliceTooBig => InvalidMetaSliceTooLarge { ptr_kind },
InvalidMetaKind::TooBig => InvalidMetaTooLarge { ptr_kind }, InvalidMetaKind::TooBig => InvalidMetaTooLarge { ptr_kind },
} }
@ -375,23 +385,23 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
CheckInAllocMsg::InboundsTest, // will anyway be replaced by validity message CheckInAllocMsg::InboundsTest, // will anyway be replaced by validity message
), ),
self.path, self.path,
AlignmentCheckFailed { required, has } => UnalignedPtr { Ub(AlignmentCheckFailed { required, has }) => UnalignedPtr {
ptr_kind, ptr_kind,
required_bytes: required.bytes(), required_bytes: required.bytes(),
found_bytes: has.bytes() found_bytes: has.bytes()
}, },
DanglingIntPointer(0, _) => NullPtr { ptr_kind }, Ub(DanglingIntPointer(0, _)) => NullPtr { ptr_kind },
DanglingIntPointer(i, _) => DanglingPtrNoProvenance { Ub(DanglingIntPointer(i, _)) => DanglingPtrNoProvenance {
ptr_kind, ptr_kind,
// FIXME this says "null pointer" when null but we need translate // FIXME this says "null pointer" when null but we need translate
pointer: format!("{}", Pointer::<Option<AllocId>>::from_addr_invalid(i)) pointer: format!("{}", Pointer::<Option<AllocId>>::from_addr_invalid(*i))
}, },
PointerOutOfBounds { .. } => DanglingPtrOutOfBounds { Ub(PointerOutOfBounds { .. }) => DanglingPtrOutOfBounds {
ptr_kind ptr_kind
}, },
// This cannot happen during const-eval (because interning already detects // This cannot happen during const-eval (because interning already detects
// dangling pointers), but it can happen in Miri. // dangling pointers), but it can happen in Miri.
PointerUseAfterFree(..) => DanglingPtrUseAfterFree { Ub(PointerUseAfterFree(..)) => DanglingPtrUseAfterFree {
ptr_kind, ptr_kind,
}, },
); );
@ -477,7 +487,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
try_validation!( try_validation!(
value.to_bool(), value.to_bool(),
self.path, self.path,
InvalidBool(..) => ValidationErrorKind::InvalidBool { Ub(InvalidBool(..)) => ValidationErrorKind::InvalidBool {
value: format!("{value:x}"), value: format!("{value:x}"),
} }
); );
@ -488,7 +498,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
try_validation!( try_validation!(
value.to_char(), value.to_char(),
self.path, self.path,
InvalidChar(..) => ValidationErrorKind::InvalidChar { Ub(InvalidChar(..)) => ValidationErrorKind::InvalidChar {
value: format!("{value:x}"), value: format!("{value:x}"),
} }
); );
@ -497,7 +507,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
ty::Float(_) | ty::Int(_) | ty::Uint(_) => { ty::Float(_) | ty::Int(_) | ty::Uint(_) => {
// NOTE: Keep this in sync with the array optimization for int/float // NOTE: Keep this in sync with the array optimization for int/float
// types below! // types below!
let value = self.read_scalar( self.read_scalar(
value, value,
if matches!(ty.kind(), ty::Float(..)) { if matches!(ty.kind(), ty::Float(..)) {
ExpectedKind::Float ExpectedKind::Float
@ -505,14 +515,6 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
ExpectedKind::Int ExpectedKind::Int
}, },
)?; )?;
// As a special exception we *do* match on a `Scalar` here, since we truly want
// to know its underlying representation (and *not* cast it to an integer).
if matches!(value, Scalar::Ptr(..)) {
throw_validation_failure!(
self.path,
ExpectedNonPtr { value: format!("{value:x}") }
)
}
Ok(true) Ok(true)
} }
ty::RawPtr(..) => { ty::RawPtr(..) => {
@ -546,10 +548,8 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
let _fn = try_validation!( let _fn = try_validation!(
self.ecx.get_ptr_fn(ptr), self.ecx.get_ptr_fn(ptr),
self.path, self.path,
DanglingIntPointer(..) | Ub(DanglingIntPointer(..) | InvalidFunctionPointer(..)) =>
InvalidFunctionPointer(..) => InvalidFnPtr { InvalidFnPtr { value: format!("{ptr}") },
value: format!("{ptr}"),
},
); );
// FIXME: Check if the signature matches // FIXME: Check if the signature matches
} else { } else {
@ -657,11 +657,12 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
Ok(try_validation!( Ok(try_validation!(
this.ecx.read_discriminant(op), this.ecx.read_discriminant(op),
this.path, this.path,
InvalidTag(val) => InvalidEnumTag { Ub(InvalidTag(val)) => InvalidEnumTag {
value: format!("{val:x}"), value: format!("{val:x}"),
}, },
UninhabitedEnumVariantRead(_) => UninhabitedEnumTag, Ub(UninhabitedEnumVariantRead(_)) => UninhabitedEnumVariant,
InvalidUninitBytes(None) => UninitEnumTag, // Uninit / bad provenance are not possible since the field was already previously
// checked at its integer type.
)) ))
}) })
} }
@ -740,7 +741,8 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
try_validation!( try_validation!(
self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr, Size::from_bytes(len)), self.ecx.read_bytes_ptr_strip_provenance(mplace.ptr, Size::from_bytes(len)),
self.path, self.path,
InvalidUninitBytes(..) => { UninitStr }, Ub(InvalidUninitBytes(..)) => Uninit { expected: ExpectedKind::Str },
Unsup(ReadPointerAsInt(_)) => PointerAsInt { expected: ExpectedKind::Str }
); );
} }
ty::Array(tys, ..) | ty::Slice(tys) ty::Array(tys, ..) | ty::Slice(tys)
@ -752,6 +754,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
if matches!(tys.kind(), ty::Int(..) | ty::Uint(..) | ty::Float(..)) if matches!(tys.kind(), ty::Int(..) | ty::Uint(..) | ty::Float(..))
=> =>
{ {
let expected = if tys.is_integral() { ExpectedKind::Int } else { ExpectedKind::Float };
// Optimized handling for arrays of integer/float type. // Optimized handling for arrays of integer/float type.
// This is the length of the array/slice. // This is the length of the array/slice.
@ -770,7 +773,7 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
Left(mplace) => mplace, Left(mplace) => mplace,
Right(imm) => match *imm { Right(imm) => match *imm {
Immediate::Uninit => Immediate::Uninit =>
throw_validation_failure!(self.path, UninitVal), throw_validation_failure!(self.path, Uninit { expected }),
Immediate::Scalar(..) | Immediate::ScalarPair(..) => Immediate::Scalar(..) | Immediate::ScalarPair(..) =>
bug!("arrays/slices can never have Scalar/ScalarPair layout"), bug!("arrays/slices can never have Scalar/ScalarPair layout"),
} }
@ -796,17 +799,21 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
// For some errors we might be able to provide extra information. // For some errors we might be able to provide extra information.
// (This custom logic does not fit the `try_validation!` macro.) // (This custom logic does not fit the `try_validation!` macro.)
match err.kind() { match err.kind() {
err_ub!(InvalidUninitBytes(Some((_alloc_id, access)))) => { Ub(InvalidUninitBytes(Some((_alloc_id, access)))) | Unsup(ReadPointerAsInt(Some((_alloc_id, access)))) => {
// Some byte was uninitialized, determine which // Some byte was uninitialized, determine which
// element that byte belongs to so we can // element that byte belongs to so we can
// provide an index. // provide an index.
let i = usize::try_from( let i = usize::try_from(
access.uninit.start.bytes() / layout.size.bytes(), access.bad.start.bytes() / layout.size.bytes(),
) )
.unwrap(); .unwrap();
self.path.push(PathElem::ArrayElem(i)); self.path.push(PathElem::ArrayElem(i));
throw_validation_failure!(self.path, UninitVal) if matches!(err.kind(), Ub(InvalidUninitBytes(_))) {
throw_validation_failure!(self.path, Uninit { expected })
} else {
throw_validation_failure!(self.path, PointerAsInt { expected })
}
} }
// Propagate upwards (that will also check for unexpected errors). // Propagate upwards (that will also check for unexpected errors).
@ -892,17 +899,22 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// Run it. // Run it.
match visitor.visit_value(&op) { match visitor.visit_value(&op) {
Ok(()) => Ok(()), Ok(()) => Ok(()),
// Pass through validation failures. // Pass through validation failures and "invalid program" issues.
Err(err) if matches!(err.kind(), err_ub!(Validation { .. })) => Err(err), Err(err)
// Complain about any other kind of UB error -- those are bad because we'd like to if matches!(
err.kind(),
err_ub!(ValidationError { .. }) | InterpError::InvalidProgram(_)
) =>
{
Err(err)
}
// Complain about any other kind of error -- those are bad because we'd like to
// report them in a way that shows *where* in the value the issue lies. // report them in a way that shows *where* in the value the issue lies.
Err(err) if matches!(err.kind(), InterpError::UndefinedBehavior(_)) => { Err(err) => {
let (err, backtrace) = err.into_parts(); let (err, backtrace) = err.into_parts();
backtrace.print_backtrace(); backtrace.print_backtrace();
bug!("Unexpected Undefined Behavior error during validation: {err:?}"); bug!("Unexpected Undefined Behavior error during validation: {err:?}");
} }
// Pass through everything else.
Err(err) => Err(err),
} }
} }

View File

@ -37,6 +37,7 @@
#![allow(rustc::potential_query_instability)] #![allow(rustc::potential_query_instability)]
#![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)] #![deny(rustc::diagnostic_outside_of_impl)]
#![cfg_attr(not(bootstrap), allow(internal_features))]
#![deny(unsafe_op_in_unsafe_fn)] #![deny(unsafe_op_in_unsafe_fn)]
#[macro_use] #[macro_use]

View File

@ -4,6 +4,7 @@ Erroneous code example:
```compile_fail,E0092 ```compile_fail,E0092
#![feature(intrinsics)] #![feature(intrinsics)]
#![allow(internal_features)]
extern "rust-intrinsic" { extern "rust-intrinsic" {
fn atomic_foo(); // error: unrecognized atomic operation fn atomic_foo(); // error: unrecognized atomic operation
@ -17,6 +18,7 @@ functions are defined in `compiler/rustc_codegen_llvm/src/intrinsic.rs` and in
``` ```
#![feature(intrinsics)] #![feature(intrinsics)]
#![allow(internal_features)]
extern "rust-intrinsic" { extern "rust-intrinsic" {
fn atomic_fence_seqcst(); // ok! fn atomic_fence_seqcst(); // ok!

View File

@ -4,6 +4,7 @@ Erroneous code example:
```compile_fail,E0093 ```compile_fail,E0093
#![feature(intrinsics)] #![feature(intrinsics)]
#![allow(internal_features)]
extern "rust-intrinsic" { extern "rust-intrinsic" {
fn foo(); // error: unrecognized intrinsic function: `foo` fn foo(); // error: unrecognized intrinsic function: `foo`
@ -22,6 +23,7 @@ functions are defined in `compiler/rustc_codegen_llvm/src/intrinsic.rs` and in
``` ```
#![feature(intrinsics)] #![feature(intrinsics)]
#![allow(internal_features)]
extern "rust-intrinsic" { extern "rust-intrinsic" {
fn atomic_fence_seqcst(); // ok! fn atomic_fence_seqcst(); // ok!

View File

@ -4,6 +4,7 @@ Erroneous code example:
```compile_fail,E0094 ```compile_fail,E0094
#![feature(intrinsics)] #![feature(intrinsics)]
#![allow(internal_features)]
extern "rust-intrinsic" { extern "rust-intrinsic" {
#[rustc_safe_intrinsic] #[rustc_safe_intrinsic]
@ -18,6 +19,7 @@ Example:
``` ```
#![feature(intrinsics)] #![feature(intrinsics)]
#![allow(internal_features)]
extern "rust-intrinsic" { extern "rust-intrinsic" {
#[rustc_safe_intrinsic] #[rustc_safe_intrinsic]

View File

@ -8,6 +8,7 @@ Erroneous code example:
```compile_fail ```compile_fail
// NOTE: this feature is perma-unstable and should *only* be used for // NOTE: this feature is perma-unstable and should *only* be used for
// testing purposes. // testing purposes.
#![allow(internal_features)]
#![feature(rustc_attrs)] #![feature(rustc_attrs)]
#[rustc_variance] #[rustc_variance]

View File

@ -5,6 +5,7 @@ used. Erroneous code examples:
```compile_fail ```compile_fail
#![feature(intrinsics)] #![feature(intrinsics)]
#![allow(internal_features)]
extern "rust-intrinsic" { extern "rust-intrinsic" {
#[rustc_safe_intrinsic] #[rustc_safe_intrinsic]
@ -41,6 +42,7 @@ For the first code example, please check the function definition. Example:
``` ```
#![feature(intrinsics)] #![feature(intrinsics)]
#![allow(internal_features)]
extern "rust-intrinsic" { extern "rust-intrinsic" {
#[rustc_safe_intrinsic] #[rustc_safe_intrinsic]

View File

@ -5,6 +5,7 @@ compiled:
```compile_fail,E0230 ```compile_fail,E0230
#![feature(rustc_attrs)] #![feature(rustc_attrs)]
#![allow(internal_features)]
#[rustc_on_unimplemented = "error on `{Self}` with params `<{A},{B}>`"] // error #[rustc_on_unimplemented = "error on `{Self}` with params `<{A},{B}>`"] // error
trait BadAnnotation<A> {} trait BadAnnotation<A> {}

View File

@ -5,6 +5,7 @@ compiled:
```compile_fail,E0231 ```compile_fail,E0231
#![feature(rustc_attrs)] #![feature(rustc_attrs)]
#![allow(internal_features)]
#[rustc_on_unimplemented = "error on `{Self}` with params `<{A},{}>`"] // error! #[rustc_on_unimplemented = "error on `{Self}` with params `<{A},{}>`"] // error!
trait BadAnnotation<A> {} trait BadAnnotation<A> {}

View File

@ -5,6 +5,7 @@ compiled:
```compile_fail,E0232 ```compile_fail,E0232
#![feature(rustc_attrs)] #![feature(rustc_attrs)]
#![allow(internal_features)]
#[rustc_on_unimplemented(lorem="")] // error! #[rustc_on_unimplemented(lorem="")] // error!
trait BadAnnotation {} trait BadAnnotation {}

View File

@ -4,6 +4,7 @@ Erroneous code example:
```compile_fail,E0264 ```compile_fail,E0264
#![feature(lang_items)] #![feature(lang_items)]
#![allow(internal_features)]
extern "C" { extern "C" {
#[lang = "cake"] // error: unknown external lang item: `cake` #[lang = "cake"] // error: unknown external lang item: `cake`
@ -16,6 +17,7 @@ A list of available external lang items is available in
``` ```
#![feature(lang_items)] #![feature(lang_items)]
#![allow(internal_features)]
extern "C" { extern "C" {
#[lang = "panic_impl"] // ok! #[lang = "panic_impl"] // ok!

View File

@ -16,7 +16,7 @@ extern "platform-intrinsic" {
The `simd_shuffle` function needs the length of the array passed as The `simd_shuffle` function needs the length of the array passed as
last parameter in its name. Example: last parameter in its name. Example:
``` ```ignore (no longer compiles)
#![feature(platform_intrinsics)] #![feature(platform_intrinsics)]
extern "platform-intrinsic" { extern "platform-intrinsic" {

View File

@ -4,6 +4,7 @@ Erroneous code example:
```compile_fail,E0539 ```compile_fail,E0539
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "test")] #![stable(since = "1.0.0", feature = "test")]
#[deprecated(note)] // error! #[deprecated(note)] // error!
@ -28,6 +29,7 @@ To fix these issues you need to give required key-value pairs.
``` ```
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "test")] #![stable(since = "1.0.0", feature = "test")]
#[deprecated(since = "1.39.0", note = "reason")] // ok! #[deprecated(since = "1.39.0", note = "reason")] // ok!

View File

@ -4,6 +4,7 @@ Erroneous code example:
```compile_fail,E0542 ```compile_fail,E0542
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "test")] #![stable(since = "1.0.0", feature = "test")]
#[stable(feature = "_stable_fn")] // invalid #[stable(feature = "_stable_fn")] // invalid
@ -23,6 +24,7 @@ To fix this issue, you need to provide the `since` field. Example:
``` ```
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "test")] #![stable(since = "1.0.0", feature = "test")]
#[stable(feature = "_stable_fn", since = "1.0.0")] // ok! #[stable(feature = "_stable_fn", since = "1.0.0")] // ok!

View File

@ -4,6 +4,7 @@ Erroneous code example:
```compile_fail,E0543 ```compile_fail,E0543
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "test")] #![stable(since = "1.0.0", feature = "test")]
#[stable(since = "0.1.0", feature = "_deprecated_fn")] #[stable(since = "0.1.0", feature = "_deprecated_fn")]
@ -17,6 +18,7 @@ To fix this issue, you need to provide the `note` field. Example:
``` ```
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "test")] #![stable(since = "1.0.0", feature = "test")]
#[stable(since = "0.1.0", feature = "_deprecated_fn")] #[stable(since = "0.1.0", feature = "_deprecated_fn")]

View File

@ -4,6 +4,7 @@ Erroneous code example:
```compile_fail,E0544 ```compile_fail,E0544
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "rust1")] #![stable(since = "1.0.0", feature = "rust1")]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
@ -15,6 +16,7 @@ To fix this issue, ensure that each item has at most one stability attribute.
``` ```
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "rust1")] #![stable(since = "1.0.0", feature = "rust1")]
#[stable(feature = "test", since = "2.0.0")] // ok! #[stable(feature = "test", since = "2.0.0")] // ok!

View File

@ -4,6 +4,7 @@ Erroneous code example:
```compile_fail,E0545 ```compile_fail,E0545
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "test")] #![stable(since = "1.0.0", feature = "test")]
#[unstable(feature = "_unstable_fn", issue = "0")] // invalid #[unstable(feature = "_unstable_fn", issue = "0")] // invalid
@ -18,6 +19,7 @@ Example:
``` ```
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "test")] #![stable(since = "1.0.0", feature = "test")]
#[unstable(feature = "_unstable_fn", issue = "none")] // ok! #[unstable(feature = "_unstable_fn", issue = "none")] // ok!

View File

@ -4,6 +4,7 @@ Erroneous code example:
```compile_fail,E0546 ```compile_fail,E0546
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "test")] #![stable(since = "1.0.0", feature = "test")]
#[unstable(issue = "none")] // invalid #[unstable(issue = "none")] // invalid
@ -17,6 +18,7 @@ To fix this issue, you need to provide the `feature` field. Example:
``` ```
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "test")] #![stable(since = "1.0.0", feature = "test")]
#[unstable(feature = "unstable_fn", issue = "none")] // ok! #[unstable(feature = "unstable_fn", issue = "none")] // ok!

View File

@ -4,6 +4,7 @@ Erroneous code example:
```compile_fail,E0547 ```compile_fail,E0547
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "test")] #![stable(since = "1.0.0", feature = "test")]
#[unstable(feature = "_unstable_fn")] // invalid #[unstable(feature = "_unstable_fn")] // invalid
@ -17,6 +18,7 @@ To fix this issue, you need to provide the `issue` field. Example:
``` ```
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "test")] #![stable(since = "1.0.0", feature = "test")]
#[unstable(feature = "_unstable_fn", issue = "none")] // ok! #[unstable(feature = "_unstable_fn", issue = "none")] // ok!

View File

@ -5,6 +5,7 @@ Erroneous code example:
```compile_fail,E0549 ```compile_fail,E0549
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "test")] #![stable(since = "1.0.0", feature = "test")]
#[deprecated( #[deprecated(
@ -19,6 +20,7 @@ Example:
``` ```
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![stable(since = "1.0.0", feature = "test")] #![stable(since = "1.0.0", feature = "test")]
#[stable(since = "1.0.0", feature = "test")] #[stable(since = "1.0.0", feature = "test")]

View File

@ -4,6 +4,8 @@ Erroneous code example:
```compile_fail,E0622 ```compile_fail,E0622
#![feature(intrinsics)] #![feature(intrinsics)]
#![allow(internal_features)]
extern "rust-intrinsic" { extern "rust-intrinsic" {
pub static breakpoint: fn(); // error: intrinsic must be a function pub static breakpoint: fn(); // error: intrinsic must be a function
} }
@ -17,6 +19,8 @@ error, just declare a function. Example:
```no_run ```no_run
#![feature(intrinsics)] #![feature(intrinsics)]
#![allow(internal_features)]
extern "rust-intrinsic" { extern "rust-intrinsic" {
pub fn breakpoint(); // ok! pub fn breakpoint(); // ok!
} }

View File

@ -5,6 +5,7 @@ Erroneous code example:
```compile_fail,E0773 ```compile_fail,E0773
#![feature(decl_macro)] #![feature(decl_macro)]
#![feature(rustc_attrs)] #![feature(rustc_attrs)]
#![allow(internal_features)]
#[rustc_builtin_macro] #[rustc_builtin_macro]
pub macro test($item:item) { pub macro test($item:item) {
@ -24,6 +25,7 @@ To fix the issue, remove the duplicate declaration:
``` ```
#![feature(decl_macro)] #![feature(decl_macro)]
#![feature(rustc_attrs)] #![feature(rustc_attrs)]
#![allow(internal_features)]
#[rustc_builtin_macro] #[rustc_builtin_macro]
pub macro test($item:item) { pub macro test($item:item) {

View File

@ -10,6 +10,7 @@ Erroneous code example:
// used outside of the compiler and standard library. // used outside of the compiler and standard library.
#![feature(rustc_attrs)] #![feature(rustc_attrs)]
#![feature(staged_api)] #![feature(staged_api)]
#![allow(internal_features)]
#![unstable(feature = "foo_module", reason = "...", issue = "123")] #![unstable(feature = "foo_module", reason = "...", issue = "123")]

View File

@ -4,6 +4,7 @@
#![feature(type_alias_impl_trait)] #![feature(type_alias_impl_trait)]
#![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)] #![deny(rustc::diagnostic_outside_of_impl)]
#![cfg_attr(not(bootstrap), allow(internal_features))]
#[macro_use] #[macro_use]
extern crate tracing; extern crate tracing;

View File

@ -15,6 +15,7 @@
#![feature(box_patterns)] #![feature(box_patterns)]
#![feature(error_reporter)] #![feature(error_reporter)]
#![allow(incomplete_features)] #![allow(incomplete_features)]
#![cfg_attr(not(bootstrap), allow(internal_features))]
#[macro_use] #[macro_use]
extern crate rustc_macros; extern crate rustc_macros;

View File

@ -11,6 +11,7 @@
#![feature(try_blocks)] #![feature(try_blocks)]
#![recursion_limit = "256"] #![recursion_limit = "256"]
#![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::untranslatable_diagnostic)]
#![cfg_attr(not(bootstrap), allow(internal_features))]
#[macro_use] #[macro_use]
extern crate rustc_macros; extern crate rustc_macros;

View File

@ -2,6 +2,7 @@ use crate::expand::{AstFragment, AstFragmentKind};
use rustc_ast as ast; use rustc_ast as ast;
use rustc_ast::mut_visit::*; use rustc_ast::mut_visit::*;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::Delimiter;
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_span::source_map::DUMMY_SP; use rustc_span::source_map::DUMMY_SP;
use rustc_span::symbol::Ident; use rustc_span::symbol::Ident;
@ -18,7 +19,7 @@ pub fn placeholder(
path: ast::Path { span: DUMMY_SP, segments: ThinVec::new(), tokens: None }, path: ast::Path { span: DUMMY_SP, segments: ThinVec::new(), tokens: None },
args: P(ast::DelimArgs { args: P(ast::DelimArgs {
dspan: ast::tokenstream::DelimSpan::dummy(), dspan: ast::tokenstream::DelimSpan::dummy(),
delim: ast::MacDelimiter::Parenthesis, delim: Delimiter::Parenthesis,
tokens: ast::tokenstream::TokenStream::new(Vec::new()), tokens: ast::tokenstream::TokenStream::new(Vec::new()),
}), }),
}) })

View File

@ -16,12 +16,22 @@ macro_rules! set {
}}; }};
} }
#[derive(PartialEq)]
enum FeatureStatus {
Default,
Incomplete,
Internal,
}
macro_rules! declare_features { macro_rules! declare_features {
(__status_to_bool active) => { (__status_to_enum active) => {
false FeatureStatus::Default
}; };
(__status_to_bool incomplete) => { (__status_to_enum incomplete) => {
true FeatureStatus::Incomplete
};
(__status_to_enum internal) => {
FeatureStatus::Internal
}; };
($( ($(
$(#[doc = $doc:tt])* ($status:ident, $feature:ident, $ver:expr, $issue:expr, $edition:expr), $(#[doc = $doc:tt])* ($status:ident, $feature:ident, $ver:expr, $issue:expr, $edition:expr),
@ -83,7 +93,7 @@ macro_rules! declare_features {
pub fn incomplete(&self, feature: Symbol) -> bool { pub fn incomplete(&self, feature: Symbol) -> bool {
match feature { match feature {
$( $(
sym::$feature => declare_features!(__status_to_bool $status), sym::$feature => declare_features!(__status_to_enum $status) == FeatureStatus::Incomplete,
)* )*
// accepted and removed features aren't in this file but are never incomplete // accepted and removed features aren't in this file but are never incomplete
_ if self.declared_lang_features.iter().any(|f| f.0 == feature) => false, _ if self.declared_lang_features.iter().any(|f| f.0 == feature) => false,
@ -91,6 +101,22 @@ macro_rules! declare_features {
_ => panic!("`{}` was not listed in `declare_features`", feature), _ => panic!("`{}` was not listed in `declare_features`", feature),
} }
} }
/// Some features are internal to the compiler and standard library and should not
/// be used in normal projects. We warn the user about these
/// to alert them.
pub fn internal(&self, feature: Symbol) -> bool {
match feature {
$(
sym::$feature => declare_features!(__status_to_enum $status) == FeatureStatus::Internal,
)*
// accepted and removed features aren't in this file but are never internal
// (a removed feature might have been internal, but it doesn't matter anymore)
_ if self.declared_lang_features.iter().any(|f| f.0 == feature) => false,
_ if self.declared_lib_features.iter().any(|f| f.0 == feature) => false,
_ => panic!("`{}` was not listed in `declare_features`", feature),
}
}
} }
}; };
} }
@ -137,29 +163,29 @@ declare_features! (
/// Allows using the `vectorcall` ABI. /// Allows using the `vectorcall` ABI.
(active, abi_vectorcall, "1.7.0", None, None), (active, abi_vectorcall, "1.7.0", None, None),
/// Allows using `#![needs_allocator]`, an implementation detail of `#[global_allocator]`. /// Allows using `#![needs_allocator]`, an implementation detail of `#[global_allocator]`.
(active, allocator_internals, "1.20.0", None, None), (internal, allocator_internals, "1.20.0", None, None),
/// Allows using `#[allow_internal_unsafe]`. This is an /// Allows using `#[allow_internal_unsafe]`. This is an
/// attribute on `macro_rules!` and can't use the attribute handling /// attribute on `macro_rules!` and can't use the attribute handling
/// below (it has to be checked before expansion possibly makes /// below (it has to be checked before expansion possibly makes
/// macros disappear). /// macros disappear).
(active, allow_internal_unsafe, "1.0.0", None, None), (internal, allow_internal_unsafe, "1.0.0", None, None),
/// Allows using `#[allow_internal_unstable]`. This is an /// Allows using `#[allow_internal_unstable]`. This is an
/// attribute on `macro_rules!` and can't use the attribute handling /// attribute on `macro_rules!` and can't use the attribute handling
/// below (it has to be checked before expansion possibly makes /// below (it has to be checked before expansion possibly makes
/// macros disappear). /// macros disappear).
(active, allow_internal_unstable, "1.0.0", None, None), (internal, allow_internal_unstable, "1.0.0", None, None),
/// Allows using anonymous lifetimes in argument-position impl-trait. /// Allows using anonymous lifetimes in argument-position impl-trait.
(active, anonymous_lifetime_in_impl_trait, "1.63.0", None, None), (active, anonymous_lifetime_in_impl_trait, "1.63.0", None, None),
/// Allows identifying the `compiler_builtins` crate. /// Allows identifying the `compiler_builtins` crate.
(active, compiler_builtins, "1.13.0", None, None), (internal, compiler_builtins, "1.13.0", None, None),
/// Allows writing custom MIR /// Allows writing custom MIR
(active, custom_mir, "1.65.0", None, None), (internal, custom_mir, "1.65.0", None, None),
/// Outputs useful `assert!` messages /// Outputs useful `assert!` messages
(active, generic_assert, "1.63.0", None, None), (active, generic_assert, "1.63.0", None, None),
/// Allows using the `rust-intrinsic`'s "ABI". /// Allows using the `rust-intrinsic`'s "ABI".
(active, intrinsics, "1.0.0", None, None), (internal, intrinsics, "1.0.0", None, None),
/// Allows using `#[lang = ".."]` attribute for linking items to special compiler logic. /// Allows using `#[lang = ".."]` attribute for linking items to special compiler logic.
(active, lang_items, "1.0.0", None, None), (internal, lang_items, "1.0.0", None, None),
/// Allows `#[link(..., cfg(..))]`; perma-unstable per #37406 /// Allows `#[link(..., cfg(..))]`; perma-unstable per #37406
(active, link_cfg, "1.14.0", None, None), (active, link_cfg, "1.14.0", None, None),
/// Allows the `multiple_supertrait_upcastable` lint. /// Allows the `multiple_supertrait_upcastable` lint.
@ -167,22 +193,22 @@ declare_features! (
/// Allow negative trait bounds. This is an internal-only feature for testing the trait solver! /// Allow negative trait bounds. This is an internal-only feature for testing the trait solver!
(incomplete, negative_bounds, "1.71.0", None, None), (incomplete, negative_bounds, "1.71.0", None, None),
/// Allows using `#[omit_gdb_pretty_printer_section]`. /// Allows using `#[omit_gdb_pretty_printer_section]`.
(active, omit_gdb_pretty_printer_section, "1.5.0", None, None), (internal, omit_gdb_pretty_printer_section, "1.5.0", None, None),
/// Allows using `#[prelude_import]` on glob `use` items. /// Allows using `#[prelude_import]` on glob `use` items.
(active, prelude_import, "1.2.0", None, None), (internal, prelude_import, "1.2.0", None, None),
/// Used to identify crates that contain the profiler runtime. /// Used to identify crates that contain the profiler runtime.
(active, profiler_runtime, "1.18.0", None, None), (internal, profiler_runtime, "1.18.0", None, None),
/// Allows using `rustc_*` attributes (RFC 572). /// Allows using `rustc_*` attributes (RFC 572).
(active, rustc_attrs, "1.0.0", None, None), (internal, rustc_attrs, "1.0.0", None, None),
/// Allows using the `#[stable]` and `#[unstable]` attributes. /// Allows using the `#[stable]` and `#[unstable]` attributes.
(active, staged_api, "1.0.0", None, None), (internal, staged_api, "1.0.0", None, None),
/// Added for testing E0705; perma-unstable. /// Added for testing E0705; perma-unstable.
(active, test_2018_feature, "1.31.0", None, Some(Edition::Edition2018)), (internal, test_2018_feature, "1.31.0", None, Some(Edition::Edition2018)),
/// Added for testing unstable lints; perma-unstable. /// Added for testing unstable lints; perma-unstable.
(active, test_unstable_lint, "1.60.0", None, None), (internal, test_unstable_lint, "1.60.0", None, None),
/// Allows non-`unsafe` —and thus, unsound— access to `Pin` constructions. /// Allows non-`unsafe` —and thus, unsound— access to `Pin` constructions.
/// Marked `incomplete` since perma-unstable and unsound. /// Marked `internal` since perma-unstable and unsound.
(incomplete, unsafe_pin_internals, "1.60.0", None, None), (internal, unsafe_pin_internals, "1.60.0", None, None),
/// Use for stable + negative coherence and strict coherence depending on trait's /// Use for stable + negative coherence and strict coherence depending on trait's
/// rustc_strict_coherence value. /// rustc_strict_coherence value.
(active, with_negative_coherence, "1.60.0", None, None), (active, with_negative_coherence, "1.60.0", None, None),
@ -216,19 +242,19 @@ declare_features! (
/// Allows using the `#[linkage = ".."]` attribute. /// Allows using the `#[linkage = ".."]` attribute.
(active, linkage, "1.0.0", Some(29603), None), (active, linkage, "1.0.0", Some(29603), None),
/// Allows declaring with `#![needs_panic_runtime]` that a panic runtime is needed. /// Allows declaring with `#![needs_panic_runtime]` that a panic runtime is needed.
(active, needs_panic_runtime, "1.10.0", Some(32837), None), (internal, needs_panic_runtime, "1.10.0", Some(32837), None),
/// Allows using `+bundled,+whole-archive` native libs. /// Allows using `+bundled,+whole-archive` native libs.
(active, packed_bundled_libs, "1.69.0", Some(108081), None), (active, packed_bundled_libs, "1.69.0", Some(108081), None),
/// Allows using the `#![panic_runtime]` attribute. /// Allows using the `#![panic_runtime]` attribute.
(active, panic_runtime, "1.10.0", Some(32837), None), (internal, panic_runtime, "1.10.0", Some(32837), None),
/// Allows using `#[rustc_allow_const_fn_unstable]`. /// Allows using `#[rustc_allow_const_fn_unstable]`.
/// This is an attribute on `const fn` for the same /// This is an attribute on `const fn` for the same
/// purpose as `#[allow_internal_unstable]`. /// purpose as `#[allow_internal_unstable]`.
(active, rustc_allow_const_fn_unstable, "1.49.0", Some(69399), None), (internal, rustc_allow_const_fn_unstable, "1.49.0", Some(69399), None),
/// Allows using compiler's own crates. /// Allows using compiler's own crates.
(active, rustc_private, "1.0.0", Some(27812), None), (active, rustc_private, "1.0.0", Some(27812), None),
/// Allows using internal rustdoc features like `doc(keyword)`. /// Allows using internal rustdoc features like `doc(keyword)`.
(active, rustdoc_internals, "1.58.0", Some(90418), None), (internal, rustdoc_internals, "1.58.0", Some(90418), None),
/// Allows using the `rustdoc::missing_doc_code_examples` lint /// Allows using the `rustdoc::missing_doc_code_examples` lint
(active, rustdoc_missing_doc_code_examples, "1.31.0", Some(101730), None), (active, rustdoc_missing_doc_code_examples, "1.31.0", Some(101730), None),
/// Allows using `#[start]` on a function indicating that it is the program entrypoint. /// Allows using `#[start]` on a function indicating that it is the program entrypoint.

View File

@ -13,6 +13,7 @@
#![recursion_limit = "256"] #![recursion_limit = "256"]
#![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)] #![deny(rustc::diagnostic_outside_of_impl)]
#![cfg_attr(not(bootstrap), allow(internal_features))]
#[macro_use] #[macro_use]
extern crate rustc_macros; extern crate rustc_macros;

View File

@ -567,20 +567,6 @@ pub fn check_platform_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem<'_>)
| sym::simd_reduce_min_nanless | sym::simd_reduce_min_nanless
| sym::simd_reduce_max_nanless => (2, vec![param(0)], param(1)), | sym::simd_reduce_max_nanless => (2, vec![param(0)], param(1)),
sym::simd_shuffle => (3, vec![param(0), param(0), param(1)], param(2)), sym::simd_shuffle => (3, vec![param(0), param(0), param(1)], param(2)),
name if name.as_str().starts_with("simd_shuffle") => {
match name.as_str()["simd_shuffle".len()..].parse() {
Ok(n) => {
let params = vec![param(0), param(0), Ty::new_array(tcx, tcx.types.u32, n)];
(2, params, param(1))
}
Err(_) => {
let msg =
format!("unrecognized platform-specific intrinsic function: `{name}`");
tcx.sess.struct_span_err(it.span, msg).emit();
return;
}
}
}
_ => { _ => {
let msg = format!("unrecognized platform-specific intrinsic function: `{name}`"); let msg = format!("unrecognized platform-specific intrinsic function: `{name}`");
tcx.sess.struct_span_err(it.span, msg).emit(); tcx.sess.struct_span_err(it.span, msg).emit();

View File

@ -41,7 +41,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// #55810: Type check patterns first so we get types for all bindings. // #55810: Type check patterns first so we get types for all bindings.
let scrut_span = scrut.span.find_ancestor_inside(expr.span).unwrap_or(scrut.span); let scrut_span = scrut.span.find_ancestor_inside(expr.span).unwrap_or(scrut.span);
for arm in arms { for arm in arms {
self.check_pat_top(&arm.pat, scrutinee_ty, Some(scrut_span), Some(scrut)); self.check_pat_top(&arm.pat, scrutinee_ty, Some(scrut_span), Some(scrut), None);
} }
// Now typecheck the blocks. // Now typecheck the blocks.
@ -136,15 +136,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
&cause, &cause,
Some(&arm.body), Some(&arm.body),
arm_ty, arm_ty,
Some(&mut |err| { |err| self.suggest_removing_semicolon_for_coerce(err, expr, arm_ty, prior_arm),
self.suggest_removing_semicolon_for_coerce(
err,
expr,
orig_expected,
arm_ty,
prior_arm,
)
}),
false, false,
); );
@ -181,7 +173,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
&self, &self,
diag: &mut Diagnostic, diag: &mut Diagnostic,
expr: &hir::Expr<'tcx>, expr: &hir::Expr<'tcx>,
expectation: Expectation<'tcx>,
arm_ty: Ty<'tcx>, arm_ty: Ty<'tcx>,
prior_arm: Option<(Option<hir::HirId>, Ty<'tcx>, Span)>, prior_arm: Option<(Option<hir::HirId>, Ty<'tcx>, Span)>,
) { ) {
@ -195,7 +186,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let hir::ExprKind::Block(block, _) = body.value.kind else { let hir::ExprKind::Block(block, _) = body.value.kind else {
return; return;
}; };
let Some(hir::Stmt { kind: hir::StmtKind::Semi(last_expr), .. }) = let Some(hir::Stmt { kind: hir::StmtKind::Semi(last_expr), span: semi_span, .. }) =
block.innermost_block().stmts.last() block.innermost_block().stmts.last()
else { else {
return; return;
@ -212,9 +203,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
else { else {
return; return;
}; };
let Expectation::IsLast(stmt) = expectation else {
return;
};
let can_coerce_to_return_ty = match self.ret_coercion.as_ref() { let can_coerce_to_return_ty = match self.ret_coercion.as_ref() {
Some(ret_coercion) => { Some(ret_coercion) => {
@ -231,7 +219,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
return; return;
} }
let semi_span = expr.span.shrink_to_hi().with_hi(stmt.hi()); let semi_span = expr.span.shrink_to_hi().with_hi(semi_span.hi());
let mut ret_span: MultiSpan = semi_span.into(); let mut ret_span: MultiSpan = semi_span.into();
ret_span.push_span_label( ret_span.push_span_label(
expr.span, expr.span,
@ -279,7 +267,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
coercion.coerce_forced_unit( coercion.coerce_forced_unit(
self, self,
&cause, &cause,
&mut |err| { |err| {
if let Some((span, msg)) = &ret_reason { if let Some((span, msg)) = &ret_reason {
err.span_label(*span, msg.clone()); err.span_label(*span, msg.clone());
} else if let ExprKind::Block(block, _) = &then_expr.kind } else if let ExprKind::Block(block, _) = &then_expr.kind

View File

@ -89,7 +89,7 @@ pub(super) fn check_fn<'a, 'tcx>(
for (idx, (param_ty, param)) in inputs_fn.chain(maybe_va_list).zip(body.params).enumerate() { for (idx, (param_ty, param)) in inputs_fn.chain(maybe_va_list).zip(body.params).enumerate() {
// Check the pattern. // Check the pattern.
let ty_span = try { inputs_hir?.get(idx)?.span }; let ty_span = try { inputs_hir?.get(idx)?.span };
fcx.check_pat_top(&param.pat, param_ty, ty_span, None); fcx.check_pat_top(&param.pat, param_ty, ty_span, None, None);
// Check that argument is Sized. // Check that argument is Sized.
if !params_can_be_unsized { if !params_can_be_unsized {

View File

@ -1418,7 +1418,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
expression: &'tcx hir::Expr<'tcx>, expression: &'tcx hir::Expr<'tcx>,
expression_ty: Ty<'tcx>, expression_ty: Ty<'tcx>,
) { ) {
self.coerce_inner(fcx, cause, Some(expression), expression_ty, None, false) self.coerce_inner(fcx, cause, Some(expression), expression_ty, |_| {}, false)
} }
/// Indicates that one of the inputs is a "forced unit". This /// Indicates that one of the inputs is a "forced unit". This
@ -1437,7 +1437,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
&mut self, &mut self,
fcx: &FnCtxt<'a, 'tcx>, fcx: &FnCtxt<'a, 'tcx>,
cause: &ObligationCause<'tcx>, cause: &ObligationCause<'tcx>,
augment_error: &mut dyn FnMut(&mut Diagnostic), augment_error: impl FnOnce(&mut Diagnostic),
label_unit_as_expected: bool, label_unit_as_expected: bool,
) { ) {
self.coerce_inner( self.coerce_inner(
@ -1445,7 +1445,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
cause, cause,
None, None,
Ty::new_unit(fcx.tcx), Ty::new_unit(fcx.tcx),
Some(augment_error), augment_error,
label_unit_as_expected, label_unit_as_expected,
) )
} }
@ -1460,7 +1460,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
cause: &ObligationCause<'tcx>, cause: &ObligationCause<'tcx>,
expression: Option<&'tcx hir::Expr<'tcx>>, expression: Option<&'tcx hir::Expr<'tcx>>,
mut expression_ty: Ty<'tcx>, mut expression_ty: Ty<'tcx>,
augment_error: Option<&mut dyn FnMut(&mut Diagnostic)>, augment_error: impl FnOnce(&mut Diagnostic),
label_expression_as_expected: bool, label_expression_as_expected: bool,
) { ) {
// Incorporate whatever type inference information we have // Incorporate whatever type inference information we have
@ -1639,9 +1639,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
} }
} }
if let Some(augment_error) = augment_error { augment_error(&mut err);
augment_error(&mut err);
}
let is_insufficiently_polymorphic = let is_insufficiently_polymorphic =
matches!(coercion_error, TypeError::RegionsInsufficientlyPolymorphic(..)); matches!(coercion_error, TypeError::RegionsInsufficientlyPolymorphic(..));

View File

@ -21,8 +21,6 @@ pub enum Expectation<'tcx> {
/// This rvalue expression will be wrapped in `&` or `Box` and coerced /// This rvalue expression will be wrapped in `&` or `Box` and coerced
/// to `&Ty` or `Box<Ty>`, respectively. `Ty` is `[A]` or `Trait`. /// to `&Ty` or `Box<Ty>`, respectively. `Ty` is `[A]` or `Trait`.
ExpectRvalueLikeUnsized(Ty<'tcx>), ExpectRvalueLikeUnsized(Ty<'tcx>),
IsLast(Span),
} }
impl<'a, 'tcx> Expectation<'tcx> { impl<'a, 'tcx> Expectation<'tcx> {
@ -88,13 +86,12 @@ impl<'a, 'tcx> Expectation<'tcx> {
ExpectCastableToType(t) => ExpectCastableToType(fcx.resolve_vars_if_possible(t)), ExpectCastableToType(t) => ExpectCastableToType(fcx.resolve_vars_if_possible(t)),
ExpectHasType(t) => ExpectHasType(fcx.resolve_vars_if_possible(t)), ExpectHasType(t) => ExpectHasType(fcx.resolve_vars_if_possible(t)),
ExpectRvalueLikeUnsized(t) => ExpectRvalueLikeUnsized(fcx.resolve_vars_if_possible(t)), ExpectRvalueLikeUnsized(t) => ExpectRvalueLikeUnsized(fcx.resolve_vars_if_possible(t)),
IsLast(sp) => IsLast(sp),
} }
} }
pub(super) fn to_option(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> { pub(super) fn to_option(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
match self.resolve(fcx) { match self.resolve(fcx) {
NoExpectation | IsLast(_) => None, NoExpectation => None,
ExpectCastableToType(ty) | ExpectHasType(ty) | ExpectRvalueLikeUnsized(ty) => Some(ty), ExpectCastableToType(ty) | ExpectHasType(ty) | ExpectRvalueLikeUnsized(ty) => Some(ty),
} }
} }
@ -106,9 +103,7 @@ impl<'a, 'tcx> Expectation<'tcx> {
pub(super) fn only_has_type(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> { pub(super) fn only_has_type(self, fcx: &FnCtxt<'a, 'tcx>) -> Option<Ty<'tcx>> {
match self { match self {
ExpectHasType(ty) => Some(fcx.resolve_vars_if_possible(ty)), ExpectHasType(ty) => Some(fcx.resolve_vars_if_possible(ty)),
NoExpectation | ExpectCastableToType(_) | ExpectRvalueLikeUnsized(_) | IsLast(_) => { NoExpectation | ExpectCastableToType(_) | ExpectRvalueLikeUnsized(_) => None,
None
}
} }
} }

View File

@ -60,28 +60,13 @@ use rustc_trait_selection::traits::ObligationCtxt;
use rustc_trait_selection::traits::{self, ObligationCauseCode}; use rustc_trait_selection::traits::{self, ObligationCauseCode};
impl<'a, 'tcx> FnCtxt<'a, 'tcx> { impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
fn check_expr_eq_type(&self, expr: &'tcx hir::Expr<'tcx>, expected: Ty<'tcx>) {
let ty = self.check_expr_with_hint(expr, expected);
self.demand_eqtype(expr.span, expected, ty);
}
pub fn check_expr_has_type_or_error( pub fn check_expr_has_type_or_error(
&self, &self,
expr: &'tcx hir::Expr<'tcx>, expr: &'tcx hir::Expr<'tcx>,
expected: Ty<'tcx>, expected_ty: Ty<'tcx>,
extend_err: impl FnMut(&mut Diagnostic), extend_err: impl FnOnce(&mut Diagnostic),
) -> Ty<'tcx> { ) -> Ty<'tcx> {
self.check_expr_meets_expectation_or_error(expr, ExpectHasType(expected), extend_err) let mut ty = self.check_expr_with_expectation(expr, ExpectHasType(expected_ty));
}
fn check_expr_meets_expectation_or_error(
&self,
expr: &'tcx hir::Expr<'tcx>,
expected: Expectation<'tcx>,
mut extend_err: impl FnMut(&mut Diagnostic),
) -> Ty<'tcx> {
let expected_ty = expected.to_option(&self).unwrap_or(self.tcx.types.bool);
let mut ty = self.check_expr_with_expectation(expr, expected);
// While we don't allow *arbitrary* coercions here, we *do* allow // While we don't allow *arbitrary* coercions here, we *do* allow
// coercions from ! to `expected`. // coercions from ! to `expected`.
@ -341,9 +326,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} }
ExprKind::Cast(e, t) => self.check_expr_cast(e, t, expr), ExprKind::Cast(e, t) => self.check_expr_cast(e, t, expr),
ExprKind::Type(e, t) => { ExprKind::Type(e, t) => {
let ty = self.to_ty_saving_user_provided_ty(&t); let ascribed_ty = self.to_ty_saving_user_provided_ty(&t);
self.check_expr_eq_type(&e, ty); let ty = self.check_expr_with_hint(e, ascribed_ty);
ty self.demand_eqtype(e.span, ascribed_ty, ty);
ascribed_ty
} }
ExprKind::If(cond, then_expr, opt_else_expr) => { ExprKind::If(cond, then_expr, opt_else_expr) => {
self.check_then_else(cond, then_expr, opt_else_expr, expr.span, expected) self.check_then_else(cond, then_expr, opt_else_expr, expr.span, expected)
@ -666,7 +652,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
coerce.coerce_forced_unit( coerce.coerce_forced_unit(
self, self,
&cause, &cause,
&mut |mut err| { |mut err| {
self.suggest_mismatched_types_on_tail( self.suggest_mismatched_types_on_tail(
&mut err, expr, ty, e_ty, target_id, &mut err, expr, ty, e_ty, target_id,
); );
@ -762,7 +748,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
coercion.coerce_forced_unit( coercion.coerce_forced_unit(
self, self,
&cause, &cause,
&mut |db| { |db| {
let span = fn_decl.output.span(); let span = fn_decl.output.span();
if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) { if let Ok(snippet) = self.tcx.sess.source_map().span_to_snippet(span) {
db.span_label( db.span_label(
@ -774,7 +760,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
true, true,
); );
} else { } else {
coercion.coerce_forced_unit(self, &cause, &mut |_| (), true); coercion.coerce_forced_unit(self, &cause, |_| (), true);
} }
} }
self.tcx.types.never self.tcx.types.never

View File

@ -1463,11 +1463,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}; };
// Type check the pattern. Override if necessary to avoid knock-on errors. // Type check the pattern. Override if necessary to avoid knock-on errors.
self.check_pat_top(&decl.pat, decl_ty, ty_span, origin_expr); self.check_pat_top(&decl.pat, decl_ty, ty_span, origin_expr, Some(decl.origin));
let pat_ty = self.node_ty(decl.pat.hir_id); let pat_ty = self.node_ty(decl.pat.hir_id);
self.overwrite_local_ty_if_err(decl.hir_id, decl.pat, pat_ty); self.overwrite_local_ty_if_err(decl.hir_id, decl.pat, pat_ty);
if let Some(blk) = decl.els { if let Some(blk) = decl.origin.try_get_else() {
let previous_diverges = self.diverges.get(); let previous_diverges = self.diverges.get();
let else_ty = self.check_block_with_expected(blk, NoExpectation); let else_ty = self.check_block_with_expected(blk, NoExpectation);
let cause = self.cause(blk.span, ObligationCauseCode::LetElse); let cause = self.cause(blk.span, ObligationCauseCode::LetElse);
@ -1485,7 +1485,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
self.check_decl(local.into()); self.check_decl(local.into());
} }
pub fn check_stmt(&self, stmt: &'tcx hir::Stmt<'tcx>, is_last: bool) { pub fn check_stmt(&self, stmt: &'tcx hir::Stmt<'tcx>) {
// Don't do all the complex logic below for `DeclItem`. // Don't do all the complex logic below for `DeclItem`.
match stmt.kind { match stmt.kind {
hir::StmtKind::Item(..) => return, hir::StmtKind::Item(..) => return,
@ -1512,14 +1512,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}); });
} }
hir::StmtKind::Semi(ref expr) => { hir::StmtKind::Semi(ref expr) => {
// All of this is equivalent to calling `check_expr`, but it is inlined out here self.check_expr(expr);
// in order to capture the fact that this `match` is the last statement in its
// function. This is done for better suggestions to remove the `;`.
let expectation = match expr.kind {
hir::ExprKind::Match(..) if is_last => IsLast(stmt.span),
_ => NoExpectation,
};
self.check_expr_with_expectation(expr, expectation);
} }
} }
@ -1570,8 +1563,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
let ctxt = BreakableCtxt { coerce: Some(coerce), may_break: false }; let ctxt = BreakableCtxt { coerce: Some(coerce), may_break: false };
let (ctxt, ()) = self.with_breakable_ctxt(blk.hir_id, ctxt, || { let (ctxt, ()) = self.with_breakable_ctxt(blk.hir_id, ctxt, || {
for (pos, s) in blk.stmts.iter().enumerate() { for s in blk.stmts {
self.check_stmt(s, blk.stmts.len() - 1 == pos); self.check_stmt(s);
} }
// check the tail expression **without** holding the // check the tail expression **without** holding the
@ -1594,9 +1587,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
&cause, &cause,
Some(tail_expr), Some(tail_expr),
tail_expr_ty, tail_expr_ty,
Some(&mut |diag: &mut Diagnostic| { |diag| {
self.suggest_block_to_brackets(diag, blk, tail_expr_ty, ty_for_diagnostic); self.suggest_block_to_brackets(diag, blk, tail_expr_ty, ty_for_diagnostic);
}), },
false, false,
); );
} else { } else {
@ -1633,7 +1626,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
coerce.coerce_forced_unit( coerce.coerce_forced_unit(
self, self,
&self.misc(sp), &self.misc(sp),
&mut |err| { |err| {
if let Some(expected_ty) = expected.only_has_type(self) { if let Some(expected_ty) = expected.only_has_type(self) {
if blk.stmts.is_empty() && blk.expr.is_none() { if blk.stmts.is_empty() && blk.expr.is_none() {
self.suggest_boxing_when_appropriate( self.suggest_boxing_when_appropriate(

View File

@ -994,14 +994,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}; };
let ty = self.normalize(expr.span, ty); let ty = self.normalize(expr.span, ty);
if self.can_coerce(found, ty) { if self.can_coerce(found, ty) {
err.multipart_suggestion( if let Some(node) = self.tcx.hir().find(fn_id)
"you might have meant to return this value", && let Some(owner_node) = node.as_owner()
vec![ && let Some(span) = expr.span.find_ancestor_inside(owner_node.span())
(expr.span.shrink_to_lo(), "return ".to_string()), {
(expr.span.shrink_to_hi(), ";".to_string()), err.multipart_suggestion(
], "you might have meant to return this value",
Applicability::MaybeIncorrect, vec![
); (span.shrink_to_lo(), "return ".to_string()),
(span.shrink_to_hi(), ";".to_string()),
],
Applicability::MaybeIncorrect,
);
}
} }
} }
} }
@ -1185,10 +1190,16 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
), ),
)) ))
{ {
let mut span = expr.span;
while expr.span.eq_ctxt(span) && let Some(parent_callsite) = span.parent_callsite()
{
span = parent_callsite;
}
let sugg = if expr.precedence().order() >= PREC_POSTFIX { let sugg = if expr.precedence().order() >= PREC_POSTFIX {
vec![(expr.span.shrink_to_hi(), ".into()".to_owned())] vec![(span.shrink_to_hi(), ".into()".to_owned())]
} else { } else {
vec![(expr.span.shrink_to_lo(), "(".to_owned()), (expr.span.shrink_to_hi(), ").into()".to_owned())] vec![(span.shrink_to_lo(), "(".to_owned()), (span.shrink_to_hi(), ").into()".to_owned())]
}; };
diag.multipart_suggestion( diag.multipart_suggestion(
format!("call `Into::into` on this expression to convert `{expr_ty}` into `{expected_ty}`"), format!("call `Into::into` on this expression to convert `{expr_ty}` into `{expected_ty}`"),

View File

@ -9,6 +9,26 @@ use rustc_span::def_id::LocalDefId;
use rustc_span::Span; use rustc_span::Span;
use rustc_trait_selection::traits; use rustc_trait_selection::traits;
/// Provides context for checking patterns in declarations. More specifically this
/// allows us to infer array types if the pattern is irrefutable and allows us to infer
/// the size of the array. See issue #76342.
#[derive(Debug, Copy, Clone)]
pub(super) enum DeclOrigin<'a> {
// from an `if let` expression
LetExpr,
// from `let x = ..`
LocalDecl { els: Option<&'a hir::Block<'a>> },
}
impl<'a> DeclOrigin<'a> {
pub(super) fn try_get_else(&self) -> Option<&'a hir::Block<'a>> {
match self {
Self::LocalDecl { els } => *els,
Self::LetExpr => None,
}
}
}
/// A declaration is an abstraction of [hir::Local] and [hir::Let]. /// A declaration is an abstraction of [hir::Local] and [hir::Let].
/// ///
/// It must have a hir_id, as this is how we connect gather_locals to the check functions. /// It must have a hir_id, as this is how we connect gather_locals to the check functions.
@ -18,20 +38,20 @@ pub(super) struct Declaration<'a> {
pub ty: Option<&'a hir::Ty<'a>>, pub ty: Option<&'a hir::Ty<'a>>,
pub span: Span, pub span: Span,
pub init: Option<&'a hir::Expr<'a>>, pub init: Option<&'a hir::Expr<'a>>,
pub els: Option<&'a hir::Block<'a>>, pub origin: DeclOrigin<'a>,
} }
impl<'a> From<&'a hir::Local<'a>> for Declaration<'a> { impl<'a> From<&'a hir::Local<'a>> for Declaration<'a> {
fn from(local: &'a hir::Local<'a>) -> Self { fn from(local: &'a hir::Local<'a>) -> Self {
let hir::Local { hir_id, pat, ty, span, init, els, source: _ } = *local; let hir::Local { hir_id, pat, ty, span, init, els, source: _ } = *local;
Declaration { hir_id, pat, ty, span, init, els } Declaration { hir_id, pat, ty, span, init, origin: DeclOrigin::LocalDecl { els } }
} }
} }
impl<'a> From<&'a hir::Let<'a>> for Declaration<'a> { impl<'a> From<&'a hir::Let<'a>> for Declaration<'a> {
fn from(let_expr: &'a hir::Let<'a>) -> Self { fn from(let_expr: &'a hir::Let<'a>) -> Self {
let hir::Let { hir_id, pat, ty, span, init } = *let_expr; let hir::Let { hir_id, pat, ty, span, init } = *let_expr;
Declaration { hir_id, pat, ty, span, init: Some(init), els: None } Declaration { hir_id, pat, ty, span, init: Some(init), origin: DeclOrigin::LetExpr }
} }
} }

View File

@ -1,3 +1,4 @@
use crate::gather_locals::DeclOrigin;
use crate::{errors, FnCtxt, RawTy}; use crate::{errors, FnCtxt, RawTy};
use rustc_ast as ast; use rustc_ast as ast;
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
@ -77,6 +78,13 @@ struct TopInfo<'tcx> {
span: Option<Span>, span: Option<Span>,
} }
#[derive(Copy, Clone)]
struct PatInfo<'tcx, 'a> {
binding_mode: BindingMode,
top_info: TopInfo<'tcx>,
decl_origin: Option<DeclOrigin<'a>>,
}
impl<'tcx> FnCtxt<'_, 'tcx> { impl<'tcx> FnCtxt<'_, 'tcx> {
fn pattern_cause(&self, ti: TopInfo<'tcx>, cause_span: Span) -> ObligationCause<'tcx> { fn pattern_cause(&self, ti: TopInfo<'tcx>, cause_span: Span) -> ObligationCause<'tcx> {
let code = let code =
@ -135,15 +143,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// ///
/// Otherwise, `Some(span)` represents the span of a type expression /// Otherwise, `Some(span)` represents the span of a type expression
/// which originated the `expected` type. /// which originated the `expected` type.
pub fn check_pat_top( pub(crate) fn check_pat_top(
&self, &self,
pat: &'tcx Pat<'tcx>, pat: &'tcx Pat<'tcx>,
expected: Ty<'tcx>, expected: Ty<'tcx>,
span: Option<Span>, span: Option<Span>,
origin_expr: Option<&'tcx hir::Expr<'tcx>>, origin_expr: Option<&'tcx hir::Expr<'tcx>>,
decl_origin: Option<DeclOrigin<'tcx>>,
) { ) {
let info = TopInfo { expected, origin_expr, span }; let info = TopInfo { expected, origin_expr, span };
self.check_pat(pat, expected, INITIAL_BM, info); let pat_info = PatInfo { binding_mode: INITIAL_BM, top_info: info, decl_origin };
self.check_pat(pat, expected, pat_info);
} }
/// Type check the given `pat` against the `expected` type /// Type check the given `pat` against the `expected` type
@ -151,14 +161,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// ///
/// Outside of this module, `check_pat_top` should always be used. /// Outside of this module, `check_pat_top` should always be used.
/// Conversely, inside this module, `check_pat_top` should never be used. /// Conversely, inside this module, `check_pat_top` should never be used.
#[instrument(level = "debug", skip(self, ti))] #[instrument(level = "debug", skip(self, pat_info))]
fn check_pat( fn check_pat(&self, pat: &'tcx Pat<'tcx>, expected: Ty<'tcx>, pat_info: PatInfo<'tcx, '_>) {
&self, let PatInfo { binding_mode: def_bm, top_info: ti, .. } = pat_info;
pat: &'tcx Pat<'tcx>,
expected: Ty<'tcx>,
def_bm: BindingMode,
ti: TopInfo<'tcx>,
) {
let path_res = match &pat.kind { let path_res = match &pat.kind {
PatKind::Path(qpath) => { PatKind::Path(qpath) => {
Some(self.resolve_ty_and_res_fully_qualified_call(qpath, pat.hir_id, pat.span)) Some(self.resolve_ty_and_res_fully_qualified_call(qpath, pat.hir_id, pat.span))
@ -167,38 +172,38 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}; };
let adjust_mode = self.calc_adjust_mode(pat, path_res.map(|(res, ..)| res)); let adjust_mode = self.calc_adjust_mode(pat, path_res.map(|(res, ..)| res));
let (expected, def_bm) = self.calc_default_binding_mode(pat, expected, def_bm, adjust_mode); let (expected, def_bm) = self.calc_default_binding_mode(pat, expected, def_bm, adjust_mode);
let pat_info =
PatInfo { binding_mode: def_bm, top_info: ti, decl_origin: pat_info.decl_origin };
let ty = match pat.kind { let ty = match pat.kind {
PatKind::Wild => expected, PatKind::Wild => expected,
PatKind::Lit(lt) => self.check_pat_lit(pat.span, lt, expected, ti), PatKind::Lit(lt) => self.check_pat_lit(pat.span, lt, expected, ti),
PatKind::Range(lhs, rhs, _) => self.check_pat_range(pat.span, lhs, rhs, expected, ti), PatKind::Range(lhs, rhs, _) => self.check_pat_range(pat.span, lhs, rhs, expected, ti),
PatKind::Binding(ba, var_id, _, sub) => { PatKind::Binding(ba, var_id, _, sub) => {
self.check_pat_ident(pat, ba, var_id, sub, expected, def_bm, ti) self.check_pat_ident(pat, ba, var_id, sub, expected, pat_info)
} }
PatKind::TupleStruct(ref qpath, subpats, ddpos) => { PatKind::TupleStruct(ref qpath, subpats, ddpos) => {
self.check_pat_tuple_struct(pat, qpath, subpats, ddpos, expected, def_bm, ti) self.check_pat_tuple_struct(pat, qpath, subpats, ddpos, expected, pat_info)
} }
PatKind::Path(ref qpath) => { PatKind::Path(ref qpath) => {
self.check_pat_path(pat, qpath, path_res.unwrap(), expected, ti) self.check_pat_path(pat, qpath, path_res.unwrap(), expected, ti)
} }
PatKind::Struct(ref qpath, fields, has_rest_pat) => { PatKind::Struct(ref qpath, fields, has_rest_pat) => {
self.check_pat_struct(pat, qpath, fields, has_rest_pat, expected, def_bm, ti) self.check_pat_struct(pat, qpath, fields, has_rest_pat, expected, pat_info)
} }
PatKind::Or(pats) => { PatKind::Or(pats) => {
for pat in pats { for pat in pats {
self.check_pat(pat, expected, def_bm, ti); self.check_pat(pat, expected, pat_info);
} }
expected expected
} }
PatKind::Tuple(elements, ddpos) => { PatKind::Tuple(elements, ddpos) => {
self.check_pat_tuple(pat.span, elements, ddpos, expected, def_bm, ti) self.check_pat_tuple(pat.span, elements, ddpos, expected, pat_info)
}
PatKind::Box(inner) => self.check_pat_box(pat.span, inner, expected, def_bm, ti),
PatKind::Ref(inner, mutbl) => {
self.check_pat_ref(pat, inner, mutbl, expected, def_bm, ti)
} }
PatKind::Box(inner) => self.check_pat_box(pat.span, inner, expected, pat_info),
PatKind::Ref(inner, mutbl) => self.check_pat_ref(pat, inner, mutbl, expected, pat_info),
PatKind::Slice(before, slice, after) => { PatKind::Slice(before, slice, after) => {
self.check_pat_slice(pat.span, before, slice, after, expected, def_bm, ti) self.check_pat_slice(pat.span, before, slice, after, expected, pat_info)
} }
}; };
@ -580,9 +585,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
var_id: HirId, var_id: HirId,
sub: Option<&'tcx Pat<'tcx>>, sub: Option<&'tcx Pat<'tcx>>,
expected: Ty<'tcx>, expected: Ty<'tcx>,
def_bm: BindingMode, pat_info: PatInfo<'tcx, '_>,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> { ) -> Ty<'tcx> {
let PatInfo { binding_mode: def_bm, top_info: ti, .. } = pat_info;
// Determine the binding mode... // Determine the binding mode...
let bm = match ba { let bm = match ba {
hir::BindingAnnotation::NONE => def_bm, hir::BindingAnnotation::NONE => def_bm,
@ -620,7 +626,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} }
if let Some(p) = sub { if let Some(p) = sub {
self.check_pat(p, expected, def_bm, ti); self.check_pat(p, expected, pat_info);
} }
local_ty local_ty
@ -843,8 +849,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
fields: &'tcx [hir::PatField<'tcx>], fields: &'tcx [hir::PatField<'tcx>],
has_rest_pat: bool, has_rest_pat: bool,
expected: Ty<'tcx>, expected: Ty<'tcx>,
def_bm: BindingMode, pat_info: PatInfo<'tcx, '_>,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> { ) -> Ty<'tcx> {
// Resolve the path and check the definition for errors. // Resolve the path and check the definition for errors.
let (variant, pat_ty) = match self.check_struct_path(qpath, pat.hir_id) { let (variant, pat_ty) = match self.check_struct_path(qpath, pat.hir_id) {
@ -852,18 +857,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Err(guar) => { Err(guar) => {
let err = Ty::new_error(self.tcx, guar); let err = Ty::new_error(self.tcx, guar);
for field in fields { for field in fields {
let ti = ti; self.check_pat(field.pat, err, pat_info);
self.check_pat(field.pat, err, def_bm, ti);
} }
return err; return err;
} }
}; };
// Type-check the path. // Type-check the path.
self.demand_eqtype_pat(pat.span, expected, pat_ty, ti); self.demand_eqtype_pat(pat.span, expected, pat_ty, pat_info.top_info);
// Type-check subpatterns. // Type-check subpatterns.
if self.check_struct_pat_fields(pat_ty, &pat, variant, fields, has_rest_pat, def_bm, ti) { if self.check_struct_pat_fields(pat_ty, &pat, variant, fields, has_rest_pat, pat_info) {
pat_ty pat_ty
} else { } else {
Ty::new_misc_error(self.tcx) Ty::new_misc_error(self.tcx)
@ -1029,13 +1033,17 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
subpats: &'tcx [Pat<'tcx>], subpats: &'tcx [Pat<'tcx>],
ddpos: hir::DotDotPos, ddpos: hir::DotDotPos,
expected: Ty<'tcx>, expected: Ty<'tcx>,
def_bm: BindingMode, pat_info: PatInfo<'tcx, '_>,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> { ) -> Ty<'tcx> {
let PatInfo { binding_mode: def_bm, top_info: ti, decl_origin } = pat_info;
let tcx = self.tcx; let tcx = self.tcx;
let on_error = |e| { let on_error = |e| {
for pat in subpats { for pat in subpats {
self.check_pat(pat, Ty::new_error(tcx, e), def_bm, ti); self.check_pat(
pat,
Ty::new_error(tcx, e),
PatInfo { binding_mode: def_bm, top_info: ti, decl_origin },
);
} }
}; };
let report_unexpected_res = |res: Res| { let report_unexpected_res = |res: Res| {
@ -1101,7 +1109,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
for (i, subpat) in subpats.iter().enumerate_and_adjust(variant.fields.len(), ddpos) { for (i, subpat) in subpats.iter().enumerate_and_adjust(variant.fields.len(), ddpos) {
let field = &variant.fields[FieldIdx::from_usize(i)]; let field = &variant.fields[FieldIdx::from_usize(i)];
let field_ty = self.field_ty(subpat.span, field, args); let field_ty = self.field_ty(subpat.span, field, args);
self.check_pat(subpat, field_ty, def_bm, ti); self.check_pat(
subpat,
field_ty,
PatInfo { binding_mode: def_bm, top_info: ti, decl_origin },
);
self.tcx.check_stability( self.tcx.check_stability(
variant.fields[FieldIdx::from_usize(i)].did, variant.fields[FieldIdx::from_usize(i)].did,
@ -1285,8 +1297,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
elements: &'tcx [Pat<'tcx>], elements: &'tcx [Pat<'tcx>],
ddpos: hir::DotDotPos, ddpos: hir::DotDotPos,
expected: Ty<'tcx>, expected: Ty<'tcx>,
def_bm: BindingMode, pat_info: PatInfo<'tcx, '_>,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> { ) -> Ty<'tcx> {
let tcx = self.tcx; let tcx = self.tcx;
let mut expected_len = elements.len(); let mut expected_len = elements.len();
@ -1307,18 +1318,20 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}); });
let element_tys = tcx.mk_type_list_from_iter(element_tys_iter); let element_tys = tcx.mk_type_list_from_iter(element_tys_iter);
let pat_ty = Ty::new_tup(tcx, element_tys); let pat_ty = Ty::new_tup(tcx, element_tys);
if let Some(mut err) = self.demand_eqtype_pat_diag(span, expected, pat_ty, ti) { if let Some(mut err) =
self.demand_eqtype_pat_diag(span, expected, pat_ty, pat_info.top_info)
{
let reported = err.emit(); let reported = err.emit();
// Walk subpatterns with an expected type of `err` in this case to silence // Walk subpatterns with an expected type of `err` in this case to silence
// further errors being emitted when using the bindings. #50333 // further errors being emitted when using the bindings. #50333
let element_tys_iter = (0..max_len).map(|_| Ty::new_error(tcx, reported)); let element_tys_iter = (0..max_len).map(|_| Ty::new_error(tcx, reported));
for (_, elem) in elements.iter().enumerate_and_adjust(max_len, ddpos) { for (_, elem) in elements.iter().enumerate_and_adjust(max_len, ddpos) {
self.check_pat(elem, Ty::new_error(tcx, reported), def_bm, ti); self.check_pat(elem, Ty::new_error(tcx, reported), pat_info);
} }
Ty::new_tup_from_iter(tcx, element_tys_iter) Ty::new_tup_from_iter(tcx, element_tys_iter)
} else { } else {
for (i, elem) in elements.iter().enumerate_and_adjust(max_len, ddpos) { for (i, elem) in elements.iter().enumerate_and_adjust(max_len, ddpos) {
self.check_pat(elem, element_tys[i], def_bm, ti); self.check_pat(elem, element_tys[i], pat_info);
} }
pat_ty pat_ty
} }
@ -1331,8 +1344,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
variant: &'tcx ty::VariantDef, variant: &'tcx ty::VariantDef,
fields: &'tcx [hir::PatField<'tcx>], fields: &'tcx [hir::PatField<'tcx>],
has_rest_pat: bool, has_rest_pat: bool,
def_bm: BindingMode, pat_info: PatInfo<'tcx, '_>,
ti: TopInfo<'tcx>,
) -> bool { ) -> bool {
let tcx = self.tcx; let tcx = self.tcx;
@ -1379,7 +1391,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
} }
}; };
self.check_pat(field.pat, field_ty, def_bm, ti); self.check_pat(field.pat, field_ty, pat_info);
} }
let mut unmentioned_fields = variant let mut unmentioned_fields = variant
@ -1937,8 +1949,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
span: Span, span: Span,
inner: &'tcx Pat<'tcx>, inner: &'tcx Pat<'tcx>,
expected: Ty<'tcx>, expected: Ty<'tcx>,
def_bm: BindingMode, pat_info: PatInfo<'tcx, '_>,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> { ) -> Ty<'tcx> {
let tcx = self.tcx; let tcx = self.tcx;
let (box_ty, inner_ty) = match self.check_dereferenceable(span, expected, inner) { let (box_ty, inner_ty) = match self.check_dereferenceable(span, expected, inner) {
@ -1950,7 +1961,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
span: inner.span, span: inner.span,
}); });
let box_ty = Ty::new_box(tcx, inner_ty); let box_ty = Ty::new_box(tcx, inner_ty);
self.demand_eqtype_pat(span, expected, box_ty, ti); self.demand_eqtype_pat(span, expected, box_ty, pat_info.top_info);
(box_ty, inner_ty) (box_ty, inner_ty)
} }
Err(guar) => { Err(guar) => {
@ -1958,7 +1969,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
(err, err) (err, err)
} }
}; };
self.check_pat(inner, inner_ty, def_bm, ti); self.check_pat(inner, inner_ty, pat_info);
box_ty box_ty
} }
@ -1969,8 +1980,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
inner: &'tcx Pat<'tcx>, inner: &'tcx Pat<'tcx>,
mutbl: hir::Mutability, mutbl: hir::Mutability,
expected: Ty<'tcx>, expected: Ty<'tcx>,
def_bm: BindingMode, pat_info: PatInfo<'tcx, '_>,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> { ) -> Ty<'tcx> {
let tcx = self.tcx; let tcx = self.tcx;
let expected = self.shallow_resolve(expected); let expected = self.shallow_resolve(expected);
@ -1992,7 +2002,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
}); });
let ref_ty = self.new_ref_ty(pat.span, mutbl, inner_ty); let ref_ty = self.new_ref_ty(pat.span, mutbl, inner_ty);
debug!("check_pat_ref: demanding {:?} = {:?}", expected, ref_ty); debug!("check_pat_ref: demanding {:?} = {:?}", expected, ref_ty);
let err = self.demand_eqtype_pat_diag(pat.span, expected, ref_ty, ti); let err = self.demand_eqtype_pat_diag(
pat.span,
expected,
ref_ty,
pat_info.top_info,
);
// Look for a case like `fn foo(&foo: u32)` and suggest // Look for a case like `fn foo(&foo: u32)` and suggest
// `fn foo(foo: &u32)` // `fn foo(foo: &u32)`
@ -2009,7 +2024,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
(err, err) (err, err)
} }
}; };
self.check_pat(inner, inner_ty, def_bm, ti); self.check_pat(inner, inner_ty, pat_info);
ref_ty ref_ty
} }
@ -2020,6 +2035,62 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
Ty::new_ref(self.tcx, region, mt) Ty::new_ref(self.tcx, region, mt)
} }
fn try_resolve_slice_ty_to_array_ty(
&self,
before: &'tcx [Pat<'tcx>],
slice: Option<&'tcx Pat<'tcx>>,
span: Span,
) -> Option<Ty<'tcx>> {
if !slice.is_none() {
return None;
}
let tcx = self.tcx;
let len = before.len();
let ty_var_origin =
TypeVariableOrigin { kind: TypeVariableOriginKind::TypeInference, span };
let inner_ty = self.next_ty_var(ty_var_origin);
Some(Ty::new_array(tcx, inner_ty, len.try_into().unwrap()))
}
/// Used to determines whether we can infer the expected type in the slice pattern to be of type array.
/// This is only possible if we're in an irrefutable pattern. If we were to allow this in refutable
/// patterns we wouldn't e.g. report ambiguity in the following situation:
///
/// ```ignore(rust)
/// struct Zeroes;
/// const ARR: [usize; 2] = [0; 2];
/// const ARR2: [usize; 2] = [2; 2];
///
/// impl Into<&'static [usize; 2]> for Zeroes {
/// fn into(self) -> &'static [usize; 2] {
/// &ARR
/// }
/// }
///
/// impl Into<&'static [usize]> for Zeroes {
/// fn into(self) -> &'static [usize] {
/// &ARR2
/// }
/// }
///
/// fn main() {
/// let &[a, b]: &[usize] = Zeroes.into() else {
/// ..
/// };
/// }
/// ```
///
/// If we're in an irrefutable pattern we prefer the array impl candidate given that
/// the slice impl candidate would be be rejected anyway (if no ambiguity existed).
fn pat_is_irrefutable(&self, decl_origin: Option<DeclOrigin<'_>>) -> bool {
match decl_origin {
Some(DeclOrigin::LocalDecl { els: None }) => true,
Some(DeclOrigin::LocalDecl { els: Some(_) } | DeclOrigin::LetExpr) | None => false,
}
}
/// Type check a slice pattern. /// Type check a slice pattern.
/// ///
/// Syntactically, these look like `[pat_0, ..., pat_n]`. /// Syntactically, these look like `[pat_0, ..., pat_n]`.
@ -2037,10 +2108,24 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
slice: Option<&'tcx Pat<'tcx>>, slice: Option<&'tcx Pat<'tcx>>,
after: &'tcx [Pat<'tcx>], after: &'tcx [Pat<'tcx>],
expected: Ty<'tcx>, expected: Ty<'tcx>,
def_bm: BindingMode, pat_info: PatInfo<'tcx, '_>,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> { ) -> Ty<'tcx> {
let expected = self.try_structurally_resolve_type(span, expected);
// If the pattern is irrefutable and `expected` is an infer ty, we try to equate it
// to an array if the given pattern allows it. See issue #76342
if self.pat_is_irrefutable(pat_info.decl_origin) && expected.is_ty_var() {
if let Some(resolved_arr_ty) =
self.try_resolve_slice_ty_to_array_ty(before, slice, span)
{
debug!(?resolved_arr_ty);
self.demand_eqtype(span, expected, resolved_arr_ty);
}
}
let expected = self.structurally_resolve_type(span, expected); let expected = self.structurally_resolve_type(span, expected);
debug!(?expected);
let (element_ty, opt_slice_ty, inferred) = match *expected.kind() { let (element_ty, opt_slice_ty, inferred) = match *expected.kind() {
// An array, so we might have something like `let [a, b, c] = [0, 1, 2];`. // An array, so we might have something like `let [a, b, c] = [0, 1, 2];`.
ty::Array(element_ty, len) => { ty::Array(element_ty, len) => {
@ -2055,10 +2140,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
ty::Slice(element_ty) => (element_ty, Some(expected), expected), ty::Slice(element_ty) => (element_ty, Some(expected), expected),
// The expected type must be an array or slice, but was neither, so error. // The expected type must be an array or slice, but was neither, so error.
_ => { _ => {
let guar = expected let guar = expected.error_reported().err().unwrap_or_else(|| {
.error_reported() self.error_expected_array_or_slice(span, expected, pat_info.top_info)
.err() });
.unwrap_or_else(|| self.error_expected_array_or_slice(span, expected, ti));
let err = Ty::new_error(self.tcx, guar); let err = Ty::new_error(self.tcx, guar);
(err, Some(err), err) (err, Some(err), err)
} }
@ -2066,15 +2150,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
// Type check all the patterns before `slice`. // Type check all the patterns before `slice`.
for elt in before { for elt in before {
self.check_pat(elt, element_ty, def_bm, ti); self.check_pat(elt, element_ty, pat_info);
} }
// Type check the `slice`, if present, against its expected type. // Type check the `slice`, if present, against its expected type.
if let Some(slice) = slice { if let Some(slice) = slice {
self.check_pat(slice, opt_slice_ty.unwrap(), def_bm, ti); self.check_pat(slice, opt_slice_ty.unwrap(), pat_info);
} }
// Type check the elements after `slice`, if present. // Type check the elements after `slice`, if present.
for elt in after { for elt in after {
self.check_pat(elt, element_ty, def_bm, ti); self.check_pat(elt, element_ty, pat_info);
} }
inferred inferred
} }

View File

@ -6,6 +6,7 @@
//! //!
//! ``` //! ```
//! # #![feature(rustc_attrs)] //! # #![feature(rustc_attrs)]
//! # #![allow(internal_features)]
//! #![rustc_partition_reused(module="spike", cfg="rpass2")] //! #![rustc_partition_reused(module="spike", cfg="rpass2")]
//! #![rustc_partition_codegened(module="spike-x", cfg="rpass2")] //! #![rustc_partition_codegened(module="spike-x", cfg="rpass2")]
//! ``` //! ```

View File

@ -12,6 +12,7 @@
test test
) )
)] )]
#![cfg_attr(all(not(bootstrap), feature = "nightly"), allow(internal_features))]
#[cfg(feature = "nightly")] #[cfg(feature = "nightly")]
pub mod bit_set; pub mod bit_set;

View File

@ -764,7 +764,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
Some(ty) if expected == ty => { Some(ty) if expected == ty => {
let source_map = self.tcx.sess.source_map(); let source_map = self.tcx.sess.source_map();
err.span_suggestion( err.span_suggestion(
source_map.end_point(cause.span), source_map.end_point(cause.span()),
"try removing this `?`", "try removing this `?`",
"", "",
Applicability::MachineApplicable, Applicability::MachineApplicable,

View File

@ -72,6 +72,9 @@ lint_builtin_incomplete_features = the feature `{$name}` is incomplete and may n
.note = see issue #{$n} <https://github.com/rust-lang/rust/issues/{$n}> for more information .note = see issue #{$n} <https://github.com/rust-lang/rust/issues/{$n}> for more information
.help = consider using `min_{$name}` instead, which is more stable and complete .help = consider using `min_{$name}` instead, which is more stable and complete
lint_builtin_internal_features = the feature `{$name}` is internal to the compiler or standard library
.note = using it is strongly discouraged
lint_builtin_keyword_idents = `{$kw}` is a keyword in the {$next} edition lint_builtin_keyword_idents = `{$kw}` is a keyword in the {$next} edition
.suggestion = you can use a raw identifier to stay compatible .suggestion = you can use a raw identifier to stay compatible
@ -213,9 +216,6 @@ lint_expectation = this lint expectation is unfulfilled
.note = the `unfulfilled_lint_expectations` lint can't be expected and will always produce this message .note = the `unfulfilled_lint_expectations` lint can't be expected and will always produce this message
.rationale = {$rationale} .rationale = {$rationale}
lint_fn_null_check = function pointers are not nullable, so checking them for null will always return false
.help = wrap the function pointer inside an `Option` and use `Option::is_none` to check for null pointer value
lint_for_loops_over_fallibles = lint_for_loops_over_fallibles =
for loop over {$article} `{$ty}`. This is more readably written as an `if let` statement for loop over {$article} `{$ty}`. This is more readably written as an `if let` statement
.suggestion = consider using `if let` to clear intent .suggestion = consider using `if let` to clear intent
@ -454,6 +454,13 @@ lint_path_statement_drop = path statement drops value
lint_path_statement_no_effect = path statement with no effect lint_path_statement_no_effect = path statement with no effect
lint_ptr_null_checks_fn_ptr = function pointers are not nullable, so checking them for null will always return false
.help = wrap the function pointer inside an `Option` and use `Option::is_none` to check for null pointer value
.label = expression has type `{$orig_ty}`
lint_ptr_null_checks_ref = references are not nullable, so checking them for null will always return false
.label = expression has type `{$orig_ty}`
lint_query_instability = using `{$query}` can result in unstable query results lint_query_instability = using `{$query}` can result in unstable query results
.note = if you believe this case to be fine, allow this lint and add a comment explaining your rationale .note = if you believe this case to be fine, allow this lint and add a comment explaining your rationale

View File

@ -28,8 +28,8 @@ use crate::{
BuiltinClashingExternSub, BuiltinConstNoMangle, BuiltinDeprecatedAttrLink, BuiltinClashingExternSub, BuiltinConstNoMangle, BuiltinDeprecatedAttrLink,
BuiltinDeprecatedAttrLinkSuggestion, BuiltinDeprecatedAttrUsed, BuiltinDerefNullptr, BuiltinDeprecatedAttrLinkSuggestion, BuiltinDeprecatedAttrUsed, BuiltinDerefNullptr,
BuiltinEllipsisInclusiveRangePatternsLint, BuiltinExplicitOutlives, BuiltinEllipsisInclusiveRangePatternsLint, BuiltinExplicitOutlives,
BuiltinExplicitOutlivesSuggestion, BuiltinIncompleteFeatures, BuiltinExplicitOutlivesSuggestion, BuiltinFeatureIssueNote, BuiltinIncompleteFeatures,
BuiltinIncompleteFeaturesHelp, BuiltinIncompleteFeaturesNote, BuiltinKeywordIdents, BuiltinIncompleteFeaturesHelp, BuiltinInternalFeatures, BuiltinKeywordIdents,
BuiltinMissingCopyImpl, BuiltinMissingDebugImpl, BuiltinMissingDoc, BuiltinMissingCopyImpl, BuiltinMissingDebugImpl, BuiltinMissingDoc,
BuiltinMutablesTransmutes, BuiltinNoMangleGeneric, BuiltinNonShorthandFieldPatterns, BuiltinMutablesTransmutes, BuiltinNoMangleGeneric, BuiltinNonShorthandFieldPatterns,
BuiltinSpecialModuleNameUsed, BuiltinTrivialBounds, BuiltinTypeAliasGenericBounds, BuiltinSpecialModuleNameUsed, BuiltinTrivialBounds, BuiltinTypeAliasGenericBounds,
@ -2301,12 +2301,36 @@ declare_lint! {
"incomplete features that may function improperly in some or all cases" "incomplete features that may function improperly in some or all cases"
} }
declare_lint! {
/// The `internal_features` lint detects unstable features enabled with
/// the [`feature` attribute] that are internal to the compiler or standard
/// library.
///
/// [`feature` attribute]: https://doc.rust-lang.org/nightly/unstable-book/
///
/// ### Example
///
/// ```rust,compile_fail
/// #![feature(rustc_attrs)]
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// These features are an implementation detail of the compiler and standard
/// library and are not supposed to be used in user code.
pub INTERNAL_FEATURES,
Deny,
"internal features are not supposed to be used"
}
declare_lint_pass!( declare_lint_pass!(
/// Check for used feature gates in `INCOMPLETE_FEATURES` in `rustc_feature/src/active.rs`. /// Check for used feature gates in `INCOMPLETE_FEATURES` in `rustc_feature/src/active.rs`.
IncompleteFeatures => [INCOMPLETE_FEATURES] IncompleteInternalFeatures => [INCOMPLETE_FEATURES, INTERNAL_FEATURES]
); );
impl EarlyLintPass for IncompleteFeatures { impl EarlyLintPass for IncompleteInternalFeatures {
fn check_crate(&mut self, cx: &EarlyContext<'_>, _: &ast::Crate) { fn check_crate(&mut self, cx: &EarlyContext<'_>, _: &ast::Crate) {
let features = cx.sess().features_untracked(); let features = cx.sess().features_untracked();
features features
@ -2314,17 +2338,26 @@ impl EarlyLintPass for IncompleteFeatures {
.iter() .iter()
.map(|(name, span, _)| (name, span)) .map(|(name, span, _)| (name, span))
.chain(features.declared_lib_features.iter().map(|(name, span)| (name, span))) .chain(features.declared_lib_features.iter().map(|(name, span)| (name, span)))
.filter(|(&name, _)| features.incomplete(name)) .filter(|(&name, _)| features.incomplete(name) || features.internal(name))
.for_each(|(&name, &span)| { .for_each(|(&name, &span)| {
let note = rustc_feature::find_feature_issue(name, GateIssue::Language) let note = rustc_feature::find_feature_issue(name, GateIssue::Language)
.map(|n| BuiltinIncompleteFeaturesNote { n }); .map(|n| BuiltinFeatureIssueNote { n });
let help =
HAS_MIN_FEATURES.contains(&name).then_some(BuiltinIncompleteFeaturesHelp); if features.incomplete(name) {
cx.emit_spanned_lint( let help =
INCOMPLETE_FEATURES, HAS_MIN_FEATURES.contains(&name).then_some(BuiltinIncompleteFeaturesHelp);
span, cx.emit_spanned_lint(
BuiltinIncompleteFeatures { name, note, help }, INCOMPLETE_FEATURES,
); span,
BuiltinIncompleteFeatures { name, note, help },
);
} else {
cx.emit_spanned_lint(
INTERNAL_FEATURES,
span,
BuiltinInternalFeatures { name, note },
);
}
}); });
} }
} }

View File

@ -1,112 +0,0 @@
use crate::{lints::FnNullCheckDiag, LateContext, LateLintPass, LintContext};
use rustc_ast::LitKind;
use rustc_hir::{BinOpKind, Expr, ExprKind, TyKind};
use rustc_session::{declare_lint, declare_lint_pass};
use rustc_span::sym;
declare_lint! {
/// The `incorrect_fn_null_checks` lint checks for expression that checks if a
/// function pointer is null.
///
/// ### Example
///
/// ```rust
/// # fn test() {}
/// let fn_ptr: fn() = /* somehow obtained nullable function pointer */
/// # test;
///
/// if (fn_ptr as *const ()).is_null() { /* ... */ }
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// Function pointers are assumed to be non-null, checking them for null will always
/// return false.
INCORRECT_FN_NULL_CHECKS,
Warn,
"incorrect checking of null function pointer"
}
declare_lint_pass!(IncorrectFnNullChecks => [INCORRECT_FN_NULL_CHECKS]);
fn is_fn_ptr_cast(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
let mut expr = expr.peel_blocks();
let mut had_at_least_one_cast = false;
while let ExprKind::Cast(cast_expr, cast_ty) = expr.kind
&& let TyKind::Ptr(_) = cast_ty.kind {
expr = cast_expr.peel_blocks();
had_at_least_one_cast = true;
}
had_at_least_one_cast && cx.typeck_results().expr_ty_adjusted(expr).is_fn()
}
impl<'tcx> LateLintPass<'tcx> for IncorrectFnNullChecks {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
match expr.kind {
// Catching:
// <*<const/mut> <ty>>::is_null(fn_ptr as *<const/mut> <ty>)
ExprKind::Call(path, [arg])
if let ExprKind::Path(ref qpath) = path.kind
&& let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
&& matches!(
cx.tcx.get_diagnostic_name(def_id),
Some(sym::ptr_const_is_null | sym::ptr_is_null)
)
&& is_fn_ptr_cast(cx, arg) =>
{
cx.emit_spanned_lint(INCORRECT_FN_NULL_CHECKS, expr.span, FnNullCheckDiag)
}
// Catching:
// (fn_ptr as *<const/mut> <ty>).is_null()
ExprKind::MethodCall(_, receiver, _, _)
if let Some(def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
&& matches!(
cx.tcx.get_diagnostic_name(def_id),
Some(sym::ptr_const_is_null | sym::ptr_is_null)
)
&& is_fn_ptr_cast(cx, receiver) =>
{
cx.emit_spanned_lint(INCORRECT_FN_NULL_CHECKS, expr.span, FnNullCheckDiag)
}
ExprKind::Binary(op, left, right) if matches!(op.node, BinOpKind::Eq) => {
let to_check: &Expr<'_>;
if is_fn_ptr_cast(cx, left) {
to_check = right;
} else if is_fn_ptr_cast(cx, right) {
to_check = left;
} else {
return;
}
match to_check.kind {
// Catching:
// (fn_ptr as *<const/mut> <ty>) == (0 as <ty>)
ExprKind::Cast(cast_expr, _)
if let ExprKind::Lit(spanned) = cast_expr.kind
&& let LitKind::Int(v, _) = spanned.node && v == 0 =>
{
cx.emit_spanned_lint(INCORRECT_FN_NULL_CHECKS, expr.span, FnNullCheckDiag)
},
// Catching:
// (fn_ptr as *<const/mut> <ty>) == std::ptr::null()
ExprKind::Call(path, [])
if let ExprKind::Path(ref qpath) = path.kind
&& let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
&& let Some(diag_item) = cx.tcx.get_diagnostic_name(def_id)
&& (diag_item == sym::ptr_null || diag_item == sym::ptr_null_mut) =>
{
cx.emit_spanned_lint(INCORRECT_FN_NULL_CHECKS, expr.span, FnNullCheckDiag)
},
_ => {},
}
}
_ => {}
}
}
}

View File

@ -40,6 +40,7 @@
#![recursion_limit = "256"] #![recursion_limit = "256"]
#![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)] #![deny(rustc::diagnostic_outside_of_impl)]
#![cfg_attr(not(bootstrap), allow(internal_features))]
#[macro_use] #[macro_use]
extern crate rustc_middle; extern crate rustc_middle;
@ -57,7 +58,6 @@ mod early;
mod enum_intrinsics_non_enums; mod enum_intrinsics_non_enums;
mod errors; mod errors;
mod expect; mod expect;
mod fn_null_check;
mod for_loops_over_fallibles; mod for_loops_over_fallibles;
pub mod hidden_unicode_codepoints; pub mod hidden_unicode_codepoints;
mod internal; mod internal;
@ -76,6 +76,7 @@ mod noop_method_call;
mod opaque_hidden_inferred_bound; mod opaque_hidden_inferred_bound;
mod pass_by_value; mod pass_by_value;
mod passes; mod passes;
mod ptr_nulls;
mod redundant_semicolon; mod redundant_semicolon;
mod reference_casting; mod reference_casting;
mod traits; mod traits;
@ -102,7 +103,6 @@ use builtin::*;
use deref_into_dyn_supertrait::*; use deref_into_dyn_supertrait::*;
use drop_forget_useless::*; use drop_forget_useless::*;
use enum_intrinsics_non_enums::EnumIntrinsicsNonEnums; use enum_intrinsics_non_enums::EnumIntrinsicsNonEnums;
use fn_null_check::*;
use for_loops_over_fallibles::*; use for_loops_over_fallibles::*;
use hidden_unicode_codepoints::*; use hidden_unicode_codepoints::*;
use internal::*; use internal::*;
@ -117,6 +117,7 @@ use nonstandard_style::*;
use noop_method_call::*; use noop_method_call::*;
use opaque_hidden_inferred_bound::*; use opaque_hidden_inferred_bound::*;
use pass_by_value::*; use pass_by_value::*;
use ptr_nulls::*;
use redundant_semicolon::*; use redundant_semicolon::*;
use reference_casting::*; use reference_casting::*;
use traits::*; use traits::*;
@ -173,7 +174,7 @@ early_lint_methods!(
WhileTrue: WhileTrue, WhileTrue: WhileTrue,
NonAsciiIdents: NonAsciiIdents, NonAsciiIdents: NonAsciiIdents,
HiddenUnicodeCodepoints: HiddenUnicodeCodepoints, HiddenUnicodeCodepoints: HiddenUnicodeCodepoints,
IncompleteFeatures: IncompleteFeatures, IncompleteInternalFeatures: IncompleteInternalFeatures,
RedundantSemicolons: RedundantSemicolons, RedundantSemicolons: RedundantSemicolons,
UnusedDocComment: UnusedDocComment, UnusedDocComment: UnusedDocComment,
UnexpectedCfgs: UnexpectedCfgs, UnexpectedCfgs: UnexpectedCfgs,
@ -227,7 +228,7 @@ late_lint_methods!(
// Depends on types used in type definitions // Depends on types used in type definitions
MissingCopyImplementations: MissingCopyImplementations, MissingCopyImplementations: MissingCopyImplementations,
// Depends on referenced function signatures in expressions // Depends on referenced function signatures in expressions
IncorrectFnNullChecks: IncorrectFnNullChecks, PtrNullChecks: PtrNullChecks,
MutableTransmutes: MutableTransmutes, MutableTransmutes: MutableTransmutes,
TypeAliasBounds: TypeAliasBounds, TypeAliasBounds: TypeAliasBounds,
TrivialConstraints: TrivialConstraints, TrivialConstraints: TrivialConstraints,

View File

@ -405,18 +405,27 @@ pub struct BuiltinExplicitOutlivesSuggestion {
pub struct BuiltinIncompleteFeatures { pub struct BuiltinIncompleteFeatures {
pub name: Symbol, pub name: Symbol,
#[subdiagnostic] #[subdiagnostic]
pub note: Option<BuiltinIncompleteFeaturesNote>, pub note: Option<BuiltinFeatureIssueNote>,
#[subdiagnostic] #[subdiagnostic]
pub help: Option<BuiltinIncompleteFeaturesHelp>, pub help: Option<BuiltinIncompleteFeaturesHelp>,
} }
#[derive(LintDiagnostic)]
#[diag(lint_builtin_internal_features)]
#[note]
pub struct BuiltinInternalFeatures {
pub name: Symbol,
#[subdiagnostic]
pub note: Option<BuiltinFeatureIssueNote>,
}
#[derive(Subdiagnostic)] #[derive(Subdiagnostic)]
#[help(lint_help)] #[help(lint_help)]
pub struct BuiltinIncompleteFeaturesHelp; pub struct BuiltinIncompleteFeaturesHelp;
#[derive(Subdiagnostic)] #[derive(Subdiagnostic)]
#[note(lint_note)] #[note(lint_note)]
pub struct BuiltinIncompleteFeaturesNote { pub struct BuiltinFeatureIssueNote {
pub n: NonZeroU32, pub n: NonZeroU32,
} }
@ -613,11 +622,23 @@ pub struct ExpectationNote {
pub rationale: Symbol, pub rationale: Symbol,
} }
// fn_null_check.rs // ptr_nulls.rs
#[derive(LintDiagnostic)] #[derive(LintDiagnostic)]
#[diag(lint_fn_null_check)] pub enum PtrNullChecksDiag<'a> {
#[help] #[diag(lint_ptr_null_checks_fn_ptr)]
pub struct FnNullCheckDiag; #[help(lint_help)]
FnPtr {
orig_ty: Ty<'a>,
#[label]
label: Span,
},
#[diag(lint_ptr_null_checks_ref)]
Ref {
orig_ty: Ty<'a>,
#[label]
label: Span,
},
}
// for_loops_over_fallibles.rs // for_loops_over_fallibles.rs
#[derive(LintDiagnostic)] #[derive(LintDiagnostic)]

View File

@ -0,0 +1,146 @@
use crate::{lints::PtrNullChecksDiag, LateContext, LateLintPass, LintContext};
use rustc_ast::LitKind;
use rustc_hir::{BinOpKind, Expr, ExprKind, TyKind};
use rustc_session::{declare_lint, declare_lint_pass};
use rustc_span::sym;
declare_lint! {
/// The `useless_ptr_null_checks` lint checks for useless null checks against pointers
/// obtained from non-null types.
///
/// ### Example
///
/// ```rust
/// # fn test() {}
/// let fn_ptr: fn() = /* somehow obtained nullable function pointer */
/// # test;
///
/// if (fn_ptr as *const ()).is_null() { /* ... */ }
/// ```
///
/// {{produces}}
///
/// ### Explanation
///
/// Function pointers and references are assumed to be non-null, checking them for null
/// will always return false.
USELESS_PTR_NULL_CHECKS,
Warn,
"useless checking of non-null-typed pointer"
}
declare_lint_pass!(PtrNullChecks => [USELESS_PTR_NULL_CHECKS]);
/// This function detects and returns the original expression from a series of consecutive casts,
/// ie. `(my_fn as *const _ as *mut _).cast_mut()` would return the expression for `my_fn`.
fn ptr_cast_chain<'a>(cx: &'a LateContext<'_>, mut e: &'a Expr<'a>) -> Option<&'a Expr<'a>> {
let mut had_at_least_one_cast = false;
loop {
e = e.peel_blocks();
e = if let ExprKind::Cast(expr, t) = e.kind
&& let TyKind::Ptr(_) = t.kind {
had_at_least_one_cast = true;
expr
} else if let ExprKind::MethodCall(_, expr, [], _) = e.kind
&& let Some(def_id) = cx.typeck_results().type_dependent_def_id(e.hir_id)
&& matches!(cx.tcx.get_diagnostic_name(def_id), Some(sym::ptr_cast | sym::ptr_cast_mut)) {
had_at_least_one_cast = true;
expr
} else if let ExprKind::Call(path, [arg]) = e.kind
&& let ExprKind::Path(ref qpath) = path.kind
&& let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
&& matches!(cx.tcx.get_diagnostic_name(def_id), Some(sym::ptr_from_ref | sym::ptr_from_mut)) {
had_at_least_one_cast = true;
arg
} else if had_at_least_one_cast {
return Some(e);
} else {
return None;
};
}
}
fn incorrect_check<'a>(cx: &LateContext<'a>, expr: &Expr<'_>) -> Option<PtrNullChecksDiag<'a>> {
let expr = ptr_cast_chain(cx, expr)?;
let orig_ty = cx.typeck_results().expr_ty(expr);
if orig_ty.is_fn() {
Some(PtrNullChecksDiag::FnPtr { orig_ty, label: expr.span })
} else if orig_ty.is_ref() {
Some(PtrNullChecksDiag::Ref { orig_ty, label: expr.span })
} else {
None
}
}
impl<'tcx> LateLintPass<'tcx> for PtrNullChecks {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
match expr.kind {
// Catching:
// <*<const/mut> <ty>>::is_null(fn_ptr as *<const/mut> <ty>)
ExprKind::Call(path, [arg])
if let ExprKind::Path(ref qpath) = path.kind
&& let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
&& matches!(
cx.tcx.get_diagnostic_name(def_id),
Some(sym::ptr_const_is_null | sym::ptr_is_null)
)
&& let Some(diag) = incorrect_check(cx, arg) =>
{
cx.emit_spanned_lint(USELESS_PTR_NULL_CHECKS, expr.span, diag)
}
// Catching:
// (fn_ptr as *<const/mut> <ty>).is_null()
ExprKind::MethodCall(_, receiver, _, _)
if let Some(def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id)
&& matches!(
cx.tcx.get_diagnostic_name(def_id),
Some(sym::ptr_const_is_null | sym::ptr_is_null)
)
&& let Some(diag) = incorrect_check(cx, receiver) =>
{
cx.emit_spanned_lint(USELESS_PTR_NULL_CHECKS, expr.span, diag)
}
ExprKind::Binary(op, left, right) if matches!(op.node, BinOpKind::Eq) => {
let to_check: &Expr<'_>;
let diag: PtrNullChecksDiag<'_>;
if let Some(ddiag) = incorrect_check(cx, left) {
to_check = right;
diag = ddiag;
} else if let Some(ddiag) = incorrect_check(cx, right) {
to_check = left;
diag = ddiag;
} else {
return;
}
match to_check.kind {
// Catching:
// (fn_ptr as *<const/mut> <ty>) == (0 as <ty>)
ExprKind::Cast(cast_expr, _)
if let ExprKind::Lit(spanned) = cast_expr.kind
&& let LitKind::Int(v, _) = spanned.node && v == 0 =>
{
cx.emit_spanned_lint(USELESS_PTR_NULL_CHECKS, expr.span, diag)
},
// Catching:
// (fn_ptr as *<const/mut> <ty>) == std::ptr::null()
ExprKind::Call(path, [])
if let ExprKind::Path(ref qpath) = path.kind
&& let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id()
&& let Some(diag_item) = cx.tcx.get_diagnostic_name(def_id)
&& (diag_item == sym::ptr_null || diag_item == sym::ptr_null_mut) =>
{
cx.emit_spanned_lint(USELESS_PTR_NULL_CHECKS, expr.span, diag)
},
_ => {},
}
}
_ => {}
}
}
}

View File

@ -3925,7 +3925,6 @@ declare_lint! {
/// ///
/// // in crate B /// // in crate B
/// #![feature(non_exhaustive_omitted_patterns_lint)] /// #![feature(non_exhaustive_omitted_patterns_lint)]
///
/// match Bar::A { /// match Bar::A {
/// Bar::A => {}, /// Bar::A => {},
/// #[warn(non_exhaustive_omitted_patterns)] /// #[warn(non_exhaustive_omitted_patterns)]

View File

@ -7,6 +7,7 @@
#![allow(rustc::default_hash_types)] #![allow(rustc::default_hash_types)]
#![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)] #![deny(rustc::diagnostic_outside_of_impl)]
#![cfg_attr(not(bootstrap), allow(internal_features))]
#![recursion_limit = "128"] #![recursion_limit = "128"]
use synstructure::decl_derive; use synstructure::decl_derive;

View File

@ -64,6 +64,7 @@
#![feature(macro_metavar_expr)] #![feature(macro_metavar_expr)]
#![recursion_limit = "512"] #![recursion_limit = "512"]
#![allow(rustc::potential_query_instability)] #![allow(rustc::potential_query_instability)]
#![cfg_attr(not(bootstrap), allow(internal_features))]
#[macro_use] #[macro_use]
extern crate bitflags; extern crate bitflags;

View File

@ -18,9 +18,9 @@ use rustc_span::DUMMY_SP;
use rustc_target::abi::{Align, HasDataLayout, Size}; use rustc_target::abi::{Align, HasDataLayout, Size};
use super::{ use super::{
read_target_uint, write_target_uint, AllocId, InterpError, InterpResult, Pointer, Provenance, read_target_uint, write_target_uint, AllocId, BadBytesAccess, InterpError, InterpResult,
ResourceExhaustionInfo, Scalar, ScalarSizeMismatch, UndefinedBehaviorInfo, UninitBytesAccess, Pointer, PointerArithmetic, Provenance, ResourceExhaustionInfo, Scalar, ScalarSizeMismatch,
UnsupportedOpInfo, UndefinedBehaviorInfo, UnsupportedOpInfo,
}; };
use crate::ty; use crate::ty;
use init_mask::*; use init_mask::*;
@ -173,13 +173,13 @@ pub enum AllocError {
/// A scalar had the wrong size. /// A scalar had the wrong size.
ScalarSizeMismatch(ScalarSizeMismatch), ScalarSizeMismatch(ScalarSizeMismatch),
/// Encountered a pointer where we needed raw bytes. /// Encountered a pointer where we needed raw bytes.
ReadPointerAsBytes, ReadPointerAsInt(Option<BadBytesAccess>),
/// Partially overwriting a pointer. /// Partially overwriting a pointer.
PartialPointerOverwrite(Size), OverwritePartialPointer(Size),
/// Partially copying a pointer. /// Partially copying a pointer.
PartialPointerCopy(Size), ReadPartialPointer(Size),
/// Using uninitialized data where it is not allowed. /// Using uninitialized data where it is not allowed.
InvalidUninitBytes(Option<UninitBytesAccess>), InvalidUninitBytes(Option<BadBytesAccess>),
} }
pub type AllocResult<T = ()> = Result<T, AllocError>; pub type AllocResult<T = ()> = Result<T, AllocError>;
@ -196,12 +196,14 @@ impl AllocError {
ScalarSizeMismatch(s) => { ScalarSizeMismatch(s) => {
InterpError::UndefinedBehavior(UndefinedBehaviorInfo::ScalarSizeMismatch(s)) InterpError::UndefinedBehavior(UndefinedBehaviorInfo::ScalarSizeMismatch(s))
} }
ReadPointerAsBytes => InterpError::Unsupported(UnsupportedOpInfo::ReadPointerAsBytes), ReadPointerAsInt(info) => InterpError::Unsupported(
PartialPointerOverwrite(offset) => InterpError::Unsupported( UnsupportedOpInfo::ReadPointerAsInt(info.map(|b| (alloc_id, b))),
UnsupportedOpInfo::PartialPointerOverwrite(Pointer::new(alloc_id, offset)),
), ),
PartialPointerCopy(offset) => InterpError::Unsupported( OverwritePartialPointer(offset) => InterpError::Unsupported(
UnsupportedOpInfo::PartialPointerCopy(Pointer::new(alloc_id, offset)), UnsupportedOpInfo::OverwritePartialPointer(Pointer::new(alloc_id, offset)),
),
ReadPartialPointer(offset) => InterpError::Unsupported(
UnsupportedOpInfo::ReadPartialPointer(Pointer::new(alloc_id, offset)),
), ),
InvalidUninitBytes(info) => InterpError::UndefinedBehavior( InvalidUninitBytes(info) => InterpError::UndefinedBehavior(
UndefinedBehaviorInfo::InvalidUninitBytes(info.map(|b| (alloc_id, b))), UndefinedBehaviorInfo::InvalidUninitBytes(info.map(|b| (alloc_id, b))),
@ -433,14 +435,26 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
range: AllocRange, range: AllocRange,
) -> AllocResult<&[u8]> { ) -> AllocResult<&[u8]> {
self.init_mask.is_range_initialized(range).map_err(|uninit_range| { self.init_mask.is_range_initialized(range).map_err(|uninit_range| {
AllocError::InvalidUninitBytes(Some(UninitBytesAccess { AllocError::InvalidUninitBytes(Some(BadBytesAccess {
access: range, access: range,
uninit: uninit_range, bad: uninit_range,
})) }))
})?; })?;
if !Prov::OFFSET_IS_ADDR { if !Prov::OFFSET_IS_ADDR {
if !self.provenance.range_empty(range, cx) { if !self.provenance.range_empty(range, cx) {
return Err(AllocError::ReadPointerAsBytes); // Find the provenance.
let (offset, _prov) = self
.provenance
.range_get_ptrs(range, cx)
.first()
.copied()
.expect("there must be provenance somewhere here");
let start = offset.max(range.start); // the pointer might begin before `range`!
let end = (offset + cx.pointer_size()).min(range.end()); // the pointer might end after `range`!
return Err(AllocError::ReadPointerAsInt(Some(BadBytesAccess {
access: range,
bad: AllocRange::from(start..end),
})));
} }
} }
Ok(self.get_bytes_unchecked(range)) Ok(self.get_bytes_unchecked(range))
@ -536,23 +550,25 @@ impl<Prov: Provenance, Extra, Bytes: AllocBytes> Allocation<Prov, Extra, Bytes>
// Now use this provenance. // Now use this provenance.
let ptr = Pointer::new(prov, Size::from_bytes(bits)); let ptr = Pointer::new(prov, Size::from_bytes(bits));
return Ok(Scalar::from_maybe_pointer(ptr, cx)); return Ok(Scalar::from_maybe_pointer(ptr, cx));
} else {
// Without OFFSET_IS_ADDR, the only remaining case we can handle is total absence of
// provenance.
if self.provenance.range_empty(range, cx) {
return Ok(Scalar::from_uint(bits, range.size));
}
// Else we have mixed provenance, that doesn't work.
return Err(AllocError::ReadPartialPointer(range.start));
} }
} else { } else {
// We are *not* reading a pointer. // We are *not* reading a pointer.
// If we can just ignore provenance, do exactly that. // If we can just ignore provenance or there is none, that's easy.
if Prov::OFFSET_IS_ADDR { if Prov::OFFSET_IS_ADDR || self.provenance.range_empty(range, cx) {
// We just strip provenance. // We just strip provenance.
return Ok(Scalar::from_uint(bits, range.size)); return Ok(Scalar::from_uint(bits, range.size));
} }
// There is some provenance and we don't have OFFSET_IS_ADDR. This doesn't work.
return Err(AllocError::ReadPointerAsInt(None));
} }
// Fallback path for when we cannot treat provenance bytewise or ignore it.
assert!(!Prov::OFFSET_IS_ADDR);
if !self.provenance.range_empty(range, cx) {
return Err(AllocError::ReadPointerAsBytes);
}
// There is no provenance, we can just return the bits.
Ok(Scalar::from_uint(bits, range.size))
} }
/// Writes a *non-ZST* scalar. /// Writes a *non-ZST* scalar.

View File

@ -66,7 +66,11 @@ impl<Prov: Provenance> ProvenanceMap<Prov> {
/// Returns all ptr-sized provenance in the given range. /// Returns all ptr-sized provenance in the given range.
/// If the range has length 0, returns provenance that crosses the edge between `start-1` and /// If the range has length 0, returns provenance that crosses the edge between `start-1` and
/// `start`. /// `start`.
fn range_get_ptrs(&self, range: AllocRange, cx: &impl HasDataLayout) -> &[(Size, Prov)] { pub(super) fn range_get_ptrs(
&self,
range: AllocRange,
cx: &impl HasDataLayout,
) -> &[(Size, Prov)] {
// We have to go back `pointer_size - 1` bytes, as that one would still overlap with // We have to go back `pointer_size - 1` bytes, as that one would still overlap with
// the beginning of this range. // the beginning of this range.
let adjusted_start = Size::from_bytes( let adjusted_start = Size::from_bytes(
@ -158,7 +162,7 @@ impl<Prov: Provenance> ProvenanceMap<Prov> {
if first < start { if first < start {
if !Prov::OFFSET_IS_ADDR { if !Prov::OFFSET_IS_ADDR {
// We can't split up the provenance into less than a pointer. // We can't split up the provenance into less than a pointer.
return Err(AllocError::PartialPointerOverwrite(first)); return Err(AllocError::OverwritePartialPointer(first));
} }
// Insert the remaining part in the bytewise provenance. // Insert the remaining part in the bytewise provenance.
let prov = self.ptrs[&first]; let prov = self.ptrs[&first];
@ -171,7 +175,7 @@ impl<Prov: Provenance> ProvenanceMap<Prov> {
let begin_of_last = last - cx.data_layout().pointer_size; let begin_of_last = last - cx.data_layout().pointer_size;
if !Prov::OFFSET_IS_ADDR { if !Prov::OFFSET_IS_ADDR {
// We can't split up the provenance into less than a pointer. // We can't split up the provenance into less than a pointer.
return Err(AllocError::PartialPointerOverwrite(begin_of_last)); return Err(AllocError::OverwritePartialPointer(begin_of_last));
} }
// Insert the remaining part in the bytewise provenance. // Insert the remaining part in the bytewise provenance.
let prov = self.ptrs[&begin_of_last]; let prov = self.ptrs[&begin_of_last];
@ -246,10 +250,10 @@ impl<Prov: Provenance> ProvenanceMap<Prov> {
if !Prov::OFFSET_IS_ADDR { if !Prov::OFFSET_IS_ADDR {
// There can't be any bytewise provenance, and we cannot split up the begin/end overlap. // There can't be any bytewise provenance, and we cannot split up the begin/end overlap.
if let Some(entry) = begin_overlap { if let Some(entry) = begin_overlap {
return Err(AllocError::PartialPointerCopy(entry.0)); return Err(AllocError::ReadPartialPointer(entry.0));
} }
if let Some(entry) = end_overlap { if let Some(entry) = end_overlap {
return Err(AllocError::PartialPointerCopy(entry.0)); return Err(AllocError::ReadPartialPointer(entry.0));
} }
debug_assert!(self.bytes.is_none()); debug_assert!(self.bytes.is_none());
} else { } else {

View File

@ -134,10 +134,6 @@ impl InterpErrorBacktrace {
} }
impl<'tcx> InterpErrorInfo<'tcx> { impl<'tcx> InterpErrorInfo<'tcx> {
pub fn from_parts(kind: InterpError<'tcx>, backtrace: InterpErrorBacktrace) -> Self {
Self(Box::new(InterpErrorInfoInner { kind, backtrace }))
}
pub fn into_parts(self) -> (InterpError<'tcx>, InterpErrorBacktrace) { pub fn into_parts(self) -> (InterpError<'tcx>, InterpErrorBacktrace) {
let InterpErrorInfo(box InterpErrorInfoInner { kind, backtrace }) = self; let InterpErrorInfo(box InterpErrorInfoInner { kind, backtrace }) = self;
(kind, backtrace) (kind, backtrace)
@ -155,7 +151,7 @@ impl<'tcx> InterpErrorInfo<'tcx> {
} }
fn print_backtrace(backtrace: &Backtrace) { fn print_backtrace(backtrace: &Backtrace) {
eprintln!("\n\nAn error occurred in miri:\n{backtrace}"); eprintln!("\n\nAn error occurred in the MIR interpreter:\n{backtrace}");
} }
impl From<ErrorGuaranteed> for InterpErrorInfo<'_> { impl From<ErrorGuaranteed> for InterpErrorInfo<'_> {
@ -226,13 +222,13 @@ impl IntoDiagnosticArg for InvalidMetaKind {
} }
} }
/// Details of an access to uninitialized bytes where it is not allowed. /// Details of an access to uninitialized bytes / bad pointer bytes where it is not allowed.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub struct UninitBytesAccess { pub struct BadBytesAccess {
/// Range of the original memory access. /// Range of the original memory access.
pub access: AllocRange, pub access: AllocRange,
/// Range of the uninit memory that was encountered. (Might not be maximal.) /// Range of the bad memory that was encountered. (Might not be maximal.)
pub uninit: AllocRange, pub bad: AllocRange,
} }
/// Information about a size mismatch. /// Information about a size mismatch.
@ -316,7 +312,7 @@ pub enum UndefinedBehaviorInfo<'a> {
/// Using a string that is not valid UTF-8, /// Using a string that is not valid UTF-8,
InvalidStr(std::str::Utf8Error), InvalidStr(std::str::Utf8Error),
/// Using uninitialized data where it is not allowed. /// Using uninitialized data where it is not allowed.
InvalidUninitBytes(Option<(AllocId, UninitBytesAccess)>), InvalidUninitBytes(Option<(AllocId, BadBytesAccess)>),
/// Working with a local that is not currently live. /// Working with a local that is not currently live.
DeadLocal, DeadLocal,
/// Data size is not equal to target size. /// Data size is not equal to target size.
@ -326,7 +322,7 @@ pub enum UndefinedBehaviorInfo<'a> {
/// An uninhabited enum variant is projected. /// An uninhabited enum variant is projected.
UninhabitedEnumVariantRead(VariantIdx), UninhabitedEnumVariantRead(VariantIdx),
/// Validation error. /// Validation error.
Validation(ValidationErrorInfo<'a>), ValidationError(ValidationErrorInfo<'a>),
// FIXME(fee1-dead) these should all be actual variants of the enum instead of dynamically // FIXME(fee1-dead) these should all be actual variants of the enum instead of dynamically
// dispatched // dispatched
/// A custom (free-form) error, created by `err_ub_custom!`. /// A custom (free-form) error, created by `err_ub_custom!`.
@ -368,6 +364,8 @@ pub enum ExpectedKind {
Float, Float,
Int, Int,
FnPtr, FnPtr,
EnumTag,
Str,
} }
impl From<PointerKind> for ExpectedKind { impl From<PointerKind> for ExpectedKind {
@ -381,10 +379,11 @@ impl From<PointerKind> for ExpectedKind {
#[derive(Debug)] #[derive(Debug)]
pub enum ValidationErrorKind<'tcx> { pub enum ValidationErrorKind<'tcx> {
PointerAsInt { expected: ExpectedKind },
PartialPointer,
PtrToUninhabited { ptr_kind: PointerKind, ty: Ty<'tcx> }, PtrToUninhabited { ptr_kind: PointerKind, ty: Ty<'tcx> },
PtrToStatic { ptr_kind: PointerKind }, PtrToStatic { ptr_kind: PointerKind },
PtrToMut { ptr_kind: PointerKind }, PtrToMut { ptr_kind: PointerKind },
ExpectedNonPtr { value: String },
MutableRefInConst, MutableRefInConst,
NullFnPtr, NullFnPtr,
NeverVal, NeverVal,
@ -394,11 +393,8 @@ pub enum ValidationErrorKind<'tcx> {
UnsafeCell, UnsafeCell,
UninhabitedVal { ty: Ty<'tcx> }, UninhabitedVal { ty: Ty<'tcx> },
InvalidEnumTag { value: String }, InvalidEnumTag { value: String },
UninhabitedEnumTag, UninhabitedEnumVariant,
UninitEnumTag,
UninitStr,
Uninit { expected: ExpectedKind }, Uninit { expected: ExpectedKind },
UninitVal,
InvalidVTablePtr { value: String }, InvalidVTablePtr { value: String },
InvalidMetaSliceTooLarge { ptr_kind: PointerKind }, InvalidMetaSliceTooLarge { ptr_kind: PointerKind },
InvalidMetaTooLarge { ptr_kind: PointerKind }, InvalidMetaTooLarge { ptr_kind: PointerKind },
@ -426,12 +422,12 @@ pub enum UnsupportedOpInfo {
// //
/// Overwriting parts of a pointer; without knowing absolute addresses, the resulting state /// Overwriting parts of a pointer; without knowing absolute addresses, the resulting state
/// cannot be represented by the CTFE interpreter. /// cannot be represented by the CTFE interpreter.
PartialPointerOverwrite(Pointer<AllocId>), OverwritePartialPointer(Pointer<AllocId>),
/// Attempting to `copy` parts of a pointer to somewhere else; without knowing absolute /// Attempting to read or copy parts of a pointer to somewhere else; without knowing absolute
/// addresses, the resulting state cannot be represented by the CTFE interpreter. /// addresses, the resulting state cannot be represented by the CTFE interpreter.
PartialPointerCopy(Pointer<AllocId>), ReadPartialPointer(Pointer<AllocId>),
/// Encountered a pointer where we needed raw bytes. /// Encountered a pointer where we needed an integer.
ReadPointerAsBytes, ReadPointerAsInt(Option<(AllocId, BadBytesAccess)>),
/// Accessing thread local statics /// Accessing thread local statics
ThreadLocalStatic(DefId), ThreadLocalStatic(DefId),
/// Accessing an unsupported extern static. /// Accessing an unsupported extern static.
@ -497,7 +493,7 @@ impl InterpError<'_> {
matches!( matches!(
self, self,
InterpError::Unsupported(UnsupportedOpInfo::Unsupported(_)) InterpError::Unsupported(UnsupportedOpInfo::Unsupported(_))
| InterpError::UndefinedBehavior(UndefinedBehaviorInfo::Validation { .. }) | InterpError::UndefinedBehavior(UndefinedBehaviorInfo::ValidationError { .. })
| InterpError::UndefinedBehavior(UndefinedBehaviorInfo::Ub(_)) | InterpError::UndefinedBehavior(UndefinedBehaviorInfo::Ub(_))
) )
} }

View File

@ -142,11 +142,11 @@ use crate::ty::GenericArgKind;
use crate::ty::{self, Instance, Ty, TyCtxt}; use crate::ty::{self, Instance, Ty, TyCtxt};
pub use self::error::{ pub use self::error::{
struct_error, CheckInAllocMsg, ErrorHandled, EvalToAllocationRawResult, EvalToConstValueResult, struct_error, BadBytesAccess, CheckInAllocMsg, ErrorHandled, EvalToAllocationRawResult,
EvalToValTreeResult, ExpectedKind, InterpError, InterpErrorInfo, InterpResult, InvalidMetaKind, EvalToConstValueResult, EvalToValTreeResult, ExpectedKind, InterpError, InterpErrorInfo,
InvalidProgramInfo, MachineStopType, PointerKind, ReportedErrorInfo, ResourceExhaustionInfo, InterpResult, InvalidMetaKind, InvalidProgramInfo, MachineStopType, PointerKind,
ScalarSizeMismatch, UndefinedBehaviorInfo, UninitBytesAccess, UnsupportedOpInfo, ReportedErrorInfo, ResourceExhaustionInfo, ScalarSizeMismatch, UndefinedBehaviorInfo,
ValidationErrorInfo, ValidationErrorKind, UnsupportedOpInfo, ValidationErrorInfo, ValidationErrorKind,
}; };
pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar}; pub use self::value::{get_slice_bytes, ConstAlloc, ConstValue, Scalar};
@ -609,7 +609,7 @@ impl<'tcx> TyCtxt<'tcx> {
/// Panics in case the `AllocId` is dangling. Since that is impossible for `AllocId`s in /// Panics in case the `AllocId` is dangling. Since that is impossible for `AllocId`s in
/// constants (as all constants must pass interning and validation that check for dangling /// constants (as all constants must pass interning and validation that check for dangling
/// ids), this function is frequently used throughout rustc, but should not be used within /// ids), this function is frequently used throughout rustc, but should not be used within
/// the miri engine. /// the interpreter.
pub fn global_alloc(self, id: AllocId) -> GlobalAlloc<'tcx> { pub fn global_alloc(self, id: AllocId) -> GlobalAlloc<'tcx> {
match self.try_get_global_alloc(id) { match self.try_get_global_alloc(id) {
Some(alloc) => alloc, Some(alloc) => alloc,

View File

@ -378,15 +378,16 @@ impl<'tcx, Prov: Provenance> Scalar<Prov> {
#[inline] #[inline]
pub fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> { pub fn to_bits(self, target_size: Size) -> InterpResult<'tcx, u128> {
assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST"); assert_ne!(target_size.bytes(), 0, "you should never look at the bits of a ZST");
self.try_to_int().map_err(|_| err_unsup!(ReadPointerAsBytes))?.to_bits(target_size).map_err( self.try_to_int()
|size| { .map_err(|_| err_unsup!(ReadPointerAsInt(None)))?
.to_bits(target_size)
.map_err(|size| {
err_ub!(ScalarSizeMismatch(ScalarSizeMismatch { err_ub!(ScalarSizeMismatch(ScalarSizeMismatch {
target_size: target_size.bytes(), target_size: target_size.bytes(),
data_size: size.bytes(), data_size: size.bytes(),
})) }))
.into() .into()
}, })
)
} }
#[inline(always)] #[inline(always)]

View File

@ -223,19 +223,29 @@ impl<'tcx> Inliner<'tcx> {
return Err("failed to normalize return type"); return Err("failed to normalize return type");
} }
if callsite.fn_sig.abi() == Abi::RustCall { if callsite.fn_sig.abi() == Abi::RustCall {
let (arg_tuple, skipped_args) = match &args[..] { // FIXME: Don't inline user-written `extern "rust-call"` functions,
[arg_tuple] => (arg_tuple, 0), // since this is generally perf-negative on rustc, and we hope that
[_, arg_tuple] => (arg_tuple, 1), // LLVM will inline these functions instead.
if callee_body.spread_arg.is_some() {
return Err("do not inline user-written rust-call functions");
}
let (self_arg, arg_tuple) = match &args[..] {
[arg_tuple] => (None, arg_tuple),
[self_arg, arg_tuple] => (Some(self_arg), arg_tuple),
_ => bug!("Expected `rust-call` to have 1 or 2 args"), _ => bug!("Expected `rust-call` to have 1 or 2 args"),
}; };
let self_arg_ty =
self_arg.map(|self_arg| self_arg.ty(&caller_body.local_decls, self.tcx));
let arg_tuple_ty = arg_tuple.ty(&caller_body.local_decls, self.tcx); let arg_tuple_ty = arg_tuple.ty(&caller_body.local_decls, self.tcx);
let ty::Tuple(arg_tuple_tys) = arg_tuple_ty.kind() else { let ty::Tuple(arg_tuple_tys) = *arg_tuple_ty.kind() else {
bug!("Closure arguments are not passed as a tuple"); bug!("Closure arguments are not passed as a tuple");
}; };
for (arg_ty, input) in for (arg_ty, input) in
arg_tuple_tys.iter().zip(callee_body.args_iter().skip(skipped_args)) self_arg_ty.into_iter().chain(arg_tuple_tys).zip(callee_body.args_iter())
{ {
let input_type = callee_body.local_decls[input].ty; let input_type = callee_body.local_decls[input].ty;
if !util::is_subtype(self.tcx, self.param_env, input_type, arg_ty) { if !util::is_subtype(self.tcx, self.param_env, input_type, arg_ty) {

View File

@ -305,7 +305,7 @@ impl<'tcx> MirPass<'tcx> for LowerIntrinsics {
terminator.kind = TerminatorKind::Unreachable; terminator.kind = TerminatorKind::Unreachable;
} }
} }
_ if intrinsic_name.as_str().starts_with("simd_shuffle") => { sym::simd_shuffle => {
validate_simd_shuffle(tcx, args, terminator.source_info.span); validate_simd_shuffle(tcx, args, terminator.source_info.span);
} }
_ => {} _ => {}

View File

@ -384,7 +384,7 @@ fn collect_items_rec<'tcx>(
if let Ok(alloc) = tcx.eval_static_initializer(def_id) { if let Ok(alloc) = tcx.eval_static_initializer(def_id) {
for &id in alloc.inner().provenance().ptrs().values() { for &id in alloc.inner().provenance().ptrs().values() {
collect_miri(tcx, id, &mut used_items); collect_alloc(tcx, id, &mut used_items);
} }
} }
@ -1331,8 +1331,8 @@ fn create_mono_items_for_default_impls<'tcx>(
} }
} }
/// Scans the miri alloc in order to find function calls, closures, and drop-glue. /// Scans the CTFE alloc in order to find function calls, closures, and drop-glue.
fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut MonoItems<'tcx>) { fn collect_alloc<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut MonoItems<'tcx>) {
match tcx.global_alloc(alloc_id) { match tcx.global_alloc(alloc_id) {
GlobalAlloc::Static(def_id) => { GlobalAlloc::Static(def_id) => {
assert!(!tcx.is_thread_local_static(def_id)); assert!(!tcx.is_thread_local_static(def_id));
@ -1346,7 +1346,7 @@ fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut MonoIte
trace!("collecting {:?} with {:#?}", alloc_id, alloc); trace!("collecting {:?} with {:#?}", alloc_id, alloc);
for &inner in alloc.inner().provenance().ptrs().values() { for &inner in alloc.inner().provenance().ptrs().values() {
rustc_data_structures::stack::ensure_sufficient_stack(|| { rustc_data_structures::stack::ensure_sufficient_stack(|| {
collect_miri(tcx, inner, output); collect_alloc(tcx, inner, output);
}); });
} }
} }
@ -1358,7 +1358,7 @@ fn collect_miri<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut MonoIte
} }
GlobalAlloc::VTable(ty, trait_ref) => { GlobalAlloc::VTable(ty, trait_ref) => {
let alloc_id = tcx.vtable_allocation((ty, trait_ref)); let alloc_id = tcx.vtable_allocation((ty, trait_ref));
collect_miri(tcx, alloc_id, output) collect_alloc(tcx, alloc_id, output)
} }
} }
} }
@ -1381,10 +1381,10 @@ fn collect_const_value<'tcx>(
output: &mut MonoItems<'tcx>, output: &mut MonoItems<'tcx>,
) { ) {
match value { match value {
ConstValue::Scalar(Scalar::Ptr(ptr, _size)) => collect_miri(tcx, ptr.provenance, output), ConstValue::Scalar(Scalar::Ptr(ptr, _size)) => collect_alloc(tcx, ptr.provenance, output),
ConstValue::Slice { data: alloc, start: _, end: _ } | ConstValue::ByRef { alloc, .. } => { ConstValue::Slice { data: alloc, start: _, end: _ } | ConstValue::ByRef { alloc, .. } => {
for &id in alloc.inner().provenance().ptrs().values() { for &id in alloc.inner().provenance().ptrs().values() {
collect_miri(tcx, id, output); collect_alloc(tcx, id, output);
} }
} }
_ => {} _ => {}

View File

@ -23,6 +23,8 @@ parse_async_block_in_2015 = `async` blocks are only allowed in Rust 2018 or late
parse_async_fn_in_2015 = `async fn` is not permitted in Rust 2015 parse_async_fn_in_2015 = `async fn` is not permitted in Rust 2015
.label = to use `async fn`, switch to Rust 2018 or later .label = to use `async fn`, switch to Rust 2018 or later
parse_async_move_block_in_2015 = `async move` blocks are only allowed in Rust 2018 or later
parse_async_move_order_incorrect = the order of `move` and `async` is incorrect parse_async_move_order_incorrect = the order of `move` and `async` is incorrect
.suggestion = try switching the order .suggestion = try switching the order
@ -459,6 +461,12 @@ parse_loop_else = `{$loop_kind}...else` loops are not supported
.note = consider moving this `else` clause to a separate `if` statement and use a `bool` variable to control if it should run .note = consider moving this `else` clause to a separate `if` statement and use a `bool` variable to control if it should run
.loop_keyword = `else` is attached to this loop .loop_keyword = `else` is attached to this loop
parse_macro_expands_to_adt_field = macros cannot expand to {$adt_ty} fields
parse_macro_expands_to_enum_variant = macros cannot expand to enum variants
parse_macro_expands_to_match_arm = macros cannot expand to match arms
parse_macro_invocation_visibility = can't qualify macro invocation with `pub` parse_macro_invocation_visibility = can't qualify macro invocation with `pub`
.suggestion = remove the visibility .suggestion = remove the visibility
.help = try adjusting the macro to put `{$vis}` inside the invocation .help = try adjusting the macro to put `{$vis}` inside the invocation

View File

@ -1434,6 +1434,13 @@ pub(crate) struct AsyncBlockIn2015 {
pub span: Span, pub span: Span,
} }
#[derive(Diagnostic)]
#[diag(parse_async_move_block_in_2015)]
pub(crate) struct AsyncMoveBlockIn2015 {
#[primary_span]
pub span: Span,
}
#[derive(Diagnostic)] #[derive(Diagnostic)]
#[diag(parse_self_argument_pointer)] #[diag(parse_self_argument_pointer)]
pub(crate) struct SelfArgumentPointer { pub(crate) struct SelfArgumentPointer {
@ -1808,6 +1815,12 @@ pub struct UnknownPrefix<'a> {
pub sugg: Option<UnknownPrefixSugg>, pub sugg: Option<UnknownPrefixSugg>,
} }
#[derive(Subdiagnostic)]
#[note(parse_macro_expands_to_adt_field)]
pub struct MacroExpandsToAdtField<'a> {
pub adt_ty: &'a str,
}
#[derive(Subdiagnostic)] #[derive(Subdiagnostic)]
pub enum UnknownPrefixSugg { pub enum UnknownPrefixSugg {
#[suggestion( #[suggestion(

View File

@ -8,6 +8,7 @@
#![feature(never_type)] #![feature(never_type)]
#![feature(rustc_attrs)] #![feature(rustc_attrs)]
#![recursion_limit = "256"] #![recursion_limit = "256"]
#![cfg_attr(not(bootstrap), allow(internal_features))]
#[macro_use] #[macro_use]
extern crate tracing; extern crate tracing;

View File

@ -36,7 +36,7 @@ impl<'a> Parser<'a> {
pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, AttrWrapper> { pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, AttrWrapper> {
let mut outer_attrs = ast::AttrVec::new(); let mut outer_attrs = ast::AttrVec::new();
let mut just_parsed_doc_comment = false; let mut just_parsed_doc_comment = false;
let start_pos = self.token_cursor.num_next_calls; let start_pos = self.num_bump_calls;
loop { loop {
let attr = if self.check(&token::Pound) { let attr = if self.check(&token::Pound) {
let prev_outer_attr_sp = outer_attrs.last().map(|attr| attr.span); let prev_outer_attr_sp = outer_attrs.last().map(|attr| attr.span);
@ -277,7 +277,7 @@ impl<'a> Parser<'a> {
pub(crate) fn parse_inner_attributes(&mut self) -> PResult<'a, ast::AttrVec> { pub(crate) fn parse_inner_attributes(&mut self) -> PResult<'a, ast::AttrVec> {
let mut attrs = ast::AttrVec::new(); let mut attrs = ast::AttrVec::new();
loop { loop {
let start_pos: u32 = self.token_cursor.num_next_calls.try_into().unwrap(); let start_pos: u32 = self.num_bump_calls.try_into().unwrap();
// Only try to parse if it is an inner attribute (has `!`). // Only try to parse if it is an inner attribute (has `!`).
let attr = if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) { let attr = if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
Some(self.parse_attribute(InnerAttrPolicy::Permitted)?) Some(self.parse_attribute(InnerAttrPolicy::Permitted)?)
@ -298,7 +298,7 @@ impl<'a> Parser<'a> {
None None
}; };
if let Some(attr) = attr { if let Some(attr) = attr {
let end_pos: u32 = self.token_cursor.num_next_calls.try_into().unwrap(); let end_pos: u32 = self.num_bump_calls.try_into().unwrap();
// If we are currently capturing tokens, mark the location of this inner attribute. // If we are currently capturing tokens, mark the location of this inner attribute.
// If capturing ends up creating a `LazyAttrTokenStream`, we will include // If capturing ends up creating a `LazyAttrTokenStream`, we will include
// this replace range with it, removing the inner attribute from the final // this replace range with it, removing the inner attribute from the final

View File

@ -213,6 +213,7 @@ impl<'a> Parser<'a> {
let start_token = (self.token.clone(), self.token_spacing); let start_token = (self.token.clone(), self.token_spacing);
let cursor_snapshot = self.token_cursor.clone(); let cursor_snapshot = self.token_cursor.clone();
let start_pos = self.num_bump_calls;
let has_outer_attrs = !attrs.attrs.is_empty(); let has_outer_attrs = !attrs.attrs.is_empty();
let prev_capturing = std::mem::replace(&mut self.capture_state.capturing, Capturing::Yes); let prev_capturing = std::mem::replace(&mut self.capture_state.capturing, Capturing::Yes);
@ -273,8 +274,7 @@ impl<'a> Parser<'a> {
let replace_ranges_end = self.capture_state.replace_ranges.len(); let replace_ranges_end = self.capture_state.replace_ranges.len();
let cursor_snapshot_next_calls = cursor_snapshot.num_next_calls; let mut end_pos = self.num_bump_calls;
let mut end_pos = self.token_cursor.num_next_calls;
let mut captured_trailing = false; let mut captured_trailing = false;
@ -301,12 +301,12 @@ impl<'a> Parser<'a> {
// then extend the range of captured tokens to include it, since the parser // then extend the range of captured tokens to include it, since the parser
// was not actually bumped past it. When the `LazyAttrTokenStream` gets converted // was not actually bumped past it. When the `LazyAttrTokenStream` gets converted
// into an `AttrTokenStream`, we will create the proper token. // into an `AttrTokenStream`, we will create the proper token.
if self.token_cursor.break_last_token { if self.break_last_token {
assert!(!captured_trailing, "Cannot set break_last_token and have trailing token"); assert!(!captured_trailing, "Cannot set break_last_token and have trailing token");
end_pos += 1; end_pos += 1;
} }
let num_calls = end_pos - cursor_snapshot_next_calls; let num_calls = end_pos - start_pos;
// If we have no attributes, then we will never need to // If we have no attributes, then we will never need to
// use any replace ranges. // use any replace ranges.
@ -316,7 +316,7 @@ impl<'a> Parser<'a> {
// Grab any replace ranges that occur *inside* the current AST node. // Grab any replace ranges that occur *inside* the current AST node.
// We will perform the actual replacement when we convert the `LazyAttrTokenStream` // We will perform the actual replacement when we convert the `LazyAttrTokenStream`
// to an `AttrTokenStream`. // to an `AttrTokenStream`.
let start_calls: u32 = cursor_snapshot_next_calls.try_into().unwrap(); let start_calls: u32 = start_pos.try_into().unwrap();
self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end] self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
.iter() .iter()
.cloned() .cloned()
@ -331,7 +331,7 @@ impl<'a> Parser<'a> {
start_token, start_token,
num_calls, num_calls,
cursor_snapshot, cursor_snapshot,
break_last_token: self.token_cursor.break_last_token, break_last_token: self.break_last_token,
replace_ranges, replace_ranges,
}); });
@ -359,14 +359,10 @@ impl<'a> Parser<'a> {
// with a `FlatToken::AttrTarget`. If this AST node is inside an item // with a `FlatToken::AttrTarget`. If this AST node is inside an item
// that has `#[derive]`, then this will allow us to cfg-expand this // that has `#[derive]`, then this will allow us to cfg-expand this
// AST node. // AST node.
let start_pos = let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
if has_outer_attrs { attrs.start_pos } else { cursor_snapshot_next_calls };
let new_tokens = vec![(FlatToken::AttrTarget(attr_data), Spacing::Alone)]; let new_tokens = vec![(FlatToken::AttrTarget(attr_data), Spacing::Alone)];
assert!( assert!(!self.break_last_token, "Should not have unglued last token with cfg attr");
!self.token_cursor.break_last_token,
"Should not have unglued last token with cfg attr"
);
let range: Range<u32> = (start_pos.try_into().unwrap())..(end_pos.try_into().unwrap()); let range: Range<u32> = (start_pos.try_into().unwrap())..(end_pos.try_into().unwrap());
self.capture_state.replace_ranges.push((range, new_tokens)); self.capture_state.replace_ranges.push((range, new_tokens));
self.capture_state.replace_ranges.extend(inner_attr_replace_ranges); self.capture_state.replace_ranges.extend(inner_attr_replace_ranges);
@ -464,6 +460,6 @@ mod size_asserts {
use rustc_data_structures::static_assert_size; use rustc_data_structures::static_assert_size;
// tidy-alphabetical-start // tidy-alphabetical-start
static_assert_size!(AttrWrapper, 16); static_assert_size!(AttrWrapper, 16);
static_assert_size!(LazyAttrTokenStreamImpl, 120); static_assert_size!(LazyAttrTokenStreamImpl, 104);
// tidy-alphabetical-end // tidy-alphabetical-end
} }

View File

@ -4,10 +4,11 @@ use super::{
TokenExpectType, TokenType, TokenExpectType, TokenType,
}; };
use crate::errors::{ use crate::errors::{
AmbiguousPlus, AttributeOnParamType, BadQPathStage2, BadTypePlus, BadTypePlusSub, ColonAsSemi, AmbiguousPlus, AsyncMoveBlockIn2015, AttributeOnParamType, BadQPathStage2, BadTypePlus,
ComparisonOperatorsCannotBeChained, ComparisonOperatorsCannotBeChainedSugg, BadTypePlusSub, ColonAsSemi, ComparisonOperatorsCannotBeChained,
ConstGenericWithoutBraces, ConstGenericWithoutBracesSugg, DocCommentDoesNotDocumentAnything, ComparisonOperatorsCannotBeChainedSugg, ConstGenericWithoutBraces,
DocCommentOnParamType, DoubleColonInBound, ExpectedIdentifier, ExpectedSemi, ExpectedSemiSugg, ConstGenericWithoutBracesSugg, DocCommentDoesNotDocumentAnything, DocCommentOnParamType,
DoubleColonInBound, ExpectedIdentifier, ExpectedSemi, ExpectedSemiSugg,
GenericParamsWithoutAngleBrackets, GenericParamsWithoutAngleBracketsSugg, GenericParamsWithoutAngleBrackets, GenericParamsWithoutAngleBracketsSugg,
HelpIdentifierStartsWithNumber, InInTypo, IncorrectAwait, IncorrectSemicolon, HelpIdentifierStartsWithNumber, InInTypo, IncorrectAwait, IncorrectSemicolon,
IncorrectUseOfAwait, ParenthesesInForHead, ParenthesesInForHeadSugg, IncorrectUseOfAwait, ParenthesesInForHead, ParenthesesInForHeadSugg,
@ -573,6 +574,12 @@ impl<'a> Parser<'a> {
return Err(self.sess.create_err(UseEqInstead { span: self.token.span })); return Err(self.sess.create_err(UseEqInstead { span: self.token.span }));
} }
if self.token.is_keyword(kw::Move) && self.prev_token.is_keyword(kw::Async) {
// The 2015 edition is in use because parsing of `async move` has failed.
let span = self.prev_token.span.to(self.token.span);
return Err(self.sess.create_err(AsyncMoveBlockIn2015 { span }));
}
let expect = tokens_to_string(&expected); let expect = tokens_to_string(&expected);
let actual = super::token_descr(&self.token); let actual = super::token_descr(&self.token);
let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 { let (msg_exp, (label_sp, label_exp)) = if expected.len() > 1 {
@ -2634,6 +2641,7 @@ impl<'a> Parser<'a> {
pub(crate) fn maybe_recover_unexpected_comma( pub(crate) fn maybe_recover_unexpected_comma(
&mut self, &mut self,
lo: Span, lo: Span,
is_mac_invoc: bool,
rt: CommaRecoveryMode, rt: CommaRecoveryMode,
) -> PResult<'a, ()> { ) -> PResult<'a, ()> {
if self.token != token::Comma { if self.token != token::Comma {
@ -2654,24 +2662,28 @@ impl<'a> Parser<'a> {
let seq_span = lo.to(self.prev_token.span); let seq_span = lo.to(self.prev_token.span);
let mut err = self.struct_span_err(comma_span, "unexpected `,` in pattern"); let mut err = self.struct_span_err(comma_span, "unexpected `,` in pattern");
if let Ok(seq_snippet) = self.span_to_snippet(seq_span) { if let Ok(seq_snippet) = self.span_to_snippet(seq_span) {
err.multipart_suggestion( if is_mac_invoc {
format!( err.note(fluent::parse_macro_expands_to_match_arm);
"try adding parentheses to match on a tuple{}", } else {
if let CommaRecoveryMode::LikelyTuple = rt { "" } else { "..." }, err.multipart_suggestion(
), format!(
vec![ "try adding parentheses to match on a tuple{}",
(seq_span.shrink_to_lo(), "(".to_string()), if let CommaRecoveryMode::LikelyTuple = rt { "" } else { "..." },
(seq_span.shrink_to_hi(), ")".to_string()), ),
], vec![
Applicability::MachineApplicable, (seq_span.shrink_to_lo(), "(".to_string()),
); (seq_span.shrink_to_hi(), ")".to_string()),
if let CommaRecoveryMode::EitherTupleOrPipe = rt { ],
err.span_suggestion(
seq_span,
"...or a vertical bar to match on multiple alternatives",
seq_snippet.replace(',', " |"),
Applicability::MachineApplicable, Applicability::MachineApplicable,
); );
if let CommaRecoveryMode::EitherTupleOrPipe = rt {
err.span_suggestion(
seq_span,
"...or a vertical bar to match on multiple alternatives",
seq_snippet.replace(',', " |"),
Applicability::MachineApplicable,
);
}
} }
} }
Err(err) Err(err)

View File

@ -1167,7 +1167,7 @@ impl<'a> Parser<'a> {
DestructuredFloat::TrailingDot(sym, sym_span, dot_span) => { DestructuredFloat::TrailingDot(sym, sym_span, dot_span) => {
assert!(suffix.is_none()); assert!(suffix.is_none());
// Analogous to `Self::break_and_eat` // Analogous to `Self::break_and_eat`
self.token_cursor.break_last_token = true; self.break_last_token = true;
// This might work, in cases like `1. 2`, and might not, // This might work, in cases like `1. 2`, and might not,
// in cases like `offset_of!(Ty, 1.)`. It depends on what comes // in cases like `offset_of!(Ty, 1.)`. It depends on what comes
// after the float-like token, and therefore we have to make // after the float-like token, and therefore we have to make
@ -2599,7 +2599,7 @@ impl<'a> Parser<'a> {
// Recover from missing expression in `for` loop // Recover from missing expression in `for` loop
if matches!(expr.kind, ExprKind::Block(..)) if matches!(expr.kind, ExprKind::Block(..))
&& !matches!(self.token.kind, token::OpenDelim(token::Delimiter::Brace)) && !matches!(self.token.kind, token::OpenDelim(Delimiter::Brace))
&& self.may_recover() && self.may_recover()
{ {
self.sess self.sess

View File

@ -1,20 +1,20 @@
use crate::errors;
use super::diagnostics::{dummy_arg, ConsumeClosingDelim}; use super::diagnostics::{dummy_arg, ConsumeClosingDelim};
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken}; use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
use crate::errors::{self, MacroExpandsToAdtField};
use crate::fluent_generated as fluent;
use ast::StaticItem; use ast::StaticItem;
use rustc_ast::ast::*; use rustc_ast::ast::*;
use rustc_ast::ptr::P; use rustc_ast::ptr::P;
use rustc_ast::token::{self, Delimiter, TokenKind}; use rustc_ast::token::{self, Delimiter, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree}; use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_ast::util::case::Case; use rustc_ast::util::case::Case;
use rustc_ast::MacCall;
use rustc_ast::{self as ast, AttrVec, Attribute, DUMMY_NODE_ID}; use rustc_ast::{self as ast, AttrVec, Attribute, DUMMY_NODE_ID};
use rustc_ast::{Async, Const, Defaultness, IsAuto, Mutability, Unsafe, UseTree, UseTreeKind}; use rustc_ast::{Async, Const, Defaultness, IsAuto, Mutability, Unsafe, UseTree, UseTreeKind};
use rustc_ast::{BindingAnnotation, Block, FnDecl, FnSig, Param, SelfKind}; use rustc_ast::{BindingAnnotation, Block, FnDecl, FnSig, Param, SelfKind};
use rustc_ast::{EnumDef, FieldDef, Generics, TraitRef, Ty, TyKind, Variant, VariantData}; use rustc_ast::{EnumDef, FieldDef, Generics, TraitRef, Ty, TyKind, Variant, VariantData};
use rustc_ast::{FnHeader, ForeignItem, Path, PathSegment, Visibility, VisibilityKind}; use rustc_ast::{FnHeader, ForeignItem, Path, PathSegment, Visibility, VisibilityKind};
use rustc_ast::{MacCall, MacDelimiter};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_errors::{ use rustc_errors::{
struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult, struct_span_err, Applicability, DiagnosticBuilder, ErrorGuaranteed, IntoDiagnostic, PResult,
@ -1450,6 +1450,17 @@ impl<'a> Parser<'a> {
} }
let ident = this.parse_field_ident("enum", vlo)?; let ident = this.parse_field_ident("enum", vlo)?;
if this.token == token::Not {
if let Err(mut err) = this.unexpected::<()>() {
err.note(fluent::parse_macro_expands_to_enum_variant).emit();
}
this.bump();
this.parse_delim_args()?;
return Ok((None, TrailingToken::MaybeComma));
}
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) { let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
// Parse a struct variant. // Parse a struct variant.
let (fields, recovered) = let (fields, recovered) =
@ -1477,7 +1488,7 @@ impl<'a> Parser<'a> {
Ok((Some(vr), TrailingToken::MaybeComma)) Ok((Some(vr), TrailingToken::MaybeComma))
}, },
).map_err(|mut err|{ ).map_err(|mut err| {
err.help("enum variants can be `Variant`, `Variant = <integer>`, `Variant(Type, ..., TypeN)` or `Variant { fields: Types }`"); err.help("enum variants can be `Variant`, `Variant = <integer>`, `Variant(Type, ..., TypeN)` or `Variant { fields: Types }`");
err err
}) })
@ -1687,7 +1698,8 @@ impl<'a> Parser<'a> {
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| { self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
let lo = this.token.span; let lo = this.token.span;
let vis = this.parse_visibility(FollowedByType::No)?; let vis = this.parse_visibility(FollowedByType::No)?;
Ok((this.parse_single_struct_field(adt_ty, lo, vis, attrs)?, TrailingToken::None)) this.parse_single_struct_field(adt_ty, lo, vis, attrs)
.map(|field| (field, TrailingToken::None))
}) })
} }
@ -1821,8 +1833,8 @@ impl<'a> Parser<'a> {
"field names and their types are separated with `:`", "field names and their types are separated with `:`",
":", ":",
Applicability::MachineApplicable, Applicability::MachineApplicable,
); )
err.emit(); .emit();
} else { } else {
return Err(err); return Err(err);
} }
@ -1839,6 +1851,23 @@ impl<'a> Parser<'a> {
attrs: AttrVec, attrs: AttrVec,
) -> PResult<'a, FieldDef> { ) -> PResult<'a, FieldDef> {
let name = self.parse_field_ident(adt_ty, lo)?; let name = self.parse_field_ident(adt_ty, lo)?;
// Parse the macro invocation and recover
if self.token.kind == token::Not {
if let Err(mut err) = self.unexpected::<FieldDef>() {
err.subdiagnostic(MacroExpandsToAdtField { adt_ty }).emit();
self.bump();
self.parse_delim_args()?;
return Ok(FieldDef {
span: DUMMY_SP,
ident: None,
vis,
id: DUMMY_NODE_ID,
ty: self.mk_ty(DUMMY_SP, TyKind::Err),
attrs,
is_placeholder: false,
});
}
}
self.expect_field_ty_separator()?; self.expect_field_ty_separator()?;
let ty = self.parse_ty()?; let ty = self.parse_ty()?;
if self.token.kind == token::Colon && self.look_ahead(1, |tok| tok.kind != token::Colon) { if self.token.kind == token::Colon && self.look_ahead(1, |tok| tok.kind != token::Colon) {
@ -1968,7 +1997,7 @@ impl<'a> Parser<'a> {
let arrow = TokenTree::token_alone(token::FatArrow, pspan.between(bspan)); // `=>` let arrow = TokenTree::token_alone(token::FatArrow, pspan.between(bspan)); // `=>`
let tokens = TokenStream::new(vec![params, arrow, body]); let tokens = TokenStream::new(vec![params, arrow, body]);
let dspan = DelimSpan::from_pair(pspan.shrink_to_lo(), bspan.shrink_to_hi()); let dspan = DelimSpan::from_pair(pspan.shrink_to_lo(), bspan.shrink_to_hi());
P(DelimArgs { dspan, delim: MacDelimiter::Brace, tokens }) P(DelimArgs { dspan, delim: Delimiter::Brace, tokens })
} else { } else {
return self.unexpected(); return self.unexpected();
}; };

View File

@ -25,7 +25,7 @@ use rustc_ast::util::case::Case;
use rustc_ast::AttrId; use rustc_ast::AttrId;
use rustc_ast::DUMMY_NODE_ID; use rustc_ast::DUMMY_NODE_ID;
use rustc_ast::{self as ast, AnonConst, Const, DelimArgs, Extern}; use rustc_ast::{self as ast, AnonConst, Const, DelimArgs, Extern};
use rustc_ast::{Async, AttrArgs, AttrArgsEq, Expr, ExprKind, MacDelimiter, Mutability, StrLit}; use rustc_ast::{Async, AttrArgs, AttrArgsEq, Expr, ExprKind, Mutability, StrLit};
use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind}; use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
@ -135,9 +135,24 @@ pub struct Parser<'a> {
pub capture_cfg: bool, pub capture_cfg: bool,
restrictions: Restrictions, restrictions: Restrictions,
expected_tokens: Vec<TokenType>, expected_tokens: Vec<TokenType>,
// Important: This must only be advanced from `bump` to ensure that
// `token_cursor.num_next_calls` is updated properly.
token_cursor: TokenCursor, token_cursor: TokenCursor,
// The number of calls to `bump`, i.e. the position in the token stream.
num_bump_calls: usize,
// During parsing we may sometimes need to 'unglue' a glued token into two
// component tokens (e.g. '>>' into '>' and '>), so the parser can consume
// them one at a time. This process bypasses the normal capturing mechanism
// (e.g. `num_bump_calls` will not be incremented), since the 'unglued'
// tokens due not exist in the original `TokenStream`.
//
// If we end up consuming both unglued tokens, this is not an issue. We'll
// end up capturing the single 'glued' token.
//
// However, sometimes we may want to capture just the first 'unglued'
// token. For example, capturing the `Vec<u8>` in `Option<Vec<u8>>`
// requires us to unglue the trailing `>>` token. The `break_last_token`
// field is used to track this token. It gets appended to the captured
// stream when we evaluate a `LazyAttrTokenStream`.
break_last_token: bool,
/// This field is used to keep track of how many left angle brackets we have seen. This is /// This field is used to keep track of how many left angle brackets we have seen. This is
/// required in order to detect extra leading left angle brackets (`<` characters) and error /// required in order to detect extra leading left angle brackets (`<` characters) and error
/// appropriately. /// appropriately.
@ -161,7 +176,7 @@ pub struct Parser<'a> {
// This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure // This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
// it doesn't unintentionally get bigger. // it doesn't unintentionally get bigger.
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))] #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
rustc_data_structures::static_assert_size!(Parser<'_>, 272); rustc_data_structures::static_assert_size!(Parser<'_>, 264);
/// Stores span information about a closure. /// Stores span information about a closure.
#[derive(Clone)] #[derive(Clone)]
@ -223,32 +238,6 @@ struct TokenCursor {
// tokens are in `stack[n-1]`. `stack[0]` (when present) has no delimiters // tokens are in `stack[n-1]`. `stack[0]` (when present) has no delimiters
// because it's the outermost token stream which never has delimiters. // because it's the outermost token stream which never has delimiters.
stack: Vec<(TokenTreeCursor, Delimiter, DelimSpan)>, stack: Vec<(TokenTreeCursor, Delimiter, DelimSpan)>,
// Counts the number of calls to `{,inlined_}next`.
num_next_calls: usize,
// During parsing, we may sometimes need to 'unglue' a
// glued token into two component tokens
// (e.g. '>>' into '>' and '>), so that the parser
// can consume them one at a time. This process
// bypasses the normal capturing mechanism
// (e.g. `num_next_calls` will not be incremented),
// since the 'unglued' tokens due not exist in
// the original `TokenStream`.
//
// If we end up consuming both unglued tokens,
// then this is not an issue - we'll end up
// capturing the single 'glued' token.
//
// However, in certain circumstances, we may
// want to capture just the first 'unglued' token.
// For example, capturing the `Vec<u8>`
// in `Option<Vec<u8>>` requires us to unglue
// the trailing `>>` token. The `break_last_token`
// field is used to track this token - it gets
// appended to the captured stream when
// we evaluate a `LazyAttrTokenStream`.
break_last_token: bool,
} }
impl TokenCursor { impl TokenCursor {
@ -399,12 +388,9 @@ impl<'a> Parser<'a> {
capture_cfg: false, capture_cfg: false,
restrictions: Restrictions::empty(), restrictions: Restrictions::empty(),
expected_tokens: Vec::new(), expected_tokens: Vec::new(),
token_cursor: TokenCursor { token_cursor: TokenCursor { tree_cursor: stream.into_trees(), stack: Vec::new() },
tree_cursor: stream.into_trees(), num_bump_calls: 0,
stack: Vec::new(), break_last_token: false,
num_next_calls: 0,
break_last_token: false,
},
unmatched_angle_bracket_count: 0, unmatched_angle_bracket_count: 0,
max_angle_bracket_count: 0, max_angle_bracket_count: 0,
last_unexpected_token_span: None, last_unexpected_token_span: None,
@ -707,7 +693,7 @@ impl<'a> Parser<'a> {
// If we consume any additional tokens, then this token // If we consume any additional tokens, then this token
// is not needed (we'll capture the entire 'glued' token), // is not needed (we'll capture the entire 'glued' token),
// and `bump` will set this field to `None` // and `bump` will set this field to `None`
self.token_cursor.break_last_token = true; self.break_last_token = true;
// Use the spacing of the glued token as the spacing // Use the spacing of the glued token as the spacing
// of the unglued second token. // of the unglued second token.
self.bump_with((Token::new(second, second_span), self.token_spacing)); self.bump_with((Token::new(second, second_span), self.token_spacing));
@ -1049,11 +1035,11 @@ impl<'a> Parser<'a> {
// Note: destructuring here would give nicer code, but it was found in #96210 to be slower // Note: destructuring here would give nicer code, but it was found in #96210 to be slower
// than `.0`/`.1` access. // than `.0`/`.1` access.
let mut next = self.token_cursor.inlined_next(); let mut next = self.token_cursor.inlined_next();
self.token_cursor.num_next_calls += 1; self.num_bump_calls += 1;
// We've retrieved an token from the underlying // We've retrieved an token from the underlying
// cursor, so we no longer need to worry about // cursor, so we no longer need to worry about
// an unglued token. See `break_and_eat` for more details // an unglued token. See `break_and_eat` for more details
self.token_cursor.break_last_token = false; self.break_last_token = false;
if next.0.span.is_dummy() { if next.0.span.is_dummy() {
// Tweak the location for better diagnostics, but keep syntactic context intact. // Tweak the location for better diagnostics, but keep syntactic context intact.
let fallback_span = self.token.span; let fallback_span = self.token.span;
@ -1230,12 +1216,10 @@ impl<'a> Parser<'a> {
|| self.check(&token::OpenDelim(Delimiter::Brace)); || self.check(&token::OpenDelim(Delimiter::Brace));
delimited.then(|| { delimited.then(|| {
// We've confirmed above that there is a delimiter so unwrapping is OK.
let TokenTree::Delimited(dspan, delim, tokens) = self.parse_token_tree() else { let TokenTree::Delimited(dspan, delim, tokens) = self.parse_token_tree() else {
unreachable!() unreachable!()
}; };
DelimArgs { dspan, delim, tokens }
DelimArgs { dspan, delim: MacDelimiter::from_token(delim).unwrap(), tokens }
}) })
} }
@ -1251,12 +1235,11 @@ impl<'a> Parser<'a> {
} }
/// Parses a single token tree from the input. /// Parses a single token tree from the input.
pub(crate) fn parse_token_tree(&mut self) -> TokenTree { pub fn parse_token_tree(&mut self) -> TokenTree {
match self.token.kind { match self.token.kind {
token::OpenDelim(..) => { token::OpenDelim(..) => {
// Grab the tokens within the delimiters. // Grab the tokens within the delimiters.
let tree_cursor = &self.token_cursor.tree_cursor; let stream = self.token_cursor.tree_cursor.stream.clone();
let stream = tree_cursor.stream.clone();
let (_, delim, span) = *self.token_cursor.stack.last().unwrap(); let (_, delim, span) = *self.token_cursor.stack.last().unwrap();
// Advance the token cursor through the entire delimited // Advance the token cursor through the entire delimited
@ -1287,15 +1270,6 @@ impl<'a> Parser<'a> {
} }
} }
/// Parses a stream of tokens into a list of `TokenTree`s, up to EOF.
pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
let mut tts = Vec::new();
while self.token != token::Eof {
tts.push(self.parse_token_tree());
}
Ok(tts)
}
pub fn parse_tokens(&mut self) -> TokenStream { pub fn parse_tokens(&mut self) -> TokenStream {
let mut result = Vec::new(); let mut result = Vec::new();
loop { loop {
@ -1455,7 +1429,7 @@ impl<'a> Parser<'a> {
} }
pub fn approx_token_stream_pos(&self) -> usize { pub fn approx_token_stream_pos(&self) -> usize {
self.token_cursor.num_next_calls self.num_bump_calls
} }
} }

View File

@ -142,7 +142,11 @@ impl<'a> Parser<'a> {
// Parse the first pattern (`p_0`). // Parse the first pattern (`p_0`).
let mut first_pat = self.parse_pat_no_top_alt(expected, syntax_loc)?; let mut first_pat = self.parse_pat_no_top_alt(expected, syntax_loc)?;
if rc == RecoverComma::Yes { if rc == RecoverComma::Yes {
self.maybe_recover_unexpected_comma(first_pat.span, rt)?; self.maybe_recover_unexpected_comma(
first_pat.span,
matches!(first_pat.kind, PatKind::MacCall(_)),
rt,
)?;
} }
// If the next token is not a `|`, // If the next token is not a `|`,
@ -184,7 +188,7 @@ impl<'a> Parser<'a> {
err err
})?; })?;
if rc == RecoverComma::Yes { if rc == RecoverComma::Yes {
self.maybe_recover_unexpected_comma(pat.span, rt)?; self.maybe_recover_unexpected_comma(pat.span, false, rt)?;
} }
pats.push(pat); pats.push(pat);
} }

View File

@ -193,10 +193,9 @@ impl<'a> Parser<'a> {
/// At this point, the `!` token after the path has already been eaten. /// At this point, the `!` token after the path has already been eaten.
fn parse_stmt_mac(&mut self, lo: Span, attrs: AttrVec, path: ast::Path) -> PResult<'a, Stmt> { fn parse_stmt_mac(&mut self, lo: Span, attrs: AttrVec, path: ast::Path) -> PResult<'a, Stmt> {
let args = self.parse_delim_args()?; let args = self.parse_delim_args()?;
let delim = args.delim.to_token();
let hi = self.prev_token.span; let hi = self.prev_token.span;
let style = match delim { let style = match args.delim {
Delimiter::Brace => MacStmtStyle::Braces, Delimiter::Brace => MacStmtStyle::Braces,
_ => MacStmtStyle::NoBraces, _ => MacStmtStyle::NoBraces,
}; };

View File

@ -2,9 +2,10 @@
use crate::{errors, parse_in}; use crate::{errors, parse_in};
use rustc_ast::token::Delimiter;
use rustc_ast::tokenstream::DelimSpan; use rustc_ast::tokenstream::DelimSpan;
use rustc_ast::MetaItemKind; use rustc_ast::MetaItemKind;
use rustc_ast::{self as ast, AttrArgs, AttrArgsEq, Attribute, DelimArgs, MacDelimiter, MetaItem}; use rustc_ast::{self as ast, AttrArgs, AttrArgsEq, Attribute, DelimArgs, MetaItem};
use rustc_ast_pretty::pprust; use rustc_ast_pretty::pprust;
use rustc_errors::{Applicability, FatalError, PResult}; use rustc_errors::{Applicability, FatalError, PResult};
use rustc_feature::{AttributeTemplate, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP}; use rustc_feature::{AttributeTemplate, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP};
@ -84,8 +85,8 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta
}) })
} }
pub fn check_meta_bad_delim(sess: &ParseSess, span: DelimSpan, delim: MacDelimiter) { pub fn check_meta_bad_delim(sess: &ParseSess, span: DelimSpan, delim: Delimiter) {
if let ast::MacDelimiter::Parenthesis = delim { if let Delimiter::Parenthesis = delim {
return; return;
} }
sess.emit_err(errors::MetaBadDelim { sess.emit_err(errors::MetaBadDelim {
@ -94,8 +95,8 @@ pub fn check_meta_bad_delim(sess: &ParseSess, span: DelimSpan, delim: MacDelimit
}); });
} }
pub fn check_cfg_attr_bad_delim(sess: &ParseSess, span: DelimSpan, delim: MacDelimiter) { pub fn check_cfg_attr_bad_delim(sess: &ParseSess, span: DelimSpan, delim: Delimiter) {
if let ast::MacDelimiter::Parenthesis = delim { if let Delimiter::Parenthesis = delim {
return; return;
} }
sess.emit_err(errors::CfgAttrBadDelim { sess.emit_err(errors::CfgAttrBadDelim {

View File

@ -11,6 +11,7 @@
#![allow(rustc::potential_query_instability, unused_parens)] #![allow(rustc::potential_query_instability, unused_parens)]
#![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)] #![deny(rustc::diagnostic_outside_of_impl)]
#![cfg_attr(not(bootstrap), allow(internal_features))]
#[macro_use] #[macro_use]
extern crate rustc_middle; extern crate rustc_middle;

View File

@ -18,6 +18,7 @@
#![recursion_limit = "256"] #![recursion_limit = "256"]
#![allow(rustdoc::private_intra_doc_links)] #![allow(rustdoc::private_intra_doc_links)]
#![allow(rustc::potential_query_instability)] #![allow(rustc::potential_query_instability)]
#![cfg_attr(not(bootstrap), allow(internal_features))]
#[macro_use] #[macro_use]
extern crate tracing; extern crate tracing;

View File

@ -10,6 +10,7 @@
#![allow(rustc::potential_query_instability)] #![allow(rustc::potential_query_instability)]
#![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)] #![deny(rustc::diagnostic_outside_of_impl)]
#![cfg_attr(not(bootstrap), allow(internal_features))]
#[macro_use] #[macro_use]
extern crate rustc_macros; extern crate rustc_macros;

View File

@ -238,8 +238,9 @@ fn find_best_match_for_name_impl(
} }
fn find_match_by_sorted_words(iter_names: &[Symbol], lookup: &str) -> Option<Symbol> { fn find_match_by_sorted_words(iter_names: &[Symbol], lookup: &str) -> Option<Symbol> {
let lookup_sorted_by_words = sort_by_words(lookup);
iter_names.iter().fold(None, |result, candidate| { iter_names.iter().fold(None, |result, candidate| {
if sort_by_words(candidate.as_str()) == sort_by_words(lookup) { if sort_by_words(candidate.as_str()) == lookup_sorted_by_words {
Some(*candidate) Some(*candidate)
} else { } else {
result result

View File

@ -23,6 +23,7 @@
#![feature(round_char_boundary)] #![feature(round_char_boundary)]
#![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)] #![deny(rustc::diagnostic_outside_of_impl)]
#![cfg_attr(not(bootstrap), allow(internal_features))]
#[macro_use] #[macro_use]
extern crate rustc_macros; extern crate rustc_macros;

View File

@ -1155,8 +1155,10 @@ symbols! {
profiler_builtins, profiler_builtins,
profiler_runtime, profiler_runtime,
ptr, ptr,
ptr_cast,
ptr_cast_mut, ptr_cast_mut,
ptr_const_is_null, ptr_const_is_null,
ptr_from_mut,
ptr_from_ref, ptr_from_ref,
ptr_guaranteed_cmp, ptr_guaranteed_cmp,
ptr_is_null, ptr_is_null,

View File

@ -628,7 +628,8 @@ impl<'tcx> Printer<'tcx> for &mut SymbolMangler<'tcx> {
valtree, ty valtree, ty
) )
}); });
let s = std::str::from_utf8(slice).expect("non utf8 str from miri"); let s = std::str::from_utf8(slice)
.expect("non utf8 str from MIR interpreter");
self.push("e"); self.push("e");

View File

@ -19,6 +19,7 @@
#![feature(step_trait)] #![feature(step_trait)]
#![deny(rustc::untranslatable_diagnostic)] #![deny(rustc::untranslatable_diagnostic)]
#![deny(rustc::diagnostic_outside_of_impl)] #![deny(rustc::diagnostic_outside_of_impl)]
#![cfg_attr(not(bootstrap), allow(internal_features))]
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};

View File

@ -987,9 +987,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let target = self.infcx.shallow_resolve(target); let target = self.infcx.shallow_resolve(target);
debug!(?source, ?target, "confirm_builtin_unsize_candidate"); debug!(?source, ?target, "confirm_builtin_unsize_candidate");
let mut nested = vec![]; Ok(match (source.kind(), target.kind()) {
let src;
match (source.kind(), target.kind()) {
// Trait+Kx+'a -> Trait+Ky+'b (auto traits and lifetime subtyping). // Trait+Kx+'a -> Trait+Ky+'b (auto traits and lifetime subtyping).
(&ty::Dynamic(ref data_a, r_a, dyn_a), &ty::Dynamic(ref data_b, r_b, dyn_b)) (&ty::Dynamic(ref data_a, r_a, dyn_a), &ty::Dynamic(ref data_b, r_b, dyn_b))
if dyn_a == dyn_b => if dyn_a == dyn_b =>
@ -1016,16 +1014,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// Require that the traits involved in this upcast are **equal**; // Require that the traits involved in this upcast are **equal**;
// only the **lifetime bound** is changed. // only the **lifetime bound** is changed.
let InferOk { obligations, .. } = self let InferOk { mut obligations, .. } = self
.infcx .infcx
.at(&obligation.cause, obligation.param_env) .at(&obligation.cause, obligation.param_env)
.sup(DefineOpaqueTypes::No, target, source_trait) .sup(DefineOpaqueTypes::No, target, source_trait)
.map_err(|_| Unimplemented)?; .map_err(|_| Unimplemented)?;
nested.extend(obligations);
// Register one obligation for 'a: 'b. // Register one obligation for 'a: 'b.
let outlives = ty::OutlivesPredicate(r_a, r_b); let outlives = ty::OutlivesPredicate(r_a, r_b);
nested.push(Obligation::with_depth( obligations.push(Obligation::with_depth(
tcx, tcx,
obligation.cause.clone(), obligation.cause.clone(),
obligation.recursion_depth + 1, obligation.recursion_depth + 1,
@ -1033,7 +1030,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
obligation.predicate.rebind(outlives), obligation.predicate.rebind(outlives),
)); ));
src = BuiltinImplSource::Misc; ImplSource::Builtin(BuiltinImplSource::Misc, obligations)
} }
// `T` -> `Trait` // `T` -> `Trait`
@ -1059,11 +1056,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// words, if the object type is `Foo + Send`, this would create an obligation for // words, if the object type is `Foo + Send`, this would create an obligation for
// the `Send` check.) // the `Send` check.)
// - Projection predicates // - Projection predicates
nested.extend( let mut nested: Vec<_> = data
data.iter().map(|predicate| { .iter()
predicate_to_obligation(predicate.with_self_ty(tcx, source)) .map(|predicate| predicate_to_obligation(predicate.with_self_ty(tcx, source)))
}), .collect();
);
// We can only make objects from sized types. // We can only make objects from sized types.
let tr = ty::TraitRef::from_lang_item( let tr = ty::TraitRef::from_lang_item(
@ -1081,7 +1077,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
ty::Binder::dummy(ty::ClauseKind::TypeOutlives(outlives)).to_predicate(tcx), ty::Binder::dummy(ty::ClauseKind::TypeOutlives(outlives)).to_predicate(tcx),
)); ));
src = BuiltinImplSource::Misc; ImplSource::Builtin(BuiltinImplSource::Misc, nested)
} }
// `[T; n]` -> `[T]` // `[T; n]` -> `[T]`
@ -1091,9 +1087,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
.at(&obligation.cause, obligation.param_env) .at(&obligation.cause, obligation.param_env)
.eq(DefineOpaqueTypes::No, b, a) .eq(DefineOpaqueTypes::No, b, a)
.map_err(|_| Unimplemented)?; .map_err(|_| Unimplemented)?;
nested.extend(obligations);
src = BuiltinImplSource::Misc; ImplSource::Builtin(BuiltinImplSource::Misc, obligations)
} }
// `Struct<T>` -> `Struct<U>` // `Struct<T>` -> `Struct<U>`
@ -1106,6 +1101,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
let tail_field = def.non_enum_variant().tail(); let tail_field = def.non_enum_variant().tail();
let tail_field_ty = tcx.type_of(tail_field.did); let tail_field_ty = tcx.type_of(tail_field.did);
let mut nested = vec![];
// Extract `TailField<T>` and `TailField<U>` from `Struct<T>` and `Struct<U>`, // Extract `TailField<T>` and `TailField<U>` from `Struct<T>` and `Struct<U>`,
// normalizing in the process, since `type_of` returns something directly from // normalizing in the process, since `type_of` returns something directly from
// astconv (which means it's un-normalized). // astconv (which means it's un-normalized).
@ -1151,7 +1148,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
); );
nested.push(tail_unsize_obligation); nested.push(tail_unsize_obligation);
src = BuiltinImplSource::Misc; ImplSource::Builtin(BuiltinImplSource::Misc, nested)
} }
// `(.., T)` -> `(.., U)` // `(.., T)` -> `(.., U)`
@ -1166,27 +1163,24 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> {
// last element is equal to the target. // last element is equal to the target.
let new_tuple = let new_tuple =
Ty::new_tup_from_iter(tcx, a_mid.iter().copied().chain(iter::once(b_last))); Ty::new_tup_from_iter(tcx, a_mid.iter().copied().chain(iter::once(b_last)));
let InferOk { obligations, .. } = self let InferOk { mut obligations, .. } = self
.infcx .infcx
.at(&obligation.cause, obligation.param_env) .at(&obligation.cause, obligation.param_env)
.eq(DefineOpaqueTypes::No, target, new_tuple) .eq(DefineOpaqueTypes::No, target, new_tuple)
.map_err(|_| Unimplemented)?; .map_err(|_| Unimplemented)?;
nested.extend(obligations);
// Add a nested `T: Unsize<U>` predicate. // Add a nested `T: Unsize<U>` predicate.
let last_unsize_obligation = obligation.with( let last_unsize_obligation = obligation.with(
tcx, tcx,
ty::TraitRef::new(tcx, obligation.predicate.def_id(), [a_last, b_last]), ty::TraitRef::new(tcx, obligation.predicate.def_id(), [a_last, b_last]),
); );
nested.push(last_unsize_obligation); obligations.push(last_unsize_obligation);
src = BuiltinImplSource::TupleUnsizing; ImplSource::Builtin(BuiltinImplSource::TupleUnsizing, obligations)
} }
_ => bug!("source: {source}, target: {target}"), _ => bug!("source: {source}, target: {target}"),
}; })
Ok(ImplSource::Builtin(src, nested))
} }
fn confirm_const_destruct_candidate( fn confirm_const_destruct_candidate(

Some files were not shown because too many files have changed in this diff Show More