mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-27 17:24:06 +00:00
Auto merge of #3197 - RalfJung:rustup, r=RalfJung
Rustup also move some fail tests into suitable subdirectories
This commit is contained in:
commit
d368c4043c
6
.github/workflows/ci.yml
vendored
6
.github/workflows/ci.yml
vendored
@ -56,7 +56,7 @@ jobs:
|
||||
- name: mingw-check-tidy
|
||||
os: ubuntu-20.04-4core-16gb
|
||||
env: {}
|
||||
- name: x86_64-gnu-llvm-15
|
||||
- name: x86_64-gnu-llvm-16
|
||||
env:
|
||||
ENABLE_GCC_CODEGEN: "1"
|
||||
os: ubuntu-20.04-16core-64gb
|
||||
@ -302,10 +302,6 @@ jobs:
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
os: ubuntu-20.04-8core-32gb
|
||||
- name: x86_64-gnu-llvm-15
|
||||
env:
|
||||
RUST_BACKTRACE: 1
|
||||
os: ubuntu-20.04-8core-32gb
|
||||
- name: x86_64-gnu-nopt
|
||||
os: ubuntu-20.04-4core-16gb
|
||||
env: {}
|
||||
|
3
.mailmap
3
.mailmap
@ -575,7 +575,8 @@ Val Markovic <val@markovic.io>
|
||||
Valerii Lashmanov <vflashm@gmail.com>
|
||||
Vitali Haravy <HumaneProgrammer@gmail.com> Vitali Haravy <humaneprogrammer@gmail.com>
|
||||
Vitaly Shukela <vi0oss@gmail.com>
|
||||
Waffle Maybe <waffle.lapkin@gmail.com>
|
||||
Waffle Lapkin <waffle.lapkin@gmail.com>
|
||||
Waffle Lapkin <waffle.lapkin@tasking.com>
|
||||
Wesley Wiser <wwiser@gmail.com> <wesleywiser@microsoft.com>
|
||||
whitequark <whitequark@whitequark.org>
|
||||
William Ting <io@williamting.com> <william.h.ting@gmail.com>
|
||||
|
@ -645,6 +645,7 @@ impl Pat {
|
||||
// These patterns do not contain subpatterns, skip.
|
||||
PatKind::Wild
|
||||
| PatKind::Rest
|
||||
| PatKind::Never
|
||||
| PatKind::Lit(_)
|
||||
| PatKind::Range(..)
|
||||
| PatKind::Ident(..)
|
||||
@ -795,6 +796,9 @@ pub enum PatKind {
|
||||
/// only one rest pattern may occur in the pattern sequences.
|
||||
Rest,
|
||||
|
||||
// A never pattern `!`
|
||||
Never,
|
||||
|
||||
/// Parentheses in patterns used for grouping (i.e., `(PAT)`).
|
||||
Paren(P<Pat>),
|
||||
|
||||
@ -817,7 +821,7 @@ pub enum BorrowKind {
|
||||
Raw,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, Copy)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
|
||||
pub enum BinOpKind {
|
||||
/// The `+` operator (addition)
|
||||
Add,
|
||||
@ -858,9 +862,9 @@ pub enum BinOpKind {
|
||||
}
|
||||
|
||||
impl BinOpKind {
|
||||
pub fn to_string(&self) -> &'static str {
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
use BinOpKind::*;
|
||||
match *self {
|
||||
match self {
|
||||
Add => "+",
|
||||
Sub => "-",
|
||||
Mul => "*",
|
||||
@ -881,19 +885,25 @@ impl BinOpKind {
|
||||
Gt => ">",
|
||||
}
|
||||
}
|
||||
pub fn lazy(&self) -> bool {
|
||||
|
||||
pub fn is_lazy(&self) -> bool {
|
||||
matches!(self, BinOpKind::And | BinOpKind::Or)
|
||||
}
|
||||
|
||||
pub fn is_comparison(&self) -> bool {
|
||||
use BinOpKind::*;
|
||||
// Note for developers: please keep this as is;
|
||||
// Note for developers: please keep this match exhaustive;
|
||||
// we want compilation to fail if another variant is added.
|
||||
match *self {
|
||||
Eq | Lt | Le | Ne | Gt | Ge => true,
|
||||
And | Or | Add | Sub | Mul | Div | Rem | BitXor | BitAnd | BitOr | Shl | Shr => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the binary operator takes its arguments by value.
|
||||
pub fn is_by_value(self) -> bool {
|
||||
!self.is_comparison()
|
||||
}
|
||||
}
|
||||
|
||||
pub type BinOp = Spanned<BinOpKind>;
|
||||
@ -901,7 +911,7 @@ pub type BinOp = Spanned<BinOpKind>;
|
||||
/// Unary operator.
|
||||
///
|
||||
/// Note that `&data` is not an operator, it's an `AddrOf` expression.
|
||||
#[derive(Clone, Encodable, Decodable, Debug, Copy)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
|
||||
pub enum UnOp {
|
||||
/// The `*` operator for dereferencing
|
||||
Deref,
|
||||
@ -912,13 +922,18 @@ pub enum UnOp {
|
||||
}
|
||||
|
||||
impl UnOp {
|
||||
pub fn to_string(op: UnOp) -> &'static str {
|
||||
match op {
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
UnOp::Deref => "*",
|
||||
UnOp::Not => "!",
|
||||
UnOp::Neg => "-",
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the unary operator takes its argument by value.
|
||||
pub fn is_by_value(self) -> bool {
|
||||
matches!(self, Self::Neg | Self::Not)
|
||||
}
|
||||
}
|
||||
|
||||
/// A statement
|
||||
|
@ -1249,7 +1249,7 @@ pub fn noop_visit_pat<T: MutVisitor>(pat: &mut P<Pat>, vis: &mut T) {
|
||||
let Pat { id, kind, span, tokens } = pat.deref_mut();
|
||||
vis.visit_id(id);
|
||||
match kind {
|
||||
PatKind::Wild | PatKind::Rest => {}
|
||||
PatKind::Wild | PatKind::Rest | PatKind::Never => {}
|
||||
PatKind::Ident(_binding_mode, ident, sub) => {
|
||||
vis.visit_ident(ident);
|
||||
visit_opt(sub, |sub| vis.visit_pat(sub));
|
||||
|
@ -756,6 +756,11 @@ impl Token {
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is the integer literal.
|
||||
pub fn is_integer_lit(&self) -> bool {
|
||||
matches!(self.kind, Literal(Lit { kind: LitKind::Integer, .. }))
|
||||
}
|
||||
|
||||
/// Returns `true` if the token is a non-raw identifier for which `pred` holds.
|
||||
pub fn is_non_raw_ident_where(&self, pred: impl FnOnce(Ident) -> bool) -> bool {
|
||||
match self.ident() {
|
||||
|
@ -559,7 +559,7 @@ pub fn walk_pat<'a, V: Visitor<'a>>(visitor: &mut V, pattern: &'a Pat) {
|
||||
walk_list!(visitor, visit_expr, lower_bound);
|
||||
walk_list!(visitor, visit_expr, upper_bound);
|
||||
}
|
||||
PatKind::Wild | PatKind::Rest => {}
|
||||
PatKind::Wild | PatKind::Rest | PatKind::Never => {}
|
||||
PatKind::Tuple(elems) | PatKind::Slice(elems) | PatKind::Or(elems) => {
|
||||
walk_list!(visitor, visit_pat, elems);
|
||||
}
|
||||
|
@ -228,6 +228,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
parent_def_id.def_id,
|
||||
node_id,
|
||||
DefPathData::AnonConst,
|
||||
DefKind::AnonConst,
|
||||
*op_sp,
|
||||
);
|
||||
let anon_const = AnonConst { id: node_id, value: P(expr) };
|
||||
|
@ -12,7 +12,7 @@ use rustc_ast::ptr::P as AstP;
|
||||
use rustc_ast::*;
|
||||
use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::Res;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
use rustc_hir::definitions::DefPathData;
|
||||
use rustc_session::errors::report_lit_error;
|
||||
use rustc_span::source_map::{respan, Spanned};
|
||||
@ -72,7 +72,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
let kind = match &e.kind {
|
||||
ExprKind::Array(exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
|
||||
ExprKind::ConstBlock(c) => {
|
||||
let c = self.with_new_scopes(|this| hir::ConstBlock {
|
||||
let c = self.with_new_scopes(c.value.span, |this| hir::ConstBlock {
|
||||
def_id: this.local_def_id(c.id),
|
||||
hir_id: this.lower_node_id(c.id),
|
||||
body: this.lower_const_body(c.value.span, Some(&c.value)),
|
||||
@ -189,7 +189,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
None,
|
||||
e.span,
|
||||
hir::CoroutineSource::Block,
|
||||
|this| this.with_new_scopes(|this| this.lower_block_expr(block)),
|
||||
|this| this.with_new_scopes(e.span, |this| this.lower_block_expr(block)),
|
||||
),
|
||||
ExprKind::Await(expr, await_kw_span) => self.lower_expr_await(*await_kw_span, expr),
|
||||
ExprKind::Closure(box Closure {
|
||||
@ -323,7 +323,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
None,
|
||||
e.span,
|
||||
hir::CoroutineSource::Block,
|
||||
|this| this.with_new_scopes(|this| this.lower_block_expr(block)),
|
||||
|this| this.with_new_scopes(e.span, |this| this.lower_block_expr(block)),
|
||||
),
|
||||
ExprKind::Yield(opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
|
||||
ExprKind::Err => hir::ExprKind::Err(
|
||||
@ -350,30 +350,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
|
||||
Spanned {
|
||||
node: match b.node {
|
||||
BinOpKind::Add => hir::BinOpKind::Add,
|
||||
BinOpKind::Sub => hir::BinOpKind::Sub,
|
||||
BinOpKind::Mul => hir::BinOpKind::Mul,
|
||||
BinOpKind::Div => hir::BinOpKind::Div,
|
||||
BinOpKind::Rem => hir::BinOpKind::Rem,
|
||||
BinOpKind::And => hir::BinOpKind::And,
|
||||
BinOpKind::Or => hir::BinOpKind::Or,
|
||||
BinOpKind::BitXor => hir::BinOpKind::BitXor,
|
||||
BinOpKind::BitAnd => hir::BinOpKind::BitAnd,
|
||||
BinOpKind::BitOr => hir::BinOpKind::BitOr,
|
||||
BinOpKind::Shl => hir::BinOpKind::Shl,
|
||||
BinOpKind::Shr => hir::BinOpKind::Shr,
|
||||
BinOpKind::Eq => hir::BinOpKind::Eq,
|
||||
BinOpKind::Lt => hir::BinOpKind::Lt,
|
||||
BinOpKind::Le => hir::BinOpKind::Le,
|
||||
BinOpKind::Ne => hir::BinOpKind::Ne,
|
||||
BinOpKind::Ge => hir::BinOpKind::Ge,
|
||||
BinOpKind::Gt => hir::BinOpKind::Gt,
|
||||
},
|
||||
span: self.lower_span(b.span),
|
||||
}
|
||||
fn lower_binop(&mut self, b: BinOp) -> BinOp {
|
||||
Spanned { node: b.node, span: self.lower_span(b.span) }
|
||||
}
|
||||
|
||||
fn lower_legacy_const_generics(
|
||||
@ -395,7 +373,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
let node_id = self.next_node_id();
|
||||
|
||||
// Add a definition for the in-band const def.
|
||||
self.create_def(parent_def_id.def_id, node_id, DefPathData::AnonConst, f.span);
|
||||
self.create_def(
|
||||
parent_def_id.def_id,
|
||||
node_id,
|
||||
DefPathData::AnonConst,
|
||||
DefKind::AnonConst,
|
||||
f.span,
|
||||
);
|
||||
|
||||
let anon_const = AnonConst { id: node_id, value: arg };
|
||||
generic_args.push(AngleBracketedArg::Arg(GenericArg::Const(anon_const)));
|
||||
@ -524,7 +508,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
this.mark_span_with_reason(
|
||||
DesugaringKind::TryBlock,
|
||||
expr.span,
|
||||
this.allow_try_trait.clone(),
|
||||
Some(this.allow_try_trait.clone()),
|
||||
),
|
||||
expr,
|
||||
)
|
||||
@ -532,7 +516,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
let try_span = this.mark_span_with_reason(
|
||||
DesugaringKind::TryBlock,
|
||||
this.tcx.sess.source_map().end_point(body.span),
|
||||
this.allow_try_trait.clone(),
|
||||
Some(this.allow_try_trait.clone()),
|
||||
);
|
||||
|
||||
(try_span, this.expr_unit(try_span))
|
||||
@ -612,8 +596,11 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
let output = ret_ty.unwrap_or_else(|| hir::FnRetTy::DefaultReturn(self.lower_span(span)));
|
||||
|
||||
// Resume argument type: `ResumeTy`
|
||||
let unstable_span =
|
||||
self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
|
||||
let unstable_span = self.mark_span_with_reason(
|
||||
DesugaringKind::Async,
|
||||
span,
|
||||
Some(self.allow_gen_future.clone()),
|
||||
);
|
||||
let resume_ty = hir::QPath::LangItem(hir::LangItem::ResumeTy, unstable_span);
|
||||
let input_ty = hir::Ty {
|
||||
hir_id: self.next_id(),
|
||||
@ -735,7 +722,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
let unstable_span = self.mark_span_with_reason(
|
||||
DesugaringKind::Async,
|
||||
span,
|
||||
self.allow_gen_future.clone(),
|
||||
Some(self.allow_gen_future.clone()),
|
||||
);
|
||||
self.lower_attrs(
|
||||
inner_hir_id,
|
||||
@ -772,17 +759,17 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
match self.coroutine_kind {
|
||||
Some(hir::CoroutineKind::Async(_)) => {}
|
||||
Some(hir::CoroutineKind::Coroutine) | Some(hir::CoroutineKind::Gen(_)) | None => {
|
||||
self.tcx.sess.emit_err(AwaitOnlyInAsyncFnAndBlocks {
|
||||
return hir::ExprKind::Err(self.tcx.sess.emit_err(AwaitOnlyInAsyncFnAndBlocks {
|
||||
await_kw_span,
|
||||
item_span: self.current_item,
|
||||
});
|
||||
}));
|
||||
}
|
||||
}
|
||||
let span = self.mark_span_with_reason(DesugaringKind::Await, await_kw_span, None);
|
||||
let gen_future_span = self.mark_span_with_reason(
|
||||
DesugaringKind::Await,
|
||||
full_span,
|
||||
self.allow_gen_future.clone(),
|
||||
Some(self.allow_gen_future.clone()),
|
||||
);
|
||||
let expr = self.lower_expr_mut(expr);
|
||||
let expr_hir_id = expr.hir_id;
|
||||
@ -935,9 +922,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
) -> hir::ExprKind<'hir> {
|
||||
let (binder_clause, generic_params) = self.lower_closure_binder(binder);
|
||||
|
||||
let (body_id, coroutine_option) = self.with_new_scopes(move |this| {
|
||||
let prev = this.current_item;
|
||||
this.current_item = Some(fn_decl_span);
|
||||
let (body_id, coroutine_option) = self.with_new_scopes(fn_decl_span, move |this| {
|
||||
let mut coroutine_kind = None;
|
||||
let body_id = this.lower_fn_body(decl, |this| {
|
||||
let e = this.lower_expr_mut(body);
|
||||
@ -946,7 +931,6 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
});
|
||||
let coroutine_option =
|
||||
this.coroutine_movability_for_fn(decl, fn_decl_span, coroutine_kind, movability);
|
||||
this.current_item = prev;
|
||||
(body_id, coroutine_option)
|
||||
});
|
||||
|
||||
@ -1032,7 +1016,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
let outer_decl =
|
||||
FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) };
|
||||
|
||||
let body = self.with_new_scopes(|this| {
|
||||
let body = self.with_new_scopes(fn_decl_span, |this| {
|
||||
// FIXME(cramertj): allow `async` non-`move` closures with arguments.
|
||||
if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
|
||||
this.tcx.sess.emit_err(AsyncNonMoveClosureNotSupported { fn_decl_span });
|
||||
@ -1054,7 +1038,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
async_ret_ty,
|
||||
body.span,
|
||||
hir::CoroutineSource::Closure,
|
||||
|this| this.with_new_scopes(|this| this.lower_expr_mut(body)),
|
||||
|this| this.with_new_scopes(fn_decl_span, |this| this.lower_expr_mut(body)),
|
||||
);
|
||||
let hir_id = this.lower_node_id(inner_closure_id);
|
||||
this.maybe_forward_track_caller(body.span, closure_hir_id, hir_id);
|
||||
@ -1494,7 +1478,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
match self.coroutine_kind {
|
||||
Some(hir::CoroutineKind::Gen(_)) => {}
|
||||
Some(hir::CoroutineKind::Async(_)) => {
|
||||
self.tcx.sess.emit_err(AsyncCoroutinesNotSupported { span });
|
||||
return hir::ExprKind::Err(
|
||||
self.tcx.sess.emit_err(AsyncCoroutinesNotSupported { span }),
|
||||
);
|
||||
}
|
||||
Some(hir::CoroutineKind::Coroutine) | None => {
|
||||
if !self.tcx.features().coroutines {
|
||||
@ -1640,13 +1626,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
let unstable_span = self.mark_span_with_reason(
|
||||
DesugaringKind::QuestionMark,
|
||||
span,
|
||||
self.allow_try_trait.clone(),
|
||||
Some(self.allow_try_trait.clone()),
|
||||
);
|
||||
let try_span = self.tcx.sess.source_map().end_point(span);
|
||||
let try_span = self.mark_span_with_reason(
|
||||
DesugaringKind::QuestionMark,
|
||||
try_span,
|
||||
self.allow_try_trait.clone(),
|
||||
Some(self.allow_try_trait.clone()),
|
||||
);
|
||||
|
||||
// `Try::branch(<expr>)`
|
||||
@ -1739,7 +1725,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
let unstable_span = self.mark_span_with_reason(
|
||||
DesugaringKind::YeetExpr,
|
||||
span,
|
||||
self.allow_try_trait.clone(),
|
||||
Some(self.allow_try_trait.clone()),
|
||||
);
|
||||
|
||||
let from_yeet_expr = self.wrap_in_try_constructor(
|
||||
|
@ -10,7 +10,7 @@ use rustc_middle::ty::TyCtxt;
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
|
||||
/// A visitor that walks over the HIR and collects `Node`s into a HIR map.
|
||||
pub(super) struct NodeCollector<'a, 'hir> {
|
||||
struct NodeCollector<'a, 'hir> {
|
||||
tcx: TyCtxt<'hir>,
|
||||
|
||||
bodies: &'a SortedMap<ItemLocalId, &'hir Body<'hir>>,
|
||||
|
@ -7,7 +7,6 @@ use hir::definitions::DefPathData;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::visit::AssocCtxt;
|
||||
use rustc_ast::*;
|
||||
use rustc_data_structures::sorted_map::SortedMap;
|
||||
use rustc_errors::ErrorGuaranteed;
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
@ -55,42 +54,7 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> {
|
||||
owner: NodeId,
|
||||
f: impl FnOnce(&mut LoweringContext<'_, 'hir>) -> hir::OwnerNode<'hir>,
|
||||
) {
|
||||
let allow_gen_future = Some(if self.tcx.features().async_fn_track_caller {
|
||||
[sym::gen_future, sym::closure_track_caller][..].into()
|
||||
} else {
|
||||
[sym::gen_future][..].into()
|
||||
});
|
||||
let mut lctx = LoweringContext {
|
||||
// Pseudo-globals.
|
||||
tcx: self.tcx,
|
||||
resolver: self.resolver,
|
||||
arena: self.tcx.hir_arena,
|
||||
|
||||
// HirId handling.
|
||||
bodies: Vec::new(),
|
||||
attrs: SortedMap::default(),
|
||||
children: Vec::default(),
|
||||
current_hir_id_owner: hir::CRATE_OWNER_ID,
|
||||
item_local_id_counter: hir::ItemLocalId::new(0),
|
||||
node_id_to_local_id: Default::default(),
|
||||
trait_map: Default::default(),
|
||||
|
||||
// Lowering state.
|
||||
catch_scope: None,
|
||||
loop_scope: None,
|
||||
is_in_loop_condition: false,
|
||||
is_in_trait_impl: false,
|
||||
is_in_dyn_type: false,
|
||||
coroutine_kind: None,
|
||||
task_context: None,
|
||||
current_item: None,
|
||||
impl_trait_defs: Vec::new(),
|
||||
impl_trait_bounds: Vec::new(),
|
||||
allow_try_trait: Some([sym::try_trait_v2, sym::yeet_desugar_details][..].into()),
|
||||
allow_gen_future,
|
||||
generics_def_id_map: Default::default(),
|
||||
host_param_id: None,
|
||||
};
|
||||
let mut lctx = LoweringContext::new(self.tcx, self.resolver);
|
||||
lctx.with_hir_id_owner(owner, |lctx| f(lctx));
|
||||
|
||||
for (def_id, info) in lctx.children {
|
||||
@ -136,39 +100,9 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> {
|
||||
|
||||
fn lower_assoc_item(&mut self, item: &AssocItem, ctxt: AssocCtxt) {
|
||||
let def_id = self.resolver.node_id_to_def_id[&item.id];
|
||||
|
||||
let parent_id = self.tcx.local_parent(def_id);
|
||||
let parent_hir = self.lower_node(parent_id).unwrap();
|
||||
self.with_lctx(item.id, |lctx| {
|
||||
// Evaluate with the lifetimes in `params` in-scope.
|
||||
// This is used to track which lifetimes have already been defined,
|
||||
// and which need to be replicated when lowering an async fn.
|
||||
|
||||
match parent_hir.node().expect_item().kind {
|
||||
hir::ItemKind::Impl(impl_) => {
|
||||
lctx.is_in_trait_impl = impl_.of_trait.is_some();
|
||||
}
|
||||
hir::ItemKind::Trait(_, _, generics, _, _) if lctx.tcx.features().effects => {
|
||||
lctx.host_param_id = generics
|
||||
.params
|
||||
.iter()
|
||||
.find(|param| {
|
||||
parent_hir
|
||||
.attrs
|
||||
.get(param.hir_id.local_id)
|
||||
.iter()
|
||||
.any(|attr| attr.has_name(sym::rustc_host))
|
||||
})
|
||||
.map(|param| param.def_id);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match ctxt {
|
||||
AssocCtxt::Trait => hir::OwnerNode::TraitItem(lctx.lower_trait_item(item)),
|
||||
AssocCtxt::Impl => hir::OwnerNode::ImplItem(lctx.lower_impl_item(item)),
|
||||
}
|
||||
})
|
||||
self.with_lctx(item.id, |lctx| lctx.lower_assoc_item(item, ctxt, parent_hir))
|
||||
}
|
||||
|
||||
fn lower_foreign_item(&mut self, item: &ForeignItem) {
|
||||
@ -268,9 +202,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
body,
|
||||
..
|
||||
}) => {
|
||||
self.with_new_scopes(|this| {
|
||||
this.current_item = Some(ident.span);
|
||||
|
||||
self.with_new_scopes(ident.span, |this| {
|
||||
// Note: we don't need to change the return type from `T` to
|
||||
// `impl Future<Output = T>` here because lower_body
|
||||
// only cares about the input argument patterns in the function
|
||||
@ -480,7 +412,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
}
|
||||
ItemKind::MacroDef(MacroDef { body, macro_rules }) => {
|
||||
let body = P(self.lower_delim_args(body));
|
||||
let macro_kind = self.resolver.decl_macro_kind(self.local_def_id(id));
|
||||
let DefKind::Macro(macro_kind) = self.tcx.def_kind(self.local_def_id(id)) else {
|
||||
unreachable!()
|
||||
};
|
||||
let macro_def = self.arena.alloc(ast::MacroDef { body, macro_rules: *macro_rules });
|
||||
hir::ItemKind::Macro(macro_def, macro_kind)
|
||||
}
|
||||
@ -609,6 +543,42 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_assoc_item(
|
||||
&mut self,
|
||||
item: &AssocItem,
|
||||
ctxt: AssocCtxt,
|
||||
parent_hir: &'hir hir::OwnerInfo<'hir>,
|
||||
) -> hir::OwnerNode<'hir> {
|
||||
// Evaluate with the lifetimes in `params` in-scope.
|
||||
// This is used to track which lifetimes have already been defined,
|
||||
// and which need to be replicated when lowering an async fn.
|
||||
|
||||
match parent_hir.node().expect_item().kind {
|
||||
hir::ItemKind::Impl(impl_) => {
|
||||
self.is_in_trait_impl = impl_.of_trait.is_some();
|
||||
}
|
||||
hir::ItemKind::Trait(_, _, generics, _, _) if self.tcx.features().effects => {
|
||||
self.host_param_id = generics
|
||||
.params
|
||||
.iter()
|
||||
.find(|param| {
|
||||
parent_hir
|
||||
.attrs
|
||||
.get(param.hir_id.local_id)
|
||||
.iter()
|
||||
.any(|attr| attr.has_name(sym::rustc_host))
|
||||
})
|
||||
.map(|param| param.def_id);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
match ctxt {
|
||||
AssocCtxt::Trait => hir::OwnerNode::TraitItem(self.lower_trait_item(item)),
|
||||
AssocCtxt::Impl => hir::OwnerNode::ImplItem(self.lower_impl_item(item)),
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_foreign_item(&mut self, i: &ForeignItem) -> &'hir hir::ForeignItem<'hir> {
|
||||
let hir_id = self.lower_node_id(i.id);
|
||||
let owner_id = hir_id.expect_owner();
|
||||
@ -865,7 +835,6 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
},
|
||||
),
|
||||
AssocItemKind::Fn(box Fn { sig, generics, body, .. }) => {
|
||||
self.current_item = Some(i.span);
|
||||
let asyncness = sig.header.asyncness;
|
||||
let body_id = self.lower_maybe_async_body(
|
||||
i.span,
|
||||
@ -1399,6 +1368,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
self.local_def_id(parent_node_id),
|
||||
param_node_id,
|
||||
DefPathData::TypeNs(sym::host),
|
||||
DefKind::ConstParam,
|
||||
span,
|
||||
);
|
||||
self.host_param_id = Some(def_id);
|
||||
@ -1457,8 +1427,13 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||
|
||||
if let Some((span, hir_id, def_id)) = host_param_parts {
|
||||
let const_node_id = self.next_node_id();
|
||||
let anon_const: LocalDefId =
|
||||
self.create_def(def_id, const_node_id, DefPathData::AnonConst, span);
|
||||
let anon_const = self.create_def(
|
||||
def_id,
|
||||
const_node_id,
|
||||
DefPathData::AnonConst,
|
||||
DefKind::AnonConst,
|
||||
span,
|
||||
);
|
||||
|
||||
let const_id = self.next_id();
|
||||
let const_expr_id = self.next_id();
|
||||
|
@ -35,7 +35,6 @@
|
||||
#![doc(rust_logo)]
|
||||
#![feature(box_patterns)]
|
||||
#![feature(let_chains)]
|
||||
#![feature(never_type)]
|
||||
#![recursion_limit = "256"]
|
||||
#![deny(rustc::untranslatable_diagnostic)]
|
||||
#![deny(rustc::diagnostic_outside_of_impl)]
|
||||
@ -67,7 +66,6 @@ use rustc_middle::{
|
||||
ty::{ResolverAstLowering, TyCtxt},
|
||||
};
|
||||
use rustc_session::parse::{add_feature_diagnostics, feature_err};
|
||||
use rustc_span::hygiene::MacroKind;
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
use rustc_span::{DesugaringKind, Span, DUMMY_SP};
|
||||
use smallvec::SmallVec;
|
||||
@ -133,8 +131,8 @@ struct LoweringContext<'a, 'hir> {
|
||||
/// NodeIds that are lowered inside the current HIR owner.
|
||||
node_id_to_local_id: FxHashMap<NodeId, hir::ItemLocalId>,
|
||||
|
||||
allow_try_trait: Option<Lrc<[Symbol]>>,
|
||||
allow_gen_future: Option<Lrc<[Symbol]>>,
|
||||
allow_try_trait: Lrc<[Symbol]>,
|
||||
allow_gen_future: Lrc<[Symbol]>,
|
||||
|
||||
/// Mapping from generics `def_id`s to TAIT generics `def_id`s.
|
||||
/// For each captured lifetime (e.g., 'a), we create a new lifetime parameter that is a generic
|
||||
@ -145,6 +143,46 @@ struct LoweringContext<'a, 'hir> {
|
||||
host_param_id: Option<LocalDefId>,
|
||||
}
|
||||
|
||||
impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
fn new(tcx: TyCtxt<'hir>, resolver: &'a mut ResolverAstLowering) -> Self {
|
||||
Self {
|
||||
// Pseudo-globals.
|
||||
tcx,
|
||||
resolver: resolver,
|
||||
arena: tcx.hir_arena,
|
||||
|
||||
// HirId handling.
|
||||
bodies: Vec::new(),
|
||||
attrs: SortedMap::default(),
|
||||
children: Vec::default(),
|
||||
current_hir_id_owner: hir::CRATE_OWNER_ID,
|
||||
item_local_id_counter: hir::ItemLocalId::new(0),
|
||||
node_id_to_local_id: Default::default(),
|
||||
trait_map: Default::default(),
|
||||
|
||||
// Lowering state.
|
||||
catch_scope: None,
|
||||
loop_scope: None,
|
||||
is_in_loop_condition: false,
|
||||
is_in_trait_impl: false,
|
||||
is_in_dyn_type: false,
|
||||
coroutine_kind: None,
|
||||
task_context: None,
|
||||
current_item: None,
|
||||
impl_trait_defs: Vec::new(),
|
||||
impl_trait_bounds: Vec::new(),
|
||||
allow_try_trait: [sym::try_trait_v2, sym::yeet_desugar_details].into(),
|
||||
allow_gen_future: if tcx.features().async_fn_track_caller {
|
||||
[sym::gen_future, sym::closure_track_caller].into()
|
||||
} else {
|
||||
[sym::gen_future].into()
|
||||
},
|
||||
generics_def_id_map: Default::default(),
|
||||
host_param_id: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait ResolverAstLoweringExt {
|
||||
fn legacy_const_generic_args(&self, expr: &Expr) -> Option<Vec<usize>>;
|
||||
fn get_partial_res(&self, id: NodeId) -> Option<PartialRes>;
|
||||
@ -153,7 +191,6 @@ trait ResolverAstLoweringExt {
|
||||
fn get_lifetime_res(&self, id: NodeId) -> Option<LifetimeRes>;
|
||||
fn take_extra_lifetime_params(&mut self, id: NodeId) -> Vec<(Ident, NodeId, LifetimeRes)>;
|
||||
fn remap_extra_lifetime_params(&mut self, from: NodeId, to: NodeId);
|
||||
fn decl_macro_kind(&self, def_id: LocalDefId) -> MacroKind;
|
||||
}
|
||||
|
||||
impl ResolverAstLoweringExt for ResolverAstLowering {
|
||||
@ -217,10 +254,6 @@ impl ResolverAstLoweringExt for ResolverAstLowering {
|
||||
let lifetimes = self.extra_lifetime_params_map.remove(&from).unwrap_or_default();
|
||||
self.extra_lifetime_params_map.insert(to, lifetimes);
|
||||
}
|
||||
|
||||
fn decl_macro_kind(&self, def_id: LocalDefId) -> MacroKind {
|
||||
self.builtin_macro_kinds.get(&def_id).copied().unwrap_or(MacroKind::Bang)
|
||||
}
|
||||
}
|
||||
|
||||
/// Context of `impl Trait` in code, which determines whether it is allowed in an HIR subtree,
|
||||
@ -467,6 +500,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
parent: LocalDefId,
|
||||
node_id: ast::NodeId,
|
||||
data: DefPathData,
|
||||
def_kind: DefKind,
|
||||
span: Span,
|
||||
) -> LocalDefId {
|
||||
debug_assert_ne!(node_id, ast::DUMMY_NODE_ID);
|
||||
@ -478,7 +512,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
self.tcx.hir().def_key(self.local_def_id(node_id)),
|
||||
);
|
||||
|
||||
let def_id = self.tcx.at(span).create_def(parent, data).def_id();
|
||||
let def_id = self.tcx.at(span).create_def(parent, data, def_kind).def_id();
|
||||
|
||||
debug!("create_def: def_id_to_node_id[{:?}] <-> {:?}", def_id, node_id);
|
||||
self.resolver.node_id_to_def_id.insert(node_id, def_id);
|
||||
@ -780,6 +814,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
self.current_hir_id_owner.def_id,
|
||||
param,
|
||||
DefPathData::LifetimeNs(kw::UnderscoreLifetime),
|
||||
DefKind::LifetimeParam,
|
||||
ident.span,
|
||||
);
|
||||
debug!(?_def_id);
|
||||
@ -843,7 +878,10 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
result
|
||||
}
|
||||
|
||||
fn with_new_scopes<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
|
||||
fn with_new_scopes<T>(&mut self, scope_span: Span, f: impl FnOnce(&mut Self) -> T) -> T {
|
||||
let current_item = self.current_item;
|
||||
self.current_item = Some(scope_span);
|
||||
|
||||
let was_in_loop_condition = self.is_in_loop_condition;
|
||||
self.is_in_loop_condition = false;
|
||||
|
||||
@ -855,6 +893,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
|
||||
self.is_in_loop_condition = was_in_loop_condition;
|
||||
|
||||
self.current_item = current_item;
|
||||
|
||||
ret
|
||||
}
|
||||
|
||||
@ -1192,6 +1232,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
parent_def_id.def_id,
|
||||
node_id,
|
||||
DefPathData::AnonConst,
|
||||
DefKind::AnonConst,
|
||||
span,
|
||||
);
|
||||
|
||||
@ -1203,7 +1244,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
tokens: None,
|
||||
};
|
||||
|
||||
let ct = self.with_new_scopes(|this| hir::AnonConst {
|
||||
let ct = self.with_new_scopes(span, |this| hir::AnonConst {
|
||||
def_id,
|
||||
hir_id: this.lower_node_id(node_id),
|
||||
body: this.lower_const_body(path_expr.span, Some(&path_expr)),
|
||||
@ -1429,6 +1470,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
self.current_hir_id_owner.def_id,
|
||||
*def_node_id,
|
||||
DefPathData::TypeNs(ident.name),
|
||||
DefKind::TyParam,
|
||||
span,
|
||||
);
|
||||
let (param, bounds, path) = self.lower_universal_param_and_bounds(
|
||||
@ -1582,6 +1624,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
self.current_hir_id_owner.def_id,
|
||||
opaque_ty_node_id,
|
||||
DefPathData::ImplTrait,
|
||||
DefKind::OpaqueTy,
|
||||
opaque_ty_span,
|
||||
);
|
||||
debug!(?opaque_ty_def_id);
|
||||
@ -1636,6 +1679,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
opaque_ty_def_id,
|
||||
duplicated_lifetime_node_id,
|
||||
DefPathData::LifetimeNs(lifetime.ident.name),
|
||||
DefKind::LifetimeParam,
|
||||
lifetime.ident.span,
|
||||
);
|
||||
captured_to_synthesized_mapping.insert(old_def_id, duplicated_lifetime_def_id);
|
||||
@ -2207,7 +2251,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
}
|
||||
|
||||
fn lower_anon_const(&mut self, c: &AnonConst) -> hir::AnonConst {
|
||||
self.with_new_scopes(|this| hir::AnonConst {
|
||||
self.with_new_scopes(c.value.span, |this| hir::AnonConst {
|
||||
def_id: this.local_def_id(c.id),
|
||||
hir_id: this.lower_node_id(c.id),
|
||||
body: this.lower_const_body(c.value.span, Some(&c.value)),
|
||||
@ -2505,8 +2549,13 @@ impl<'hir> GenericArgsCtor<'hir> {
|
||||
});
|
||||
lcx.attrs.insert(hir_id.local_id, std::slice::from_ref(attr));
|
||||
|
||||
let def_id =
|
||||
lcx.create_def(lcx.current_hir_id_owner.def_id, id, DefPathData::AnonConst, span);
|
||||
let def_id = lcx.create_def(
|
||||
lcx.current_hir_id_owner.def_id,
|
||||
id,
|
||||
DefPathData::AnonConst,
|
||||
DefKind::AnonConst,
|
||||
span,
|
||||
);
|
||||
lcx.children.push((def_id, hir::MaybeOwner::NonOwner(hir_id)));
|
||||
self.args.push(hir::GenericArg::Const(hir::ConstArg {
|
||||
value: hir::AnonConst { def_id, hir_id, body },
|
||||
|
@ -108,7 +108,7 @@ impl<'ast> Visitor<'ast> for LifetimeCollectVisitor<'ast> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lifetimes_in_bounds(
|
||||
pub(crate) fn lifetimes_in_bounds(
|
||||
resolver: &ResolverAstLowering,
|
||||
bounds: &GenericBounds,
|
||||
) -> Vec<Lifetime> {
|
||||
|
@ -18,12 +18,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
self.arena.alloc(self.lower_pat_mut(pattern))
|
||||
}
|
||||
|
||||
pub(crate) fn lower_pat_mut(&mut self, mut pattern: &Pat) -> hir::Pat<'hir> {
|
||||
fn lower_pat_mut(&mut self, mut pattern: &Pat) -> hir::Pat<'hir> {
|
||||
ensure_sufficient_stack(|| {
|
||||
// loop here to avoid recursion
|
||||
let node = loop {
|
||||
match &pattern.kind {
|
||||
PatKind::Wild => break hir::PatKind::Wild,
|
||||
PatKind::Never => break hir::PatKind::Never,
|
||||
PatKind::Ident(binding_mode, ident, sub) => {
|
||||
let lower_sub = |this: &mut Self| sub.as_ref().map(|s| this.lower_pat(s));
|
||||
break self.lower_pat_ident(pattern, *binding_mode, *ident, lower_sub);
|
||||
|
@ -555,6 +555,7 @@ pub fn check_crate(krate: &ast::Crate, sess: &Session, features: &Features) {
|
||||
gate_all!(explicit_tail_calls, "`become` expression is experimental");
|
||||
gate_all!(generic_const_items, "generic const items are experimental");
|
||||
gate_all!(unnamed_fields, "unnamed fields are not yet fully implemented");
|
||||
gate_all!(never_patterns, "`!` patterns are experimental");
|
||||
|
||||
if !visitor.features.negative_bounds {
|
||||
for &span in spans.get(&sym::negative_bounds).iter().copied().flatten() {
|
||||
|
@ -151,7 +151,7 @@ pub fn print_crate<'a>(
|
||||
/// Note: some old proc macros parse pretty-printed output, so changes here can
|
||||
/// break old code. For example:
|
||||
/// - #63896: `#[allow(unused,` must be printed rather than `#[allow(unused ,`
|
||||
/// - #73345: `#[allow(unused)] must be printed rather than `# [allow(unused)]
|
||||
/// - #73345: `#[allow(unused)]` must be printed rather than `# [allow(unused)]`
|
||||
///
|
||||
fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool {
|
||||
use token::*;
|
||||
@ -1343,6 +1343,7 @@ impl<'a> State<'a> {
|
||||
is that it doesn't matter */
|
||||
match &pat.kind {
|
||||
PatKind::Wild => self.word("_"),
|
||||
PatKind::Never => self.word("!"),
|
||||
PatKind::Ident(BindingAnnotation(by_ref, mutbl), ident, sub) => {
|
||||
if *by_ref == ByRef::Yes {
|
||||
self.word_nbsp("ref");
|
||||
|
@ -255,12 +255,12 @@ impl<'a> State<'a> {
|
||||
|
||||
self.print_expr_maybe_paren(lhs, left_prec);
|
||||
self.space();
|
||||
self.word_space(op.node.to_string());
|
||||
self.word_space(op.node.as_str());
|
||||
self.print_expr_maybe_paren(rhs, right_prec)
|
||||
}
|
||||
|
||||
fn print_expr_unary(&mut self, op: ast::UnOp, expr: &ast::Expr) {
|
||||
self.word(ast::UnOp::to_string(op));
|
||||
self.word(op.as_str());
|
||||
self.print_expr_maybe_paren(expr, parser::PREC_PREFIX)
|
||||
}
|
||||
|
||||
@ -470,7 +470,7 @@ impl<'a> State<'a> {
|
||||
let prec = AssocOp::Assign.precedence() as i8;
|
||||
self.print_expr_maybe_paren(lhs, prec + 1);
|
||||
self.space();
|
||||
self.word(op.node.to_string());
|
||||
self.word(op.node.as_str());
|
||||
self.word_space("=");
|
||||
self.print_expr_maybe_paren(rhs, prec);
|
||||
}
|
||||
|
@ -985,7 +985,7 @@ pub fn parse_repr_attr(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
|
||||
Ok(literal) => acc.push(ReprPacked(literal)),
|
||||
Err(message) => literal_error = Some(message),
|
||||
};
|
||||
} else if matches!(name, sym::C | sym::simd | sym::transparent)
|
||||
} else if matches!(name, sym::Rust | sym::C | sym::simd | sym::transparent)
|
||||
|| int_type_of_word(name).is_some()
|
||||
{
|
||||
recognised = true;
|
||||
@ -1018,7 +1018,7 @@ pub fn parse_repr_attr(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
|
||||
});
|
||||
} else if matches!(
|
||||
meta_item.name_or_empty(),
|
||||
sym::C | sym::simd | sym::transparent
|
||||
sym::Rust | sym::C | sym::simd | sym::transparent
|
||||
) || int_type_of_word(meta_item.name_or_empty()).is_some()
|
||||
{
|
||||
recognised = true;
|
||||
@ -1043,7 +1043,7 @@ pub fn parse_repr_attr(sess: &Session, attr: &Attribute) -> Vec<ReprAttr> {
|
||||
);
|
||||
} else if matches!(
|
||||
meta_item.name_or_empty(),
|
||||
sym::C | sym::simd | sym::transparent
|
||||
sym::Rust | sym::C | sym::simd | sym::transparent
|
||||
) || int_type_of_word(meta_item.name_or_empty()).is_some()
|
||||
{
|
||||
recognised = true;
|
||||
|
@ -2352,7 +2352,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
} else {
|
||||
err.note("the result of `format_args!` can only be assigned directly if no placeholders in it's arguments are used");
|
||||
err.note("the result of `format_args!` can only be assigned directly if no placeholders in its arguments are used");
|
||||
err.note("to learn more, visit <https://doc.rust-lang.org/std/macro.format_args.html>");
|
||||
}
|
||||
suggested = true;
|
||||
|
@ -42,6 +42,7 @@ use rustc_target::abi::FieldIdx;
|
||||
use smallvec::SmallVec;
|
||||
use std::cell::RefCell;
|
||||
use std::collections::BTreeMap;
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::Deref;
|
||||
use std::rc::Rc;
|
||||
|
||||
@ -100,7 +101,7 @@ use renumber::RegionCtxt;
|
||||
rustc_fluent_macro::fluent_messages! { "../messages.ftl" }
|
||||
|
||||
/// Associate some local constants with the `'tcx` lifetime
|
||||
struct TyCtxtConsts<'tcx>(TyCtxt<'tcx>);
|
||||
struct TyCtxtConsts<'tcx>(PhantomData<&'tcx ()>);
|
||||
impl<'tcx> TyCtxtConsts<'tcx> {
|
||||
const DEREF_PROJECTION: &'tcx [PlaceElem<'tcx>; 1] = &[ProjectionElem::Deref];
|
||||
}
|
||||
|
@ -672,30 +672,22 @@ fn report_missing_placeholders(
|
||||
if explained.contains(&sub) {
|
||||
continue;
|
||||
}
|
||||
explained.insert(sub.clone());
|
||||
explained.insert(sub);
|
||||
|
||||
if !found_foreign {
|
||||
found_foreign = true;
|
||||
show_doc_note = true;
|
||||
}
|
||||
|
||||
if let Some(inner_sp) = pos {
|
||||
let sp = fmt_span.from_inner(inner_sp);
|
||||
let sp = fmt_span.from_inner(pos);
|
||||
|
||||
if success {
|
||||
suggestions.push((sp, trn));
|
||||
} else {
|
||||
diag.span_note(
|
||||
sp,
|
||||
format!("format specifiers use curly braces, and {}", trn),
|
||||
);
|
||||
}
|
||||
if success {
|
||||
suggestions.push((sp, trn));
|
||||
} else {
|
||||
if success {
|
||||
diag.help(format!("`{}` should be written as `{}`", sub, trn));
|
||||
} else {
|
||||
diag.note(format!("`{}` should use curly braces, and {}", sub, trn));
|
||||
}
|
||||
diag.span_note(
|
||||
sp,
|
||||
format!("format specifiers use curly braces, and {}", trn),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -19,10 +19,10 @@ pub(crate) mod printf {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn position(&self) -> Option<InnerSpan> {
|
||||
pub fn position(&self) -> InnerSpan {
|
||||
match self {
|
||||
Substitution::Format(fmt) => Some(fmt.position),
|
||||
&Substitution::Escape((start, end)) => Some(InnerSpan::new(start, end)),
|
||||
Substitution::Format(fmt) => fmt.position,
|
||||
&Substitution::Escape((start, end)) => InnerSpan::new(start, end),
|
||||
}
|
||||
}
|
||||
|
||||
@ -302,10 +302,9 @@ pub(crate) mod printf {
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let (mut sub, tail) = parse_next_substitution(self.s)?;
|
||||
self.s = tail;
|
||||
if let Some(InnerSpan { start, end }) = sub.position() {
|
||||
sub.set_position(start + self.pos, end + self.pos);
|
||||
self.pos += end;
|
||||
}
|
||||
let InnerSpan { start, end } = sub.position();
|
||||
sub.set_position(start + self.pos, end + self.pos);
|
||||
self.pos += end;
|
||||
Some(sub)
|
||||
}
|
||||
|
||||
@ -629,9 +628,9 @@ pub mod shell {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn position(&self) -> Option<InnerSpan> {
|
||||
pub fn position(&self) -> InnerSpan {
|
||||
let (Self::Ordinal(_, pos) | Self::Name(_, pos) | Self::Escape(pos)) = self;
|
||||
Some(InnerSpan::new(pos.0, pos.1))
|
||||
InnerSpan::new(pos.0, pos.1)
|
||||
}
|
||||
|
||||
pub fn set_position(&mut self, start: usize, end: usize) {
|
||||
@ -664,10 +663,9 @@ pub mod shell {
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let (mut sub, tail) = parse_next_substitution(self.s)?;
|
||||
self.s = tail;
|
||||
if let Some(InnerSpan { start, end }) = sub.position() {
|
||||
sub.set_position(start + self.pos, end + self.pos);
|
||||
self.pos += end;
|
||||
}
|
||||
let InnerSpan { start, end } = sub.position();
|
||||
sub.set_position(start + self.pos, end + self.pos);
|
||||
self.pos += end;
|
||||
Some(sub)
|
||||
}
|
||||
|
||||
|
@ -4,7 +4,7 @@ use rustc_codegen_ssa::traits::*;
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags;
|
||||
use rustc_middle::ty::{self, TyCtxt};
|
||||
use rustc_session::config::OptLevel;
|
||||
use rustc_session::config::{FunctionReturn, OptLevel};
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_target::spec::abi::Abi;
|
||||
use rustc_target::spec::{FramePointer, SanitizerSet, StackProbeType, StackProtector};
|
||||
@ -118,6 +118,15 @@ pub fn frame_pointer_type_attr<'ll>(cx: &CodegenCx<'ll, '_>) -> Option<&'ll Attr
|
||||
Some(llvm::CreateAttrStringValue(cx.llcx, "frame-pointer", attr_value))
|
||||
}
|
||||
|
||||
fn function_return_attr<'ll>(cx: &CodegenCx<'ll, '_>) -> Option<&'ll Attribute> {
|
||||
let function_return_attr = match cx.sess().opts.unstable_opts.function_return {
|
||||
FunctionReturn::Keep => return None,
|
||||
FunctionReturn::ThunkExtern => AttributeKind::FnRetThunkExtern,
|
||||
};
|
||||
|
||||
Some(function_return_attr.create_attr(cx.llcx))
|
||||
}
|
||||
|
||||
/// Tell LLVM what instrument function to insert.
|
||||
#[inline]
|
||||
fn instrument_function_attr<'ll>(cx: &CodegenCx<'ll, '_>) -> SmallVec<[&'ll Attribute; 4]> {
|
||||
@ -331,8 +340,9 @@ pub fn from_fn_attrs<'ll, 'tcx>(
|
||||
to_add.push(llvm::CreateAttrString(cx.llcx, "use-sample-profile"));
|
||||
}
|
||||
|
||||
// FIXME: none of these three functions interact with source level attributes.
|
||||
// FIXME: none of these functions interact with source level attributes.
|
||||
to_add.extend(frame_pointer_type_attr(cx));
|
||||
to_add.extend(function_return_attr(cx));
|
||||
to_add.extend(instrument_function_attr(cx));
|
||||
to_add.extend(nojumptables_attr(cx));
|
||||
to_add.extend(probestack_attr(cx));
|
||||
|
@ -134,18 +134,6 @@ pub unsafe fn create_module<'ll>(
|
||||
|
||||
let mut target_data_layout = sess.target.data_layout.to_string();
|
||||
let llvm_version = llvm_util::get_version();
|
||||
if llvm_version < (16, 0, 0) {
|
||||
if sess.target.arch == "s390x" {
|
||||
// LLVM 16 data layout changed to always set 64-bit vector alignment,
|
||||
// which is conditional in earlier LLVM versions.
|
||||
// https://reviews.llvm.org/D131158 for the discussion.
|
||||
target_data_layout = target_data_layout.replace("-v128:64", "");
|
||||
} else if sess.target.arch == "riscv64" {
|
||||
// LLVM 16 introduced this change so as to produce more efficient code.
|
||||
// See https://reviews.llvm.org/D116735 for the discussion.
|
||||
target_data_layout = target_data_layout.replace("-n32:64-", "-n64-");
|
||||
}
|
||||
}
|
||||
if llvm_version < (17, 0, 0) {
|
||||
if sess.target.arch.starts_with("powerpc") {
|
||||
// LLVM 17 specifies function pointer alignment for ppc:
|
||||
|
@ -200,6 +200,7 @@ pub enum AttributeKind {
|
||||
AllocatedPointer = 38,
|
||||
AllocAlign = 39,
|
||||
SanitizeSafeStack = 40,
|
||||
FnRetThunkExtern = 41,
|
||||
}
|
||||
|
||||
/// LLVMIntPredicate
|
||||
|
@ -44,7 +44,7 @@ use tempfile::Builder as TempFileBuilder;
|
||||
use itertools::Itertools;
|
||||
use std::cell::OnceCell;
|
||||
use std::collections::BTreeSet;
|
||||
use std::ffi::OsString;
|
||||
use std::ffi::{OsStr, OsString};
|
||||
use std::fs::{read, File, OpenOptions};
|
||||
use std::io::{BufWriter, Write};
|
||||
use std::ops::Deref;
|
||||
@ -2527,7 +2527,7 @@ fn add_native_libs_from_crate(
|
||||
NativeLibKind::WasmImportModule => {}
|
||||
NativeLibKind::LinkArg => {
|
||||
if link_static {
|
||||
cmd.arg(name);
|
||||
cmd.linker_arg(OsStr::new(name), verbatim);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -196,6 +196,14 @@ pub trait Linker {
|
||||
fn add_no_exec(&mut self) {}
|
||||
fn add_as_needed(&mut self) {}
|
||||
fn reset_per_library_state(&mut self) {}
|
||||
fn linker_arg(&mut self, arg: &OsStr, verbatim: bool) {
|
||||
self.linker_args(&[arg], verbatim);
|
||||
}
|
||||
fn linker_args(&mut self, args: &[&OsStr], _verbatim: bool) {
|
||||
args.into_iter().for_each(|a| {
|
||||
self.cmd().arg(a);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl dyn Linker + '_ {
|
||||
@ -223,38 +231,12 @@ pub struct GccLinker<'a> {
|
||||
}
|
||||
|
||||
impl<'a> GccLinker<'a> {
|
||||
/// Passes an argument directly to the linker.
|
||||
///
|
||||
/// When the linker is not ld-like such as when using a compiler as a linker, the argument is
|
||||
/// prepended by `-Wl,`.
|
||||
fn linker_arg(&mut self, arg: impl AsRef<OsStr>) -> &mut Self {
|
||||
self.linker_args(&[arg]);
|
||||
self
|
||||
fn linker_arg(&mut self, arg: impl AsRef<OsStr>) {
|
||||
Linker::linker_arg(self, arg.as_ref(), false);
|
||||
}
|
||||
|
||||
/// Passes a series of arguments directly to the linker.
|
||||
///
|
||||
/// When the linker is ld-like, the arguments are simply appended to the command. When the
|
||||
/// linker is not ld-like such as when using a compiler as a linker, the arguments are joined by
|
||||
/// commas to form an argument that is then prepended with `-Wl`. In this situation, only a
|
||||
/// single argument is appended to the command to ensure that the order of the arguments is
|
||||
/// preserved by the compiler.
|
||||
fn linker_args(&mut self, args: &[impl AsRef<OsStr>]) -> &mut Self {
|
||||
if self.is_ld {
|
||||
args.into_iter().for_each(|a| {
|
||||
self.cmd.arg(a);
|
||||
});
|
||||
} else {
|
||||
if !args.is_empty() {
|
||||
let mut s = OsString::from("-Wl");
|
||||
for a in args {
|
||||
s.push(",");
|
||||
s.push(a);
|
||||
}
|
||||
self.cmd.arg(s);
|
||||
}
|
||||
}
|
||||
self
|
||||
fn linker_args(&mut self, args: &[impl AsRef<OsStr>]) {
|
||||
let args_vec: Vec<&OsStr> = args.iter().map(|x| x.as_ref()).collect();
|
||||
Linker::linker_args(self, &args_vec, false);
|
||||
}
|
||||
|
||||
fn takes_hints(&self) -> bool {
|
||||
@ -361,6 +343,30 @@ impl<'a> GccLinker<'a> {
|
||||
}
|
||||
|
||||
impl<'a> Linker for GccLinker<'a> {
|
||||
/// Passes a series of arguments directly to the linker.
|
||||
///
|
||||
/// When the linker is ld-like, the arguments are simply appended to the command. When the
|
||||
/// linker is not ld-like such as when using a compiler as a linker, the arguments are joined by
|
||||
/// commas to form an argument that is then prepended with `-Wl`. In this situation, only a
|
||||
/// single argument is appended to the command to ensure that the order of the arguments is
|
||||
/// preserved by the compiler.
|
||||
fn linker_args(&mut self, args: &[&OsStr], verbatim: bool) {
|
||||
if self.is_ld || verbatim {
|
||||
args.into_iter().for_each(|a| {
|
||||
self.cmd.arg(a);
|
||||
});
|
||||
} else {
|
||||
if !args.is_empty() {
|
||||
let mut s = OsString::from("-Wl");
|
||||
for a in args {
|
||||
s.push(",");
|
||||
s.push(a);
|
||||
}
|
||||
self.cmd.arg(s);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn cmd(&mut self) -> &mut Command {
|
||||
&mut self.cmd
|
||||
}
|
||||
@ -531,7 +537,7 @@ impl<'a> Linker for GccLinker<'a> {
|
||||
self.linker_arg("-force_load");
|
||||
self.linker_arg(&lib);
|
||||
} else {
|
||||
self.linker_arg("--whole-archive").cmd.arg(lib);
|
||||
self.linker_args(&[OsString::from("--whole-archive"), lib.into()]);
|
||||
self.linker_arg("--no-whole-archive");
|
||||
}
|
||||
}
|
||||
@ -1302,6 +1308,8 @@ impl<'a> Linker for WasmLd<'a> {
|
||||
}
|
||||
|
||||
fn optimize(&mut self) {
|
||||
// The -O flag is, as of late 2023, only used for merging of strings and debuginfo, and
|
||||
// only differentiates -O0 and -O1. It does not apply to LTO.
|
||||
self.cmd.arg(match self.sess.opts.optimize {
|
||||
OptLevel::No => "-O0",
|
||||
OptLevel::Less => "-O1",
|
||||
@ -1354,7 +1362,31 @@ impl<'a> Linker for WasmLd<'a> {
|
||||
fn subsystem(&mut self, _subsystem: &str) {}
|
||||
|
||||
fn linker_plugin_lto(&mut self) {
|
||||
// Do nothing for now
|
||||
match self.sess.opts.cg.linker_plugin_lto {
|
||||
LinkerPluginLto::Disabled => {
|
||||
// Nothing to do
|
||||
}
|
||||
LinkerPluginLto::LinkerPluginAuto => {
|
||||
self.push_linker_plugin_lto_args();
|
||||
}
|
||||
LinkerPluginLto::LinkerPlugin(_) => {
|
||||
self.push_linker_plugin_lto_args();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> WasmLd<'a> {
|
||||
fn push_linker_plugin_lto_args(&mut self) {
|
||||
let opt_level = match self.sess.opts.optimize {
|
||||
config::OptLevel::No => "O0",
|
||||
config::OptLevel::Less => "O1",
|
||||
config::OptLevel::Default => "O2",
|
||||
config::OptLevel::Aggressive => "O3",
|
||||
// wasm-ld only handles integer LTO opt levels. Use O2
|
||||
config::OptLevel::Size | config::OptLevel::SizeMin => "O2",
|
||||
};
|
||||
self.cmd.arg(&format!("--lto-{opt_level}"));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -216,6 +216,7 @@ impl CodegenResults {
|
||||
sess: &Session,
|
||||
rlink_file: &Path,
|
||||
codegen_results: &CodegenResults,
|
||||
outputs: &OutputFilenames,
|
||||
) -> Result<usize, io::Error> {
|
||||
let mut encoder = FileEncoder::new(rlink_file)?;
|
||||
encoder.emit_raw_bytes(RLINK_MAGIC);
|
||||
@ -224,10 +225,14 @@ impl CodegenResults {
|
||||
encoder.emit_raw_bytes(&RLINK_VERSION.to_be_bytes());
|
||||
encoder.emit_str(sess.cfg_version);
|
||||
Encodable::encode(codegen_results, &mut encoder);
|
||||
Encodable::encode(outputs, &mut encoder);
|
||||
encoder.finish().map_err(|(_path, err)| err)
|
||||
}
|
||||
|
||||
pub fn deserialize_rlink(sess: &Session, data: Vec<u8>) -> Result<Self, CodegenErrors> {
|
||||
pub fn deserialize_rlink(
|
||||
sess: &Session,
|
||||
data: Vec<u8>,
|
||||
) -> Result<(Self, OutputFilenames), CodegenErrors> {
|
||||
// The Decodable machinery is not used here because it panics if the input data is invalid
|
||||
// and because its internal representation may change.
|
||||
if !data.starts_with(RLINK_MAGIC) {
|
||||
@ -256,6 +261,7 @@ impl CodegenResults {
|
||||
}
|
||||
|
||||
let codegen_results = CodegenResults::decode(&mut decoder);
|
||||
Ok(codegen_results)
|
||||
let outputs = OutputFilenames::decode(&mut decoder);
|
||||
Ok((codegen_results, outputs))
|
||||
}
|
||||
}
|
||||
|
@ -7,12 +7,11 @@ use std::assert_matches::assert_matches;
|
||||
use either::{Either, Left, Right};
|
||||
|
||||
use rustc_ast::Mutability;
|
||||
use rustc_index::IndexSlice;
|
||||
use rustc_middle::mir;
|
||||
use rustc_middle::ty;
|
||||
use rustc_middle::ty::layout::{LayoutOf, TyAndLayout};
|
||||
use rustc_middle::ty::Ty;
|
||||
use rustc_target::abi::{Abi, Align, FieldIdx, HasDataLayout, Size, FIRST_VARIANT};
|
||||
use rustc_target::abi::{Abi, Align, HasDataLayout, Size};
|
||||
|
||||
use super::{
|
||||
alloc_range, mir_assign_valid_types, AllocId, AllocRef, AllocRefMut, CheckAlignMsg, ImmTy,
|
||||
@ -977,34 +976,6 @@ where
|
||||
Ok(self.ptr_with_meta_to_mplace(ptr.into(), MemPlaceMeta::Meta(meta), layout))
|
||||
}
|
||||
|
||||
/// Writes the aggregate to the destination.
|
||||
#[instrument(skip(self), level = "trace")]
|
||||
pub fn write_aggregate(
|
||||
&mut self,
|
||||
kind: &mir::AggregateKind<'tcx>,
|
||||
operands: &IndexSlice<FieldIdx, mir::Operand<'tcx>>,
|
||||
dest: &PlaceTy<'tcx, M::Provenance>,
|
||||
) -> InterpResult<'tcx> {
|
||||
self.write_uninit(dest)?;
|
||||
let (variant_index, variant_dest, active_field_index) = match *kind {
|
||||
mir::AggregateKind::Adt(_, variant_index, _, _, active_field_index) => {
|
||||
let variant_dest = self.project_downcast(dest, variant_index)?;
|
||||
(variant_index, variant_dest, active_field_index)
|
||||
}
|
||||
_ => (FIRST_VARIANT, dest.clone(), None),
|
||||
};
|
||||
if active_field_index.is_some() {
|
||||
assert_eq!(operands.len(), 1);
|
||||
}
|
||||
for (field_index, operand) in operands.iter_enumerated() {
|
||||
let field_index = active_field_index.unwrap_or(field_index);
|
||||
let field_dest = self.project_field(&variant_dest, field_index.as_usize())?;
|
||||
let op = self.eval_operand(operand, Some(field_dest.layout))?;
|
||||
self.copy_op(&op, &field_dest, /*allow_transmute*/ false)?;
|
||||
}
|
||||
self.write_discriminant(variant_index, dest)
|
||||
}
|
||||
|
||||
pub fn raw_const_to_mplace(
|
||||
&self,
|
||||
raw: mir::ConstAlloc<'tcx>,
|
||||
|
@ -4,11 +4,12 @@
|
||||
|
||||
use either::Either;
|
||||
|
||||
use rustc_index::IndexSlice;
|
||||
use rustc_middle::mir;
|
||||
use rustc_middle::mir::interpret::{InterpResult, Scalar};
|
||||
use rustc_middle::ty::layout::LayoutOf;
|
||||
use rustc_target::abi::{FieldIdx, FIRST_VARIANT};
|
||||
|
||||
use super::{ImmTy, InterpCx, Machine, Projectable};
|
||||
use super::{ImmTy, InterpCx, InterpResult, Machine, PlaceTy, Projectable, Scalar};
|
||||
use crate::util;
|
||||
|
||||
impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
@ -187,34 +188,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
}
|
||||
|
||||
Repeat(ref operand, _) => {
|
||||
let src = self.eval_operand(operand, None)?;
|
||||
assert!(src.layout.is_sized());
|
||||
let dest = self.force_allocation(&dest)?;
|
||||
let length = dest.len(self)?;
|
||||
|
||||
if length == 0 {
|
||||
// Nothing to copy... but let's still make sure that `dest` as a place is valid.
|
||||
self.get_place_alloc_mut(&dest)?;
|
||||
} else {
|
||||
// Write the src to the first element.
|
||||
let first = self.project_index(&dest, 0)?;
|
||||
self.copy_op(&src, &first, /*allow_transmute*/ false)?;
|
||||
|
||||
// This is performance-sensitive code for big static/const arrays! So we
|
||||
// avoid writing each operand individually and instead just make many copies
|
||||
// of the first element.
|
||||
let elem_size = first.layout.size;
|
||||
let first_ptr = first.ptr();
|
||||
let rest_ptr = first_ptr.offset(elem_size, self)?;
|
||||
// No alignment requirement since `copy_op` above already checked it.
|
||||
self.mem_copy_repeatedly(
|
||||
first_ptr,
|
||||
rest_ptr,
|
||||
elem_size,
|
||||
length - 1,
|
||||
/*nonoverlapping:*/ true,
|
||||
)?;
|
||||
}
|
||||
self.write_repeat(operand, &dest)?;
|
||||
}
|
||||
|
||||
Len(place) => {
|
||||
@ -307,6 +281,73 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Writes the aggregate to the destination.
|
||||
#[instrument(skip(self), level = "trace")]
|
||||
fn write_aggregate(
|
||||
&mut self,
|
||||
kind: &mir::AggregateKind<'tcx>,
|
||||
operands: &IndexSlice<FieldIdx, mir::Operand<'tcx>>,
|
||||
dest: &PlaceTy<'tcx, M::Provenance>,
|
||||
) -> InterpResult<'tcx> {
|
||||
self.write_uninit(dest)?; // make sure all the padding ends up as uninit
|
||||
let (variant_index, variant_dest, active_field_index) = match *kind {
|
||||
mir::AggregateKind::Adt(_, variant_index, _, _, active_field_index) => {
|
||||
let variant_dest = self.project_downcast(dest, variant_index)?;
|
||||
(variant_index, variant_dest, active_field_index)
|
||||
}
|
||||
_ => (FIRST_VARIANT, dest.clone(), None),
|
||||
};
|
||||
if active_field_index.is_some() {
|
||||
assert_eq!(operands.len(), 1);
|
||||
}
|
||||
for (field_index, operand) in operands.iter_enumerated() {
|
||||
let field_index = active_field_index.unwrap_or(field_index);
|
||||
let field_dest = self.project_field(&variant_dest, field_index.as_usize())?;
|
||||
let op = self.eval_operand(operand, Some(field_dest.layout))?;
|
||||
self.copy_op(&op, &field_dest, /*allow_transmute*/ false)?;
|
||||
}
|
||||
self.write_discriminant(variant_index, dest)
|
||||
}
|
||||
|
||||
/// Repeats `operand` into the destination. `dest` must have array type, and that type
|
||||
/// determines how often `operand` is repeated.
|
||||
fn write_repeat(
|
||||
&mut self,
|
||||
operand: &mir::Operand<'tcx>,
|
||||
dest: &PlaceTy<'tcx, M::Provenance>,
|
||||
) -> InterpResult<'tcx> {
|
||||
let src = self.eval_operand(operand, None)?;
|
||||
assert!(src.layout.is_sized());
|
||||
let dest = self.force_allocation(&dest)?;
|
||||
let length = dest.len(self)?;
|
||||
|
||||
if length == 0 {
|
||||
// Nothing to copy... but let's still make sure that `dest` as a place is valid.
|
||||
self.get_place_alloc_mut(&dest)?;
|
||||
} else {
|
||||
// Write the src to the first element.
|
||||
let first = self.project_index(&dest, 0)?;
|
||||
self.copy_op(&src, &first, /*allow_transmute*/ false)?;
|
||||
|
||||
// This is performance-sensitive code for big static/const arrays! So we
|
||||
// avoid writing each operand individually and instead just make many copies
|
||||
// of the first element.
|
||||
let elem_size = first.layout.size;
|
||||
let first_ptr = first.ptr();
|
||||
let rest_ptr = first_ptr.offset(elem_size, self)?;
|
||||
// No alignment requirement since `copy_op` above already checked it.
|
||||
self.mem_copy_repeatedly(
|
||||
first_ptr,
|
||||
rest_ptr,
|
||||
elem_size,
|
||||
length - 1,
|
||||
/*nonoverlapping:*/ true,
|
||||
)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Evaluate the given terminator. Will also adjust the stack frame and statement position accordingly.
|
||||
fn terminator(&mut self, terminator: &mir::Terminator<'tcx>) -> InterpResult<'tcx> {
|
||||
info!("{:?}", terminator.kind);
|
||||
|
@ -492,7 +492,7 @@ where
|
||||
let returned_walk =
|
||||
return_value.take().into_iter().map(|walk| (*successor_node, Some(walk)));
|
||||
|
||||
let successor_walk = successors.by_ref().map(|successor_node| {
|
||||
let successor_walk = successors.map(|successor_node| {
|
||||
debug!(?node, ?successor_node);
|
||||
(successor_node, self.inspect_node(successor_node))
|
||||
});
|
||||
|
@ -401,9 +401,7 @@ fn run_compiler(
|
||||
Ok(())
|
||||
})?;
|
||||
|
||||
// Make sure the `output_filenames` query is run for its side
|
||||
// effects of writing the dep-info and reporting errors.
|
||||
queries.global_ctxt()?.enter(|tcx| tcx.output_filenames(()));
|
||||
queries.write_dep_info()?;
|
||||
} else {
|
||||
let krate = queries.parse()?;
|
||||
pretty::print(
|
||||
@ -431,9 +429,7 @@ fn run_compiler(
|
||||
return early_exit();
|
||||
}
|
||||
|
||||
// Make sure the `output_filenames` query is run for its side
|
||||
// effects of writing the dep-info and reporting errors.
|
||||
queries.global_ctxt()?.enter(|tcx| tcx.output_filenames(()));
|
||||
queries.write_dep_info()?;
|
||||
|
||||
if sess.opts.output_types.contains_key(&OutputType::DepInfo)
|
||||
&& sess.opts.output_types.len() == 1
|
||||
@ -648,12 +644,11 @@ fn show_md_content_with_pager(content: &str, color: ColorConfig) {
|
||||
fn process_rlink(sess: &Session, compiler: &interface::Compiler) {
|
||||
assert!(sess.opts.unstable_opts.link_only);
|
||||
if let Input::File(file) = &sess.io.input {
|
||||
let outputs = compiler.build_output_filenames(sess, &[]);
|
||||
let rlink_data = fs::read(file).unwrap_or_else(|err| {
|
||||
sess.emit_fatal(RlinkUnableToRead { err });
|
||||
});
|
||||
let codegen_results = match CodegenResults::deserialize_rlink(sess, rlink_data) {
|
||||
Ok(codegen) => codegen,
|
||||
let (codegen_results, outputs) = match CodegenResults::deserialize_rlink(sess, rlink_data) {
|
||||
Ok((codegen, outputs)) => (codegen, outputs),
|
||||
Err(err) => {
|
||||
match err {
|
||||
CodegenErrors::WrongFileType => sess.emit_fatal(RLinkWrongFileType),
|
||||
|
@ -653,3 +653,4 @@ E0795: include_str!("./error_codes/E0795.md"),
|
||||
// E0721, // `await` keyword
|
||||
// E0723, // unstable feature in `const` context
|
||||
// E0738, // Removed; errored on `#[track_caller] fn`s in `extern "Rust" { ... }`.
|
||||
// E0744, // merged into E0728
|
||||
|
@ -1,8 +1,10 @@
|
||||
#### Note: this error code is no longer emitted by the compiler.
|
||||
|
||||
An unsupported expression was used inside a const context.
|
||||
|
||||
Erroneous code example:
|
||||
|
||||
```compile_fail,edition2018,E0744
|
||||
```ignore (removed error code)
|
||||
const _: i32 = {
|
||||
async { 0 }.await
|
||||
};
|
||||
|
@ -50,6 +50,8 @@ macro_rules! declare_features {
|
||||
}),+
|
||||
];
|
||||
|
||||
const NUM_FEATURES: usize = UNSTABLE_FEATURES.len();
|
||||
|
||||
/// A set of features to be used by later passes.
|
||||
#[derive(Clone, Default, Debug)]
|
||||
pub struct Features {
|
||||
@ -82,8 +84,14 @@ macro_rules! declare_features {
|
||||
self.declared_features.insert(symbol);
|
||||
}
|
||||
|
||||
pub fn walk_feature_fields(&self, mut f: impl FnMut(&str, bool)) {
|
||||
$(f(stringify!($feature), self.$feature);)+
|
||||
/// This is intended for hashing the set of active features.
|
||||
///
|
||||
/// The expectation is that this produces much smaller code than other alternatives.
|
||||
///
|
||||
/// Note that the total feature count is pretty small, so this is not a huge array.
|
||||
#[inline]
|
||||
pub fn all_features(&self) -> [u8; NUM_FEATURES] {
|
||||
[$(self.$feature as u8),+]
|
||||
}
|
||||
|
||||
/// Is the given feature explicitly declared, i.e. named in a
|
||||
@ -147,7 +155,7 @@ macro_rules! declare_features {
|
||||
// was set.
|
||||
//
|
||||
// Note that the features are grouped into internal/user-facing and then
|
||||
// sorted by version inside those groups. This is enforced with tidy.
|
||||
// sorted alphabetically inside those groups. This is enforced with tidy.
|
||||
//
|
||||
// N.B., `tools/tidy/src/features.rs` parses this information directly out of the
|
||||
// source, so take care when modifying it.
|
||||
@ -512,6 +520,8 @@ declare_features! (
|
||||
(unstable, native_link_modifiers_as_needed, "1.53.0", Some(81490), None),
|
||||
/// Allow negative trait implementations.
|
||||
(unstable, negative_impls, "1.44.0", Some(68318), None),
|
||||
/// Allows the `!` pattern.
|
||||
(incomplete, never_patterns, "CURRENT_RUSTC_VERSION", Some(118155), None),
|
||||
/// Allows the `!` type. Does not imply 'exhaustive_patterns' (below) any more.
|
||||
(unstable, never_type, "1.13.0", Some(35121), None),
|
||||
/// Allows diverging expressions to fall back to `!` rather than `()`.
|
||||
|
@ -7,8 +7,8 @@ use crate::LangItem;
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::util::parser::ExprPrecedence;
|
||||
use rustc_ast::{Attribute, FloatTy, IntTy, Label, LitKind, TraitObjectSyntax, UintTy};
|
||||
pub use rustc_ast::{BindingAnnotation, BorrowKind, ByRef, ImplPolarity, IsAuto};
|
||||
pub use rustc_ast::{CaptureBy, Movability, Mutability};
|
||||
pub use rustc_ast::{BinOp, BinOpKind, BindingAnnotation, BorrowKind, ByRef, CaptureBy};
|
||||
pub use rustc_ast::{ImplPolarity, IsAuto, Movability, Mutability, UnOp};
|
||||
use rustc_ast::{InlineAsmOptions, InlineAsmTemplatePiece};
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
@ -1002,7 +1002,7 @@ impl<'hir> Pat<'hir> {
|
||||
|
||||
use PatKind::*;
|
||||
match self.kind {
|
||||
Wild | Lit(_) | Range(..) | Binding(.., None) | Path(_) => true,
|
||||
Wild | Never | Lit(_) | Range(..) | Binding(.., None) | Path(_) => true,
|
||||
Box(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_short_(it),
|
||||
Struct(_, fields, _) => fields.iter().all(|field| field.pat.walk_short_(it)),
|
||||
TupleStruct(_, s, _) | Tuple(s, _) | Or(s) => s.iter().all(|p| p.walk_short_(it)),
|
||||
@ -1029,7 +1029,7 @@ impl<'hir> Pat<'hir> {
|
||||
|
||||
use PatKind::*;
|
||||
match self.kind {
|
||||
Wild | Lit(_) | Range(..) | Binding(.., None) | Path(_) => {}
|
||||
Wild | Never | Lit(_) | Range(..) | Binding(.., None) | Path(_) => {}
|
||||
Box(s) | Ref(s, _) | Binding(.., Some(s)) => s.walk_(it),
|
||||
Struct(_, fields, _) => fields.iter().for_each(|field| field.pat.walk_(it)),
|
||||
TupleStruct(_, s, _) | Tuple(s, _) | Or(s) => s.iter().for_each(|p| p.walk_(it)),
|
||||
@ -1142,6 +1142,9 @@ pub enum PatKind<'hir> {
|
||||
/// Invariant: `pats.len() >= 2`.
|
||||
Or(&'hir [Pat<'hir>]),
|
||||
|
||||
/// A never pattern `!`.
|
||||
Never,
|
||||
|
||||
/// A path pattern for a unit struct/variant or a (maybe-associated) constant.
|
||||
Path(QPath<'hir>),
|
||||
|
||||
@ -1174,155 +1177,6 @@ pub enum PatKind<'hir> {
|
||||
Slice(&'hir [Pat<'hir>], Option<&'hir Pat<'hir>>, &'hir [Pat<'hir>]),
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Debug, HashStable_Generic)]
|
||||
pub enum BinOpKind {
|
||||
/// The `+` operator (addition).
|
||||
Add,
|
||||
/// The `-` operator (subtraction).
|
||||
Sub,
|
||||
/// The `*` operator (multiplication).
|
||||
Mul,
|
||||
/// The `/` operator (division).
|
||||
Div,
|
||||
/// The `%` operator (modulus).
|
||||
Rem,
|
||||
/// The `&&` operator (logical and).
|
||||
And,
|
||||
/// The `||` operator (logical or).
|
||||
Or,
|
||||
/// The `^` operator (bitwise xor).
|
||||
BitXor,
|
||||
/// The `&` operator (bitwise and).
|
||||
BitAnd,
|
||||
/// The `|` operator (bitwise or).
|
||||
BitOr,
|
||||
/// The `<<` operator (shift left).
|
||||
Shl,
|
||||
/// The `>>` operator (shift right).
|
||||
Shr,
|
||||
/// The `==` operator (equality).
|
||||
Eq,
|
||||
/// The `<` operator (less than).
|
||||
Lt,
|
||||
/// The `<=` operator (less than or equal to).
|
||||
Le,
|
||||
/// The `!=` operator (not equal to).
|
||||
Ne,
|
||||
/// The `>=` operator (greater than or equal to).
|
||||
Ge,
|
||||
/// The `>` operator (greater than).
|
||||
Gt,
|
||||
}
|
||||
|
||||
impl BinOpKind {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
BinOpKind::Add => "+",
|
||||
BinOpKind::Sub => "-",
|
||||
BinOpKind::Mul => "*",
|
||||
BinOpKind::Div => "/",
|
||||
BinOpKind::Rem => "%",
|
||||
BinOpKind::And => "&&",
|
||||
BinOpKind::Or => "||",
|
||||
BinOpKind::BitXor => "^",
|
||||
BinOpKind::BitAnd => "&",
|
||||
BinOpKind::BitOr => "|",
|
||||
BinOpKind::Shl => "<<",
|
||||
BinOpKind::Shr => ">>",
|
||||
BinOpKind::Eq => "==",
|
||||
BinOpKind::Lt => "<",
|
||||
BinOpKind::Le => "<=",
|
||||
BinOpKind::Ne => "!=",
|
||||
BinOpKind::Ge => ">=",
|
||||
BinOpKind::Gt => ">",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_lazy(self) -> bool {
|
||||
matches!(self, BinOpKind::And | BinOpKind::Or)
|
||||
}
|
||||
|
||||
pub fn is_comparison(self) -> bool {
|
||||
match self {
|
||||
BinOpKind::Eq
|
||||
| BinOpKind::Lt
|
||||
| BinOpKind::Le
|
||||
| BinOpKind::Ne
|
||||
| BinOpKind::Gt
|
||||
| BinOpKind::Ge => true,
|
||||
BinOpKind::And
|
||||
| BinOpKind::Or
|
||||
| BinOpKind::Add
|
||||
| BinOpKind::Sub
|
||||
| BinOpKind::Mul
|
||||
| BinOpKind::Div
|
||||
| BinOpKind::Rem
|
||||
| BinOpKind::BitXor
|
||||
| BinOpKind::BitAnd
|
||||
| BinOpKind::BitOr
|
||||
| BinOpKind::Shl
|
||||
| BinOpKind::Shr => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the binary operator takes its arguments by value.
|
||||
pub fn is_by_value(self) -> bool {
|
||||
!self.is_comparison()
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<ast::BinOpKind> for BinOpKind {
|
||||
fn into(self) -> ast::BinOpKind {
|
||||
match self {
|
||||
BinOpKind::Add => ast::BinOpKind::Add,
|
||||
BinOpKind::Sub => ast::BinOpKind::Sub,
|
||||
BinOpKind::Mul => ast::BinOpKind::Mul,
|
||||
BinOpKind::Div => ast::BinOpKind::Div,
|
||||
BinOpKind::Rem => ast::BinOpKind::Rem,
|
||||
BinOpKind::And => ast::BinOpKind::And,
|
||||
BinOpKind::Or => ast::BinOpKind::Or,
|
||||
BinOpKind::BitXor => ast::BinOpKind::BitXor,
|
||||
BinOpKind::BitAnd => ast::BinOpKind::BitAnd,
|
||||
BinOpKind::BitOr => ast::BinOpKind::BitOr,
|
||||
BinOpKind::Shl => ast::BinOpKind::Shl,
|
||||
BinOpKind::Shr => ast::BinOpKind::Shr,
|
||||
BinOpKind::Eq => ast::BinOpKind::Eq,
|
||||
BinOpKind::Lt => ast::BinOpKind::Lt,
|
||||
BinOpKind::Le => ast::BinOpKind::Le,
|
||||
BinOpKind::Ne => ast::BinOpKind::Ne,
|
||||
BinOpKind::Ge => ast::BinOpKind::Ge,
|
||||
BinOpKind::Gt => ast::BinOpKind::Gt,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type BinOp = Spanned<BinOpKind>;
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Debug, HashStable_Generic)]
|
||||
pub enum UnOp {
|
||||
/// The `*` operator (dereferencing).
|
||||
Deref,
|
||||
/// The `!` operator (logical negation).
|
||||
Not,
|
||||
/// The `-` operator (negation).
|
||||
Neg,
|
||||
}
|
||||
|
||||
impl UnOp {
|
||||
pub fn as_str(self) -> &'static str {
|
||||
match self {
|
||||
Self::Deref => "*",
|
||||
Self::Not => "!",
|
||||
Self::Neg => "-",
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the unary operator takes its argument by value.
|
||||
pub fn is_by_value(self) -> bool {
|
||||
matches!(self, Self::Neg | Self::Not)
|
||||
}
|
||||
}
|
||||
|
||||
/// A statement.
|
||||
#[derive(Debug, Clone, Copy, HashStable_Generic)]
|
||||
pub struct Stmt<'hir> {
|
||||
|
@ -660,7 +660,7 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat<'v>) {
|
||||
walk_list!(visitor, visit_expr, lower_bound);
|
||||
walk_list!(visitor, visit_expr, upper_bound);
|
||||
}
|
||||
PatKind::Wild => (),
|
||||
PatKind::Never | PatKind::Wild => (),
|
||||
PatKind::Slice(prepatterns, ref slice_pattern, postpatterns) => {
|
||||
walk_list!(visitor, visit_pat, prepatterns);
|
||||
walk_list!(visitor, visit_pat, slice_pattern);
|
||||
|
@ -26,12 +26,13 @@ use rustc_hir::def::{CtorOf, DefKind, Namespace, Res};
|
||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||
use rustc_hir::intravisit::{walk_generics, Visitor as _};
|
||||
use rustc_hir::{GenericArg, GenericArgs, OpaqueTyOrigin};
|
||||
use rustc_infer::infer::{InferCtxt, InferOk, TyCtxtInferExt};
|
||||
use rustc_infer::infer::{InferCtxt, TyCtxtInferExt};
|
||||
use rustc_infer::traits::ObligationCause;
|
||||
use rustc_middle::middle::stability::AllowUnstable;
|
||||
use rustc_middle::ty::GenericParamDefKind;
|
||||
use rustc_middle::ty::{
|
||||
self, Const, GenericArgKind, GenericArgsRef, IsSuggestable, Ty, TyCtxt, TypeVisitableExt,
|
||||
self, Const, GenericArgKind, GenericArgsRef, IsSuggestable, ParamEnv, Ty, TyCtxt,
|
||||
TypeVisitableExt,
|
||||
};
|
||||
use rustc_session::lint::builtin::AMBIGUOUS_ASSOCIATED_ITEMS;
|
||||
use rustc_span::edit_distance::find_best_match_for_name;
|
||||
@ -39,8 +40,7 @@ use rustc_span::symbol::{kw, Ident, Symbol};
|
||||
use rustc_span::{sym, BytePos, Span, DUMMY_SP};
|
||||
use rustc_target::spec::abi;
|
||||
use rustc_trait_selection::traits::wf::object_region_bounds;
|
||||
use rustc_trait_selection::traits::{self, NormalizeExt, ObligationCtxt};
|
||||
use rustc_type_ir::fold::{TypeFoldable, TypeFolder, TypeSuperFoldable};
|
||||
use rustc_trait_selection::traits::{self, ObligationCtxt};
|
||||
|
||||
use std::fmt::Display;
|
||||
use std::slice;
|
||||
@ -1606,133 +1606,110 @@ impl<'o, 'tcx> dyn AstConv<'tcx> + 'o {
|
||||
// FIXME(inherent_associated_types): Acquiring the ParamEnv this early leads to cycle errors
|
||||
// when inside of an ADT (#108491) or where clause.
|
||||
let param_env = tcx.param_env(block.owner);
|
||||
let cause = ObligationCause::misc(span, block.owner.def_id);
|
||||
|
||||
let mut universes = if self_ty.has_escaping_bound_vars() {
|
||||
vec![None; self_ty.outer_exclusive_binder().as_usize()]
|
||||
} else {
|
||||
vec![]
|
||||
};
|
||||
|
||||
let (impl_, (assoc_item, def_scope)) =
|
||||
crate::traits::project::with_replaced_escaping_bound_vars(
|
||||
infcx,
|
||||
&mut universes,
|
||||
self_ty,
|
||||
|self_ty| {
|
||||
self.select_inherent_assoc_type_candidates(
|
||||
infcx, name, span, self_ty, param_env, candidates,
|
||||
)
|
||||
},
|
||||
)?;
|
||||
|
||||
self.check_assoc_ty(assoc_item, name, def_scope, block, span);
|
||||
|
||||
// FIXME(fmease): Currently creating throwaway `parent_args` to please
|
||||
// `create_args_for_associated_item`. Modify the latter instead (or sth. similar) to
|
||||
// not require the parent args logic.
|
||||
let parent_args = ty::GenericArgs::identity_for_item(tcx, impl_);
|
||||
let args = self.create_args_for_associated_item(span, assoc_item, segment, parent_args);
|
||||
let args = tcx.mk_args_from_iter(
|
||||
std::iter::once(ty::GenericArg::from(self_ty))
|
||||
.chain(args.into_iter().skip(parent_args.len())),
|
||||
);
|
||||
|
||||
let ty = Ty::new_alias(tcx, ty::Inherent, ty::AliasTy::new(tcx, assoc_item, args));
|
||||
|
||||
Ok(Some((ty, assoc_item)))
|
||||
}
|
||||
|
||||
fn select_inherent_assoc_type_candidates(
|
||||
&self,
|
||||
infcx: &InferCtxt<'tcx>,
|
||||
name: Ident,
|
||||
span: Span,
|
||||
self_ty: Ty<'tcx>,
|
||||
param_env: ParamEnv<'tcx>,
|
||||
candidates: Vec<(DefId, (DefId, DefId))>,
|
||||
) -> Result<(DefId, (DefId, DefId)), ErrorGuaranteed> {
|
||||
let tcx = self.tcx();
|
||||
let mut fulfillment_errors = Vec::new();
|
||||
let mut applicable_candidates: Vec<_> = infcx.probe(|_| {
|
||||
// Regions are not considered during selection.
|
||||
let self_ty = self_ty
|
||||
.fold_with(&mut BoundVarEraser { tcx, universe: infcx.create_next_universe() });
|
||||
|
||||
struct BoundVarEraser<'tcx> {
|
||||
tcx: TyCtxt<'tcx>,
|
||||
universe: ty::UniverseIndex,
|
||||
}
|
||||
let applicable_candidates: Vec<_> = candidates
|
||||
.iter()
|
||||
.copied()
|
||||
.filter(|&(impl_, _)| {
|
||||
infcx.probe(|_| {
|
||||
let ocx = ObligationCtxt::new(infcx);
|
||||
let self_ty = ocx.normalize(&ObligationCause::dummy(), param_env, self_ty);
|
||||
|
||||
// FIXME(non_lifetime_binders): Don't assign the same universe to each placeholder.
|
||||
impl<'tcx> TypeFolder<TyCtxt<'tcx>> for BoundVarEraser<'tcx> {
|
||||
fn interner(&self) -> TyCtxt<'tcx> {
|
||||
self.tcx
|
||||
}
|
||||
let impl_args = infcx.fresh_args_for_item(span, impl_);
|
||||
let impl_ty = tcx.type_of(impl_).instantiate(tcx, impl_args);
|
||||
let impl_ty = ocx.normalize(&ObligationCause::dummy(), param_env, impl_ty);
|
||||
|
||||
fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
|
||||
// FIXME(@lcnr): This is broken, erasing bound regions
|
||||
// impacts selection as it results in different types.
|
||||
if r.is_bound() { self.tcx.lifetimes.re_erased } else { r }
|
||||
}
|
||||
|
||||
fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> {
|
||||
match *ty.kind() {
|
||||
ty::Bound(_, bv) => Ty::new_placeholder(
|
||||
self.tcx,
|
||||
ty::PlaceholderType { universe: self.universe, bound: bv },
|
||||
),
|
||||
_ => ty.super_fold_with(self),
|
||||
// Check that the self types can be related.
|
||||
if ocx.eq(&ObligationCause::dummy(), param_env, impl_ty, self_ty).is_err() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
fn fold_const(
|
||||
&mut self,
|
||||
ct: ty::Const<'tcx>,
|
||||
) -> <TyCtxt<'tcx> as rustc_type_ir::Interner>::Const {
|
||||
assert!(!ct.ty().has_escaping_bound_vars());
|
||||
// Check whether the impl imposes obligations we have to worry about.
|
||||
let impl_bounds = tcx.predicates_of(impl_).instantiate(tcx, impl_args);
|
||||
let impl_bounds =
|
||||
ocx.normalize(&ObligationCause::dummy(), param_env, impl_bounds);
|
||||
let impl_obligations = traits::predicates_for_generics(
|
||||
|_, _| ObligationCause::dummy(),
|
||||
param_env,
|
||||
impl_bounds,
|
||||
);
|
||||
ocx.register_obligations(impl_obligations);
|
||||
|
||||
match ct.kind() {
|
||||
ty::ConstKind::Bound(_, bv) => ty::Const::new_placeholder(
|
||||
self.tcx,
|
||||
ty::PlaceholderConst { universe: self.universe, bound: bv },
|
||||
ct.ty(),
|
||||
),
|
||||
_ => ct.super_fold_with(self),
|
||||
let mut errors = ocx.select_where_possible();
|
||||
if !errors.is_empty() {
|
||||
fulfillment_errors.append(&mut errors);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let InferOk { value: self_ty, obligations } =
|
||||
infcx.at(&cause, param_env).normalize(self_ty);
|
||||
|
||||
candidates
|
||||
.iter()
|
||||
.copied()
|
||||
.filter(|&(impl_, _)| {
|
||||
infcx.probe(|_| {
|
||||
let ocx = ObligationCtxt::new(infcx);
|
||||
ocx.register_obligations(obligations.clone());
|
||||
|
||||
let impl_args = infcx.fresh_args_for_item(span, impl_);
|
||||
let impl_ty = tcx.type_of(impl_).instantiate(tcx, impl_args);
|
||||
let impl_ty = ocx.normalize(&cause, param_env, impl_ty);
|
||||
|
||||
// Check that the self types can be related.
|
||||
if ocx.eq(&ObligationCause::dummy(), param_env, impl_ty, self_ty).is_err() {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check whether the impl imposes obligations we have to worry about.
|
||||
let impl_bounds = tcx.predicates_of(impl_).instantiate(tcx, impl_args);
|
||||
let impl_bounds = ocx.normalize(&cause, param_env, impl_bounds);
|
||||
let impl_obligations = traits::predicates_for_generics(
|
||||
|_, _| cause.clone(),
|
||||
param_env,
|
||||
impl_bounds,
|
||||
);
|
||||
ocx.register_obligations(impl_obligations);
|
||||
|
||||
let mut errors = ocx.select_where_possible();
|
||||
if !errors.is_empty() {
|
||||
fulfillment_errors.append(&mut errors);
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
})
|
||||
true
|
||||
})
|
||||
.collect()
|
||||
});
|
||||
})
|
||||
.collect();
|
||||
|
||||
if applicable_candidates.len() > 1 {
|
||||
return Err(self.complain_about_ambiguous_inherent_assoc_type(
|
||||
match &applicable_candidates[..] {
|
||||
&[] => Err(self.complain_about_inherent_assoc_type_not_found(
|
||||
name,
|
||||
self_ty,
|
||||
candidates,
|
||||
fulfillment_errors,
|
||||
span,
|
||||
)),
|
||||
|
||||
&[applicable_candidate] => Ok(applicable_candidate),
|
||||
|
||||
&[_, ..] => Err(self.complain_about_ambiguous_inherent_assoc_type(
|
||||
name,
|
||||
applicable_candidates.into_iter().map(|(_, (candidate, _))| candidate).collect(),
|
||||
span,
|
||||
));
|
||||
)),
|
||||
}
|
||||
|
||||
if let Some((impl_, (assoc_item, def_scope))) = applicable_candidates.pop() {
|
||||
self.check_assoc_ty(assoc_item, name, def_scope, block, span);
|
||||
|
||||
// FIXME(fmease): Currently creating throwaway `parent_args` to please
|
||||
// `create_args_for_associated_item`. Modify the latter instead (or sth. similar) to
|
||||
// not require the parent args logic.
|
||||
let parent_args = ty::GenericArgs::identity_for_item(tcx, impl_);
|
||||
let args = self.create_args_for_associated_item(span, assoc_item, segment, parent_args);
|
||||
let args = tcx.mk_args_from_iter(
|
||||
std::iter::once(ty::GenericArg::from(self_ty))
|
||||
.chain(args.into_iter().skip(parent_args.len())),
|
||||
);
|
||||
|
||||
let ty = Ty::new_alias(tcx, ty::Inherent, ty::AliasTy::new(tcx, assoc_item, args));
|
||||
|
||||
return Ok(Some((ty, assoc_item)));
|
||||
}
|
||||
|
||||
Err(self.complain_about_inherent_assoc_type_not_found(
|
||||
name,
|
||||
self_ty,
|
||||
candidates,
|
||||
fulfillment_errors,
|
||||
span,
|
||||
))
|
||||
}
|
||||
|
||||
fn lookup_assoc_ty(
|
||||
|
@ -662,6 +662,7 @@ fn resolve_local<'tcx>(
|
||||
PatKind::Ref(_, _)
|
||||
| PatKind::Binding(hir::BindingAnnotation(hir::ByRef::No, _), ..)
|
||||
| PatKind::Wild
|
||||
| PatKind::Never
|
||||
| PatKind::Path(_)
|
||||
| PatKind::Lit(_)
|
||||
| PatKind::Range(_, _, _) => false,
|
||||
|
@ -1724,6 +1724,7 @@ impl<'a> State<'a> {
|
||||
// is that it doesn't matter
|
||||
match pat.kind {
|
||||
PatKind::Wild => self.word("_"),
|
||||
PatKind::Never => self.word("!"),
|
||||
PatKind::Binding(BindingAnnotation(by_ref, mutbl), _, ident, sub) => {
|
||||
if by_ref == ByRef::Yes {
|
||||
self.word_nbsp("ref");
|
||||
|
@ -139,7 +139,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
&cause,
|
||||
Some(arm.body),
|
||||
arm_ty,
|
||||
|err| self.suggest_removing_semicolon_for_coerce(err, expr, arm_ty, prior_arm),
|
||||
|err| {
|
||||
self.explain_never_type_coerced_to_unit(err, arm, arm_ty, prior_arm, expr);
|
||||
},
|
||||
false,
|
||||
);
|
||||
|
||||
@ -177,6 +179,38 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
coercion.complete(self)
|
||||
}
|
||||
|
||||
fn explain_never_type_coerced_to_unit(
|
||||
&self,
|
||||
err: &mut Diagnostic,
|
||||
arm: &hir::Arm<'tcx>,
|
||||
arm_ty: Ty<'tcx>,
|
||||
prior_arm: Option<(Option<hir::HirId>, Ty<'tcx>, Span)>,
|
||||
expr: &hir::Expr<'tcx>,
|
||||
) {
|
||||
if let hir::ExprKind::Block(block, _) = arm.body.kind
|
||||
&& let Some(expr) = block.expr
|
||||
&& let arm_tail_ty = self.node_ty(expr.hir_id)
|
||||
&& arm_tail_ty.is_never()
|
||||
&& !arm_ty.is_never()
|
||||
{
|
||||
err.span_label(
|
||||
expr.span,
|
||||
format!(
|
||||
"this expression is of type `!`, but it is coerced to `{arm_ty}` due to its \
|
||||
surrounding expression",
|
||||
),
|
||||
);
|
||||
self.suggest_mismatched_types_on_tail(
|
||||
err,
|
||||
expr,
|
||||
arm_ty,
|
||||
prior_arm.map_or(arm_tail_ty, |(_, ty, _)| ty),
|
||||
expr.hir_id,
|
||||
);
|
||||
}
|
||||
self.suggest_removing_semicolon_for_coerce(err, expr, arm_ty, prior_arm)
|
||||
}
|
||||
|
||||
fn suggest_removing_semicolon_for_coerce(
|
||||
&self,
|
||||
diag: &mut Diagnostic,
|
||||
|
@ -6,9 +6,8 @@ use crate::errors;
|
||||
use rustc_ast::util::parser::PREC_POSTFIX;
|
||||
use rustc_errors::{Applicability, Diagnostic, ErrorGuaranteed, StashKey};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::def::{self, CtorKind, DefKind, Namespace, Res};
|
||||
use rustc_hir::def::{self, CtorKind, Namespace, Res};
|
||||
use rustc_hir::def_id::DefId;
|
||||
use rustc_hir::HirId;
|
||||
use rustc_hir_analysis::autoderef::Autoderef;
|
||||
use rustc_infer::{
|
||||
infer,
|
||||
@ -373,7 +372,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
) -> Ty<'tcx> {
|
||||
let (fn_sig, def_id) = match *callee_ty.kind() {
|
||||
ty::FnDef(def_id, args) => {
|
||||
self.enforce_context_effects(call_expr.hir_id, call_expr.span, def_id, args);
|
||||
self.enforce_context_effects(call_expr.span, def_id, args);
|
||||
let fn_sig = self.tcx.fn_sig(def_id).instantiate(self.tcx, args);
|
||||
|
||||
// Unit testing: function items annotated with
|
||||
@ -770,7 +769,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
#[tracing::instrument(level = "debug", skip(self, span))]
|
||||
pub(super) fn enforce_context_effects(
|
||||
&self,
|
||||
call_expr_hir: HirId,
|
||||
span: Span,
|
||||
callee_did: DefId,
|
||||
callee_args: GenericArgsRef<'tcx>,
|
||||
@ -781,38 +779,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
let generics = tcx.generics_of(callee_did);
|
||||
let Some(host_effect_index) = generics.host_effect_index else { return };
|
||||
|
||||
// if the callee does have the param, we need to equate the param to some const
|
||||
// value no matter whether the effects feature is enabled in the local crate,
|
||||
// because inference will fail if we don't.
|
||||
let mut host_always_on =
|
||||
!tcx.features().effects || tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you;
|
||||
|
||||
// Compute the constness required by the context.
|
||||
let context = tcx.hir().enclosing_body_owner(call_expr_hir);
|
||||
let const_context = tcx.hir().body_const_context(context);
|
||||
|
||||
let kind = tcx.def_kind(context.to_def_id());
|
||||
debug_assert_ne!(kind, DefKind::ConstParam);
|
||||
|
||||
if tcx.has_attr(context.to_def_id(), sym::rustc_do_not_const_check) {
|
||||
trace!("do not const check this context");
|
||||
host_always_on = true;
|
||||
}
|
||||
|
||||
let effect = match const_context {
|
||||
_ if host_always_on => tcx.consts.true_,
|
||||
Some(hir::ConstContext::Static(_) | hir::ConstContext::Const { .. }) => {
|
||||
tcx.consts.false_
|
||||
}
|
||||
Some(hir::ConstContext::ConstFn) => {
|
||||
let host_idx = tcx
|
||||
.generics_of(context)
|
||||
.host_effect_index
|
||||
.expect("ConstContext::Maybe must have host effect param");
|
||||
ty::GenericArgs::identity_for_item(tcx, context).const_at(host_idx)
|
||||
}
|
||||
None => tcx.consts.true_,
|
||||
};
|
||||
let effect = tcx.expected_const_effect_param_for_body(self.body_id);
|
||||
|
||||
trace!(?effect, ?generics, ?callee_args);
|
||||
|
||||
@ -845,7 +812,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
expected,
|
||||
);
|
||||
|
||||
self.write_method_call(call_expr.hir_id, method_callee);
|
||||
self.write_method_call_and_enforce_effects(call_expr.hir_id, call_expr.span, method_callee);
|
||||
output_type
|
||||
}
|
||||
}
|
||||
@ -895,7 +862,11 @@ impl<'a, 'tcx> DeferredCallResolution<'tcx> {
|
||||
adjustments.extend(autoref);
|
||||
fcx.apply_adjustments(self.callee_expr, adjustments);
|
||||
|
||||
fcx.write_method_call(self.call_expr.hir_id, method_callee);
|
||||
fcx.write_method_call_and_enforce_effects(
|
||||
self.call_expr.hir_id,
|
||||
self.call_expr.span,
|
||||
method_callee,
|
||||
);
|
||||
}
|
||||
None => {
|
||||
// This can happen if `#![no_core]` is used and the `fn/fn_mut/fn_once`
|
||||
|
@ -1715,6 +1715,7 @@ impl<'tcx, 'exprs, E: AsCoercionSite> CoerceMany<'tcx, 'exprs, E> {
|
||||
// label pointing out the cause for the type coercion will be wrong
|
||||
// as prior return coercions would not be relevant (#57664).
|
||||
let fn_decl = if let (Some(expr), Some(blk_id)) = (expression, blk_id) {
|
||||
fcx.suggest_missing_semicolon(&mut err, expr, expected, false);
|
||||
let pointing_at_return_type =
|
||||
fcx.suggest_mismatched_types_on_tail(&mut err, expr, expected, found, blk_id);
|
||||
if let (Some(cond_expr), true, false) = (
|
||||
|
@ -626,7 +626,7 @@ pub struct SuggestConvertViaMethod<'tcx> {
|
||||
pub span: Span,
|
||||
#[suggestion_part(code = "")]
|
||||
pub borrow_removal_span: Option<Span>,
|
||||
pub sugg: &'static str,
|
||||
pub sugg: String,
|
||||
pub expected: Ty<'tcx>,
|
||||
pub found: Ty<'tcx>,
|
||||
}
|
||||
|
@ -663,8 +663,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
coerce.coerce_forced_unit(
|
||||
self,
|
||||
&cause,
|
||||
|err| {
|
||||
self.suggest_mismatched_types_on_tail(err, expr, ty, e_ty, target_id);
|
||||
|mut err| {
|
||||
self.suggest_missing_semicolon(&mut err, expr, e_ty, false);
|
||||
self.suggest_mismatched_types_on_tail(
|
||||
&mut err, expr, ty, e_ty, target_id,
|
||||
);
|
||||
let error = Some(Sorts(ExpectedFound { expected: ty, found: e_ty }));
|
||||
self.annotate_loop_expected_due_to_inference(err, expr, error);
|
||||
if let Some(val) = ty_kind_suggestion(ty) {
|
||||
@ -1312,9 +1315,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
Ok(method) => {
|
||||
// We could add a "consider `foo::<params>`" suggestion here, but I wasn't able to
|
||||
// trigger this codepath causing `structurally_resolve_type` to emit an error.
|
||||
|
||||
self.enforce_context_effects(expr.hir_id, expr.span, method.def_id, method.args);
|
||||
self.write_method_call(expr.hir_id, method);
|
||||
self.write_method_call_and_enforce_effects(expr.hir_id, expr.span, method);
|
||||
Ok(method)
|
||||
}
|
||||
Err(error) => {
|
||||
|
@ -401,12 +401,17 @@ impl<'a, 'tcx> ExprUseVisitor<'a, 'tcx> {
|
||||
mc.cat_pattern(discr_place.clone(), pat, |place, pat| {
|
||||
match &pat.kind {
|
||||
PatKind::Binding(.., opt_sub_pat) => {
|
||||
// If the opt_sub_pat is None, than the binding does not count as
|
||||
// If the opt_sub_pat is None, then the binding does not count as
|
||||
// a wildcard for the purpose of borrowing discr.
|
||||
if opt_sub_pat.is_none() {
|
||||
needs_to_be_read = true;
|
||||
}
|
||||
}
|
||||
PatKind::Never => {
|
||||
// A never pattern reads the value.
|
||||
// FIXME(never_patterns): does this do what I expect?
|
||||
needs_to_be_read = true;
|
||||
}
|
||||
PatKind::Path(qpath) => {
|
||||
// A `Path` pattern is just a name like `Foo`. This is either a
|
||||
// named constant or else it refers to an ADT variant
|
||||
|
@ -159,7 +159,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
}
|
||||
|
||||
#[instrument(level = "debug", skip(self))]
|
||||
pub fn write_method_call(&self, hir_id: hir::HirId, method: MethodCallee<'tcx>) {
|
||||
pub fn write_method_call_and_enforce_effects(
|
||||
&self,
|
||||
hir_id: hir::HirId,
|
||||
span: Span,
|
||||
method: MethodCallee<'tcx>,
|
||||
) {
|
||||
self.enforce_context_effects(span, method.def_id, method.args);
|
||||
self.write_resolution(hir_id, Ok((DefKind::AssocFn, method.def_id)));
|
||||
self.write_args(hir_id, method.args);
|
||||
}
|
||||
|
@ -282,12 +282,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
span: provided_arg.span,
|
||||
});
|
||||
} else {
|
||||
self.enforce_context_effects(
|
||||
provided_arg.hir_id,
|
||||
provided_arg.span,
|
||||
def_id,
|
||||
args,
|
||||
)
|
||||
self.enforce_context_effects(provided_arg.span, def_id, args)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -72,7 +72,6 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
blk_id: hir::HirId,
|
||||
) -> bool {
|
||||
let expr = expr.peel_drop_temps();
|
||||
self.suggest_missing_semicolon(err, expr, expected, false);
|
||||
let mut pointing_at_return_type = false;
|
||||
if let hir::ExprKind::Break(..) = expr.kind {
|
||||
// `break` type mismatches provide better context for tail `loop` expressions.
|
||||
@ -443,12 +442,22 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
expected,
|
||||
)
|
||||
});
|
||||
|
||||
let prefix_wrap = |sugg: &str| {
|
||||
if let Some(name) = self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) {
|
||||
format!(": {}{}", name, sugg)
|
||||
} else {
|
||||
sugg.to_string()
|
||||
}
|
||||
};
|
||||
|
||||
// FIXME: This could/should be extended to suggest `as_mut` and `as_deref_mut`,
|
||||
// but those checks need to be a bit more delicate and the benefit is diminishing.
|
||||
if self.can_eq(self.param_env, found_ty_inner, peeled) && error_tys_equate_as_ref {
|
||||
let sugg = prefix_wrap(".as_ref()");
|
||||
err.subdiagnostic(errors::SuggestConvertViaMethod {
|
||||
span: expr.span.shrink_to_hi(),
|
||||
sugg: ".as_ref()",
|
||||
sugg,
|
||||
expected,
|
||||
found,
|
||||
borrow_removal_span,
|
||||
@ -459,9 +468,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
&& self.can_eq(self.param_env, deref_ty, peeled)
|
||||
&& error_tys_equate_as_ref
|
||||
{
|
||||
let sugg = prefix_wrap(".as_deref()");
|
||||
err.subdiagnostic(errors::SuggestConvertViaMethod {
|
||||
span: expr.span.shrink_to_hi(),
|
||||
sugg: ".as_deref()",
|
||||
sugg,
|
||||
expected,
|
||||
found,
|
||||
borrow_removal_span,
|
||||
@ -475,10 +485,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
self.can_eq(self.param_env, found, expected)
|
||||
})
|
||||
{
|
||||
let sugg = prefix_wrap(".map(|x| x.as_str())");
|
||||
err.span_suggestion_verbose(
|
||||
expr.span.shrink_to_hi(),
|
||||
fluent::hir_typeck_convert_to_str,
|
||||
".map(|x| x.as_str())",
|
||||
sugg,
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
return true;
|
||||
@ -629,12 +640,20 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
err.help("use `Box::pin`");
|
||||
}
|
||||
_ => {
|
||||
let prefix = if let Some(name) =
|
||||
self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr)
|
||||
{
|
||||
format!("{}: ", name)
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let suggestion = vec![
|
||||
(expr.span.shrink_to_lo(), format!("{prefix}Box::pin(")),
|
||||
(expr.span.shrink_to_hi(), ")".to_string()),
|
||||
];
|
||||
err.multipart_suggestion(
|
||||
"you need to pin and box this expression",
|
||||
vec![
|
||||
(expr.span.shrink_to_lo(), "Box::pin(".to_string()),
|
||||
(expr.span.shrink_to_hi(), ")".to_string()),
|
||||
],
|
||||
suggestion,
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
@ -1215,7 +1234,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
span = parent_callsite;
|
||||
}
|
||||
|
||||
let sugg = if expr.precedence().order() >= PREC_POSTFIX {
|
||||
let mut sugg = if expr.precedence().order() >= PREC_POSTFIX {
|
||||
vec![(span.shrink_to_hi(), ".into()".to_owned())]
|
||||
} else {
|
||||
vec![
|
||||
@ -1223,6 +1242,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
(span.shrink_to_hi(), ").into()".to_owned()),
|
||||
]
|
||||
};
|
||||
if let Some(name) = self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) {
|
||||
sugg.insert(0, (expr.span.shrink_to_lo(), format!("{}: ", name)));
|
||||
}
|
||||
diag.multipart_suggestion(
|
||||
format!("call `Into::into` on this expression to convert `{expr_ty}` into `{expected_ty}`"),
|
||||
sugg,
|
||||
@ -1812,6 +1834,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
".expect(\"REASON\")",
|
||||
)
|
||||
};
|
||||
|
||||
let sugg = match self.tcx.hir().maybe_get_struct_pattern_shorthand_field(expr) {
|
||||
Some(ident) => format!(": {ident}{sugg}"),
|
||||
None => sugg.to_string(),
|
||||
};
|
||||
|
||||
err.span_suggestion_verbose(
|
||||
expr.span.shrink_to_hi(),
|
||||
msg,
|
||||
|
@ -438,9 +438,13 @@ fn fatally_break_rust(tcx: TyCtxt<'_>) {
|
||||
}
|
||||
}
|
||||
|
||||
/// `expected` here is the expected number of explicit generic arguments on the trait.
|
||||
fn has_expected_num_generic_args(tcx: TyCtxt<'_>, trait_did: DefId, expected: usize) -> bool {
|
||||
let generics = tcx.generics_of(trait_did);
|
||||
generics.count() == expected + if generics.has_self { 1 } else { 0 }
|
||||
generics.count()
|
||||
== expected
|
||||
+ if generics.has_self { 1 } else { 0 }
|
||||
+ if generics.host_effect_index.is_some() { 1 } else { 0 }
|
||||
}
|
||||
|
||||
pub fn provide(providers: &mut Providers) {
|
||||
|
@ -766,6 +766,7 @@ impl<'a, 'tcx> MemCategorizationContext<'a, 'tcx> {
|
||||
| PatKind::Binding(.., None)
|
||||
| PatKind::Lit(..)
|
||||
| PatKind::Range(..)
|
||||
| PatKind::Never
|
||||
| PatKind::Wild => {
|
||||
// always ok
|
||||
}
|
||||
|
@ -291,7 +291,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
.push(autoref);
|
||||
}
|
||||
}
|
||||
self.write_method_call(expr.hir_id, method);
|
||||
self.write_method_call_and_enforce_effects(expr.hir_id, expr.span, method);
|
||||
|
||||
method.sig.output()
|
||||
}
|
||||
@ -781,7 +781,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
assert!(op.is_by_value());
|
||||
match self.lookup_op_method(operand_ty, None, Op::Unary(op, ex.span), expected) {
|
||||
Ok(method) => {
|
||||
self.write_method_call(ex.hir_id, method);
|
||||
self.write_method_call_and_enforce_effects(ex.hir_id, ex.span, method);
|
||||
method.sig.output()
|
||||
}
|
||||
Err(errors) => {
|
||||
|
@ -178,6 +178,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
|
||||
let ty = match pat.kind {
|
||||
PatKind::Wild => expected,
|
||||
// FIXME(never_patterns): check the type is uninhabited. If that is not possible within
|
||||
// typeck, do that in a later phase.
|
||||
PatKind::Never => expected,
|
||||
PatKind::Lit(lt) => self.check_pat_lit(pat.span, lt, expected, ti),
|
||||
PatKind::Range(lhs, rhs, _) => self.check_pat_range(pat.span, lhs, rhs, expected, ti),
|
||||
PatKind::Binding(ba, var_id, _, sub) => {
|
||||
@ -287,9 +290,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
| PatKind::Box(_)
|
||||
| PatKind::Range(..)
|
||||
| PatKind::Slice(..) => AdjustMode::Peel,
|
||||
// A never pattern behaves somewhat like a literal or unit variant.
|
||||
PatKind::Never => AdjustMode::Peel,
|
||||
// String and byte-string literals result in types `&str` and `&[u8]` respectively.
|
||||
// All other literals result in non-reference types.
|
||||
// As a result, we allow `if let 0 = &&0 {}` but not `if let "foo" = &&"foo {}`.
|
||||
// As a result, we allow `if let 0 = &&0 {}` but not `if let "foo" = &&"foo" {}`.
|
||||
//
|
||||
// Call `resolve_vars_if_possible` here for inline const blocks.
|
||||
PatKind::Lit(lt) => match self.resolve_vars_if_possible(self.check_expr(lt)).kind() {
|
||||
@ -743,6 +748,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
| PatKind::Slice(..) => "binding",
|
||||
|
||||
PatKind::Wild
|
||||
| PatKind::Never
|
||||
| PatKind::Binding(..)
|
||||
| PatKind::Path(..)
|
||||
| PatKind::Box(..)
|
||||
|
@ -38,7 +38,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
span_bug!(expr.span, "input to deref is not a ref?");
|
||||
}
|
||||
let ty = self.make_overloaded_place_return_type(method).ty;
|
||||
self.write_method_call(expr.hir_id, method);
|
||||
self.write_method_call_and_enforce_effects(expr.hir_id, expr.span, method);
|
||||
Some(ty)
|
||||
}
|
||||
|
||||
@ -179,7 +179,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
}
|
||||
self.apply_adjustments(base_expr, adjustments);
|
||||
|
||||
self.write_method_call(expr.hir_id, method);
|
||||
self.write_method_call_and_enforce_effects(expr.hir_id, expr.span, method);
|
||||
|
||||
return Some((input_ty, self.make_overloaded_place_return_type(method).ty));
|
||||
}
|
||||
@ -404,7 +404,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
|
||||
None => return,
|
||||
};
|
||||
debug!("convert_place_op_to_mutable: method={:?}", method);
|
||||
self.write_method_call(expr.hir_id, method);
|
||||
self.write_method_call_and_enforce_effects(expr.hir_id, expr.span, method);
|
||||
|
||||
let ty::Ref(region, _, hir::Mutability::Mut) = method.sig.inputs()[0].kind() else {
|
||||
span_bug!(expr.span, "input to mutable place op is not a mut ref?");
|
||||
|
@ -23,14 +23,14 @@ mod newtype;
|
||||
/// The impls provided by default are Clone, Copy, PartialEq, Eq, and Hash.
|
||||
///
|
||||
/// Accepted attributes for customization:
|
||||
/// - #[derive(HashStable_Generic)]/#[derive(HashStable)]: derives
|
||||
/// - `#[derive(HashStable_Generic)]`/`#[derive(HashStable)]`: derives
|
||||
/// `HashStable`, as normal.
|
||||
/// - #[encodable]: derives `Encodable`/`Decodable`.
|
||||
/// - #[orderable]: derives `PartialOrd`/`Ord`, plus step-related methods.
|
||||
/// - #[debug_format = "Foo({})"]: derives `Debug` with particular output.
|
||||
/// - #[max = 0xFFFF_FFFD]: specifies the max value, which allows niche
|
||||
/// - `#[encodable]`: derives `Encodable`/`Decodable`.
|
||||
/// - `#[orderable]`: derives `PartialOrd`/`Ord`, plus step-related methods.
|
||||
/// - `#[debug_format = "Foo({})"]`: derives `Debug` with particular output.
|
||||
/// - `#[max = 0xFFFF_FFFD]`: specifies the max value, which allows niche
|
||||
/// optimizations. The default max value is 0xFFFF_FF00.
|
||||
/// - #[gate_rustc_only]: makes parts of the generated code nightly-only.
|
||||
/// - `#[gate_rustc_only]`: makes parts of the generated code nightly-only.
|
||||
#[proc_macro]
|
||||
#[cfg_attr(
|
||||
feature = "nightly",
|
||||
|
@ -448,7 +448,11 @@ impl<'tcx> ToTrace<'tcx> for ty::TraitRef<'tcx> {
|
||||
) -> TypeTrace<'tcx> {
|
||||
TypeTrace {
|
||||
cause: cause.clone(),
|
||||
values: TraitRefs(ExpectedFound::new(a_is_expected, a, b)),
|
||||
values: PolyTraitRefs(ExpectedFound::new(
|
||||
a_is_expected,
|
||||
ty::Binder::dummy(a),
|
||||
ty::Binder::dummy(b),
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1667,9 +1667,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||
.report(diag);
|
||||
(false, Mismatch::Fixed("signature"))
|
||||
}
|
||||
ValuePairs::TraitRefs(_) | ValuePairs::PolyTraitRefs(_) => {
|
||||
(false, Mismatch::Fixed("trait"))
|
||||
}
|
||||
ValuePairs::PolyTraitRefs(_) => (false, Mismatch::Fixed("trait")),
|
||||
ValuePairs::Aliases(infer::ExpectedFound { expected, .. }) => {
|
||||
(false, Mismatch::Fixed(self.tcx.def_descr(expected.def_id)))
|
||||
}
|
||||
@ -2219,18 +2217,6 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> {
|
||||
infer::Aliases(exp_found) => self.expected_found_str(exp_found),
|
||||
infer::ExistentialTraitRef(exp_found) => self.expected_found_str(exp_found),
|
||||
infer::ExistentialProjection(exp_found) => self.expected_found_str(exp_found),
|
||||
infer::TraitRefs(exp_found) => {
|
||||
let pretty_exp_found = ty::error::ExpectedFound {
|
||||
expected: exp_found.expected.print_only_trait_path(),
|
||||
found: exp_found.found.print_only_trait_path(),
|
||||
};
|
||||
match self.expected_found_str(pretty_exp_found) {
|
||||
Some((expected, found, _, _)) if expected == found => {
|
||||
self.expected_found_str(exp_found)
|
||||
}
|
||||
ret => ret,
|
||||
}
|
||||
}
|
||||
infer::PolyTraitRefs(exp_found) => {
|
||||
let pretty_exp_found = ty::error::ExpectedFound {
|
||||
expected: exp_found.expected.print_only_trait_path(),
|
||||
|
@ -197,11 +197,6 @@ impl<'tcx> NiceRegionError<'_, 'tcx> {
|
||||
value_pairs: &ValuePairs<'tcx>,
|
||||
) -> Option<DiagnosticBuilder<'tcx, ErrorGuaranteed>> {
|
||||
let (expected_args, found_args, trait_def_id) = match value_pairs {
|
||||
ValuePairs::TraitRefs(ExpectedFound { expected, found })
|
||||
if expected.def_id == found.def_id =>
|
||||
{
|
||||
(expected.args, found.args, expected.def_id)
|
||||
}
|
||||
ValuePairs::PolyTraitRefs(ExpectedFound { expected, found })
|
||||
if expected.def_id() == found.def_id() =>
|
||||
{
|
||||
|
@ -384,7 +384,6 @@ pub enum ValuePairs<'tcx> {
|
||||
Regions(ExpectedFound<ty::Region<'tcx>>),
|
||||
Terms(ExpectedFound<ty::Term<'tcx>>),
|
||||
Aliases(ExpectedFound<ty::AliasTy<'tcx>>),
|
||||
TraitRefs(ExpectedFound<ty::TraitRef<'tcx>>),
|
||||
PolyTraitRefs(ExpectedFound<ty::PolyTraitRef<'tcx>>),
|
||||
PolySigs(ExpectedFound<ty::PolyFnSig<'tcx>>),
|
||||
ExistentialTraitRef(ExpectedFound<ty::PolyExistentialTraitRef<'tcx>>),
|
||||
|
@ -101,12 +101,19 @@ pub fn report_object_safety_error<'tcx>(
|
||||
to be resolvable dynamically; for more information visit \
|
||||
<https://doc.rust-lang.org/reference/items/traits.html#object-safety>",
|
||||
);
|
||||
|
||||
// Only provide the help if its a local trait, otherwise it's not actionable.
|
||||
if trait_span.is_some() {
|
||||
let mut reported_violations: Vec<_> = reported_violations.into_iter().collect();
|
||||
reported_violations.sort();
|
||||
for violation in reported_violations {
|
||||
// Only provide the help if its a local trait, otherwise it's not actionable.
|
||||
violation.solution(&mut err);
|
||||
|
||||
let mut potential_solutions: Vec<_> =
|
||||
reported_violations.into_iter().map(|violation| violation.solution()).collect();
|
||||
potential_solutions.sort();
|
||||
// Allows us to skip suggesting that the same item should be moved to another trait multiple times.
|
||||
potential_solutions.dedup();
|
||||
for solution in potential_solutions {
|
||||
solution.add_to(&mut err);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
use crate::util;
|
||||
|
||||
use rustc_ast::token;
|
||||
use rustc_ast::{self as ast, LitKind, MetaItemKind};
|
||||
use rustc_ast::{LitKind, MetaItemKind};
|
||||
use rustc_codegen_ssa::traits::CodegenBackend;
|
||||
use rustc_data_structures::defer;
|
||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||
@ -15,9 +15,7 @@ use rustc_middle::{bug, ty};
|
||||
use rustc_parse::maybe_new_parser_from_source_str;
|
||||
use rustc_query_impl::QueryCtxt;
|
||||
use rustc_query_system::query::print_query_stack;
|
||||
use rustc_session::config::{
|
||||
self, Cfg, CheckCfg, ExpectedValues, Input, OutFileName, OutputFilenames,
|
||||
};
|
||||
use rustc_session::config::{self, Cfg, CheckCfg, ExpectedValues, Input, OutFileName};
|
||||
use rustc_session::filesearch::sysroot_candidates;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_session::{lint, CompilerIO, EarlyErrorHandler, Session};
|
||||
@ -43,16 +41,6 @@ pub struct Compiler {
|
||||
pub(crate) override_queries: Option<fn(&Session, &mut Providers)>,
|
||||
}
|
||||
|
||||
impl Compiler {
|
||||
pub fn build_output_filenames(
|
||||
&self,
|
||||
sess: &Session,
|
||||
attrs: &[ast::Attribute],
|
||||
) -> OutputFilenames {
|
||||
util::build_output_filenames(attrs, sess)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts strings provided as `--cfg [cfgspec]` into a `Cfg`.
|
||||
pub(crate) fn parse_cfg(handler: &EarlyErrorHandler, cfgs: Vec<String>) -> Cfg {
|
||||
cfgs.into_iter()
|
||||
|
@ -39,7 +39,7 @@ use std::any::Any;
|
||||
use std::ffi::OsString;
|
||||
use std::io::{self, BufWriter, Write};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::{Arc, LazyLock};
|
||||
use std::sync::LazyLock;
|
||||
use std::{env, fs, iter};
|
||||
|
||||
pub fn parse<'a>(sess: &'a Session) -> PResult<'a, ast::Crate> {
|
||||
@ -553,13 +553,17 @@ fn resolver_for_lowering<'tcx>(
|
||||
tcx.arena.alloc(Steal::new((untracked_resolver_for_lowering, Lrc::new(krate))))
|
||||
}
|
||||
|
||||
fn output_filenames(tcx: TyCtxt<'_>, (): ()) -> Arc<OutputFilenames> {
|
||||
pub(crate) fn write_dep_info(tcx: TyCtxt<'_>) {
|
||||
// Make sure name resolution and macro expansion is run for
|
||||
// the side-effect of providing a complete set of all
|
||||
// accessed files and env vars.
|
||||
let _ = tcx.resolver_for_lowering(());
|
||||
|
||||
let sess = tcx.sess;
|
||||
let _timer = sess.timer("prepare_outputs");
|
||||
let (_, krate) = &*tcx.resolver_for_lowering(()).borrow();
|
||||
let _timer = sess.timer("write_dep_info");
|
||||
let crate_name = tcx.crate_name(LOCAL_CRATE);
|
||||
|
||||
let outputs = util::build_output_filenames(&krate.attrs, sess);
|
||||
let outputs = tcx.output_filenames(());
|
||||
let output_paths =
|
||||
generated_output_paths(tcx, &outputs, sess.io.output_file.is_some(), crate_name);
|
||||
|
||||
@ -596,15 +600,12 @@ fn output_filenames(tcx: TyCtxt<'_>, (): ()) -> Arc<OutputFilenames> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
outputs.into()
|
||||
}
|
||||
|
||||
pub static DEFAULT_QUERY_PROVIDERS: LazyLock<Providers> = LazyLock::new(|| {
|
||||
let providers = &mut Providers::default();
|
||||
providers.analysis = analysis;
|
||||
providers.hir_crate = rustc_ast_lowering::lower_to_hir;
|
||||
providers.output_filenames = output_filenames;
|
||||
providers.resolver_for_lowering = resolver_for_lowering;
|
||||
providers.early_lint_checks = early_lint_checks;
|
||||
proc_macro_decls::provide(providers);
|
||||
|
@ -8,6 +8,7 @@ use rustc_codegen_ssa::CodegenResults;
|
||||
use rustc_data_structures::steal::Steal;
|
||||
use rustc_data_structures::svh::Svh;
|
||||
use rustc_data_structures::sync::{AppendOnlyIndexVec, FreezeLock, OnceLock, WorkerLocal};
|
||||
use rustc_hir::def::DefKind;
|
||||
use rustc_hir::def_id::{StableCrateId, CRATE_DEF_ID, LOCAL_CRATE};
|
||||
use rustc_hir::definitions::Definitions;
|
||||
use rustc_incremental::setup_dep_graph;
|
||||
@ -85,7 +86,6 @@ pub struct Queries<'tcx> {
|
||||
hir_arena: WorkerLocal<rustc_hir::Arena<'tcx>>,
|
||||
|
||||
parse: Query<ast::Crate>,
|
||||
pre_configure: Query<(ast::Crate, ast::AttrVec)>,
|
||||
// This just points to what's in `gcx_cell`.
|
||||
gcx: Query<&'tcx GlobalCtxt<'tcx>>,
|
||||
}
|
||||
@ -98,7 +98,6 @@ impl<'tcx> Queries<'tcx> {
|
||||
arena: WorkerLocal::new(|_| Arena::default()),
|
||||
hir_arena: WorkerLocal::new(|_| rustc_hir::Arena::default()),
|
||||
parse: Default::default(),
|
||||
pre_configure: Default::default(),
|
||||
gcx: Default::default(),
|
||||
}
|
||||
}
|
||||
@ -113,12 +112,12 @@ impl<'tcx> Queries<'tcx> {
|
||||
})
|
||||
}
|
||||
|
||||
#[deprecated = "pre_configure may be made private in the future. If you need it please open an issue with your use case."]
|
||||
pub fn pre_configure(&self) -> Result<QueryResult<'_, (ast::Crate, ast::AttrVec)>> {
|
||||
self.pre_configure.compute(|| {
|
||||
pub fn global_ctxt(&'tcx self) -> Result<QueryResult<'_, &'tcx GlobalCtxt<'tcx>>> {
|
||||
self.gcx.compute(|| {
|
||||
let sess = &self.compiler.sess;
|
||||
|
||||
let mut krate = self.parse()?.steal();
|
||||
|
||||
let sess = &self.compiler.sess;
|
||||
rustc_builtin_macros::cmdline_attrs::inject(
|
||||
&mut krate,
|
||||
&sess.parse_sess,
|
||||
@ -127,15 +126,6 @@ impl<'tcx> Queries<'tcx> {
|
||||
|
||||
let pre_configured_attrs =
|
||||
rustc_expand::config::pre_configure_attrs(sess, &krate.attrs);
|
||||
Ok((krate, pre_configured_attrs))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn global_ctxt(&'tcx self) -> Result<QueryResult<'_, &'tcx GlobalCtxt<'tcx>>> {
|
||||
self.gcx.compute(|| {
|
||||
let sess = &self.compiler.sess;
|
||||
#[allow(deprecated)]
|
||||
let (krate, pre_configured_attrs) = self.pre_configure()?.steal();
|
||||
|
||||
// parse `#[crate_name]` even if `--crate-name` was passed, to make sure it matches.
|
||||
let crate_name = find_crate_name(sess, &pre_configured_attrs);
|
||||
@ -146,6 +136,7 @@ impl<'tcx> Queries<'tcx> {
|
||||
sess.opts.cg.metadata.clone(),
|
||||
sess.cfg_version,
|
||||
);
|
||||
let outputs = util::build_output_filenames(&pre_configured_attrs, sess);
|
||||
let dep_graph = setup_dep_graph(sess, crate_name, stable_crate_id)?;
|
||||
|
||||
let cstore = FreezeLock::new(Box::new(CStore::new(
|
||||
@ -180,11 +171,22 @@ impl<'tcx> Queries<'tcx> {
|
||||
crate_name,
|
||||
)));
|
||||
feed.crate_for_resolver(tcx.arena.alloc(Steal::new((krate, pre_configured_attrs))));
|
||||
feed.output_filenames(Arc::new(outputs));
|
||||
|
||||
let feed = tcx.feed_local_def_id(CRATE_DEF_ID);
|
||||
feed.def_kind(DefKind::Mod);
|
||||
});
|
||||
Ok(qcx)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn write_dep_info(&'tcx self) -> Result<()> {
|
||||
self.global_ctxt()?.enter(|tcx| {
|
||||
passes::write_dep_info(tcx);
|
||||
});
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Check for the `#[rustc_error]` annotation, which forces an error in codegen. This is used
|
||||
/// to write UI tests that actually test that compilation succeeds without reporting
|
||||
/// an error.
|
||||
@ -284,8 +286,13 @@ impl Linker {
|
||||
|
||||
if sess.opts.unstable_opts.no_link {
|
||||
let rlink_file = self.output_filenames.with_extension(config::RLINK_EXT);
|
||||
CodegenResults::serialize_rlink(sess, &rlink_file, &codegen_results)
|
||||
.map_err(|error| sess.emit_fatal(FailedWritingFile { path: &rlink_file, error }))?;
|
||||
CodegenResults::serialize_rlink(
|
||||
sess,
|
||||
&rlink_file,
|
||||
&codegen_results,
|
||||
&*self.output_filenames,
|
||||
)
|
||||
.map_err(|error| sess.emit_fatal(FailedWritingFile { path: &rlink_file, error }))?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
|
@ -5,10 +5,11 @@ use rustc_errors::{emitter::HumanReadableErrorType, registry, ColorConfig};
|
||||
use rustc_session::config::{
|
||||
build_configuration, build_session_options, rustc_optgroups, BranchProtection, CFGuard, Cfg,
|
||||
DebugInfo, DumpMonoStatsFormat, ErrorOutputType, ExternEntry, ExternLocation, Externs,
|
||||
InliningThreshold, Input, InstrumentCoverage, InstrumentXRay, LinkSelfContained,
|
||||
LinkerPluginLto, LocationDetail, LtoCli, MirSpanview, OomStrategy, Options, OutFileName,
|
||||
OutputType, OutputTypes, PAuthKey, PacRet, Passes, Polonius, ProcMacroExecutionStrategy, Strip,
|
||||
SwitchWithOptPath, SymbolManglingVersion, TraitSolver, WasiExecModel,
|
||||
FunctionReturn, InliningThreshold, Input, InstrumentCoverage, InstrumentXRay,
|
||||
LinkSelfContained, LinkerPluginLto, LocationDetail, LtoCli, MirSpanview, OomStrategy, Options,
|
||||
OutFileName, OutputType, OutputTypes, PAuthKey, PacRet, Passes, Polonius,
|
||||
ProcMacroExecutionStrategy, Strip, SwitchWithOptPath, SymbolManglingVersion, TraitSolver,
|
||||
WasiExecModel,
|
||||
};
|
||||
use rustc_session::lint::Level;
|
||||
use rustc_session::search_paths::SearchPath;
|
||||
@ -758,6 +759,7 @@ fn test_unstable_options_tracking_hash() {
|
||||
tracked!(flatten_format_args, false);
|
||||
tracked!(force_unstable_if_unmarked, true);
|
||||
tracked!(fuel, Some(("abc".to_string(), 99)));
|
||||
tracked!(function_return, FunctionReturn::ThunkExtern);
|
||||
tracked!(function_sections, Some(false));
|
||||
tracked!(human_readable_cgu_names, true);
|
||||
tracked!(incremental_ignore_spans, true);
|
||||
|
@ -126,11 +126,8 @@ pub(crate) fn run_in_thread_pool_with_globals<F: FnOnce() -> R + Send, R: Send>(
|
||||
.deadlock_handler(|| {
|
||||
// On deadlock, creates a new thread and forwards information in thread
|
||||
// locals to it. The new thread runs the deadlock handler.
|
||||
let query_map = FromDyn::from(tls::with(|tcx| {
|
||||
QueryCtxt::new(tcx)
|
||||
.try_collect_active_jobs()
|
||||
.expect("active jobs shouldn't be locked in deadlock handler")
|
||||
}));
|
||||
let query_map =
|
||||
FromDyn::from(tls::with(|tcx| QueryCtxt::new(tcx).collect_active_jobs()));
|
||||
let registry = rayon_core::Registry::current();
|
||||
thread::spawn(move || deadlock(query_map.into_inner(), ®istry));
|
||||
});
|
||||
|
@ -656,7 +656,7 @@ trait UnusedDelimLint {
|
||||
) -> bool {
|
||||
if followed_by_else {
|
||||
match inner.kind {
|
||||
ast::ExprKind::Binary(op, ..) if op.node.lazy() => return true,
|
||||
ast::ExprKind::Binary(op, ..) if op.node.is_lazy() => return true,
|
||||
_ if classify::expr_trailing_brace(inner).is_some() => return true,
|
||||
_ => {}
|
||||
}
|
||||
@ -1016,7 +1016,7 @@ impl UnusedDelimLint for UnusedParens {
|
||||
rustc_span::source_map::Spanned { node, .. },
|
||||
_,
|
||||
_,
|
||||
) if node.lazy()))
|
||||
) if node.is_lazy()))
|
||||
{
|
||||
self.emit_unused_delims_expr(cx, value, ctx, left_pos, right_pos, is_kw)
|
||||
}
|
||||
@ -1154,7 +1154,7 @@ impl EarlyLintPass for UnusedParens {
|
||||
// Do not lint on `(..)` as that will result in the other arms being useless.
|
||||
Paren(_)
|
||||
// The other cases do not contain sub-patterns.
|
||||
| Wild | Rest | Lit(..) | MacCall(..) | Range(..) | Ident(.., None) | Path(..) => {},
|
||||
| Wild | Never | Rest | Lit(..) | MacCall(..) | Range(..) | Ident(.., None) | Path(..) => {},
|
||||
// These are list-like patterns; parens can always be removed.
|
||||
TupleStruct(_, _, ps) | Tuple(ps) | Slice(ps) | Or(ps) => for p in ps {
|
||||
self.check_unused_parens_pat(cx, p, false, false, keep_space);
|
||||
|
@ -42,11 +42,7 @@
|
||||
#include "llvm/IR/IRPrintingPasses.h"
|
||||
#include "llvm/Linker/Linker.h"
|
||||
|
||||
#if LLVM_VERSION_GE(16, 0)
|
||||
#include "llvm/TargetParser/Triple.h"
|
||||
#else
|
||||
#include "llvm/ADT/Triple.h"
|
||||
#endif
|
||||
|
||||
extern "C" void LLVMRustSetLastError(const char *);
|
||||
|
||||
@ -93,6 +89,7 @@ enum LLVMRustAttribute {
|
||||
AllocatedPointer = 38,
|
||||
AllocAlign = 39,
|
||||
SanitizeSafeStack = 40,
|
||||
FnRetThunkExtern = 41,
|
||||
};
|
||||
|
||||
typedef struct OpaqueRustString *RustStringRef;
|
||||
|
@ -204,11 +204,7 @@ enum class LLVMRustCodeModel {
|
||||
None,
|
||||
};
|
||||
|
||||
#if LLVM_VERSION_LT(16, 0)
|
||||
static Optional<CodeModel::Model>
|
||||
#else
|
||||
static std::optional<CodeModel::Model>
|
||||
#endif
|
||||
fromRust(LLVMRustCodeModel Model) {
|
||||
switch (Model) {
|
||||
case LLVMRustCodeModel::Tiny:
|
||||
@ -222,11 +218,7 @@ fromRust(LLVMRustCodeModel Model) {
|
||||
case LLVMRustCodeModel::Large:
|
||||
return CodeModel::Large;
|
||||
case LLVMRustCodeModel::None:
|
||||
#if LLVM_VERSION_LT(16, 0)
|
||||
return None;
|
||||
#else
|
||||
return std::nullopt;
|
||||
#endif
|
||||
default:
|
||||
report_fatal_error("Bad CodeModel.");
|
||||
}
|
||||
@ -452,7 +444,6 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
|
||||
if (OutputObjFile) {
|
||||
Options.ObjectFilenameForDebug = OutputObjFile;
|
||||
}
|
||||
#if LLVM_VERSION_GE(16, 0)
|
||||
if (!strcmp("zlib", DebugInfoCompression) && llvm::compression::zlib::isAvailable()) {
|
||||
Options.CompressDebugSections = DebugCompressionType::Zlib;
|
||||
} else if (!strcmp("zstd", DebugInfoCompression) && llvm::compression::zstd::isAvailable()) {
|
||||
@ -460,7 +451,6 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
|
||||
} else if (!strcmp("none", DebugInfoCompression)) {
|
||||
Options.CompressDebugSections = DebugCompressionType::None;
|
||||
}
|
||||
#endif
|
||||
|
||||
Options.RelaxELFRelocations = RelaxELFRelocations;
|
||||
Options.UseInitArray = UseInitArray;
|
||||
@ -734,22 +724,14 @@ LLVMRustOptimize(
|
||||
bool DebugPassManager = false;
|
||||
|
||||
PassInstrumentationCallbacks PIC;
|
||||
#if LLVM_VERSION_LT(16, 0)
|
||||
StandardInstrumentations SI(DebugPassManager);
|
||||
#else
|
||||
StandardInstrumentations SI(TheModule->getContext(), DebugPassManager);
|
||||
#endif
|
||||
SI.registerCallbacks(PIC);
|
||||
|
||||
if (LlvmSelfProfiler){
|
||||
LLVMSelfProfileInitializeCallbacks(PIC,LlvmSelfProfiler,BeforePassCallback,AfterPassCallback);
|
||||
}
|
||||
|
||||
#if LLVM_VERSION_LT(16, 0)
|
||||
Optional<PGOOptions> PGOOpt;
|
||||
#else
|
||||
std::optional<PGOOptions> PGOOpt;
|
||||
#endif
|
||||
#if LLVM_VERSION_GE(17, 0)
|
||||
auto FS = vfs::getRealFileSystem();
|
||||
#endif
|
||||
@ -882,12 +864,7 @@ LLVMRustOptimize(
|
||||
/*EagerChecks=*/true);
|
||||
OptimizerLastEPCallbacks.push_back(
|
||||
[Options](ModulePassManager &MPM, OptimizationLevel Level) {
|
||||
#if LLVM_VERSION_LT(16, 0)
|
||||
MPM.addPass(ModuleMemorySanitizerPass(Options));
|
||||
MPM.addPass(createModuleToFunctionPassAdaptor(MemorySanitizerPass(Options)));
|
||||
#else
|
||||
MPM.addPass(MemorySanitizerPass(Options));
|
||||
#endif
|
||||
}
|
||||
);
|
||||
}
|
||||
@ -912,11 +889,7 @@ LLVMRustOptimize(
|
||||
/*UseAfterScope=*/true,
|
||||
AsanDetectStackUseAfterReturnMode::Runtime,
|
||||
};
|
||||
#if LLVM_VERSION_LT(16, 0)
|
||||
MPM.addPass(ModuleAddressSanitizerPass(opts));
|
||||
#else
|
||||
MPM.addPass(AddressSanitizerPass(opts));
|
||||
#endif
|
||||
}
|
||||
);
|
||||
}
|
||||
|
@ -14,18 +14,13 @@
|
||||
#include "llvm/Remarks/RemarkSerializer.h"
|
||||
#include "llvm/Remarks/RemarkFormat.h"
|
||||
#include "llvm/Support/ToolOutputFile.h"
|
||||
#if LLVM_VERSION_GE(16, 0)
|
||||
#include "llvm/Support/ModRef.h"
|
||||
#endif
|
||||
#include "llvm/Object/Archive.h"
|
||||
#include "llvm/Object/COFFImportFile.h"
|
||||
#include "llvm/Object/ObjectFile.h"
|
||||
#include "llvm/Pass.h"
|
||||
#include "llvm/Bitcode/BitcodeWriter.h"
|
||||
#include "llvm/Support/Signals.h"
|
||||
#if LLVM_VERSION_LT(16, 0)
|
||||
#include "llvm/ADT/Optional.h"
|
||||
#endif
|
||||
|
||||
#include <iostream>
|
||||
|
||||
@ -283,6 +278,8 @@ static Attribute::AttrKind fromRust(LLVMRustAttribute Kind) {
|
||||
return Attribute::AllocAlign;
|
||||
case SanitizeSafeStack:
|
||||
return Attribute::SafeStack;
|
||||
case FnRetThunkExtern:
|
||||
return Attribute::FnRetThunkExtern;
|
||||
}
|
||||
report_fatal_error("bad AttributeKind");
|
||||
}
|
||||
@ -347,13 +344,7 @@ extern "C" LLVMAttributeRef LLVMRustCreateUWTableAttr(LLVMContextRef C, bool Asy
|
||||
}
|
||||
|
||||
extern "C" LLVMAttributeRef LLVMRustCreateAllocSizeAttr(LLVMContextRef C, uint32_t ElementSizeArg) {
|
||||
return wrap(Attribute::getWithAllocSizeArgs(*unwrap(C), ElementSizeArg,
|
||||
#if LLVM_VERSION_LT(16, 0)
|
||||
None
|
||||
#else
|
||||
std::nullopt
|
||||
#endif
|
||||
));
|
||||
return wrap(Attribute::getWithAllocSizeArgs(*unwrap(C), ElementSizeArg, std::nullopt));
|
||||
}
|
||||
|
||||
// These values **must** match ffi::AllocKindFlags.
|
||||
@ -416,7 +407,6 @@ enum class LLVMRustMemoryEffects {
|
||||
|
||||
extern "C" LLVMAttributeRef LLVMRustCreateMemoryEffectsAttr(LLVMContextRef C,
|
||||
LLVMRustMemoryEffects Effects) {
|
||||
#if LLVM_VERSION_GE(16, 0)
|
||||
switch (Effects) {
|
||||
case LLVMRustMemoryEffects::None:
|
||||
return wrap(Attribute::getWithMemoryEffects(*unwrap(C), MemoryEffects::none()));
|
||||
@ -428,18 +418,6 @@ extern "C" LLVMAttributeRef LLVMRustCreateMemoryEffectsAttr(LLVMContextRef C,
|
||||
default:
|
||||
report_fatal_error("bad MemoryEffects.");
|
||||
}
|
||||
#else
|
||||
switch (Effects) {
|
||||
case LLVMRustMemoryEffects::None:
|
||||
return wrap(Attribute::get(*unwrap(C), Attribute::ReadNone));
|
||||
case LLVMRustMemoryEffects::ReadOnly:
|
||||
return wrap(Attribute::get(*unwrap(C), Attribute::ReadOnly));
|
||||
case LLVMRustMemoryEffects::InaccessibleMemOnly:
|
||||
return wrap(Attribute::get(*unwrap(C), Attribute::InaccessibleMemOnly));
|
||||
default:
|
||||
report_fatal_error("bad MemoryEffects.");
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
// Enable a fast-math flag
|
||||
@ -726,18 +704,10 @@ enum class LLVMRustChecksumKind {
|
||||
SHA256,
|
||||
};
|
||||
|
||||
#if LLVM_VERSION_LT(16, 0)
|
||||
static Optional<DIFile::ChecksumKind> fromRust(LLVMRustChecksumKind Kind) {
|
||||
#else
|
||||
static std::optional<DIFile::ChecksumKind> fromRust(LLVMRustChecksumKind Kind) {
|
||||
#endif
|
||||
switch (Kind) {
|
||||
case LLVMRustChecksumKind::None:
|
||||
#if LLVM_VERSION_LT(16, 0)
|
||||
return None;
|
||||
#else
|
||||
return std::nullopt;
|
||||
#endif
|
||||
case LLVMRustChecksumKind::MD5:
|
||||
return DIFile::ChecksumKind::CSK_MD5;
|
||||
case LLVMRustChecksumKind::SHA1:
|
||||
@ -810,17 +780,8 @@ extern "C" LLVMMetadataRef LLVMRustDIBuilderCreateFile(
|
||||
const char *Directory, size_t DirectoryLen, LLVMRustChecksumKind CSKind,
|
||||
const char *Checksum, size_t ChecksumLen) {
|
||||
|
||||
#if LLVM_VERSION_LT(16, 0)
|
||||
Optional<DIFile::ChecksumKind> llvmCSKind = fromRust(CSKind);
|
||||
#else
|
||||
std::optional<DIFile::ChecksumKind> llvmCSKind = fromRust(CSKind);
|
||||
#endif
|
||||
|
||||
#if LLVM_VERSION_LT(16, 0)
|
||||
Optional<DIFile::ChecksumInfo<StringRef>> CSInfo{};
|
||||
#else
|
||||
std::optional<DIFile::ChecksumInfo<StringRef>> CSInfo{};
|
||||
#endif
|
||||
if (llvmCSKind)
|
||||
CSInfo.emplace(*llvmCSKind, StringRef{Checksum, ChecksumLen});
|
||||
return wrap(Builder->createFile(StringRef(Filename, FilenameLen),
|
||||
@ -2053,17 +2014,9 @@ extern "C" bool LLVMRustIsNonGVFunctionPointerTy(LLVMValueRef V) {
|
||||
}
|
||||
|
||||
extern "C" bool LLVMRustLLVMHasZlibCompressionForDebugSymbols() {
|
||||
#if LLVM_VERSION_GE(16, 0)
|
||||
return llvm::compression::zlib::isAvailable();
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
extern "C" bool LLVMRustLLVMHasZstdCompressionForDebugSymbols() {
|
||||
#if LLVM_VERSION_GE(16, 0)
|
||||
return llvm::compression::zstd::isAvailable();
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
@ -9,7 +9,7 @@ use rustc_data_structures::svh::Svh;
|
||||
use rustc_data_structures::sync::{par_for_each_in, try_par_for_each_in, DynSend, DynSync};
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
use rustc_hir::def_id::{DefId, LocalDefId, LocalModDefId, LOCAL_CRATE};
|
||||
use rustc_hir::definitions::{DefKey, DefPath, DefPathData, DefPathHash};
|
||||
use rustc_hir::definitions::{DefKey, DefPath, DefPathHash};
|
||||
use rustc_hir::intravisit::{self, Visitor};
|
||||
use rustc_hir::*;
|
||||
use rustc_index::Idx;
|
||||
@ -168,98 +168,6 @@ impl<'hir> Map<'hir> {
|
||||
self.tcx.definitions_untracked().def_path_hash(def_id)
|
||||
}
|
||||
|
||||
/// Do not call this function directly. The query should be called.
|
||||
pub(super) fn def_kind(self, local_def_id: LocalDefId) -> DefKind {
|
||||
let hir_id = self.tcx.local_def_id_to_hir_id(local_def_id);
|
||||
let node = match self.find(hir_id) {
|
||||
Some(node) => node,
|
||||
None => match self.def_key(local_def_id).disambiguated_data.data {
|
||||
// FIXME: Some anonymous constants produced by `#[rustc_legacy_const_generics]`
|
||||
// do not have corresponding HIR nodes, but they are still anonymous constants.
|
||||
DefPathData::AnonConst => return DefKind::AnonConst,
|
||||
_ => bug!("no HIR node for def id {local_def_id:?}"),
|
||||
},
|
||||
};
|
||||
match node {
|
||||
Node::Item(item) => match item.kind {
|
||||
ItemKind::Static(_, mt, _) => DefKind::Static(mt),
|
||||
ItemKind::Const(..) => DefKind::Const,
|
||||
ItemKind::Fn(..) => DefKind::Fn,
|
||||
ItemKind::Macro(_, macro_kind) => DefKind::Macro(macro_kind),
|
||||
ItemKind::Mod(..) => DefKind::Mod,
|
||||
ItemKind::OpaqueTy(..) => DefKind::OpaqueTy,
|
||||
ItemKind::TyAlias(..) => DefKind::TyAlias,
|
||||
ItemKind::Enum(..) => DefKind::Enum,
|
||||
ItemKind::Struct(..) => DefKind::Struct,
|
||||
ItemKind::Union(..) => DefKind::Union,
|
||||
ItemKind::Trait(..) => DefKind::Trait,
|
||||
ItemKind::TraitAlias(..) => DefKind::TraitAlias,
|
||||
ItemKind::ExternCrate(_) => DefKind::ExternCrate,
|
||||
ItemKind::Use(..) => DefKind::Use,
|
||||
ItemKind::ForeignMod { .. } => DefKind::ForeignMod,
|
||||
ItemKind::GlobalAsm(..) => DefKind::GlobalAsm,
|
||||
ItemKind::Impl(impl_) => DefKind::Impl { of_trait: impl_.of_trait.is_some() },
|
||||
},
|
||||
Node::ForeignItem(item) => match item.kind {
|
||||
ForeignItemKind::Fn(..) => DefKind::Fn,
|
||||
ForeignItemKind::Static(_, mt) => DefKind::Static(mt),
|
||||
ForeignItemKind::Type => DefKind::ForeignTy,
|
||||
},
|
||||
Node::TraitItem(item) => match item.kind {
|
||||
TraitItemKind::Const(..) => DefKind::AssocConst,
|
||||
TraitItemKind::Fn(..) => DefKind::AssocFn,
|
||||
TraitItemKind::Type(..) => DefKind::AssocTy,
|
||||
},
|
||||
Node::ImplItem(item) => match item.kind {
|
||||
ImplItemKind::Const(..) => DefKind::AssocConst,
|
||||
ImplItemKind::Fn(..) => DefKind::AssocFn,
|
||||
ImplItemKind::Type(..) => DefKind::AssocTy,
|
||||
},
|
||||
Node::Variant(_) => DefKind::Variant,
|
||||
Node::Ctor(variant_data) => {
|
||||
let ctor_of = match self.find_parent(hir_id) {
|
||||
Some(Node::Item(..)) => def::CtorOf::Struct,
|
||||
Some(Node::Variant(..)) => def::CtorOf::Variant,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
match variant_data.ctor_kind() {
|
||||
Some(kind) => DefKind::Ctor(ctor_of, kind),
|
||||
None => bug!("constructor node without a constructor"),
|
||||
}
|
||||
}
|
||||
Node::AnonConst(_) => DefKind::AnonConst,
|
||||
Node::ConstBlock(_) => DefKind::InlineConst,
|
||||
Node::Field(_) => DefKind::Field,
|
||||
Node::Expr(expr) => match expr.kind {
|
||||
ExprKind::Closure(_) => DefKind::Closure,
|
||||
_ => bug!("def_kind: unsupported node: {}", self.node_to_string(hir_id)),
|
||||
},
|
||||
Node::GenericParam(param) => match param.kind {
|
||||
GenericParamKind::Lifetime { .. } => DefKind::LifetimeParam,
|
||||
GenericParamKind::Type { .. } => DefKind::TyParam,
|
||||
GenericParamKind::Const { .. } => DefKind::ConstParam,
|
||||
},
|
||||
Node::Crate(_) => DefKind::Mod,
|
||||
Node::Stmt(_)
|
||||
| Node::PathSegment(_)
|
||||
| Node::Ty(_)
|
||||
| Node::TypeBinding(_)
|
||||
| Node::Infer(_)
|
||||
| Node::TraitRef(_)
|
||||
| Node::Pat(_)
|
||||
| Node::PatField(_)
|
||||
| Node::ExprField(_)
|
||||
| Node::Local(_)
|
||||
| Node::Param(_)
|
||||
| Node::Arm(_)
|
||||
| Node::Lifetime(_)
|
||||
| Node::Block(_) => span_bug!(
|
||||
self.span(hir_id),
|
||||
"unexpected node with def id {local_def_id:?}: {node:?}"
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
/// Finds the id of the parent node to this one.
|
||||
///
|
||||
/// If calling repeatedly and iterating over parents, prefer [`Map::parent_iter`].
|
||||
|
@ -202,7 +202,6 @@ pub fn provide(providers: &mut Providers) {
|
||||
span_bug!(hir.span(hir_id), "fn_arg_names: unexpected item {:?}", def_id);
|
||||
}
|
||||
};
|
||||
providers.def_kind = |tcx, def_id| tcx.hir().def_kind(def_id);
|
||||
providers.all_local_trait_impls = |tcx, ()| &tcx.resolutions(()).trait_impls;
|
||||
providers.expn_that_defined =
|
||||
|tcx, id| tcx.resolutions(()).expn_that_defined.get(&id).copied().unwrap_or(ExpnId::root());
|
||||
|
@ -27,7 +27,7 @@ pub enum UnsafetyViolationKind {
|
||||
UnsafeFn,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, TyEncodable, TyDecodable, HashStable, Debug)]
|
||||
#[derive(Clone, PartialEq, TyEncodable, TyDecodable, HashStable, Debug)]
|
||||
pub enum UnsafetyViolationDetails {
|
||||
CallToUnsafeFunction,
|
||||
UseOfInlineAssembly,
|
||||
@ -39,10 +39,17 @@ pub enum UnsafetyViolationDetails {
|
||||
AccessToUnionField,
|
||||
MutationOfLayoutConstrainedField,
|
||||
BorrowOfLayoutConstrainedField,
|
||||
CallToFunctionWith,
|
||||
CallToFunctionWith {
|
||||
/// Target features enabled in callee's `#[target_feature]` but missing in
|
||||
/// caller's `#[target_feature]`.
|
||||
missing: Vec<Symbol>,
|
||||
/// Target features in `missing` that are enabled at compile time
|
||||
/// (e.g., with `-C target-feature`).
|
||||
build_enabled: Vec<Symbol>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, TyEncodable, TyDecodable, HashStable, Debug)]
|
||||
#[derive(Clone, PartialEq, TyEncodable, TyDecodable, HashStable, Debug)]
|
||||
pub struct UnsafetyViolation {
|
||||
pub source_info: SourceInfo,
|
||||
pub lint_root: hir::HirId,
|
||||
|
@ -635,7 +635,12 @@ impl<'tcx> Pat<'tcx> {
|
||||
|
||||
use PatKind::*;
|
||||
match &self.kind {
|
||||
Wild | Range(..) | Binding { subpattern: None, .. } | Constant { .. } | Error(_) => {}
|
||||
Wild
|
||||
| Never
|
||||
| Range(..)
|
||||
| Binding { subpattern: None, .. }
|
||||
| Constant { .. }
|
||||
| Error(_) => {}
|
||||
AscribeUserType { subpattern, .. }
|
||||
| Binding { subpattern: Some(subpattern), .. }
|
||||
| Deref { subpattern }
|
||||
@ -809,6 +814,9 @@ pub enum PatKind<'tcx> {
|
||||
pats: Box<[Box<Pat<'tcx>>]>,
|
||||
},
|
||||
|
||||
/// A never pattern `!`.
|
||||
Never,
|
||||
|
||||
/// An error has been encountered during lowering. We probably shouldn't report more lints
|
||||
/// related to this pattern.
|
||||
Error(ErrorGuaranteed),
|
||||
@ -1069,6 +1077,7 @@ impl<'tcx> fmt::Display for Pat<'tcx> {
|
||||
|
||||
match self.kind {
|
||||
PatKind::Wild => write!(f, "_"),
|
||||
PatKind::Never => write!(f, "!"),
|
||||
PatKind::AscribeUserType { ref subpattern, .. } => write!(f, "{subpattern}: _"),
|
||||
PatKind::Binding { mutability, name, mode, ref subpattern, .. } => {
|
||||
let is_mut = match mode {
|
||||
|
@ -227,7 +227,7 @@ pub fn walk_pat<'a, 'tcx: 'a, V: Visitor<'a, 'tcx>>(visitor: &mut V, pat: &Pat<'
|
||||
is_primary: _,
|
||||
name: _,
|
||||
} => visitor.visit_pat(subpattern),
|
||||
Binding { .. } | Wild | Error(_) => {}
|
||||
Binding { .. } | Wild | Never | Error(_) => {}
|
||||
Variant { subpatterns, adt_def: _, args: _, variant_index: _ } | Leaf { subpatterns } => {
|
||||
for subpattern in subpatterns {
|
||||
visitor.visit_pat(&subpattern.pattern);
|
||||
|
@ -832,50 +832,31 @@ impl ObjectSafetyViolation {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn solution(&self, err: &mut Diagnostic) {
|
||||
pub fn solution(&self) -> ObjectSafetyViolationSolution {
|
||||
match self {
|
||||
ObjectSafetyViolation::SizedSelf(_)
|
||||
| ObjectSafetyViolation::SupertraitSelf(_)
|
||||
| ObjectSafetyViolation::SupertraitNonLifetimeBinder(..) => {}
|
||||
| ObjectSafetyViolation::SupertraitNonLifetimeBinder(..) => {
|
||||
ObjectSafetyViolationSolution::None
|
||||
}
|
||||
ObjectSafetyViolation::Method(
|
||||
name,
|
||||
MethodViolationCode::StaticMethod(Some((add_self_sugg, make_sized_sugg))),
|
||||
_,
|
||||
) => {
|
||||
err.span_suggestion(
|
||||
add_self_sugg.1,
|
||||
format!(
|
||||
"consider turning `{name}` into a method by giving it a `&self` argument"
|
||||
),
|
||||
add_self_sugg.0.to_string(),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
err.span_suggestion(
|
||||
make_sized_sugg.1,
|
||||
format!(
|
||||
"alternatively, consider constraining `{name}` so it does not apply to \
|
||||
trait objects"
|
||||
),
|
||||
make_sized_sugg.0.to_string(),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
) => ObjectSafetyViolationSolution::AddSelfOrMakeSized {
|
||||
name: *name,
|
||||
add_self_sugg: add_self_sugg.clone(),
|
||||
make_sized_sugg: make_sized_sugg.clone(),
|
||||
},
|
||||
ObjectSafetyViolation::Method(
|
||||
name,
|
||||
MethodViolationCode::UndispatchableReceiver(Some(span)),
|
||||
_,
|
||||
) => {
|
||||
err.span_suggestion(
|
||||
*span,
|
||||
format!("consider changing method `{name}`'s `self` parameter to be `&self`"),
|
||||
"&Self",
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
) => ObjectSafetyViolationSolution::ChangeToRefSelf(*name, *span),
|
||||
ObjectSafetyViolation::AssocConst(name, _)
|
||||
| ObjectSafetyViolation::GAT(name, _)
|
||||
| ObjectSafetyViolation::Method(name, ..) => {
|
||||
err.help(format!("consider moving `{name}` to another trait"));
|
||||
ObjectSafetyViolationSolution::MoveToAnotherTrait(*name)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -899,6 +880,60 @@ impl ObjectSafetyViolation {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
pub enum ObjectSafetyViolationSolution {
|
||||
None,
|
||||
AddSelfOrMakeSized {
|
||||
name: Symbol,
|
||||
add_self_sugg: (String, Span),
|
||||
make_sized_sugg: (String, Span),
|
||||
},
|
||||
ChangeToRefSelf(Symbol, Span),
|
||||
MoveToAnotherTrait(Symbol),
|
||||
}
|
||||
|
||||
impl ObjectSafetyViolationSolution {
|
||||
pub fn add_to(self, err: &mut Diagnostic) {
|
||||
match self {
|
||||
ObjectSafetyViolationSolution::None => {}
|
||||
ObjectSafetyViolationSolution::AddSelfOrMakeSized {
|
||||
name,
|
||||
add_self_sugg,
|
||||
make_sized_sugg,
|
||||
} => {
|
||||
err.span_suggestion(
|
||||
add_self_sugg.1,
|
||||
format!(
|
||||
"consider turning `{name}` into a method by giving it a `&self` argument"
|
||||
),
|
||||
add_self_sugg.0,
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
err.span_suggestion(
|
||||
make_sized_sugg.1,
|
||||
format!(
|
||||
"alternatively, consider constraining `{name}` so it does not apply to \
|
||||
trait objects"
|
||||
),
|
||||
make_sized_sugg.0,
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
ObjectSafetyViolationSolution::ChangeToRefSelf(name, span) => {
|
||||
err.span_suggestion(
|
||||
span,
|
||||
format!("consider changing method `{name}`'s `self` parameter to be `&self`"),
|
||||
"&Self",
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
}
|
||||
ObjectSafetyViolationSolution::MoveToAnotherTrait(name) => {
|
||||
err.help(format!("consider moving `{name}` to another trait"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Reasons a method might not be object-safe.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash, HashStable, PartialOrd, Ord)]
|
||||
pub enum MethodViolationCode {
|
||||
|
@ -501,6 +501,9 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
pub fn feed_local_crate(self) -> TyCtxtFeed<'tcx, CrateNum> {
|
||||
TyCtxtFeed { tcx: self, key: LOCAL_CRATE }
|
||||
}
|
||||
pub fn feed_local_def_id(self, key: LocalDefId) -> TyCtxtFeed<'tcx, LocalDefId> {
|
||||
TyCtxtFeed { tcx: self, key }
|
||||
}
|
||||
|
||||
/// In order to break cycles involving `AnonConst`, we need to set the expected type by side
|
||||
/// effect. However, we do not want this as a general capability, so this interface restricts
|
||||
@ -973,6 +976,7 @@ impl<'tcx> TyCtxtAt<'tcx> {
|
||||
self,
|
||||
parent: LocalDefId,
|
||||
data: hir::definitions::DefPathData,
|
||||
def_kind: DefKind,
|
||||
) -> TyCtxtFeed<'tcx, LocalDefId> {
|
||||
// This function modifies `self.definitions` using a side-effect.
|
||||
// We need to ensure that these side effects are re-run by the incr. comp. engine.
|
||||
@ -997,6 +1001,7 @@ impl<'tcx> TyCtxtAt<'tcx> {
|
||||
let key = self.untracked.definitions.write().create_def(parent, data);
|
||||
|
||||
let feed = TyCtxtFeed { tcx: self.tcx, key };
|
||||
feed.def_kind(def_kind);
|
||||
feed.def_span(self.span);
|
||||
feed
|
||||
}
|
||||
|
@ -202,9 +202,6 @@ pub struct ResolverAstLowering {
|
||||
pub def_id_to_node_id: IndexVec<LocalDefId, ast::NodeId>,
|
||||
|
||||
pub trait_map: NodeMap<Vec<hir::TraitCandidate>>,
|
||||
/// A small map keeping true kinds of built-in macros that appear to be fn-like on
|
||||
/// the surface (`macro` items in libcore), but are actually attributes or derives.
|
||||
pub builtin_macro_kinds: FxHashMap<LocalDefId, MacroKind>,
|
||||
/// List functions and methods for which lifetime elision was successful.
|
||||
pub lifetime_elision_allowed: FxHashSet<ast::NodeId>,
|
||||
|
||||
|
@ -283,7 +283,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
||||
/// from at least one local module, and returns `true`. If the crate defining `def_id` is
|
||||
/// declared with an `extern crate`, the path is guaranteed to use the `extern crate`.
|
||||
fn try_print_visible_def_path(&mut self, def_id: DefId) -> Result<bool, PrintError> {
|
||||
if NO_VISIBLE_PATH.with(|flag| flag.get()) {
|
||||
if with_no_visible_paths() {
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
@ -367,7 +367,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
||||
|
||||
/// Try to see if this path can be trimmed to a unique symbol name.
|
||||
fn try_print_trimmed_def_path(&mut self, def_id: DefId) -> Result<bool, PrintError> {
|
||||
if FORCE_TRIMMED_PATH.with(|flag| flag.get()) {
|
||||
if with_forced_trimmed_paths() {
|
||||
let trimmed = self.force_print_trimmed_def_path(def_id)?;
|
||||
if trimmed {
|
||||
return Ok(true);
|
||||
@ -375,8 +375,8 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
||||
}
|
||||
if !self.tcx().sess.opts.unstable_opts.trim_diagnostic_paths
|
||||
|| matches!(self.tcx().sess.opts.trimmed_def_paths, TrimmedDefPaths::Never)
|
||||
|| NO_TRIMMED_PATH.with(|flag| flag.get())
|
||||
|| SHOULD_PREFIX_WITH_CRATE.with(|flag| flag.get())
|
||||
|| with_no_trimmed_paths()
|
||||
|| with_crate_prefix()
|
||||
{
|
||||
return Ok(false);
|
||||
}
|
||||
@ -861,7 +861,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
||||
p!("@", print_def_path(did.to_def_id(), args));
|
||||
} else {
|
||||
let span = self.tcx().def_span(did);
|
||||
let preference = if FORCE_TRIMMED_PATH.with(|flag| flag.get()) {
|
||||
let preference = if with_forced_trimmed_paths() {
|
||||
FileNameDisplayPreference::Short
|
||||
} else {
|
||||
FileNameDisplayPreference::Remapped
|
||||
@ -1102,7 +1102,7 @@ pub trait PrettyPrinter<'tcx>: Printer<'tcx> + fmt::Write {
|
||||
write!(self, "Sized")?;
|
||||
}
|
||||
|
||||
if !FORCE_TRIMMED_PATH.with(|flag| flag.get()) {
|
||||
if !with_forced_trimmed_paths() {
|
||||
for re in lifetimes {
|
||||
write!(self, " + ")?;
|
||||
self.print_region(re)?;
|
||||
@ -1886,7 +1886,7 @@ impl<'tcx> Printer<'tcx> for FmtPrinter<'_, 'tcx> {
|
||||
// available, and filename/line-number is mostly uninteresting.
|
||||
let use_types = !def_id.is_local() || {
|
||||
// Otherwise, use filename/line-number if forced.
|
||||
let force_no_types = FORCE_IMPL_FILENAME_LINE.with(|f| f.get());
|
||||
let force_no_types = with_forced_impl_filename_line();
|
||||
!force_no_types
|
||||
};
|
||||
|
||||
@ -1951,7 +1951,7 @@ impl<'tcx> Printer<'tcx> for FmtPrinter<'_, 'tcx> {
|
||||
if cnum == LOCAL_CRATE {
|
||||
if self.tcx.sess.at_least_rust_2018() {
|
||||
// We add the `crate::` keyword on Rust 2018, only when desired.
|
||||
if SHOULD_PREFIX_WITH_CRATE.with(|flag| flag.get()) {
|
||||
if with_crate_prefix() {
|
||||
write!(self, "{}", kw::Crate)?;
|
||||
self.empty_path = false;
|
||||
}
|
||||
@ -2154,7 +2154,7 @@ impl<'tcx> PrettyPrinter<'tcx> for FmtPrinter<'_, 'tcx> {
|
||||
return true;
|
||||
}
|
||||
|
||||
if FORCE_TRIMMED_PATH.with(|flag| flag.get()) {
|
||||
if with_forced_trimmed_paths() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@ -2437,7 +2437,7 @@ impl<'tcx> FmtPrinter<'_, 'tcx> {
|
||||
} else {
|
||||
let tcx = self.tcx;
|
||||
|
||||
let trim_path = FORCE_TRIMMED_PATH.with(|flag| flag.get());
|
||||
let trim_path = with_forced_trimmed_paths();
|
||||
// Closure used in `RegionFolder` to create names for anonymous late-bound
|
||||
// regions. We use two `DebruijnIndex`es (one for the currently folded
|
||||
// late-bound region and the other for the binder level) to determine
|
||||
|
@ -779,6 +779,57 @@ impl<'tcx> TyCtxt<'tcx> {
|
||||
// the language.
|
||||
|| self.extern_crate(key.as_def_id()).is_some_and(|e| e.is_direct())
|
||||
}
|
||||
|
||||
pub fn expected_const_effect_param_for_body(self, def_id: LocalDefId) -> ty::Const<'tcx> {
|
||||
// if the callee does have the param, we need to equate the param to some const
|
||||
// value no matter whether the effects feature is enabled in the local crate,
|
||||
// because inference will fail if we don't.
|
||||
let mut host_always_on =
|
||||
!self.features().effects || self.sess.opts.unstable_opts.unleash_the_miri_inside_of_you;
|
||||
|
||||
// Compute the constness required by the context.
|
||||
let const_context = self.hir().body_const_context(def_id);
|
||||
|
||||
let kind = self.def_kind(def_id);
|
||||
debug_assert_ne!(kind, DefKind::ConstParam);
|
||||
|
||||
if self.has_attr(def_id, sym::rustc_do_not_const_check) {
|
||||
trace!("do not const check this context");
|
||||
host_always_on = true;
|
||||
}
|
||||
|
||||
match const_context {
|
||||
_ if host_always_on => self.consts.true_,
|
||||
Some(hir::ConstContext::Static(_) | hir::ConstContext::Const { .. }) => {
|
||||
self.consts.false_
|
||||
}
|
||||
Some(hir::ConstContext::ConstFn) => {
|
||||
let host_idx = self
|
||||
.generics_of(def_id)
|
||||
.host_effect_index
|
||||
.expect("ConstContext::Maybe must have host effect param");
|
||||
ty::GenericArgs::identity_for_item(self, def_id).const_at(host_idx)
|
||||
}
|
||||
None => self.consts.true_,
|
||||
}
|
||||
}
|
||||
|
||||
/// Constructs generic args for an item, optionally appending a const effect param type
|
||||
pub fn with_opt_const_effect_param(
|
||||
self,
|
||||
caller_def_id: LocalDefId,
|
||||
callee_def_id: DefId,
|
||||
args: impl IntoIterator<Item: Into<ty::GenericArg<'tcx>>>,
|
||||
) -> ty::GenericArgsRef<'tcx> {
|
||||
let generics = self.generics_of(callee_def_id);
|
||||
assert_eq!(generics.parent, None);
|
||||
|
||||
let opt_const_param = generics.host_effect_index.is_some().then(|| {
|
||||
ty::GenericArg::from(self.expected_const_effect_param_for_body(caller_def_id))
|
||||
});
|
||||
|
||||
self.mk_args_from_iter(args.into_iter().map(|arg| arg.into()).chain(opt_const_param))
|
||||
}
|
||||
}
|
||||
|
||||
struct OpaqueTypeExpander<'tcx> {
|
||||
|
@ -30,12 +30,32 @@ mir_build_borrow_of_moved_value = borrow of moved value
|
||||
|
||||
mir_build_call_to_fn_with_requires_unsafe =
|
||||
call to function `{$function}` with `#[target_feature]` is unsafe and requires unsafe block
|
||||
.note = can only be called if the required target features are available
|
||||
.help = in order for the call to be safe, the context requires the following additional target {$missing_target_features_count ->
|
||||
[1] feature
|
||||
*[count] features
|
||||
}: {$missing_target_features}
|
||||
.note = the {$build_target_features} target {$build_target_features_count ->
|
||||
[1] feature
|
||||
*[count] features
|
||||
} being enabled in the build configuration does not remove the requirement to list {$build_target_features_count ->
|
||||
[1] it
|
||||
*[count] them
|
||||
} in `#[target_feature]`
|
||||
.label = call to function with `#[target_feature]`
|
||||
|
||||
mir_build_call_to_fn_with_requires_unsafe_unsafe_op_in_unsafe_fn_allowed =
|
||||
call to function `{$function}` with `#[target_feature]` is unsafe and requires unsafe function or block
|
||||
.note = can only be called if the required target features are available
|
||||
.help = in order for the call to be safe, the context requires the following additional target {$missing_target_features_count ->
|
||||
[1] feature
|
||||
*[count] features
|
||||
}: {$missing_target_features}
|
||||
.note = the {$build_target_features} target {$build_target_features_count ->
|
||||
[1] feature
|
||||
*[count] features
|
||||
} being enabled in the build configuration does not remove the requirement to list {$build_target_features_count ->
|
||||
[1] it
|
||||
*[count] them
|
||||
} in `#[target_feature]`
|
||||
.label = call to function with `#[target_feature]`
|
||||
|
||||
mir_build_call_to_unsafe_fn_requires_unsafe =
|
||||
@ -330,7 +350,17 @@ mir_build_unsafe_op_in_unsafe_fn_borrow_of_layout_constrained_field_requires_uns
|
||||
|
||||
mir_build_unsafe_op_in_unsafe_fn_call_to_fn_with_requires_unsafe =
|
||||
call to function `{$function}` with `#[target_feature]` is unsafe and requires unsafe block (error E0133)
|
||||
.note = can only be called if the required target features are available
|
||||
.help = in order for the call to be safe, the context requires the following additional target {$missing_target_features_count ->
|
||||
[1] feature
|
||||
*[count] features
|
||||
}: {$missing_target_features}
|
||||
.note = the {$build_target_features} target {$build_target_features_count ->
|
||||
[1] feature
|
||||
*[count] features
|
||||
} being enabled in the build configuration does not remove the requirement to list {$build_target_features_count ->
|
||||
[1] it
|
||||
*[count] them
|
||||
} in `#[target_feature]`
|
||||
.label = call to function with `#[target_feature]`
|
||||
|
||||
mir_build_unsafe_op_in_unsafe_fn_call_to_unsafe_fn_requires_unsafe =
|
||||
|
@ -827,6 +827,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
PatKind::Constant { .. }
|
||||
| PatKind::Range { .. }
|
||||
| PatKind::Wild
|
||||
| PatKind::Never
|
||||
| PatKind::Error(_) => {}
|
||||
|
||||
PatKind::Deref { ref subpattern } => {
|
||||
|
@ -194,6 +194,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
PatKind::Never => {
|
||||
// A never pattern acts like a load from the place.
|
||||
// FIXME(never_patterns): load from the place
|
||||
Ok(())
|
||||
}
|
||||
|
||||
PatKind::Constant { .. } => {
|
||||
// FIXME normalize patterns when possible
|
||||
Err(match_pair)
|
||||
|
@ -75,6 +75,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
| PatKind::Array { .. }
|
||||
| PatKind::Wild
|
||||
| PatKind::Binding { .. }
|
||||
| PatKind::Never
|
||||
| PatKind::Leaf { .. }
|
||||
| PatKind::Deref { .. }
|
||||
| PatKind::Error(_) => self.error_simplifiable(match_pair),
|
||||
@ -107,6 +108,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
PatKind::Slice { .. }
|
||||
| PatKind::Array { .. }
|
||||
| PatKind::Wild
|
||||
| PatKind::Never
|
||||
| PatKind::Or { .. }
|
||||
| PatKind::Binding { .. }
|
||||
| PatKind::AscribeUserType { .. }
|
||||
@ -494,7 +496,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
|
||||
}
|
||||
|
||||
let eq_def_id = self.tcx.require_lang_item(LangItem::PartialEq, Some(source_info.span));
|
||||
let method = trait_method(self.tcx, eq_def_id, sym::eq, [ty, ty]);
|
||||
let method = trait_method(
|
||||
self.tcx,
|
||||
eq_def_id,
|
||||
sym::eq,
|
||||
self.tcx.with_opt_const_effect_param(self.def_id, eq_def_id, [ty, ty]),
|
||||
);
|
||||
|
||||
let bool_ty = self.tcx.types.bool;
|
||||
let eq_result = self.temp(bool_ty, source_info.span);
|
||||
|
@ -1,7 +1,10 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use crate::build::ExprCategory;
|
||||
use crate::errors::*;
|
||||
use rustc_middle::thir::visit::{self, Visitor};
|
||||
|
||||
use rustc_errors::DiagnosticArgValue;
|
||||
use rustc_hir as hir;
|
||||
use rustc_middle::mir::BorrowKind;
|
||||
use rustc_middle::thir::*;
|
||||
@ -247,8 +250,9 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> {
|
||||
self.requires_unsafe(pat.span, AccessToUnionField);
|
||||
return; // we can return here since this already requires unsafe
|
||||
}
|
||||
// wildcard doesn't take anything
|
||||
// wildcard/never don't take anything
|
||||
PatKind::Wild |
|
||||
PatKind::Never |
|
||||
// these just wrap other patterns
|
||||
PatKind::Or { .. } |
|
||||
PatKind::InlineConstant { .. } |
|
||||
@ -392,15 +396,29 @@ impl<'a, 'tcx> Visitor<'a, 'tcx> for UnsafetyVisitor<'a, 'tcx> {
|
||||
// the call requires `unsafe`. Don't check this on wasm
|
||||
// targets, though. For more information on wasm see the
|
||||
// is_like_wasm check in hir_analysis/src/collect.rs
|
||||
let callee_features = &self.tcx.codegen_fn_attrs(func_did).target_features;
|
||||
if !self.tcx.sess.target.options.is_like_wasm
|
||||
&& !self
|
||||
.tcx
|
||||
.codegen_fn_attrs(func_did)
|
||||
.target_features
|
||||
&& !callee_features
|
||||
.iter()
|
||||
.all(|feature| self.body_target_features.contains(feature))
|
||||
{
|
||||
self.requires_unsafe(expr.span, CallToFunctionWith(func_did));
|
||||
let missing: Vec<_> = callee_features
|
||||
.iter()
|
||||
.copied()
|
||||
.filter(|feature| !self.body_target_features.contains(feature))
|
||||
.collect();
|
||||
let build_enabled = self
|
||||
.tcx
|
||||
.sess
|
||||
.target_features
|
||||
.iter()
|
||||
.copied()
|
||||
.filter(|feature| missing.contains(feature))
|
||||
.collect();
|
||||
self.requires_unsafe(
|
||||
expr.span,
|
||||
CallToFunctionWith { function: func_did, missing, build_enabled },
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -526,7 +544,7 @@ struct UnusedUnsafeWarning {
|
||||
enclosing_unsafe: Option<UnusedUnsafeEnclosing>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq)]
|
||||
#[derive(Clone, PartialEq)]
|
||||
enum UnsafeOpKind {
|
||||
CallToUnsafeFunction(Option<DefId>),
|
||||
UseOfInlineAssembly,
|
||||
@ -537,7 +555,15 @@ enum UnsafeOpKind {
|
||||
AccessToUnionField,
|
||||
MutationOfLayoutConstrainedField,
|
||||
BorrowOfLayoutConstrainedField,
|
||||
CallToFunctionWith(DefId),
|
||||
CallToFunctionWith {
|
||||
function: DefId,
|
||||
/// Target features enabled in callee's `#[target_feature]` but missing in
|
||||
/// caller's `#[target_feature]`.
|
||||
missing: Vec<Symbol>,
|
||||
/// Target features in `missing` that are enabled at compile time
|
||||
/// (e.g., with `-C target-feature`).
|
||||
build_enabled: Vec<Symbol>,
|
||||
},
|
||||
}
|
||||
|
||||
use UnsafeOpKind::*;
|
||||
@ -658,13 +684,22 @@ impl UnsafeOpKind {
|
||||
unsafe_not_inherited_note,
|
||||
},
|
||||
),
|
||||
CallToFunctionWith(did) => tcx.emit_spanned_lint(
|
||||
CallToFunctionWith { function, missing, build_enabled } => tcx.emit_spanned_lint(
|
||||
UNSAFE_OP_IN_UNSAFE_FN,
|
||||
hir_id,
|
||||
span,
|
||||
UnsafeOpInUnsafeFnCallToFunctionWithRequiresUnsafe {
|
||||
span,
|
||||
function: &with_no_trimmed_paths!(tcx.def_path_str(*did)),
|
||||
function: &with_no_trimmed_paths!(tcx.def_path_str(*function)),
|
||||
missing_target_features: DiagnosticArgValue::StrListSepByAnd(
|
||||
missing.iter().map(|feature| Cow::from(feature.as_str())).collect(),
|
||||
),
|
||||
missing_target_features_count: missing.len(),
|
||||
note: if build_enabled.is_empty() { None } else { Some(()) },
|
||||
build_target_features: DiagnosticArgValue::StrListSepByAnd(
|
||||
build_enabled.iter().map(|feature| Cow::from(feature.as_str())).collect(),
|
||||
),
|
||||
build_target_features_count: build_enabled.len(),
|
||||
unsafe_not_inherited_note,
|
||||
},
|
||||
),
|
||||
@ -821,18 +856,38 @@ impl UnsafeOpKind {
|
||||
unsafe_not_inherited_note,
|
||||
});
|
||||
}
|
||||
CallToFunctionWith(did) if unsafe_op_in_unsafe_fn_allowed => {
|
||||
CallToFunctionWith { function, missing, build_enabled }
|
||||
if unsafe_op_in_unsafe_fn_allowed =>
|
||||
{
|
||||
tcx.sess.emit_err(CallToFunctionWithRequiresUnsafeUnsafeOpInUnsafeFnAllowed {
|
||||
span,
|
||||
missing_target_features: DiagnosticArgValue::StrListSepByAnd(
|
||||
missing.iter().map(|feature| Cow::from(feature.as_str())).collect(),
|
||||
),
|
||||
missing_target_features_count: missing.len(),
|
||||
note: if build_enabled.is_empty() { None } else { Some(()) },
|
||||
build_target_features: DiagnosticArgValue::StrListSepByAnd(
|
||||
build_enabled.iter().map(|feature| Cow::from(feature.as_str())).collect(),
|
||||
),
|
||||
build_target_features_count: build_enabled.len(),
|
||||
unsafe_not_inherited_note,
|
||||
function: &tcx.def_path_str(*did),
|
||||
function: &tcx.def_path_str(*function),
|
||||
});
|
||||
}
|
||||
CallToFunctionWith(did) => {
|
||||
CallToFunctionWith { function, missing, build_enabled } => {
|
||||
tcx.sess.emit_err(CallToFunctionWithRequiresUnsafe {
|
||||
span,
|
||||
missing_target_features: DiagnosticArgValue::StrListSepByAnd(
|
||||
missing.iter().map(|feature| Cow::from(feature.as_str())).collect(),
|
||||
),
|
||||
missing_target_features_count: missing.len(),
|
||||
note: if build_enabled.is_empty() { None } else { Some(()) },
|
||||
build_target_features: DiagnosticArgValue::StrListSepByAnd(
|
||||
build_enabled.iter().map(|feature| Cow::from(feature.as_str())).collect(),
|
||||
),
|
||||
build_target_features_count: build_enabled.len(),
|
||||
unsafe_not_inherited_note,
|
||||
function: &tcx.def_path_str(*did),
|
||||
function: &tcx.def_path_str(*function),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -2,6 +2,7 @@ use crate::{
|
||||
fluent_generated as fluent,
|
||||
thir::pattern::{deconstruct_pat::WitnessPat, MatchCheckCtxt},
|
||||
};
|
||||
use rustc_errors::DiagnosticArgValue;
|
||||
use rustc_errors::{
|
||||
error_code, AddToDiagnostic, Applicability, Diagnostic, DiagnosticBuilder, ErrorGuaranteed,
|
||||
Handler, IntoDiagnostic, MultiSpan, SubdiagnosticMessage,
|
||||
@ -124,11 +125,17 @@ pub struct UnsafeOpInUnsafeFnBorrowOfLayoutConstrainedFieldRequiresUnsafe {
|
||||
|
||||
#[derive(LintDiagnostic)]
|
||||
#[diag(mir_build_unsafe_op_in_unsafe_fn_call_to_fn_with_requires_unsafe)]
|
||||
#[note]
|
||||
#[help]
|
||||
pub struct UnsafeOpInUnsafeFnCallToFunctionWithRequiresUnsafe<'a> {
|
||||
#[label]
|
||||
pub span: Span,
|
||||
pub function: &'a str,
|
||||
pub missing_target_features: DiagnosticArgValue<'a>,
|
||||
pub missing_target_features_count: usize,
|
||||
#[note]
|
||||
pub note: Option<()>,
|
||||
pub build_target_features: DiagnosticArgValue<'a>,
|
||||
pub build_target_features_count: usize,
|
||||
#[subdiagnostic]
|
||||
pub unsafe_not_inherited_note: Option<UnsafeNotInheritedLintNote>,
|
||||
}
|
||||
@ -369,24 +376,36 @@ pub struct BorrowOfLayoutConstrainedFieldRequiresUnsafeUnsafeOpInUnsafeFnAllowed
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(mir_build_call_to_fn_with_requires_unsafe, code = "E0133")]
|
||||
#[note]
|
||||
#[help]
|
||||
pub struct CallToFunctionWithRequiresUnsafe<'a> {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub span: Span,
|
||||
pub function: &'a str,
|
||||
pub missing_target_features: DiagnosticArgValue<'a>,
|
||||
pub missing_target_features_count: usize,
|
||||
#[note]
|
||||
pub note: Option<()>,
|
||||
pub build_target_features: DiagnosticArgValue<'a>,
|
||||
pub build_target_features_count: usize,
|
||||
#[subdiagnostic]
|
||||
pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(mir_build_call_to_fn_with_requires_unsafe_unsafe_op_in_unsafe_fn_allowed, code = "E0133")]
|
||||
#[note]
|
||||
#[help]
|
||||
pub struct CallToFunctionWithRequiresUnsafeUnsafeOpInUnsafeFnAllowed<'a> {
|
||||
#[primary_span]
|
||||
#[label]
|
||||
pub span: Span,
|
||||
pub function: &'a str,
|
||||
pub missing_target_features: DiagnosticArgValue<'a>,
|
||||
pub missing_target_features_count: usize,
|
||||
#[note]
|
||||
pub note: Option<()>,
|
||||
pub build_target_features: DiagnosticArgValue<'a>,
|
||||
pub build_target_features_count: usize,
|
||||
#[subdiagnostic]
|
||||
pub unsafe_not_inherited_note: Option<UnsafeNotInheritedNote>,
|
||||
}
|
||||
|
@ -258,18 +258,26 @@ impl<'tcx> ConstToPat<'tcx> {
|
||||
|
||||
#[instrument(level = "trace", skip(self), ret)]
|
||||
fn type_has_partial_eq_impl(&self, ty: Ty<'tcx>) -> bool {
|
||||
let tcx = self.tcx();
|
||||
// double-check there even *is* a semantic `PartialEq` to dispatch to.
|
||||
//
|
||||
// (If there isn't, then we can safely issue a hard
|
||||
// error, because that's never worked, due to compiler
|
||||
// using `PartialEq::eq` in this scenario in the past.)
|
||||
let partial_eq_trait_id =
|
||||
self.tcx().require_lang_item(hir::LangItem::PartialEq, Some(self.span));
|
||||
let partial_eq_trait_id = tcx.require_lang_item(hir::LangItem::PartialEq, Some(self.span));
|
||||
let partial_eq_obligation = Obligation::new(
|
||||
self.tcx(),
|
||||
tcx,
|
||||
ObligationCause::dummy(),
|
||||
self.param_env,
|
||||
ty::TraitRef::new(self.tcx(), partial_eq_trait_id, [ty, ty]),
|
||||
ty::TraitRef::new(
|
||||
tcx,
|
||||
partial_eq_trait_id,
|
||||
tcx.with_opt_const_effect_param(
|
||||
tcx.hir().enclosing_body_owner(self.id),
|
||||
partial_eq_trait_id,
|
||||
[ty, ty],
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
// This *could* accept a type that isn't actually `PartialEq`, because region bounds get
|
||||
|
@ -1557,6 +1557,12 @@ impl<'p, 'tcx> DeconstructedPat<'p, 'tcx> {
|
||||
let pats = expand_or_pat(pat);
|
||||
fields = Fields::from_iter(cx, pats.into_iter().map(mkpat));
|
||||
}
|
||||
PatKind::Never => {
|
||||
// FIXME(never_patterns): handle `!` in exhaustiveness. This is a sane default
|
||||
// in the meantime.
|
||||
ctor = Wildcard;
|
||||
fields = Fields::empty();
|
||||
}
|
||||
PatKind::Error(_) => {
|
||||
ctor = Opaque(OpaqueId::new());
|
||||
fields = Fields::empty();
|
||||
|
@ -251,6 +251,8 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> {
|
||||
let kind = match pat.kind {
|
||||
hir::PatKind::Wild => PatKind::Wild,
|
||||
|
||||
hir::PatKind::Never => PatKind::Never,
|
||||
|
||||
hir::PatKind::Lit(value) => self.lower_lit(value),
|
||||
|
||||
hir::PatKind::Range(ref lo_expr, ref hi_expr, end) => {
|
||||
|
@ -31,8 +31,8 @@ const INDENT: &str = " ";
|
||||
|
||||
macro_rules! print_indented {
|
||||
($writer:ident, $s:expr, $indent_lvl:expr) => {
|
||||
let indent = (0..$indent_lvl).map(|_| INDENT).collect::<Vec<_>>().concat();
|
||||
writeln!($writer, "{}{}", indent, $s).expect("unable to write to ThirPrinter");
|
||||
$writer.indent($indent_lvl);
|
||||
writeln!($writer, "{}", $s).expect("unable to write to ThirPrinter");
|
||||
};
|
||||
}
|
||||
|
||||
@ -48,6 +48,12 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
|
||||
Self { thir, fmt: String::new() }
|
||||
}
|
||||
|
||||
fn indent(&mut self, level: usize) {
|
||||
for _ in 0..level {
|
||||
self.fmt.push_str(INDENT);
|
||||
}
|
||||
}
|
||||
|
||||
fn print(&mut self) {
|
||||
print_indented!(self, "params: [", 0);
|
||||
for param in self.thir.params.iter() {
|
||||
@ -636,6 +642,9 @@ impl<'a, 'tcx> ThirPrinter<'a, 'tcx> {
|
||||
PatKind::Wild => {
|
||||
print_indented!(self, "Wild", depth_lvl + 1);
|
||||
}
|
||||
PatKind::Never => {
|
||||
print_indented!(self, "Never", depth_lvl + 1);
|
||||
}
|
||||
PatKind::AscribeUserType { ascription, subpattern } => {
|
||||
print_indented!(self, "AscribeUserType: {", depth_lvl + 1);
|
||||
print_indented!(self, format!("ascription: {:?}", ascription), depth_lvl + 2);
|
||||
|
@ -42,8 +42,19 @@ mir_transform_requires_unsafe = {$details} is unsafe and requires unsafe {$op_in
|
||||
}
|
||||
.not_inherited = items do not inherit unsafety from separate enclosing items
|
||||
|
||||
mir_transform_target_feature_call_help = in order for the call to be safe, the context requires the following additional target {$missing_target_features_count ->
|
||||
[1] feature
|
||||
*[count] features
|
||||
}: {$missing_target_features}
|
||||
|
||||
mir_transform_target_feature_call_label = call to function with `#[target_feature]`
|
||||
mir_transform_target_feature_call_note = can only be called if the required target features are available
|
||||
mir_transform_target_feature_call_note = the {$build_target_features} target {$build_target_features_count ->
|
||||
[1] feature
|
||||
*[count] features
|
||||
} being enabled in the build configuration does not remove the requirement to list {$build_target_features_count ->
|
||||
[1] it
|
||||
*[count] them
|
||||
} in `#[target_feature]`
|
||||
|
||||
mir_transform_unaligned_packed_ref = reference to packed field is unaligned
|
||||
.note = packed structs are only aligned by one byte, and many modern architectures penalize unaligned field accesses
|
||||
|
@ -287,19 +287,20 @@ impl<'tcx> UnsafetyChecker<'_, 'tcx> {
|
||||
.safety;
|
||||
match safety {
|
||||
// `unsafe` blocks are required in safe code
|
||||
Safety::Safe => violations.into_iter().for_each(|&violation| {
|
||||
Safety::Safe => violations.into_iter().for_each(|violation| {
|
||||
match violation.kind {
|
||||
UnsafetyViolationKind::General => {}
|
||||
UnsafetyViolationKind::UnsafeFn => {
|
||||
bug!("`UnsafetyViolationKind::UnsafeFn` in an `Safe` context")
|
||||
}
|
||||
}
|
||||
if !self.violations.contains(&violation) {
|
||||
self.violations.push(violation)
|
||||
if !self.violations.contains(violation) {
|
||||
self.violations.push(violation.clone())
|
||||
}
|
||||
}),
|
||||
// With the RFC 2585, no longer allow `unsafe` operations in `unsafe fn`s
|
||||
Safety::FnUnsafe => violations.into_iter().for_each(|&(mut violation)| {
|
||||
Safety::FnUnsafe => violations.into_iter().for_each(|violation| {
|
||||
let mut violation = violation.clone();
|
||||
violation.kind = UnsafetyViolationKind::UnsafeFn;
|
||||
if !self.violations.contains(&violation) {
|
||||
self.violations.push(violation)
|
||||
@ -367,9 +368,22 @@ impl<'tcx> UnsafetyChecker<'_, 'tcx> {
|
||||
|
||||
// Is `callee_features` a subset of `calling_features`?
|
||||
if !callee_features.iter().all(|feature| self_features.contains(feature)) {
|
||||
let missing: Vec<_> = callee_features
|
||||
.iter()
|
||||
.copied()
|
||||
.filter(|feature| !self_features.contains(feature))
|
||||
.collect();
|
||||
let build_enabled = self
|
||||
.tcx
|
||||
.sess
|
||||
.target_features
|
||||
.iter()
|
||||
.copied()
|
||||
.filter(|feature| missing.contains(feature))
|
||||
.collect();
|
||||
self.require_unsafe(
|
||||
UnsafetyViolationKind::General,
|
||||
UnsafetyViolationDetails::CallToFunctionWith,
|
||||
UnsafetyViolationDetails::CallToFunctionWith { missing, build_enabled },
|
||||
)
|
||||
}
|
||||
}
|
||||
@ -385,7 +399,7 @@ pub(crate) fn provide(providers: &mut Providers) {
|
||||
enum Context {
|
||||
Safe,
|
||||
/// in an `unsafe fn`
|
||||
UnsafeFn(HirId),
|
||||
UnsafeFn,
|
||||
/// in a *used* `unsafe` block
|
||||
/// (i.e. a block without unused-unsafe warning)
|
||||
UnsafeBlock(HirId),
|
||||
@ -407,7 +421,7 @@ impl<'tcx> intravisit::Visitor<'tcx> for UnusedUnsafeVisitor<'_, 'tcx> {
|
||||
};
|
||||
let unused_unsafe = match (self.context, used) {
|
||||
(_, false) => UnusedUnsafe::Unused,
|
||||
(Context::Safe, true) | (Context::UnsafeFn(_), true) => {
|
||||
(Context::Safe, true) | (Context::UnsafeFn, true) => {
|
||||
let previous_context = self.context;
|
||||
self.context = Context::UnsafeBlock(block.hir_id);
|
||||
intravisit::walk_block(self, block);
|
||||
@ -454,7 +468,7 @@ fn check_unused_unsafe(
|
||||
let body = tcx.hir().body(body_id);
|
||||
let hir_id = tcx.local_def_id_to_hir_id(def_id);
|
||||
let context = match tcx.hir().fn_sig_by_hir_id(hir_id) {
|
||||
Some(sig) if sig.header.unsafety == hir::Unsafety::Unsafe => Context::UnsafeFn(hir_id),
|
||||
Some(sig) if sig.header.unsafety == hir::Unsafety::Unsafe => Context::UnsafeFn,
|
||||
_ => Context::Safe,
|
||||
};
|
||||
|
||||
@ -528,8 +542,9 @@ pub fn check_unsafety(tcx: TyCtxt<'_>, def_id: LocalDefId) {
|
||||
// Only suggest wrapping the entire function body in an unsafe block once
|
||||
let mut suggest_unsafe_block = true;
|
||||
|
||||
for &UnsafetyViolation { source_info, lint_root, kind, details } in violations.iter() {
|
||||
let details = errors::RequiresUnsafeDetail { violation: details, span: source_info.span };
|
||||
for &UnsafetyViolation { source_info, lint_root, kind, ref details } in violations.iter() {
|
||||
let details =
|
||||
errors::RequiresUnsafeDetail { violation: details.clone(), span: source_info.span };
|
||||
|
||||
match kind {
|
||||
UnsafetyViolationKind::General => {
|
||||
|
@ -439,6 +439,7 @@ impl<'mir, 'tcx> ConstPropagator<'mir, 'tcx> {
|
||||
|
||||
// FIXME we need to revisit this for #67176
|
||||
if rvalue.has_param() {
|
||||
trace!("skipping, has param");
|
||||
return None;
|
||||
}
|
||||
if !rvalue
|
||||
@ -707,7 +708,11 @@ impl<'tcx> Visitor<'tcx> for ConstPropagator<'_, 'tcx> {
|
||||
fn visit_assign(&mut self, place: &Place<'tcx>, rvalue: &Rvalue<'tcx>, location: Location) {
|
||||
self.super_assign(place, rvalue, location);
|
||||
|
||||
let Some(()) = self.check_rvalue(rvalue) else { return };
|
||||
let Some(()) = self.check_rvalue(rvalue) else {
|
||||
trace!("rvalue check failed, removing const");
|
||||
Self::remove_const(&mut self.ecx, place.local);
|
||||
return;
|
||||
};
|
||||
|
||||
match self.ecx.machine.can_const_prop[place.local] {
|
||||
// Do nothing if the place is indirect.
|
||||
|
@ -51,7 +51,6 @@
|
||||
//! Otherwise it drops all the values in scope at the last suspension point.
|
||||
|
||||
use crate::abort_unwinding_calls;
|
||||
use crate::add_call_guards;
|
||||
use crate::deref_separator::deref_finder;
|
||||
use crate::errors;
|
||||
use crate::pass_manager as pm;
|
||||
@ -1168,18 +1167,9 @@ fn create_coroutine_drop_shim<'tcx>(
|
||||
simplify::remove_dead_blocks(&mut body);
|
||||
|
||||
// Update the body's def to become the drop glue.
|
||||
// This needs to be updated before the AbortUnwindingCalls pass.
|
||||
let coroutine_instance = body.source.instance;
|
||||
let drop_in_place = tcx.require_lang_item(LangItem::DropInPlace, None);
|
||||
let drop_instance = InstanceDef::DropGlue(drop_in_place, Some(coroutine_ty));
|
||||
body.source.instance = drop_instance;
|
||||
|
||||
pm::run_passes_no_validate(
|
||||
tcx,
|
||||
&mut body,
|
||||
&[&abort_unwinding_calls::AbortUnwindingCalls, &add_call_guards::CriticalCallEdges],
|
||||
None,
|
||||
);
|
||||
|
||||
// Temporary change MirSource to coroutine's instance so that dump_mir produces more sensible
|
||||
// filename.
|
||||
|
@ -1,6 +1,8 @@
|
||||
use std::borrow::Cow;
|
||||
|
||||
use rustc_errors::{
|
||||
Applicability, DecorateLint, DiagnosticBuilder, DiagnosticMessage, EmissionGuarantee, Handler,
|
||||
IntoDiagnostic,
|
||||
Applicability, DecorateLint, DiagnosticArgValue, DiagnosticBuilder, DiagnosticMessage,
|
||||
EmissionGuarantee, Handler, IntoDiagnostic,
|
||||
};
|
||||
use rustc_macros::{Diagnostic, LintDiagnostic, Subdiagnostic};
|
||||
use rustc_middle::mir::{AssertKind, UnsafetyViolationDetails};
|
||||
@ -9,6 +11,8 @@ use rustc_session::lint::{self, Lint};
|
||||
use rustc_span::def_id::DefId;
|
||||
use rustc_span::Span;
|
||||
|
||||
use crate::fluent_generated as fluent;
|
||||
|
||||
#[derive(LintDiagnostic)]
|
||||
pub(crate) enum ConstMutate {
|
||||
#[diag(mir_transform_const_modify)]
|
||||
@ -61,72 +65,105 @@ pub(crate) struct RequiresUnsafe {
|
||||
impl<'sess, G: EmissionGuarantee> IntoDiagnostic<'sess, G> for RequiresUnsafe {
|
||||
#[track_caller]
|
||||
fn into_diagnostic(self, handler: &'sess Handler) -> DiagnosticBuilder<'sess, G> {
|
||||
let mut diag =
|
||||
handler.struct_diagnostic(crate::fluent_generated::mir_transform_requires_unsafe);
|
||||
let mut diag = handler.struct_diagnostic(fluent::mir_transform_requires_unsafe);
|
||||
diag.code(rustc_errors::DiagnosticId::Error("E0133".to_string()));
|
||||
diag.set_span(self.span);
|
||||
diag.span_label(self.span, self.details.label());
|
||||
diag.note(self.details.note());
|
||||
let desc = handler.eagerly_translate_to_string(self.details.label(), [].into_iter());
|
||||
diag.set_arg("details", desc);
|
||||
diag.set_arg("op_in_unsafe_fn_allowed", self.op_in_unsafe_fn_allowed);
|
||||
self.details.add_subdiagnostics(&mut diag);
|
||||
if let Some(sp) = self.enclosing {
|
||||
diag.span_label(sp, crate::fluent_generated::mir_transform_not_inherited);
|
||||
diag.span_label(sp, fluent::mir_transform_not_inherited);
|
||||
}
|
||||
diag
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct RequiresUnsafeDetail {
|
||||
pub span: Span,
|
||||
pub violation: UnsafetyViolationDetails,
|
||||
}
|
||||
|
||||
impl RequiresUnsafeDetail {
|
||||
fn note(self) -> DiagnosticMessage {
|
||||
fn add_subdiagnostics<G: EmissionGuarantee>(&self, diag: &mut DiagnosticBuilder<'_, G>) {
|
||||
use UnsafetyViolationDetails::*;
|
||||
match self.violation {
|
||||
CallToUnsafeFunction => crate::fluent_generated::mir_transform_call_to_unsafe_note,
|
||||
UseOfInlineAssembly => crate::fluent_generated::mir_transform_use_of_asm_note,
|
||||
InitializingTypeWith => {
|
||||
crate::fluent_generated::mir_transform_initializing_valid_range_note
|
||||
CallToUnsafeFunction => {
|
||||
diag.note(fluent::mir_transform_call_to_unsafe_note);
|
||||
}
|
||||
UseOfInlineAssembly => {
|
||||
diag.note(fluent::mir_transform_use_of_asm_note);
|
||||
}
|
||||
InitializingTypeWith => {
|
||||
diag.note(fluent::mir_transform_initializing_valid_range_note);
|
||||
}
|
||||
CastOfPointerToInt => {
|
||||
diag.note(fluent::mir_transform_const_ptr2int_note);
|
||||
}
|
||||
UseOfMutableStatic => {
|
||||
diag.note(fluent::mir_transform_use_of_static_mut_note);
|
||||
}
|
||||
UseOfExternStatic => {
|
||||
diag.note(fluent::mir_transform_use_of_extern_static_note);
|
||||
}
|
||||
DerefOfRawPointer => {
|
||||
diag.note(fluent::mir_transform_deref_ptr_note);
|
||||
}
|
||||
AccessToUnionField => {
|
||||
diag.note(fluent::mir_transform_union_access_note);
|
||||
}
|
||||
CastOfPointerToInt => crate::fluent_generated::mir_transform_const_ptr2int_note,
|
||||
UseOfMutableStatic => crate::fluent_generated::mir_transform_use_of_static_mut_note,
|
||||
UseOfExternStatic => crate::fluent_generated::mir_transform_use_of_extern_static_note,
|
||||
DerefOfRawPointer => crate::fluent_generated::mir_transform_deref_ptr_note,
|
||||
AccessToUnionField => crate::fluent_generated::mir_transform_union_access_note,
|
||||
MutationOfLayoutConstrainedField => {
|
||||
crate::fluent_generated::mir_transform_mutation_layout_constrained_note
|
||||
diag.note(fluent::mir_transform_mutation_layout_constrained_note);
|
||||
}
|
||||
BorrowOfLayoutConstrainedField => {
|
||||
crate::fluent_generated::mir_transform_mutation_layout_constrained_borrow_note
|
||||
diag.note(fluent::mir_transform_mutation_layout_constrained_borrow_note);
|
||||
}
|
||||
CallToFunctionWith { ref missing, ref build_enabled } => {
|
||||
diag.help(fluent::mir_transform_target_feature_call_help);
|
||||
diag.set_arg(
|
||||
"missing_target_features",
|
||||
DiagnosticArgValue::StrListSepByAnd(
|
||||
missing.iter().map(|feature| Cow::from(feature.as_str())).collect(),
|
||||
),
|
||||
);
|
||||
diag.set_arg("missing_target_features_count", missing.len());
|
||||
if !build_enabled.is_empty() {
|
||||
diag.note(fluent::mir_transform_target_feature_call_note);
|
||||
diag.set_arg(
|
||||
"build_target_features",
|
||||
DiagnosticArgValue::StrListSepByAnd(
|
||||
build_enabled
|
||||
.iter()
|
||||
.map(|feature| Cow::from(feature.as_str()))
|
||||
.collect(),
|
||||
),
|
||||
);
|
||||
diag.set_arg("build_target_features_count", build_enabled.len());
|
||||
}
|
||||
}
|
||||
CallToFunctionWith => crate::fluent_generated::mir_transform_target_feature_call_note,
|
||||
}
|
||||
}
|
||||
|
||||
fn label(self) -> DiagnosticMessage {
|
||||
fn label(&self) -> DiagnosticMessage {
|
||||
use UnsafetyViolationDetails::*;
|
||||
match self.violation {
|
||||
CallToUnsafeFunction => crate::fluent_generated::mir_transform_call_to_unsafe_label,
|
||||
UseOfInlineAssembly => crate::fluent_generated::mir_transform_use_of_asm_label,
|
||||
InitializingTypeWith => {
|
||||
crate::fluent_generated::mir_transform_initializing_valid_range_label
|
||||
}
|
||||
CastOfPointerToInt => crate::fluent_generated::mir_transform_const_ptr2int_label,
|
||||
UseOfMutableStatic => crate::fluent_generated::mir_transform_use_of_static_mut_label,
|
||||
UseOfExternStatic => crate::fluent_generated::mir_transform_use_of_extern_static_label,
|
||||
DerefOfRawPointer => crate::fluent_generated::mir_transform_deref_ptr_label,
|
||||
AccessToUnionField => crate::fluent_generated::mir_transform_union_access_label,
|
||||
CallToUnsafeFunction => fluent::mir_transform_call_to_unsafe_label,
|
||||
UseOfInlineAssembly => fluent::mir_transform_use_of_asm_label,
|
||||
InitializingTypeWith => fluent::mir_transform_initializing_valid_range_label,
|
||||
CastOfPointerToInt => fluent::mir_transform_const_ptr2int_label,
|
||||
UseOfMutableStatic => fluent::mir_transform_use_of_static_mut_label,
|
||||
UseOfExternStatic => fluent::mir_transform_use_of_extern_static_label,
|
||||
DerefOfRawPointer => fluent::mir_transform_deref_ptr_label,
|
||||
AccessToUnionField => fluent::mir_transform_union_access_label,
|
||||
MutationOfLayoutConstrainedField => {
|
||||
crate::fluent_generated::mir_transform_mutation_layout_constrained_label
|
||||
fluent::mir_transform_mutation_layout_constrained_label
|
||||
}
|
||||
BorrowOfLayoutConstrainedField => {
|
||||
crate::fluent_generated::mir_transform_mutation_layout_constrained_borrow_label
|
||||
fluent::mir_transform_mutation_layout_constrained_borrow_label
|
||||
}
|
||||
CallToFunctionWith => crate::fluent_generated::mir_transform_target_feature_call_label,
|
||||
CallToFunctionWith { .. } => fluent::mir_transform_target_feature_call_label,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -151,12 +188,12 @@ impl<'a> DecorateLint<'a, ()> for UnsafeOpInUnsafeFn {
|
||||
let desc = handler.eagerly_translate_to_string(self.details.label(), [].into_iter());
|
||||
diag.set_arg("details", desc);
|
||||
diag.span_label(self.details.span, self.details.label());
|
||||
diag.note(self.details.note());
|
||||
self.details.add_subdiagnostics(diag);
|
||||
|
||||
if let Some((start, end, fn_sig)) = self.suggest_unsafe_block {
|
||||
diag.span_note(fn_sig, crate::fluent_generated::mir_transform_note);
|
||||
diag.span_note(fn_sig, fluent::mir_transform_note);
|
||||
diag.tool_only_multipart_suggestion(
|
||||
crate::fluent_generated::mir_transform_suggestion,
|
||||
fluent::mir_transform_suggestion,
|
||||
vec![(start, " unsafe {".into()), (end, "}".into())],
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
@ -166,7 +203,7 @@ impl<'a> DecorateLint<'a, ()> for UnsafeOpInUnsafeFn {
|
||||
}
|
||||
|
||||
fn msg(&self) -> DiagnosticMessage {
|
||||
crate::fluent_generated::mir_transform_unsafe_op_in_unsafe_fn
|
||||
fluent::mir_transform_unsafe_op_in_unsafe_fn
|
||||
}
|
||||
}
|
||||
|
||||
@ -193,12 +230,8 @@ impl<'a, P: std::fmt::Debug> DecorateLint<'a, ()> for AssertLint<P> {
|
||||
|
||||
fn msg(&self) -> DiagnosticMessage {
|
||||
match self {
|
||||
AssertLint::ArithmeticOverflow(..) => {
|
||||
crate::fluent_generated::mir_transform_arithmetic_overflow
|
||||
}
|
||||
AssertLint::UnconditionalPanic(..) => {
|
||||
crate::fluent_generated::mir_transform_operation_will_panic
|
||||
}
|
||||
AssertLint::ArithmeticOverflow(..) => fluent::mir_transform_arithmetic_overflow,
|
||||
AssertLint::UnconditionalPanic(..) => fluent::mir_transform_operation_will_panic,
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -255,11 +288,11 @@ impl<'a> DecorateLint<'a, ()> for MustNotSupend<'_, '_> {
|
||||
self,
|
||||
diag: &'b mut rustc_errors::DiagnosticBuilder<'a, ()>,
|
||||
) -> &'b mut rustc_errors::DiagnosticBuilder<'a, ()> {
|
||||
diag.span_label(self.yield_sp, crate::fluent_generated::_subdiag::label);
|
||||
diag.span_label(self.yield_sp, fluent::_subdiag::label);
|
||||
if let Some(reason) = self.reason {
|
||||
diag.subdiagnostic(reason);
|
||||
}
|
||||
diag.span_help(self.src_sp, crate::fluent_generated::_subdiag::help);
|
||||
diag.span_help(self.src_sp, fluent::_subdiag::help);
|
||||
diag.set_arg("pre", self.pre);
|
||||
diag.set_arg("def_path", self.tcx.def_path_str(self.def_id));
|
||||
diag.set_arg("post", self.post);
|
||||
@ -267,7 +300,7 @@ impl<'a> DecorateLint<'a, ()> for MustNotSupend<'_, '_> {
|
||||
}
|
||||
|
||||
fn msg(&self) -> rustc_errors::DiagnosticMessage {
|
||||
crate::fluent_generated::mir_transform_must_not_suspend
|
||||
fluent::mir_transform_must_not_suspend
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -74,11 +74,13 @@ fn make_shim<'tcx>(tcx: TyCtxt<'tcx>, instance: ty::InstanceDef<'tcx>) -> Body<'
|
||||
let mut body = EarlyBinder::bind(body.clone()).instantiate(tcx, args);
|
||||
debug!("make_shim({:?}) = {:?}", instance, body);
|
||||
|
||||
// Run empty passes to mark phase change and perform validation.
|
||||
pm::run_passes(
|
||||
tcx,
|
||||
&mut body,
|
||||
&[],
|
||||
&[
|
||||
&abort_unwinding_calls::AbortUnwindingCalls,
|
||||
&add_call_guards::CriticalCallEdges,
|
||||
],
|
||||
Some(MirPhase::Runtime(RuntimePhase::Optimized)),
|
||||
);
|
||||
|
||||
|
@ -492,9 +492,13 @@ parse_match_arm_body_without_braces = `match` arm body without braces
|
||||
} with a body
|
||||
.suggestion_use_comma_not_semicolon = replace `;` with `,` to end a `match` arm expression
|
||||
|
||||
parse_maybe_comparison = you might have meant to compare for equality
|
||||
|
||||
parse_maybe_fn_typo_with_impl = you might have meant to write `impl` instead of `fn`
|
||||
.suggestion = replace `fn` with `impl` here
|
||||
|
||||
parse_maybe_missing_let = you might have meant to continue the let-chain
|
||||
|
||||
parse_maybe_recover_from_bad_qpath_stage_2 =
|
||||
missing angle brackets in associated item path
|
||||
.suggestion = types that don't start with an identifier need to be surrounded with angle brackets in qualified paths
|
||||
@ -721,6 +725,9 @@ parse_sugg_wrap_pattern_in_parens = wrap the pattern in parentheses
|
||||
parse_switch_mut_let_order =
|
||||
switch the order of `mut` and `let`
|
||||
|
||||
parse_switch_ref_box_order = switch the order of `ref` and `box`
|
||||
.suggestion = swap them
|
||||
|
||||
parse_ternary_operator = Rust has no ternary operator
|
||||
.help = use an `if-else` expression instead
|
||||
|
||||
@ -770,6 +777,9 @@ parse_unexpected_lifetime_in_pattern = unexpected lifetime `{$symbol}` in patter
|
||||
parse_unexpected_parentheses_in_for_head = unexpected parentheses surrounding `for` loop head
|
||||
.suggestion = remove parentheses in `for` loop
|
||||
|
||||
parse_unexpected_parentheses_in_match_arm_pattern = unexpected parentheses surrounding `match` arm pattern
|
||||
.suggestion = remove parentheses surrounding the pattern
|
||||
|
||||
parse_unexpected_self_in_generic_parameters = unexpected keyword `Self` in generic parameters
|
||||
.note = you cannot use `Self` as a generic parameter because it is reserved for associated items
|
||||
|
||||
|
@ -137,6 +137,14 @@ pub(crate) enum InvalidVariableDeclarationSub {
|
||||
UseLetNotVar(#[primary_span] Span),
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_switch_ref_box_order)]
|
||||
pub(crate) struct SwitchRefBoxOrder {
|
||||
#[primary_span]
|
||||
#[suggestion(applicability = "machine-applicable", code = "box ref")]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_invalid_comparison_operator)]
|
||||
pub(crate) struct InvalidComparisonOperator {
|
||||
@ -407,6 +415,32 @@ pub(crate) struct ExpectedExpressionFoundLet {
|
||||
pub span: Span,
|
||||
#[subdiagnostic]
|
||||
pub reason: ForbiddenLetReason,
|
||||
#[subdiagnostic]
|
||||
pub missing_let: Option<MaybeMissingLet>,
|
||||
#[subdiagnostic]
|
||||
pub comparison: Option<MaybeComparison>,
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic, Clone, Copy)]
|
||||
#[multipart_suggestion(
|
||||
parse_maybe_missing_let,
|
||||
applicability = "maybe-incorrect",
|
||||
style = "verbose"
|
||||
)]
|
||||
pub(crate) struct MaybeMissingLet {
|
||||
#[suggestion_part(code = "let ")]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic, Clone, Copy)]
|
||||
#[multipart_suggestion(
|
||||
parse_maybe_comparison,
|
||||
applicability = "maybe-incorrect",
|
||||
style = "verbose"
|
||||
)]
|
||||
pub(crate) struct MaybeComparison {
|
||||
#[suggestion_part(code = "=")]
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
@ -1241,12 +1275,28 @@ pub(crate) struct ParenthesesInForHead {
|
||||
#[derive(Subdiagnostic)]
|
||||
#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
|
||||
pub(crate) struct ParenthesesInForHeadSugg {
|
||||
#[suggestion_part(code = "{left_snippet}")]
|
||||
#[suggestion_part(code = " ")]
|
||||
pub left: Span,
|
||||
pub left_snippet: String,
|
||||
#[suggestion_part(code = "{right_snippet}")]
|
||||
#[suggestion_part(code = " ")]
|
||||
pub right: Span,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
#[diag(parse_unexpected_parentheses_in_match_arm_pattern)]
|
||||
pub(crate) struct ParenthesesInMatchPat {
|
||||
#[primary_span]
|
||||
pub span: Vec<Span>,
|
||||
#[subdiagnostic]
|
||||
pub sugg: ParenthesesInMatchPatSugg,
|
||||
}
|
||||
|
||||
#[derive(Subdiagnostic)]
|
||||
#[multipart_suggestion(parse_suggestion, applicability = "machine-applicable")]
|
||||
pub(crate) struct ParenthesesInMatchPatSugg {
|
||||
#[suggestion_part(code = "")]
|
||||
pub left: Span,
|
||||
#[suggestion_part(code = "")]
|
||||
pub right: Span,
|
||||
pub right_snippet: String,
|
||||
}
|
||||
|
||||
#[derive(Diagnostic)]
|
||||
|
@ -11,12 +11,12 @@ use crate::errors::{
|
||||
DoubleColonInBound, ExpectedIdentifier, ExpectedSemi, ExpectedSemiSugg,
|
||||
GenericParamsWithoutAngleBrackets, GenericParamsWithoutAngleBracketsSugg,
|
||||
HelpIdentifierStartsWithNumber, InInTypo, IncorrectAwait, IncorrectSemicolon,
|
||||
IncorrectUseOfAwait, ParenthesesInForHead, ParenthesesInForHeadSugg,
|
||||
PatternMethodParamWithoutBody, QuestionMarkInType, QuestionMarkInTypeSugg, SelfParamNotFirst,
|
||||
StructLiteralBodyWithoutPath, StructLiteralBodyWithoutPathSugg, StructLiteralNeedingParens,
|
||||
StructLiteralNeedingParensSugg, SuggAddMissingLetStmt, SuggEscapeIdentifier, SuggRemoveComma,
|
||||
TernaryOperator, UnexpectedConstInGenericParam, UnexpectedConstParamDeclaration,
|
||||
UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets, UseEqInstead, WrapType,
|
||||
IncorrectUseOfAwait, PatternMethodParamWithoutBody, QuestionMarkInType, QuestionMarkInTypeSugg,
|
||||
SelfParamNotFirst, StructLiteralBodyWithoutPath, StructLiteralBodyWithoutPathSugg,
|
||||
StructLiteralNeedingParens, StructLiteralNeedingParensSugg, SuggAddMissingLetStmt,
|
||||
SuggEscapeIdentifier, SuggRemoveComma, TernaryOperator, UnexpectedConstInGenericParam,
|
||||
UnexpectedConstParamDeclaration, UnexpectedConstParamDeclarationSugg, UnmatchedAngleBrackets,
|
||||
UseEqInstead, WrapType,
|
||||
};
|
||||
|
||||
use crate::fluent_generated as fluent;
|
||||
@ -1994,56 +1994,6 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Recovers a situation like `for ( $pat in $expr )`
|
||||
/// and suggest writing `for $pat in $expr` instead.
|
||||
///
|
||||
/// This should be called before parsing the `$block`.
|
||||
pub(super) fn recover_parens_around_for_head(
|
||||
&mut self,
|
||||
pat: P<Pat>,
|
||||
begin_paren: Option<Span>,
|
||||
) -> P<Pat> {
|
||||
match (&self.token.kind, begin_paren) {
|
||||
(token::CloseDelim(Delimiter::Parenthesis), Some(begin_par_sp)) => {
|
||||
self.bump();
|
||||
|
||||
let sm = self.sess.source_map();
|
||||
let left = begin_par_sp;
|
||||
let right = self.prev_token.span;
|
||||
let left_snippet = if let Ok(snip) = sm.span_to_prev_source(left)
|
||||
&& !snip.ends_with(' ')
|
||||
{
|
||||
" ".to_string()
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
let right_snippet = if let Ok(snip) = sm.span_to_next_source(right)
|
||||
&& !snip.starts_with(' ')
|
||||
{
|
||||
" ".to_string()
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
self.sess.emit_err(ParenthesesInForHead {
|
||||
span: vec![left, right],
|
||||
// With e.g. `for (x) in y)` this would replace `(x) in y)`
|
||||
// with `x) in y)` which is syntactically invalid.
|
||||
// However, this is prevented before we get here.
|
||||
sugg: ParenthesesInForHeadSugg { left, right, left_snippet, right_snippet },
|
||||
});
|
||||
|
||||
// Unwrap `(pat)` into `pat` to avoid the `unused_parens` lint.
|
||||
pat.and_then(|pat| match pat.kind {
|
||||
PatKind::Paren(pat) => pat,
|
||||
_ => P(pat),
|
||||
})
|
||||
}
|
||||
_ => pat,
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) fn recover_seq_parse_error(
|
||||
&mut self,
|
||||
delim: Delimiter,
|
||||
|
@ -1,3 +1,4 @@
|
||||
// ignore-tidy-filelength
|
||||
use super::diagnostics::SnapshotParser;
|
||||
use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma};
|
||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||
@ -9,7 +10,7 @@ use super::{
|
||||
use crate::errors;
|
||||
use crate::maybe_recover_from_interpolated_ty_qpath;
|
||||
use ast::mut_visit::{noop_visit_expr, MutVisitor};
|
||||
use ast::{GenBlockKind, Path, PathSegment};
|
||||
use ast::{GenBlockKind, Pat, Path, PathSegment};
|
||||
use core::mem;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
@ -2477,7 +2478,7 @@ impl<'a> Parser<'a> {
|
||||
let mut cond =
|
||||
self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL | Restrictions::ALLOW_LET, None)?;
|
||||
|
||||
CondChecker { parser: self, forbid_let_reason: None }.visit_expr(&mut cond);
|
||||
CondChecker::new(self).visit_expr(&mut cond);
|
||||
|
||||
if let ExprKind::Let(_, _, _, None) = cond.kind {
|
||||
// Remove the last feature gating of a `let` expression since it's stable.
|
||||
@ -2493,6 +2494,8 @@ impl<'a> Parser<'a> {
|
||||
let err = errors::ExpectedExpressionFoundLet {
|
||||
span: self.token.span,
|
||||
reason: ForbiddenLetReason::OtherForbidden,
|
||||
missing_let: None,
|
||||
comparison: None,
|
||||
};
|
||||
if self.prev_token.kind == token::BinOp(token::Or) {
|
||||
// This was part of a closure, the that part of the parser recover.
|
||||
@ -2606,30 +2609,72 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses `for <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
|
||||
fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
|
||||
// Record whether we are about to parse `for (`.
|
||||
// This is used below for recovery in case of `for ( $stuff ) $block`
|
||||
// in which case we will suggest `for $stuff $block`.
|
||||
let begin_paren = match self.token.kind {
|
||||
token::OpenDelim(Delimiter::Parenthesis) => Some(self.token.span),
|
||||
_ => None,
|
||||
fn parse_for_head(&mut self) -> PResult<'a, (P<Pat>, P<Expr>)> {
|
||||
let begin_paren = if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
// Record whether we are about to parse `for (`.
|
||||
// This is used below for recovery in case of `for ( $stuff ) $block`
|
||||
// in which case we will suggest `for $stuff $block`.
|
||||
let start_span = self.token.span;
|
||||
let left = self.prev_token.span.between(self.look_ahead(1, |t| t.span));
|
||||
Some((start_span, left))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
// Try to parse the pattern `for ($PAT) in $EXPR`.
|
||||
let pat = match (
|
||||
self.parse_pat_allow_top_alt(
|
||||
None,
|
||||
RecoverComma::Yes,
|
||||
RecoverColon::Yes,
|
||||
CommaRecoveryMode::LikelyTuple,
|
||||
),
|
||||
begin_paren,
|
||||
) {
|
||||
(Ok(pat), _) => pat, // Happy path.
|
||||
(Err(err), Some((start_span, left))) if self.eat_keyword(kw::In) => {
|
||||
// We know for sure we have seen `for ($SOMETHING in`. In the happy path this would
|
||||
// happen right before the return of this method.
|
||||
let expr = match self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None) {
|
||||
Ok(expr) => expr,
|
||||
Err(expr_err) => {
|
||||
// We don't know what followed the `in`, so cancel and bubble up the
|
||||
// original error.
|
||||
expr_err.cancel();
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
return if self.token.kind == token::CloseDelim(Delimiter::Parenthesis) {
|
||||
// We know for sure we have seen `for ($SOMETHING in $EXPR)`, so we recover the
|
||||
// parser state and emit a targetted suggestion.
|
||||
let span = vec![start_span, self.token.span];
|
||||
let right = self.prev_token.span.between(self.look_ahead(1, |t| t.span));
|
||||
self.bump(); // )
|
||||
err.cancel();
|
||||
self.sess.emit_err(errors::ParenthesesInForHead {
|
||||
span,
|
||||
// With e.g. `for (x) in y)` this would replace `(x) in y)`
|
||||
// with `x) in y)` which is syntactically invalid.
|
||||
// However, this is prevented before we get here.
|
||||
sugg: errors::ParenthesesInForHeadSugg { left, right },
|
||||
});
|
||||
Ok((self.mk_pat(start_span.to(right), ast::PatKind::Wild), expr))
|
||||
} else {
|
||||
Err(err) // Some other error, bubble up.
|
||||
};
|
||||
}
|
||||
(Err(err), _) => return Err(err), // Some other error, bubble up.
|
||||
};
|
||||
|
||||
let pat = self.parse_pat_allow_top_alt(
|
||||
None,
|
||||
RecoverComma::Yes,
|
||||
RecoverColon::Yes,
|
||||
CommaRecoveryMode::LikelyTuple,
|
||||
)?;
|
||||
if !self.eat_keyword(kw::In) {
|
||||
self.error_missing_in_for_loop();
|
||||
}
|
||||
self.check_for_for_in_in_typo(self.prev_token.span);
|
||||
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
|
||||
Ok((pat, expr))
|
||||
}
|
||||
|
||||
let pat = self.recover_parens_around_for_head(pat, begin_paren);
|
||||
|
||||
/// Parses `for <src_pat> in <src_expr> <src_loop_block>` (`for` token already eaten).
|
||||
fn parse_expr_for(&mut self, opt_label: Option<Label>, lo: Span) -> PResult<'a, P<Expr>> {
|
||||
let (pat, expr) = self.parse_for_head()?;
|
||||
// Recover from missing expression in `for` loop
|
||||
if matches!(expr.kind, ExprKind::Block(..))
|
||||
&& !matches!(self.token.kind, token::OpenDelim(Delimiter::Brace))
|
||||
@ -2850,47 +2895,10 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
|
||||
pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
|
||||
// Used to check the `let_chains` and `if_let_guard` features mostly by scanning
|
||||
// `&&` tokens.
|
||||
fn check_let_expr(expr: &Expr) -> (bool, bool) {
|
||||
match &expr.kind {
|
||||
ExprKind::Binary(BinOp { node: BinOpKind::And, .. }, lhs, rhs) => {
|
||||
let lhs_rslt = check_let_expr(lhs);
|
||||
let rhs_rslt = check_let_expr(rhs);
|
||||
(lhs_rslt.0 || rhs_rslt.0, false)
|
||||
}
|
||||
ExprKind::Let(..) => (true, true),
|
||||
_ => (false, true),
|
||||
}
|
||||
}
|
||||
let attrs = self.parse_outer_attributes()?;
|
||||
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
|
||||
let lo = this.token.span;
|
||||
let pat = this.parse_pat_allow_top_alt(
|
||||
None,
|
||||
RecoverComma::Yes,
|
||||
RecoverColon::Yes,
|
||||
CommaRecoveryMode::EitherTupleOrPipe,
|
||||
)?;
|
||||
let guard = if this.eat_keyword(kw::If) {
|
||||
let if_span = this.prev_token.span;
|
||||
let mut cond = this.parse_match_guard_condition()?;
|
||||
|
||||
CondChecker { parser: this, forbid_let_reason: None }.visit_expr(&mut cond);
|
||||
|
||||
let (has_let_expr, does_not_have_bin_op) = check_let_expr(&cond);
|
||||
if has_let_expr {
|
||||
if does_not_have_bin_op {
|
||||
// Remove the last feature gating of a `let` expression since it's stable.
|
||||
this.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
|
||||
}
|
||||
let span = if_span.to(cond.span);
|
||||
this.sess.gated_spans.gate(sym::if_let_guard, span);
|
||||
}
|
||||
Some(cond)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let (pat, guard) = this.parse_match_arm_pat_and_guard()?;
|
||||
let arrow_span = this.token.span;
|
||||
if let Err(mut err) = this.expect(&token::FatArrow) {
|
||||
// We might have a `=>` -> `=` or `->` typo (issue #89396).
|
||||
@ -3020,6 +3028,90 @@ impl<'a> Parser<'a> {
|
||||
})
|
||||
}
|
||||
|
||||
fn parse_match_arm_guard(&mut self) -> PResult<'a, Option<P<Expr>>> {
|
||||
// Used to check the `let_chains` and `if_let_guard` features mostly by scanning
|
||||
// `&&` tokens.
|
||||
fn check_let_expr(expr: &Expr) -> (bool, bool) {
|
||||
match &expr.kind {
|
||||
ExprKind::Binary(BinOp { node: BinOpKind::And, .. }, lhs, rhs) => {
|
||||
let lhs_rslt = check_let_expr(lhs);
|
||||
let rhs_rslt = check_let_expr(rhs);
|
||||
(lhs_rslt.0 || rhs_rslt.0, false)
|
||||
}
|
||||
ExprKind::Let(..) => (true, true),
|
||||
_ => (false, true),
|
||||
}
|
||||
}
|
||||
if !self.eat_keyword(kw::If) {
|
||||
// No match arm guard present.
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let if_span = self.prev_token.span;
|
||||
let mut cond = self.parse_match_guard_condition()?;
|
||||
|
||||
CondChecker::new(self).visit_expr(&mut cond);
|
||||
|
||||
let (has_let_expr, does_not_have_bin_op) = check_let_expr(&cond);
|
||||
if has_let_expr {
|
||||
if does_not_have_bin_op {
|
||||
// Remove the last feature gating of a `let` expression since it's stable.
|
||||
self.sess.gated_spans.ungate_last(sym::let_chains, cond.span);
|
||||
}
|
||||
let span = if_span.to(cond.span);
|
||||
self.sess.gated_spans.gate(sym::if_let_guard, span);
|
||||
}
|
||||
Ok(Some(cond))
|
||||
}
|
||||
|
||||
fn parse_match_arm_pat_and_guard(&mut self) -> PResult<'a, (P<Pat>, Option<P<Expr>>)> {
|
||||
if self.token.kind == token::OpenDelim(Delimiter::Parenthesis) {
|
||||
// Detect and recover from `($pat if $cond) => $arm`.
|
||||
let left = self.token.span;
|
||||
match self.parse_pat_allow_top_alt(
|
||||
None,
|
||||
RecoverComma::Yes,
|
||||
RecoverColon::Yes,
|
||||
CommaRecoveryMode::EitherTupleOrPipe,
|
||||
) {
|
||||
Ok(pat) => Ok((pat, self.parse_match_arm_guard()?)),
|
||||
Err(err)
|
||||
if let prev_sp = self.prev_token.span
|
||||
&& let true = self.eat_keyword(kw::If) =>
|
||||
{
|
||||
// We know for certain we've found `($pat if` so far.
|
||||
let mut cond = match self.parse_match_guard_condition() {
|
||||
Ok(cond) => cond,
|
||||
Err(cond_err) => {
|
||||
cond_err.cancel();
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
err.cancel();
|
||||
CondChecker::new(self).visit_expr(&mut cond);
|
||||
self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
|
||||
self.expect(&token::CloseDelim(Delimiter::Parenthesis))?;
|
||||
let right = self.prev_token.span;
|
||||
self.sess.emit_err(errors::ParenthesesInMatchPat {
|
||||
span: vec![left, right],
|
||||
sugg: errors::ParenthesesInMatchPatSugg { left, right },
|
||||
});
|
||||
Ok((self.mk_pat(left.to(prev_sp), ast::PatKind::Wild), Some(cond)))
|
||||
}
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
} else {
|
||||
// Regular parser flow:
|
||||
let pat = self.parse_pat_allow_top_alt(
|
||||
None,
|
||||
RecoverComma::Yes,
|
||||
RecoverColon::Yes,
|
||||
CommaRecoveryMode::EitherTupleOrPipe,
|
||||
)?;
|
||||
Ok((pat, self.parse_match_arm_guard()?))
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_match_guard_condition(&mut self) -> PResult<'a, P<Expr>> {
|
||||
self.parse_expr_res(Restrictions::ALLOW_LET | Restrictions::IN_IF_GUARD, None).map_err(
|
||||
|mut err| {
|
||||
@ -3552,6 +3644,14 @@ pub(crate) enum ForbiddenLetReason {
|
||||
struct CondChecker<'a> {
|
||||
parser: &'a Parser<'a>,
|
||||
forbid_let_reason: Option<ForbiddenLetReason>,
|
||||
missing_let: Option<errors::MaybeMissingLet>,
|
||||
comparison: Option<errors::MaybeComparison>,
|
||||
}
|
||||
|
||||
impl<'a> CondChecker<'a> {
|
||||
fn new(parser: &'a Parser<'a>) -> Self {
|
||||
CondChecker { parser, forbid_let_reason: None, missing_let: None, comparison: None }
|
||||
}
|
||||
}
|
||||
|
||||
impl MutVisitor for CondChecker<'_> {
|
||||
@ -3562,11 +3662,13 @@ impl MutVisitor for CondChecker<'_> {
|
||||
match e.kind {
|
||||
ExprKind::Let(_, _, _, ref mut is_recovered @ None) => {
|
||||
if let Some(reason) = self.forbid_let_reason {
|
||||
*is_recovered = Some(
|
||||
self.parser
|
||||
.sess
|
||||
.emit_err(errors::ExpectedExpressionFoundLet { span, reason }),
|
||||
);
|
||||
*is_recovered =
|
||||
Some(self.parser.sess.emit_err(errors::ExpectedExpressionFoundLet {
|
||||
span,
|
||||
reason,
|
||||
missing_let: self.missing_let,
|
||||
comparison: self.comparison,
|
||||
}));
|
||||
} else {
|
||||
self.parser.sess.gated_spans.gate(sym::let_chains, span);
|
||||
}
|
||||
@ -3590,9 +3692,28 @@ impl MutVisitor for CondChecker<'_> {
|
||||
noop_visit_expr(e, self);
|
||||
self.forbid_let_reason = forbid_let_reason;
|
||||
}
|
||||
ExprKind::Assign(ref lhs, _, span) => {
|
||||
let forbid_let_reason = self.forbid_let_reason;
|
||||
self.forbid_let_reason = Some(OtherForbidden);
|
||||
let missing_let = self.missing_let;
|
||||
if let ExprKind::Binary(_, _, rhs) = &lhs.kind
|
||||
&& let ExprKind::Path(_, _)
|
||||
| ExprKind::Struct(_)
|
||||
| ExprKind::Call(_, _)
|
||||
| ExprKind::Array(_) = rhs.kind
|
||||
{
|
||||
self.missing_let =
|
||||
Some(errors::MaybeMissingLet { span: rhs.span.shrink_to_lo() });
|
||||
}
|
||||
let comparison = self.comparison;
|
||||
self.comparison = Some(errors::MaybeComparison { span: span.shrink_to_hi() });
|
||||
noop_visit_expr(e, self);
|
||||
self.forbid_let_reason = forbid_let_reason;
|
||||
self.missing_let = missing_let;
|
||||
self.comparison = comparison;
|
||||
}
|
||||
ExprKind::Unary(_, _)
|
||||
| ExprKind::Await(_, _)
|
||||
| ExprKind::Assign(_, _, _)
|
||||
| ExprKind::AssignOp(_, _, _)
|
||||
| ExprKind::Range(_, _, _)
|
||||
| ExprKind::Try(_)
|
||||
|
@ -1415,8 +1415,8 @@ impl<'a> Parser<'a> {
|
||||
self.bump();
|
||||
(thin_vec![], false)
|
||||
} else {
|
||||
self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant()).map_err(
|
||||
|mut err| {
|
||||
self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant(id.span))
|
||||
.map_err(|mut err| {
|
||||
err.span_label(id.span, "while parsing this enum");
|
||||
if self.token == token::Colon {
|
||||
let snapshot = self.create_snapshot_for_diagnostic();
|
||||
@ -1436,20 +1436,22 @@ impl<'a> Parser<'a> {
|
||||
}
|
||||
self.restore_snapshot(snapshot);
|
||||
}
|
||||
self.recover_stmt();
|
||||
self.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
|
||||
self.bump(); // }
|
||||
err
|
||||
},
|
||||
)?
|
||||
})?
|
||||
};
|
||||
|
||||
let enum_definition = EnumDef { variants: variants.into_iter().flatten().collect() };
|
||||
Ok((id, ItemKind::Enum(enum_definition, generics)))
|
||||
}
|
||||
|
||||
fn parse_enum_variant(&mut self) -> PResult<'a, Option<Variant>> {
|
||||
fn parse_enum_variant(&mut self, span: Span) -> PResult<'a, Option<Variant>> {
|
||||
self.recover_diff_marker();
|
||||
let variant_attrs = self.parse_outer_attributes()?;
|
||||
self.recover_diff_marker();
|
||||
let help = "enum variants can be `Variant`, `Variant = <integer>`, \
|
||||
`Variant(Type, ..., TypeN)` or `Variant { fields: Types }`";
|
||||
self.collect_tokens_trailing_token(
|
||||
variant_attrs,
|
||||
ForceCollect::No,
|
||||
@ -1476,10 +1478,39 @@ impl<'a> Parser<'a> {
|
||||
let struct_def = if this.check(&token::OpenDelim(Delimiter::Brace)) {
|
||||
// Parse a struct variant.
|
||||
let (fields, recovered) =
|
||||
this.parse_record_struct_body("struct", ident.span, false)?;
|
||||
match this.parse_record_struct_body("struct", ident.span, false) {
|
||||
Ok((fields, recovered)) => (fields, recovered),
|
||||
Err(mut err) => {
|
||||
if this.token == token::Colon {
|
||||
// We handle `enum` to `struct` suggestion in the caller.
|
||||
return Err(err);
|
||||
}
|
||||
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Brace)]);
|
||||
this.bump(); // }
|
||||
err.span_label(span, "while parsing this enum");
|
||||
err.help(help);
|
||||
err.emit();
|
||||
(thin_vec![], true)
|
||||
}
|
||||
};
|
||||
VariantData::Struct(fields, recovered)
|
||||
} else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) {
|
||||
VariantData::Tuple(this.parse_tuple_struct_body()?, DUMMY_NODE_ID)
|
||||
let body = match this.parse_tuple_struct_body() {
|
||||
Ok(body) => body,
|
||||
Err(mut err) => {
|
||||
if this.token == token::Colon {
|
||||
// We handle `enum` to `struct` suggestion in the caller.
|
||||
return Err(err);
|
||||
}
|
||||
this.eat_to_tokens(&[&token::CloseDelim(Delimiter::Parenthesis)]);
|
||||
this.bump(); // )
|
||||
err.span_label(span, "while parsing this enum");
|
||||
err.help(help);
|
||||
err.emit();
|
||||
thin_vec![]
|
||||
}
|
||||
};
|
||||
VariantData::Tuple(body, DUMMY_NODE_ID)
|
||||
} else {
|
||||
VariantData::Unit(DUMMY_NODE_ID)
|
||||
};
|
||||
@ -1500,8 +1531,9 @@ impl<'a> Parser<'a> {
|
||||
|
||||
Ok((Some(vr), TrailingToken::MaybeComma))
|
||||
},
|
||||
).map_err(|mut err| {
|
||||
err.help("enum variants can be `Variant`, `Variant = <integer>`, `Variant(Type, ..., TypeN)` or `Variant { fields: Types }`");
|
||||
)
|
||||
.map_err(|mut err| {
|
||||
err.help(help);
|
||||
err
|
||||
})
|
||||
}
|
||||
|
@ -875,6 +875,9 @@ impl<'a> Parser<'a> {
|
||||
if self.token == token::Colon {
|
||||
// we will try to recover in `maybe_recover_struct_lit_bad_delims`
|
||||
return Err(expect_err);
|
||||
} else if let [token::CloseDelim(Delimiter::Parenthesis)] = kets
|
||||
{
|
||||
return Err(expect_err);
|
||||
} else {
|
||||
expect_err.emit();
|
||||
break;
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user