mirror of
https://github.com/rust-lang/rust.git
synced 2024-11-22 06:44:35 +00:00
Auto merge of #97111 - JohnTitor:rollup-x3vjf6u, r=JohnTitor
Rollup of 7 pull requests Successful merges: - #96329 (Add a couple tests for #90887 fixes) - #97009 (Allow `unused_macro_rules` in path tests) - #97075 (Add regression test for #81804) - #97079 (Change `Successors` to `impl Iterator<Item = BasicBlock>`) - #97080 (remove the `RelateResultCompare` trait) - #97093 (Migrate `maybe_recover_from_bad_type_plus` diagnostic) - #97102 (Update function pointer call error message) Failed merges: r? `@ghost` `@rustbot` modify labels: rollup
This commit is contained in:
commit
3655175a75
@ -199,7 +199,7 @@ impl<'tcx> OutOfScopePrecomputer<'_, 'tcx> {
|
||||
// Add successor BBs to the work list, if necessary.
|
||||
let bb_data = &self.body[bb];
|
||||
debug_assert!(hi == bb_data.statements.len());
|
||||
for &succ_bb in bb_data.terminator().successors() {
|
||||
for succ_bb in bb_data.terminator().successors() {
|
||||
if !self.visited.insert(succ_bb) {
|
||||
if succ_bb == location.block && first_lo > 0 {
|
||||
// `succ_bb` has been seen before. If it wasn't
|
||||
|
@ -467,7 +467,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
block
|
||||
.terminator()
|
||||
.successors()
|
||||
.map(|bb| Location { statement_index: 0, block: *bb })
|
||||
.map(|bb| Location { statement_index: 0, block: bb })
|
||||
.filter(|s| visited_locations.insert(*s))
|
||||
.map(|s| {
|
||||
if self.is_back_edge(location, s) {
|
||||
@ -526,7 +526,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> {
|
||||
}
|
||||
} else {
|
||||
for bb in block.terminator().successors() {
|
||||
let successor = Location { statement_index: 0, block: *bb };
|
||||
let successor = Location { statement_index: 0, block: bb };
|
||||
|
||||
if !visited_locations.contains(&successor)
|
||||
&& self.find_loop_head_dfs(successor, loop_head, visited_locations)
|
||||
|
@ -67,8 +67,8 @@ impl<'cx, 'tcx> UseFinder<'cx, 'tcx> {
|
||||
block_data
|
||||
.terminator()
|
||||
.successors()
|
||||
.filter(|&bb| Some(&Some(*bb)) != block_data.terminator().unwind())
|
||||
.map(|&bb| Location { statement_index: 0, block: bb }),
|
||||
.filter(|&bb| Some(&Some(bb)) != block_data.terminator().unwind())
|
||||
.map(|bb| Location { statement_index: 0, block: bb }),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -108,7 +108,7 @@ fn populate_polonius_move_facts(
|
||||
// We are at the terminator of an init that has a panic path,
|
||||
// and where the init should not happen on panic
|
||||
|
||||
for &successor in block_data.terminator().successors() {
|
||||
for successor in block_data.terminator().successors() {
|
||||
if body[successor].is_cleanup {
|
||||
continue;
|
||||
}
|
||||
|
@ -328,7 +328,7 @@ pub fn cleanup_kinds(mir: &mir::Body<'_>) -> IndexVec<mir::BasicBlock, CleanupKi
|
||||
bb, data, result[bb], funclet
|
||||
);
|
||||
|
||||
for &succ in data.terminator().successors() {
|
||||
for succ in data.terminator().successors() {
|
||||
let kind = result[succ];
|
||||
debug!("cleanup_kinds: propagating {:?} to {:?}/{:?}", funclet, succ, kind);
|
||||
match kind {
|
||||
|
@ -89,7 +89,10 @@ impl<'tcx> NonConstOp<'tcx> for FnCallIndirect {
|
||||
ccx: &ConstCx<'_, 'tcx>,
|
||||
span: Span,
|
||||
) -> DiagnosticBuilder<'tcx, ErrorGuaranteed> {
|
||||
ccx.tcx.sess.struct_span_err(span, "function pointers are not allowed in const fn")
|
||||
ccx.tcx.sess.struct_span_err(
|
||||
span,
|
||||
&format!("function pointer calls are not allowed in {}s", ccx.const_kind()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -5,3 +5,12 @@ parser-struct-literal-body-without-path =
|
||||
parser-maybe-report-ambiguous-plus =
|
||||
ambiguous `+` in a type
|
||||
.suggestion = use parentheses to disambiguate
|
||||
|
||||
parser-maybe-recover-from-bad-type-plus =
|
||||
expected a path on the left-hand side of `+`, not `{$ty}`
|
||||
|
||||
parser-add-paren = try adding parentheses
|
||||
|
||||
parser-forgot-paren = perhaps you forgot parentheses?
|
||||
|
||||
parser-expect-path = expected a path
|
||||
|
@ -776,21 +776,6 @@ pub trait ConstEquateRelation<'tcx>: TypeRelation<'tcx> {
|
||||
fn const_equate_obligation(&mut self, a: ty::Const<'tcx>, b: ty::Const<'tcx>);
|
||||
}
|
||||
|
||||
pub trait RelateResultCompare<'tcx, T> {
|
||||
fn compare<F>(&self, t: T, f: F) -> RelateResult<'tcx, T>
|
||||
where
|
||||
F: FnOnce() -> TypeError<'tcx>;
|
||||
}
|
||||
|
||||
impl<'tcx, T: Clone + PartialEq> RelateResultCompare<'tcx, T> for RelateResult<'tcx, T> {
|
||||
fn compare<F>(&self, t: T, f: F) -> RelateResult<'tcx, T>
|
||||
where
|
||||
F: FnOnce() -> TypeError<'tcx>,
|
||||
{
|
||||
self.clone().and_then(|s| if s == t { self.clone() } else { Err(f()) })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn const_unification_error<'tcx>(
|
||||
a_is_expected: bool,
|
||||
(a, b): (ty::Const<'tcx>, ty::Const<'tcx>),
|
||||
|
@ -859,7 +859,7 @@ where
|
||||
|
||||
delegate: &'me mut D,
|
||||
|
||||
/// After we generalize this type, we are going to relative it to
|
||||
/// After we generalize this type, we are going to relate it to
|
||||
/// some other type. What will be the variance at this point?
|
||||
ambient_variance: ty::Variance,
|
||||
|
||||
|
@ -24,7 +24,7 @@ pub fn mir_fn_to_generic_graph<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'_>) -> Grap
|
||||
let terminator = body[source].terminator();
|
||||
let labels = terminator.kind.fmt_successor_labels();
|
||||
|
||||
for (&target, label) in terminator.successors().zip(labels) {
|
||||
for (target, label) in terminator.successors().zip(labels) {
|
||||
let src = node(def_id, source);
|
||||
let trg = node(def_id, target);
|
||||
edges.push(Edge::new(src, trg, label.to_string()));
|
||||
|
@ -1355,10 +1355,7 @@ pub enum InlineAsmOperand<'tcx> {
|
||||
/// Type for MIR `Assert` terminator error messages.
|
||||
pub type AssertMessage<'tcx> = AssertKind<Operand<'tcx>>;
|
||||
|
||||
// FIXME: Change `Successors` to `impl Iterator<Item = BasicBlock>`.
|
||||
#[allow(rustc::pass_by_value)]
|
||||
pub type Successors<'a> =
|
||||
iter::Chain<option::IntoIter<&'a BasicBlock>, slice::Iter<'a, BasicBlock>>;
|
||||
pub type Successors<'a> = impl Iterator<Item = BasicBlock> + 'a;
|
||||
pub type SuccessorsMut<'a> =
|
||||
iter::Chain<option::IntoIter<&'a mut BasicBlock>, slice::IterMut<'a, BasicBlock>>;
|
||||
|
||||
@ -3434,13 +3431,13 @@ impl<'tcx> graph::WithStartNode for Body<'tcx> {
|
||||
impl<'tcx> graph::WithSuccessors for Body<'tcx> {
|
||||
#[inline]
|
||||
fn successors(&self, node: Self::Node) -> <Self as GraphSuccessors<'_>>::Iter {
|
||||
self.basic_blocks[node].terminator().successors().cloned()
|
||||
self.basic_blocks[node].terminator().successors()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b> graph::GraphSuccessors<'b> for Body<'a> {
|
||||
type Item = BasicBlock;
|
||||
type Iter = iter::Cloned<Successors<'b>>;
|
||||
type Iter = Successors<'b>;
|
||||
}
|
||||
|
||||
impl<'tcx, 'graph> graph::GraphPredecessors<'graph> for Body<'tcx> {
|
||||
|
@ -166,9 +166,7 @@ impl<'tcx> MirPatch<'tcx> {
|
||||
// get terminator's targets and apply the statement to all of them.
|
||||
if loc.statement_index > body[loc.block].statements.len() {
|
||||
let term = body[loc.block].terminator();
|
||||
let successors = term.successors().clone();
|
||||
|
||||
for i in successors {
|
||||
for i in term.successors() {
|
||||
stmts_and_targets
|
||||
.push((Statement { source_info, kind: stmt.clone() }, i.clone()));
|
||||
}
|
||||
|
@ -43,7 +43,7 @@ impl PredecessorCache {
|
||||
let mut preds = IndexVec::from_elem(SmallVec::new(), basic_blocks);
|
||||
for (bb, data) in basic_blocks.iter_enumerated() {
|
||||
if let Some(term) = &data.terminator {
|
||||
for &succ in term.successors() {
|
||||
for succ in term.successors() {
|
||||
preds[succ].push(bb);
|
||||
}
|
||||
}
|
||||
|
@ -416,32 +416,36 @@ impl<'tcx> TerminatorKind<'tcx> {
|
||||
| Return
|
||||
| Unreachable
|
||||
| Call { destination: None, cleanup: None, .. }
|
||||
| InlineAsm { destination: None, cleanup: None, .. } => None.into_iter().chain(&[]),
|
||||
Goto { target: ref t }
|
||||
| Call { destination: None, cleanup: Some(ref t), .. }
|
||||
| Call { destination: Some((_, ref t)), cleanup: None, .. }
|
||||
| Yield { resume: ref t, drop: None, .. }
|
||||
| DropAndReplace { target: ref t, unwind: None, .. }
|
||||
| Drop { target: ref t, unwind: None, .. }
|
||||
| Assert { target: ref t, cleanup: None, .. }
|
||||
| FalseUnwind { real_target: ref t, unwind: None }
|
||||
| InlineAsm { destination: Some(ref t), cleanup: None, .. }
|
||||
| InlineAsm { destination: None, cleanup: Some(ref t), .. } => {
|
||||
Some(t).into_iter().chain(&[])
|
||||
| InlineAsm { destination: None, cleanup: None, .. } => {
|
||||
None.into_iter().chain((&[]).into_iter().copied())
|
||||
}
|
||||
Call { destination: Some((_, ref t)), cleanup: Some(ref u), .. }
|
||||
| Yield { resume: ref t, drop: Some(ref u), .. }
|
||||
| DropAndReplace { target: ref t, unwind: Some(ref u), .. }
|
||||
| Drop { target: ref t, unwind: Some(ref u), .. }
|
||||
| Assert { target: ref t, cleanup: Some(ref u), .. }
|
||||
| FalseUnwind { real_target: ref t, unwind: Some(ref u) }
|
||||
| InlineAsm { destination: Some(ref t), cleanup: Some(ref u), .. } => {
|
||||
Some(t).into_iter().chain(slice::from_ref(u))
|
||||
Goto { target: t }
|
||||
| Call { destination: None, cleanup: Some(t), .. }
|
||||
| Call { destination: Some((_, t)), cleanup: None, .. }
|
||||
| Yield { resume: t, drop: None, .. }
|
||||
| DropAndReplace { target: t, unwind: None, .. }
|
||||
| Drop { target: t, unwind: None, .. }
|
||||
| Assert { target: t, cleanup: None, .. }
|
||||
| FalseUnwind { real_target: t, unwind: None }
|
||||
| InlineAsm { destination: Some(t), cleanup: None, .. }
|
||||
| InlineAsm { destination: None, cleanup: Some(t), .. } => {
|
||||
Some(t).into_iter().chain((&[]).into_iter().copied())
|
||||
}
|
||||
SwitchInt { ref targets, .. } => None.into_iter().chain(&targets.targets),
|
||||
FalseEdge { ref real_target, ref imaginary_target } => {
|
||||
Some(real_target).into_iter().chain(slice::from_ref(imaginary_target))
|
||||
Call { destination: Some((_, t)), cleanup: Some(ref u), .. }
|
||||
| Yield { resume: t, drop: Some(ref u), .. }
|
||||
| DropAndReplace { target: t, unwind: Some(ref u), .. }
|
||||
| Drop { target: t, unwind: Some(ref u), .. }
|
||||
| Assert { target: t, cleanup: Some(ref u), .. }
|
||||
| FalseUnwind { real_target: t, unwind: Some(ref u) }
|
||||
| InlineAsm { destination: Some(t), cleanup: Some(ref u), .. } => {
|
||||
Some(t).into_iter().chain(slice::from_ref(u).into_iter().copied())
|
||||
}
|
||||
SwitchInt { ref targets, .. } => {
|
||||
None.into_iter().chain(targets.targets.iter().copied())
|
||||
}
|
||||
FalseEdge { real_target, ref imaginary_target } => Some(real_target)
|
||||
.into_iter()
|
||||
.chain(slice::from_ref(imaginary_target).into_iter().copied()),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -180,7 +180,7 @@ impl<'a, 'tcx> Postorder<'a, 'tcx> {
|
||||
// two iterations yield `C` and finally `A` for a final traversal of [E, D, B, C, A]
|
||||
loop {
|
||||
let bb = if let Some(&mut (_, ref mut iter)) = self.visit_stack.last_mut() {
|
||||
if let Some(&bb) = iter.next() {
|
||||
if let Some(bb) = iter.next() {
|
||||
bb
|
||||
} else {
|
||||
break;
|
||||
|
@ -125,7 +125,7 @@ where
|
||||
}
|
||||
|
||||
fn target(&self, edge: &Self::Edge) -> Self::Node {
|
||||
self.body[edge.source].terminator().successors().nth(edge.index).copied().unwrap()
|
||||
self.body[edge.source].terminator().successors().nth(edge.index).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -701,7 +701,7 @@ pub(super) fn dump_coverage_graphviz<'tcx>(
|
||||
edge_labels.retain(|label| label != "unreachable");
|
||||
let edge_counters = from_terminator
|
||||
.successors()
|
||||
.map(|&successor_bb| graphviz_data.get_edge_counter(from_bcb, successor_bb));
|
||||
.map(|successor_bb| graphviz_data.get_edge_counter(from_bcb, successor_bb));
|
||||
iter::zip(&edge_labels, edge_counters)
|
||||
.map(|(label, some_counter)| {
|
||||
if let Some(counter) = some_counter {
|
||||
|
@ -484,17 +484,17 @@ fn bcb_filtered_successors<'a, 'tcx>(
|
||||
body: &'tcx &'a mir::Body<'tcx>,
|
||||
term_kind: &'tcx TerminatorKind<'tcx>,
|
||||
) -> Box<dyn Iterator<Item = BasicBlock> + 'a> {
|
||||
let mut successors = term_kind.successors();
|
||||
Box::new(
|
||||
match &term_kind {
|
||||
// SwitchInt successors are never unwind, and all of them should be traversed.
|
||||
TerminatorKind::SwitchInt { .. } => successors,
|
||||
TerminatorKind::SwitchInt { ref targets, .. } => {
|
||||
None.into_iter().chain(targets.all_targets().into_iter().copied())
|
||||
}
|
||||
// For all other kinds, return only the first successor, if any, and ignore unwinds.
|
||||
// NOTE: `chain(&[])` is required to coerce the `option::iter` (from
|
||||
// `next().into_iter()`) into the `mir::Successors` aliased type.
|
||||
_ => successors.next().into_iter().chain(&[]),
|
||||
_ => term_kind.successors().next().into_iter().chain((&[]).into_iter().copied()),
|
||||
}
|
||||
.copied()
|
||||
.filter(move |&successor| body[successor].terminator().kind != TerminatorKind::Unreachable),
|
||||
)
|
||||
}
|
||||
|
@ -449,7 +449,7 @@ impl<'tcx> Inliner<'tcx> {
|
||||
}
|
||||
|
||||
if !is_drop {
|
||||
for &succ in term.successors() {
|
||||
for succ in term.successors() {
|
||||
work_list.push(succ);
|
||||
}
|
||||
}
|
||||
|
@ -65,7 +65,7 @@ impl RemoveNoopLandingPads {
|
||||
| TerminatorKind::SwitchInt { .. }
|
||||
| TerminatorKind::FalseEdge { .. }
|
||||
| TerminatorKind::FalseUnwind { .. } => {
|
||||
terminator.successors().all(|&succ| nop_landing_pads.contains(succ))
|
||||
terminator.successors().all(|succ| nop_landing_pads.contains(succ))
|
||||
}
|
||||
TerminatorKind::GeneratorDrop
|
||||
| TerminatorKind::Yield { .. }
|
||||
|
@ -81,7 +81,7 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> {
|
||||
|
||||
for (_, data) in traversal::preorder(body) {
|
||||
if let Some(ref term) = data.terminator {
|
||||
for &tgt in term.successors() {
|
||||
for tgt in term.successors() {
|
||||
pred_count[tgt] += 1;
|
||||
}
|
||||
}
|
||||
@ -235,8 +235,8 @@ impl<'a, 'tcx> CfgSimplifier<'a, 'tcx> {
|
||||
};
|
||||
|
||||
let first_succ = {
|
||||
if let Some(&first_succ) = terminator.successors().next() {
|
||||
if terminator.successors().all(|s| *s == first_succ) {
|
||||
if let Some(first_succ) = terminator.successors().next() {
|
||||
if terminator.successors().all(|s| s == first_succ) {
|
||||
let count = terminator.successors().count();
|
||||
self.pred_count[first_succ] -= (count - 1) as u32;
|
||||
first_succ
|
||||
|
@ -21,7 +21,7 @@ use rustc_errors::{pluralize, struct_span_err, Diagnostic, EmissionGuarantee, Er
|
||||
use rustc_errors::{
|
||||
Applicability, DiagnosticBuilder, DiagnosticMessage, Handler, MultiSpan, PResult,
|
||||
};
|
||||
use rustc_macros::SessionDiagnostic;
|
||||
use rustc_macros::{SessionDiagnostic, SessionSubdiagnostic};
|
||||
use rustc_span::source_map::Spanned;
|
||||
use rustc_span::symbol::{kw, Ident};
|
||||
use rustc_span::{Span, SpanSnippetError, DUMMY_SP};
|
||||
@ -252,6 +252,40 @@ struct AmbiguousPlus {
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
#[derive(SessionDiagnostic)]
|
||||
#[error(code = "E0178", slug = "parser-maybe-recover-from-bad-type-plus")]
|
||||
struct BadTypePlus<'a> {
|
||||
pub ty: String,
|
||||
#[primary_span]
|
||||
pub span: Span,
|
||||
#[subdiagnostic]
|
||||
pub sub: BadTypePlusSub<'a>,
|
||||
}
|
||||
|
||||
#[derive(SessionSubdiagnostic, Clone, Copy)]
|
||||
pub enum BadTypePlusSub<'a> {
|
||||
#[suggestion(
|
||||
slug = "parser-add-paren",
|
||||
code = "{sum_with_parens}",
|
||||
applicability = "machine-applicable"
|
||||
)]
|
||||
AddParen {
|
||||
sum_with_parens: &'a str,
|
||||
#[primary_span]
|
||||
span: Span,
|
||||
},
|
||||
#[label(slug = "parser-forgot-paren")]
|
||||
ForgotParen {
|
||||
#[primary_span]
|
||||
span: Span,
|
||||
},
|
||||
#[label(slug = "parser-expect-path")]
|
||||
ExpectPath {
|
||||
#[primary_span]
|
||||
span: Span,
|
||||
},
|
||||
}
|
||||
|
||||
// SnapshotParser is used to create a snapshot of the parser
|
||||
// without causing duplicate errors being emitted when the `Parser`
|
||||
// is dropped.
|
||||
@ -1255,17 +1289,11 @@ impl<'a> Parser<'a> {
|
||||
let bounds = self.parse_generic_bounds(None)?;
|
||||
let sum_span = ty.span.to(self.prev_token.span);
|
||||
|
||||
let mut err = struct_span_err!(
|
||||
self.sess.span_diagnostic,
|
||||
sum_span,
|
||||
E0178,
|
||||
"expected a path on the left-hand side of `+`, not `{}`",
|
||||
pprust::ty_to_string(ty)
|
||||
);
|
||||
let sum_with_parens: String;
|
||||
|
||||
match ty.kind {
|
||||
let sub = match ty.kind {
|
||||
TyKind::Rptr(ref lifetime, ref mut_ty) => {
|
||||
let sum_with_parens = pprust::to_string(|s| {
|
||||
sum_with_parens = pprust::to_string(|s| {
|
||||
s.s.word("&");
|
||||
s.print_opt_lifetime(lifetime);
|
||||
s.print_mutability(mut_ty.mutbl, false);
|
||||
@ -1274,21 +1302,15 @@ impl<'a> Parser<'a> {
|
||||
s.print_type_bounds(" +", &bounds);
|
||||
s.pclose()
|
||||
});
|
||||
err.span_suggestion(
|
||||
sum_span,
|
||||
"try adding parentheses",
|
||||
sum_with_parens,
|
||||
Applicability::MachineApplicable,
|
||||
);
|
||||
|
||||
BadTypePlusSub::AddParen { sum_with_parens: &sum_with_parens, span: sum_span }
|
||||
}
|
||||
TyKind::Ptr(..) | TyKind::BareFn(..) => {
|
||||
err.span_label(sum_span, "perhaps you forgot parentheses?");
|
||||
}
|
||||
_ => {
|
||||
err.span_label(sum_span, "expected a path");
|
||||
}
|
||||
}
|
||||
err.emit();
|
||||
TyKind::Ptr(..) | TyKind::BareFn(..) => BadTypePlusSub::ForgotParen { span: sum_span },
|
||||
_ => BadTypePlusSub::ExpectPath { span: sum_span },
|
||||
};
|
||||
|
||||
self.sess.emit_err(BadTypePlus { ty: pprust::ty_to_string(ty), span: sum_span, sub });
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
@ -7,6 +7,7 @@ use crate::rc::Rc;
|
||||
use crate::sync::Arc;
|
||||
use core::hint::black_box;
|
||||
|
||||
#[allow(unknown_lints, unused_macro_rules)]
|
||||
macro_rules! t (
|
||||
($path:expr, iter: $iter:expr) => (
|
||||
{
|
||||
|
16
src/test/ui/consts/const-fn-ptr.rs
Normal file
16
src/test/ui/consts/const-fn-ptr.rs
Normal file
@ -0,0 +1,16 @@
|
||||
const fn make_fn_ptr() -> fn() {
|
||||
|| {}
|
||||
}
|
||||
|
||||
static STAT: () = make_fn_ptr()();
|
||||
//~^ ERROR function pointer
|
||||
|
||||
const CONST: () = make_fn_ptr()();
|
||||
//~^ ERROR function pointer
|
||||
|
||||
const fn call_ptr() {
|
||||
make_fn_ptr()();
|
||||
//~^ ERROR function pointer
|
||||
}
|
||||
|
||||
fn main() {}
|
20
src/test/ui/consts/const-fn-ptr.stderr
Normal file
20
src/test/ui/consts/const-fn-ptr.stderr
Normal file
@ -0,0 +1,20 @@
|
||||
error: function pointer calls are not allowed in statics
|
||||
--> $DIR/const-fn-ptr.rs:5:19
|
||||
|
|
||||
LL | static STAT: () = make_fn_ptr()();
|
||||
| ^^^^^^^^^^^^^^^
|
||||
|
||||
error: function pointer calls are not allowed in constants
|
||||
--> $DIR/const-fn-ptr.rs:8:19
|
||||
|
|
||||
LL | const CONST: () = make_fn_ptr()();
|
||||
| ^^^^^^^^^^^^^^^
|
||||
|
||||
error: function pointer calls are not allowed in constant functions
|
||||
--> $DIR/const-fn-ptr.rs:12:5
|
||||
|
|
||||
LL | make_fn_ptr()();
|
||||
| ^^^^^^^^^^^^^^^
|
||||
|
||||
error: aborting due to 3 previous errors
|
||||
|
@ -7,7 +7,7 @@ LL | const fn foo() { (||{})() }
|
||||
= note: closures need an RFC before allowed to be called in constant functions
|
||||
= note: calls in constant functions are limited to constant functions, tuple structs and tuple variants
|
||||
|
||||
error: function pointers are not allowed in const fn
|
||||
error: function pointer calls are not allowed in constant functions
|
||||
--> $DIR/issue-56164.rs:7:5
|
||||
|
|
||||
LL | input()
|
||||
|
@ -11,3 +11,20 @@ where
|
||||
fn main() {
|
||||
foo::<Vec<u32>>(vec![]);
|
||||
}
|
||||
|
||||
mod another {
|
||||
use std::ops::Deref;
|
||||
|
||||
fn test<T, TDeref>()
|
||||
where
|
||||
T: Deref<Target = TDeref>,
|
||||
TDeref: ?Sized,
|
||||
for<'a> &'a TDeref: IntoIterator,
|
||||
for<'a> <&'a TDeref as IntoIterator>::IntoIter: Clone,
|
||||
{
|
||||
}
|
||||
|
||||
fn main() {
|
||||
test::<Vec<u8>, _>();
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,31 @@
|
||||
// check-pass
|
||||
|
||||
trait Variable<'a> {
|
||||
type Type;
|
||||
}
|
||||
|
||||
impl Variable<'_> for () {
|
||||
type Type = ();
|
||||
}
|
||||
|
||||
fn check<F, T>(_: F)
|
||||
where
|
||||
F: Fn(T), // <- if removed, all fn_* then require type annotations
|
||||
F: for<'a> Fn(<T as Variable<'a>>::Type),
|
||||
T: for<'a> Variable<'a>,
|
||||
{
|
||||
}
|
||||
|
||||
fn test(arg: impl Fn(())) {
|
||||
fn fn_1(_: ()) {}
|
||||
let fn_2 = |_: ()| ();
|
||||
let fn_3 = |a| fn_1(a);
|
||||
let fn_4 = arg;
|
||||
|
||||
check(fn_1); // Error
|
||||
check(fn_2); // Ok
|
||||
check(fn_3); // Ok
|
||||
check(fn_4); // Error
|
||||
}
|
||||
|
||||
fn main() {}
|
9
src/test/ui/parser/issue-81804.rs
Normal file
9
src/test/ui/parser/issue-81804.rs
Normal file
@ -0,0 +1,9 @@
|
||||
// error-pattern: this file contains an unclosed delimiter
|
||||
// error-pattern: this file contains an unclosed delimiter
|
||||
// error-pattern: expected pattern, found `=`
|
||||
// error-pattern: expected one of `)`, `,`, `->`, `where`, or `{`, found `]`
|
||||
// error-pattern: expected item, found `]`
|
||||
|
||||
fn main() {}
|
||||
|
||||
fn p([=(}
|
41
src/test/ui/parser/issue-81804.stderr
Normal file
41
src/test/ui/parser/issue-81804.stderr
Normal file
@ -0,0 +1,41 @@
|
||||
error: this file contains an unclosed delimiter
|
||||
--> $DIR/issue-81804.rs:9:11
|
||||
|
|
||||
LL | fn p([=(}
|
||||
| -- ^
|
||||
| ||
|
||||
| |unclosed delimiter
|
||||
| unclosed delimiter
|
||||
|
||||
error: this file contains an unclosed delimiter
|
||||
--> $DIR/issue-81804.rs:9:11
|
||||
|
|
||||
LL | fn p([=(}
|
||||
| -- ^
|
||||
| ||
|
||||
| |unclosed delimiter
|
||||
| unclosed delimiter
|
||||
|
||||
error: expected pattern, found `=`
|
||||
--> $DIR/issue-81804.rs:9:7
|
||||
|
|
||||
LL | fn p([=(}
|
||||
| ^ expected pattern
|
||||
|
||||
error: expected one of `)`, `,`, `->`, `where`, or `{`, found `]`
|
||||
--> $DIR/issue-81804.rs:9:8
|
||||
|
|
||||
LL | fn p([=(}
|
||||
| ^ -^
|
||||
| | |
|
||||
| | help: `)` may belong here
|
||||
| unclosed delimiter
|
||||
|
||||
error: expected item, found `]`
|
||||
--> $DIR/issue-81804.rs:9:11
|
||||
|
|
||||
LL | fn p([=(}
|
||||
| ^ expected item
|
||||
|
||||
error: aborting due to 5 previous errors
|
||||
|
@ -114,7 +114,7 @@ impl<'tcx> LateLintPass<'tcx> for RedundantClone {
|
||||
}
|
||||
|
||||
// Give up on loops
|
||||
if terminator.successors().any(|s| *s == bb) {
|
||||
if terminator.successors().any(|s| s == bb) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@ -440,7 +440,7 @@ fn visit_clone_usage(cloned: mir::Local, clone: mir::Local, mir: &mir::Body<'_>,
|
||||
// Short-circuit
|
||||
if (usage.cloned_used && usage.clone_consumed_or_mutated) ||
|
||||
// Give up on loops
|
||||
tdata.terminator().successors().any(|s| *s == bb)
|
||||
tdata.terminator().successors().any(|s| s == bb)
|
||||
{
|
||||
return CloneUsage {
|
||||
cloned_used: true,
|
||||
|
Loading…
Reference in New Issue
Block a user