2020-05-23 22:55:44 +00:00
|
|
|
//! Validates the MIR to ensure that invariants are upheld.
|
|
|
|
|
2023-01-09 02:23:13 +00:00
|
|
|
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
2020-10-25 00:00:00 +00:00
|
|
|
use rustc_index::bit_set::BitSet;
|
2023-04-19 10:57:17 +00:00
|
|
|
use rustc_index::IndexVec;
|
2022-12-08 10:24:25 +00:00
|
|
|
use rustc_infer::traits::Reveal;
|
2020-10-25 00:00:00 +00:00
|
|
|
use rustc_middle::mir::interpret::Scalar;
|
2023-01-14 22:23:49 +00:00
|
|
|
use rustc_middle::mir::visit::{NonUseContext, PlaceContext, Visitor};
|
2023-08-26 16:58:42 +00:00
|
|
|
use rustc_middle::mir::*;
|
2023-09-30 10:44:31 +00:00
|
|
|
use rustc_middle::ty::{self, InstanceDef, ParamEnv, Ty, TyCtxt, TypeVisitableExt, Variance};
|
2023-03-26 01:43:03 +00:00
|
|
|
use rustc_target::abi::{Size, FIRST_VARIANT};
|
2023-06-28 14:15:34 +00:00
|
|
|
use rustc_target::spec::abi::Abi;
|
2020-05-23 22:55:44 +00:00
|
|
|
|
2023-08-24 09:34:23 +00:00
|
|
|
use crate::util::is_within_packed;
|
|
|
|
|
2023-09-30 10:44:31 +00:00
|
|
|
use crate::util::relate_types;
|
2023-08-28 08:19:19 +00:00
|
|
|
|
2023-01-09 02:23:13 +00:00
|
|
|
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
2020-06-08 12:54:20 +00:00
|
|
|
enum EdgeKind {
|
|
|
|
Unwind,
|
2020-06-08 15:04:41 +00:00
|
|
|
Normal,
|
2020-06-08 12:54:20 +00:00
|
|
|
}
|
|
|
|
|
2020-05-23 22:55:44 +00:00
|
|
|
pub struct Validator {
|
|
|
|
/// Describes at which point in the pipeline this validation is happening.
|
|
|
|
pub when: String,
|
2020-08-14 16:01:14 +00:00
|
|
|
/// The phase for which we are upholding the dialect. If the given phase forbids a specific
|
|
|
|
/// element, this validator will now emit errors if that specific element is encountered.
|
|
|
|
/// Note that phases that change the dialect cause all *following* phases to check the
|
|
|
|
/// invariants of the new dialect. A phase that changes dialects never checks the new invariants
|
|
|
|
/// itself.
|
|
|
|
pub mir_phase: MirPhase,
|
2020-05-23 22:55:44 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'tcx> MirPass<'tcx> for Validator {
|
2020-10-04 18:01:38 +00:00
|
|
|
fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
|
2022-03-25 02:30:23 +00:00
|
|
|
// FIXME(JakobDegen): These bodies never instantiated in codegend anyway, so it's not
|
|
|
|
// terribly important that they pass the validator. However, I think other passes might
|
|
|
|
// still see them, in which case they might be surprised. It would probably be better if we
|
|
|
|
// didn't put this through the MIR pipeline at all.
|
|
|
|
if matches!(body.source.instance, InstanceDef::Intrinsic(..) | InstanceDef::Virtual(..)) {
|
|
|
|
return;
|
|
|
|
}
|
2020-10-04 18:01:38 +00:00
|
|
|
let def_id = body.source.def_id();
|
2020-08-14 16:01:14 +00:00
|
|
|
let mir_phase = self.mir_phase;
|
2022-12-08 10:24:25 +00:00
|
|
|
let param_env = match mir_phase.reveal() {
|
|
|
|
Reveal::UserFacing => tcx.param_env(def_id),
|
|
|
|
Reveal::All => tcx.param_env_reveal_all_normalized(def_id),
|
|
|
|
};
|
2020-09-30 16:56:10 +00:00
|
|
|
|
2023-06-28 14:15:34 +00:00
|
|
|
let can_unwind = if mir_phase <= MirPhase::Runtime(RuntimePhase::Initial) {
|
|
|
|
// In this case `AbortUnwindingCalls` haven't yet been executed.
|
|
|
|
true
|
|
|
|
} else if !tcx.def_kind(def_id).is_fn_like() {
|
|
|
|
true
|
|
|
|
} else {
|
|
|
|
let body_ty = tcx.type_of(def_id).skip_binder();
|
|
|
|
let body_abi = match body_ty.kind() {
|
|
|
|
ty::FnDef(..) => body_ty.fn_sig(tcx).abi(),
|
|
|
|
ty::Closure(..) => Abi::RustCall,
|
2023-10-19 16:06:43 +00:00
|
|
|
ty::Coroutine(..) => Abi::Rust,
|
2023-06-28 14:15:34 +00:00
|
|
|
_ => {
|
|
|
|
span_bug!(body.span, "unexpected body ty: {:?} phase {:?}", body_ty, mir_phase)
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
ty::layout::fn_can_unwind(tcx, Some(def_id), body_abi)
|
|
|
|
};
|
|
|
|
|
2023-07-19 10:33:29 +00:00
|
|
|
let mut cfg_checker = CfgChecker {
|
2020-10-30 00:00:00 +00:00
|
|
|
when: &self.when,
|
|
|
|
body,
|
|
|
|
tcx,
|
|
|
|
mir_phase,
|
2023-01-16 23:01:16 +00:00
|
|
|
unwind_edge_count: 0,
|
2020-10-25 00:00:00 +00:00
|
|
|
reachable_blocks: traversal::reachable_as_bitset(body),
|
2023-05-24 16:07:35 +00:00
|
|
|
value_cache: FxHashSet::default(),
|
2023-06-28 14:15:34 +00:00
|
|
|
can_unwind,
|
2023-01-09 02:23:13 +00:00
|
|
|
};
|
2023-07-19 10:33:29 +00:00
|
|
|
cfg_checker.visit_body(body);
|
|
|
|
cfg_checker.check_cleanup_control_flow();
|
|
|
|
|
2023-08-24 09:34:23 +00:00
|
|
|
// Also run the TypeChecker.
|
2023-07-19 10:44:00 +00:00
|
|
|
for (location, msg) in validate_types(tcx, self.mir_phase, param_env, body) {
|
|
|
|
cfg_checker.fail(location, msg);
|
|
|
|
}
|
2023-03-05 18:20:17 +00:00
|
|
|
|
|
|
|
if let MirPhase::Runtime(_) = body.phase {
|
|
|
|
if let ty::InstanceDef::Item(_) = body.source.instance {
|
|
|
|
if body.has_free_regions() {
|
2023-07-19 10:33:29 +00:00
|
|
|
cfg_checker.fail(
|
2023-03-05 18:20:17 +00:00
|
|
|
Location::START,
|
|
|
|
format!("Free regions in optimized {} MIR", body.phase.name()),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2020-05-23 22:55:44 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-07-19 10:33:29 +00:00
|
|
|
struct CfgChecker<'a, 'tcx> {
|
2020-05-23 22:55:44 +00:00
|
|
|
when: &'a str,
|
|
|
|
body: &'a Body<'tcx>,
|
|
|
|
tcx: TyCtxt<'tcx>,
|
2020-08-14 16:01:14 +00:00
|
|
|
mir_phase: MirPhase,
|
2023-01-16 23:01:16 +00:00
|
|
|
unwind_edge_count: usize,
|
2020-10-25 00:00:00 +00:00
|
|
|
reachable_blocks: BitSet<BasicBlock>,
|
2023-05-24 16:07:35 +00:00
|
|
|
value_cache: FxHashSet<u128>,
|
2023-06-28 14:15:34 +00:00
|
|
|
// If `false`, then the MIR must not contain `UnwindAction::Continue` or
|
|
|
|
// `TerminatorKind::Resume`.
|
|
|
|
can_unwind: bool,
|
2020-05-23 22:55:44 +00:00
|
|
|
}
|
|
|
|
|
2023-07-19 10:33:29 +00:00
|
|
|
impl<'a, 'tcx> CfgChecker<'a, 'tcx> {
|
2022-11-03 08:17:55 +00:00
|
|
|
#[track_caller]
|
2020-05-31 13:21:09 +00:00
|
|
|
fn fail(&self, location: Location, msg: impl AsRef<str>) {
|
|
|
|
let span = self.body.source_info(location).span;
|
2023-11-30 04:01:11 +00:00
|
|
|
// We use `span_delayed_bug` as we might see broken MIR when other errors have already
|
2020-05-23 22:55:44 +00:00
|
|
|
// occurred.
|
2023-12-18 11:21:37 +00:00
|
|
|
self.tcx.dcx().span_delayed_bug(
|
2020-05-23 22:55:44 +00:00
|
|
|
span,
|
Restrict `From<S>` for `{D,Subd}iagnosticMessage`.
Currently a `{D,Subd}iagnosticMessage` can be created from any type that
impls `Into<String>`. That includes `&str`, `String`, and `Cow<'static,
str>`, which are reasonable. It also includes `&String`, which is pretty
weird, and results in many places making unnecessary allocations for
patterns like this:
```
self.fatal(&format!(...))
```
This creates a string with `format!`, takes a reference, passes the
reference to `fatal`, which does an `into()`, which clones the
reference, doing a second allocation. Two allocations for a single
string, bleh.
This commit changes the `From` impls so that you can only create a
`{D,Subd}iagnosticMessage` from `&str`, `String`, or `Cow<'static,
str>`. This requires changing all the places that currently create one
from a `&String`. Most of these are of the `&format!(...)` form
described above; each one removes an unnecessary static `&`, plus an
allocation when executed. There are also a few places where the existing
use of `&String` was more reasonable; these now just use `clone()` at
the call site.
As well as making the code nicer and more efficient, this is a step
towards possibly using `Cow<'static, str>` in
`{D,Subd}iagnosticMessage::{Str,Eager}`. That would require changing
the `From<&'a str>` impls to `From<&'static str>`, which is doable, but
I'm not yet sure if it's worthwhile.
2023-04-20 03:26:58 +00:00
|
|
|
format!(
|
2020-05-31 13:21:09 +00:00
|
|
|
"broken MIR in {:?} ({}) at {:?}:\n{}",
|
2020-10-04 18:01:38 +00:00
|
|
|
self.body.source.instance,
|
2020-05-31 13:21:09 +00:00
|
|
|
self.when,
|
|
|
|
location,
|
|
|
|
msg.as_ref()
|
|
|
|
),
|
2020-05-23 22:55:44 +00:00
|
|
|
);
|
|
|
|
}
|
2020-05-31 07:52:51 +00:00
|
|
|
|
2023-01-16 23:01:16 +00:00
|
|
|
fn check_edge(&mut self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
|
2021-09-14 00:00:00 +00:00
|
|
|
if bb == START_BLOCK {
|
|
|
|
self.fail(location, "start block must not have predecessors")
|
|
|
|
}
|
2022-07-05 00:00:00 +00:00
|
|
|
if let Some(bb) = self.body.basic_blocks.get(bb) {
|
|
|
|
let src = self.body.basic_blocks.get(location.block).unwrap();
|
2020-06-08 12:54:20 +00:00
|
|
|
match (src.is_cleanup, bb.is_cleanup, edge_kind) {
|
|
|
|
// Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges
|
2020-06-08 15:04:41 +00:00
|
|
|
(false, false, EdgeKind::Normal)
|
|
|
|
// Cleanup blocks can jump to cleanup blocks along non-unwind edges
|
|
|
|
| (true, true, EdgeKind::Normal) => {}
|
2023-01-16 23:01:16 +00:00
|
|
|
// Non-cleanup blocks can jump to cleanup blocks along unwind edges
|
|
|
|
(false, true, EdgeKind::Unwind) => {
|
|
|
|
self.unwind_edge_count += 1;
|
|
|
|
}
|
2020-06-08 12:54:20 +00:00
|
|
|
// All other jumps are invalid
|
|
|
|
_ => {
|
|
|
|
self.fail(
|
|
|
|
location,
|
2020-06-09 10:21:36 +00:00
|
|
|
format!(
|
|
|
|
"{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
|
|
|
|
edge_kind,
|
|
|
|
bb,
|
|
|
|
src.is_cleanup,
|
|
|
|
bb.is_cleanup,
|
|
|
|
)
|
2020-06-08 12:54:20 +00:00
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
} else {
|
2023-07-25 21:17:39 +00:00
|
|
|
self.fail(location, format!("encountered jump to invalid basic block {bb:?}"))
|
2020-05-31 07:52:51 +00:00
|
|
|
}
|
|
|
|
}
|
2020-05-23 22:55:44 +00:00
|
|
|
|
2023-01-09 02:23:13 +00:00
|
|
|
fn check_cleanup_control_flow(&self) {
|
2023-01-16 23:01:16 +00:00
|
|
|
if self.unwind_edge_count <= 1 {
|
|
|
|
return;
|
|
|
|
}
|
2023-01-09 02:23:13 +00:00
|
|
|
let doms = self.body.basic_blocks.dominators();
|
|
|
|
let mut post_contract_node = FxHashMap::default();
|
2023-01-13 16:32:54 +00:00
|
|
|
// Reusing the allocation across invocations of the closure
|
|
|
|
let mut dom_path = vec![];
|
2023-01-09 02:23:13 +00:00
|
|
|
let mut get_post_contract_node = |mut bb| {
|
2023-01-13 16:32:54 +00:00
|
|
|
let root = loop {
|
|
|
|
if let Some(root) = post_contract_node.get(&bb) {
|
|
|
|
break *root;
|
|
|
|
}
|
2023-05-14 00:00:00 +00:00
|
|
|
let parent = doms.immediate_dominator(bb).unwrap();
|
2023-01-09 02:23:13 +00:00
|
|
|
dom_path.push(bb);
|
2023-01-13 16:32:54 +00:00
|
|
|
if !self.body.basic_blocks[parent].is_cleanup {
|
|
|
|
break bb;
|
|
|
|
}
|
|
|
|
bb = parent;
|
|
|
|
};
|
|
|
|
for bb in dom_path.drain(..) {
|
2023-01-09 02:23:13 +00:00
|
|
|
post_contract_node.insert(bb, root);
|
|
|
|
}
|
|
|
|
root
|
|
|
|
};
|
|
|
|
|
2023-01-13 16:32:54 +00:00
|
|
|
let mut parent = IndexVec::from_elem(None, &self.body.basic_blocks);
|
2023-01-09 02:23:13 +00:00
|
|
|
for (bb, bb_data) in self.body.basic_blocks.iter_enumerated() {
|
|
|
|
if !bb_data.is_cleanup || !self.reachable_blocks.contains(bb) {
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
let bb = get_post_contract_node(bb);
|
|
|
|
for s in bb_data.terminator().successors() {
|
|
|
|
let s = get_post_contract_node(s);
|
|
|
|
if s == bb {
|
|
|
|
continue;
|
|
|
|
}
|
2023-01-13 16:32:54 +00:00
|
|
|
let parent = &mut parent[bb];
|
|
|
|
match parent {
|
|
|
|
None => {
|
|
|
|
*parent = Some(s);
|
2023-01-09 02:23:13 +00:00
|
|
|
}
|
2023-01-13 16:32:54 +00:00
|
|
|
Some(e) if *e == s => (),
|
|
|
|
Some(e) => self.fail(
|
2023-01-09 02:23:13 +00:00
|
|
|
Location { block: bb, statement_index: 0 },
|
|
|
|
format!(
|
|
|
|
"Cleanup control flow violation: The blocks dominated by {:?} have edges to both {:?} and {:?}",
|
|
|
|
bb,
|
|
|
|
s,
|
2023-01-13 16:32:54 +00:00
|
|
|
*e
|
2023-01-09 02:23:13 +00:00
|
|
|
)
|
|
|
|
),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2023-01-13 16:32:54 +00:00
|
|
|
|
|
|
|
// Check for cycles
|
|
|
|
let mut stack = FxHashSet::default();
|
|
|
|
for i in 0..parent.len() {
|
|
|
|
let mut bb = BasicBlock::from_usize(i);
|
|
|
|
stack.clear();
|
|
|
|
stack.insert(bb);
|
|
|
|
loop {
|
2023-01-16 23:01:16 +00:00
|
|
|
let Some(parent) = parent[bb].take() else { break };
|
2023-01-13 16:32:54 +00:00
|
|
|
let no_cycle = stack.insert(parent);
|
|
|
|
if !no_cycle {
|
|
|
|
self.fail(
|
|
|
|
Location { block: bb, statement_index: 0 },
|
|
|
|
format!(
|
2023-07-25 21:17:39 +00:00
|
|
|
"Cleanup control flow violation: Cycle involving edge {bb:?} -> {parent:?}",
|
2023-01-13 16:32:54 +00:00
|
|
|
),
|
|
|
|
);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
bb = parent;
|
|
|
|
}
|
|
|
|
}
|
2023-01-09 02:23:13 +00:00
|
|
|
}
|
|
|
|
|
2022-11-15 17:00:40 +00:00
|
|
|
fn check_unwind_edge(&mut self, location: Location, unwind: UnwindAction) {
|
|
|
|
let is_cleanup = self.body.basic_blocks[location.block].is_cleanup;
|
|
|
|
match unwind {
|
|
|
|
UnwindAction::Cleanup(unwind) => {
|
|
|
|
if is_cleanup {
|
2023-06-28 14:15:34 +00:00
|
|
|
self.fail(location, "`UnwindAction::Cleanup` in cleanup block");
|
2022-11-15 17:00:40 +00:00
|
|
|
}
|
|
|
|
self.check_edge(location, unwind, EdgeKind::Unwind);
|
|
|
|
}
|
|
|
|
UnwindAction::Continue => {
|
|
|
|
if is_cleanup {
|
2023-06-28 14:15:34 +00:00
|
|
|
self.fail(location, "`UnwindAction::Continue` in cleanup block");
|
|
|
|
}
|
|
|
|
|
|
|
|
if !self.can_unwind {
|
|
|
|
self.fail(location, "`UnwindAction::Continue` in no-unwind function");
|
2022-11-15 17:00:40 +00:00
|
|
|
}
|
|
|
|
}
|
2023-08-21 07:57:10 +00:00
|
|
|
UnwindAction::Terminate(UnwindTerminateReason::InCleanup) => {
|
|
|
|
if !is_cleanup {
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
"`UnwindAction::Terminate(InCleanup)` in a non-cleanup block",
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// These are allowed everywhere.
|
|
|
|
UnwindAction::Unreachable | UnwindAction::Terminate(UnwindTerminateReason::Abi) => (),
|
2022-11-15 17:00:40 +00:00
|
|
|
}
|
|
|
|
}
|
2023-11-19 00:00:00 +00:00
|
|
|
|
|
|
|
fn is_critical_call_edge(&self, target: Option<BasicBlock>, unwind: UnwindAction) -> bool {
|
|
|
|
let Some(target) = target else { return false };
|
|
|
|
matches!(unwind, UnwindAction::Cleanup(_) | UnwindAction::Terminate(_))
|
|
|
|
&& self.body.basic_blocks.predecessors()[target].len() > 1
|
|
|
|
}
|
2020-05-23 22:55:44 +00:00
|
|
|
}
|
|
|
|
|
2023-07-19 10:33:29 +00:00
|
|
|
impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> {
|
2023-12-12 00:00:00 +00:00
|
|
|
fn visit_local(&mut self, local: Local, _context: PlaceContext, location: Location) {
|
2022-07-01 14:21:21 +00:00
|
|
|
if self.body.local_decls.get(local).is_none() {
|
2020-11-13 19:23:12 +00:00
|
|
|
self.fail(
|
|
|
|
location,
|
2023-07-25 21:17:39 +00:00
|
|
|
format!("local {local:?} has no corresponding declaration in `body.local_decls`"),
|
2020-11-13 19:23:12 +00:00
|
|
|
);
|
|
|
|
}
|
2020-09-30 16:56:10 +00:00
|
|
|
}
|
|
|
|
|
2023-07-19 10:33:29 +00:00
|
|
|
fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
|
|
|
|
match &statement.kind {
|
|
|
|
StatementKind::AscribeUserType(..) => {
|
|
|
|
if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) {
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
"`AscribeUserType` should have been removed after drop lowering phase",
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
StatementKind::FakeRead(..) => {
|
|
|
|
if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) {
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
"`FakeRead` should have been removed after drop lowering phase",
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
StatementKind::SetDiscriminant { .. } => {
|
|
|
|
if self.mir_phase < MirPhase::Runtime(RuntimePhase::Initial) {
|
|
|
|
self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
StatementKind::Deinit(..) => {
|
|
|
|
if self.mir_phase < MirPhase::Runtime(RuntimePhase::Initial) {
|
|
|
|
self.fail(location, "`Deinit`is not allowed until deaggregation");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
StatementKind::Retag(kind, _) => {
|
|
|
|
// FIXME(JakobDegen) The validator should check that `self.mir_phase <
|
|
|
|
// DropsLowered`. However, this causes ICEs with generation of drop shims, which
|
|
|
|
// seem to fail to set their `MirPhase` correctly.
|
|
|
|
if matches!(kind, RetagKind::Raw | RetagKind::TwoPhase) {
|
2023-07-25 21:17:39 +00:00
|
|
|
self.fail(location, format!("explicit `{kind:?}` is forbidden"));
|
2023-07-19 10:33:29 +00:00
|
|
|
}
|
|
|
|
}
|
2024-01-04 13:24:41 +00:00
|
|
|
StatementKind::Assign(..)
|
|
|
|
| StatementKind::StorageLive(_)
|
2023-12-12 00:00:00 +00:00
|
|
|
| StatementKind::StorageDead(_)
|
2023-07-19 10:33:29 +00:00
|
|
|
| StatementKind::Intrinsic(_)
|
|
|
|
| StatementKind::Coverage(_)
|
|
|
|
| StatementKind::ConstEvalCounter
|
|
|
|
| StatementKind::PlaceMention(..)
|
|
|
|
| StatementKind::Nop => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
self.super_statement(statement, location);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
|
|
|
|
match &terminator.kind {
|
|
|
|
TerminatorKind::Goto { target } => {
|
|
|
|
self.check_edge(location, *target, EdgeKind::Normal);
|
|
|
|
}
|
|
|
|
TerminatorKind::SwitchInt { targets, discr: _ } => {
|
|
|
|
for (_, target) in targets.iter() {
|
|
|
|
self.check_edge(location, target, EdgeKind::Normal);
|
|
|
|
}
|
|
|
|
self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
|
|
|
|
|
|
|
|
self.value_cache.clear();
|
|
|
|
self.value_cache.extend(targets.iter().map(|(value, _)| value));
|
|
|
|
let has_duplicates = targets.iter().len() != self.value_cache.len();
|
|
|
|
if has_duplicates {
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!(
|
|
|
|
"duplicated values in `SwitchInt` terminator: {:?}",
|
|
|
|
terminator.kind,
|
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TerminatorKind::Drop { target, unwind, .. } => {
|
|
|
|
self.check_edge(location, *target, EdgeKind::Normal);
|
|
|
|
self.check_unwind_edge(location, *unwind);
|
|
|
|
}
|
|
|
|
TerminatorKind::Call { args, destination, target, unwind, .. } => {
|
|
|
|
if let Some(target) = target {
|
|
|
|
self.check_edge(location, *target, EdgeKind::Normal);
|
|
|
|
}
|
|
|
|
self.check_unwind_edge(location, *unwind);
|
|
|
|
|
2023-11-19 00:00:00 +00:00
|
|
|
// The code generation assumes that there are no critical call edges. The assumption
|
|
|
|
// is used to simplify inserting code that should be executed along the return edge
|
|
|
|
// from the call. FIXME(tmiasko): Since this is a strictly code generation concern,
|
|
|
|
// the code generation should be responsible for handling it.
|
|
|
|
if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Optimized)
|
|
|
|
&& self.is_critical_call_edge(*target, *unwind)
|
|
|
|
{
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!(
|
|
|
|
"encountered critical edge in `Call` terminator {:?}",
|
|
|
|
terminator.kind,
|
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
|
2023-07-19 10:33:29 +00:00
|
|
|
// The call destination place and Operand::Move place used as an argument might be
|
2024-01-04 13:24:41 +00:00
|
|
|
// passed by a reference to the callee. Consequently they cannot be packed.
|
2023-08-24 09:34:23 +00:00
|
|
|
if is_within_packed(self.tcx, &self.body.local_decls, *destination).is_some() {
|
|
|
|
// This is bad! The callee will expect the memory to be aligned.
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!(
|
|
|
|
"encountered packed place in `Call` terminator destination: {:?}",
|
|
|
|
terminator.kind,
|
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
2023-07-19 10:33:29 +00:00
|
|
|
for arg in args {
|
2024-01-12 07:21:42 +00:00
|
|
|
if let Operand::Move(place) = &arg.node {
|
2023-08-24 09:34:23 +00:00
|
|
|
if is_within_packed(self.tcx, &self.body.local_decls, *place).is_some() {
|
|
|
|
// This is bad! The callee will expect the memory to be aligned.
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!(
|
|
|
|
"encountered `Move` of a packed place in `Call` terminator: {:?}",
|
|
|
|
terminator.kind,
|
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
2023-07-19 10:33:29 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TerminatorKind::Assert { target, unwind, .. } => {
|
|
|
|
self.check_edge(location, *target, EdgeKind::Normal);
|
|
|
|
self.check_unwind_edge(location, *unwind);
|
|
|
|
}
|
|
|
|
TerminatorKind::Yield { resume, drop, .. } => {
|
2023-10-19 21:46:28 +00:00
|
|
|
if self.body.coroutine.is_none() {
|
|
|
|
self.fail(location, "`Yield` cannot appear outside coroutine bodies");
|
2023-07-19 10:33:29 +00:00
|
|
|
}
|
|
|
|
if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) {
|
2023-10-19 21:46:28 +00:00
|
|
|
self.fail(location, "`Yield` should have been replaced by coroutine lowering");
|
2023-07-19 10:33:29 +00:00
|
|
|
}
|
|
|
|
self.check_edge(location, *resume, EdgeKind::Normal);
|
|
|
|
if let Some(drop) = drop {
|
|
|
|
self.check_edge(location, *drop, EdgeKind::Normal);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TerminatorKind::FalseEdge { real_target, imaginary_target } => {
|
|
|
|
if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) {
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
"`FalseEdge` should have been removed after drop elaboration",
|
|
|
|
);
|
|
|
|
}
|
|
|
|
self.check_edge(location, *real_target, EdgeKind::Normal);
|
|
|
|
self.check_edge(location, *imaginary_target, EdgeKind::Normal);
|
|
|
|
}
|
|
|
|
TerminatorKind::FalseUnwind { real_target, unwind } => {
|
|
|
|
if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) {
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
"`FalseUnwind` should have been removed after drop elaboration",
|
|
|
|
);
|
|
|
|
}
|
|
|
|
self.check_edge(location, *real_target, EdgeKind::Normal);
|
|
|
|
self.check_unwind_edge(location, *unwind);
|
|
|
|
}
|
|
|
|
TerminatorKind::InlineAsm { destination, unwind, .. } => {
|
|
|
|
if let Some(destination) = destination {
|
|
|
|
self.check_edge(location, *destination, EdgeKind::Normal);
|
|
|
|
}
|
|
|
|
self.check_unwind_edge(location, *unwind);
|
|
|
|
}
|
2023-10-19 16:06:43 +00:00
|
|
|
TerminatorKind::CoroutineDrop => {
|
2023-10-19 21:46:28 +00:00
|
|
|
if self.body.coroutine.is_none() {
|
|
|
|
self.fail(location, "`CoroutineDrop` cannot appear outside coroutine bodies");
|
2023-07-19 10:33:29 +00:00
|
|
|
}
|
|
|
|
if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) {
|
|
|
|
self.fail(
|
|
|
|
location,
|
2023-10-19 21:46:28 +00:00
|
|
|
"`CoroutineDrop` should have been replaced by coroutine lowering",
|
2023-07-19 10:33:29 +00:00
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2023-08-19 11:10:25 +00:00
|
|
|
TerminatorKind::UnwindResume => {
|
2023-07-19 10:33:29 +00:00
|
|
|
let bb = location.block;
|
|
|
|
if !self.body.basic_blocks[bb].is_cleanup {
|
2023-08-19 11:10:25 +00:00
|
|
|
self.fail(location, "Cannot `UnwindResume` from non-cleanup basic block")
|
2023-06-28 14:15:34 +00:00
|
|
|
}
|
|
|
|
if !self.can_unwind {
|
2023-08-19 11:10:25 +00:00
|
|
|
self.fail(location, "Cannot `UnwindResume` in a function that cannot unwind")
|
2023-06-28 14:15:34 +00:00
|
|
|
}
|
|
|
|
}
|
2023-08-21 07:57:10 +00:00
|
|
|
TerminatorKind::UnwindTerminate(_) => {
|
2023-06-28 14:15:34 +00:00
|
|
|
let bb = location.block;
|
|
|
|
if !self.body.basic_blocks[bb].is_cleanup {
|
2023-08-19 11:10:25 +00:00
|
|
|
self.fail(location, "Cannot `UnwindTerminate` from non-cleanup basic block")
|
2023-07-19 10:33:29 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
TerminatorKind::Return => {
|
|
|
|
let bb = location.block;
|
|
|
|
if self.body.basic_blocks[bb].is_cleanup {
|
|
|
|
self.fail(location, "Cannot `Return` from cleanup basic block")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
TerminatorKind::Unreachable => {}
|
|
|
|
}
|
|
|
|
|
|
|
|
self.super_terminator(terminator, location);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_source_scope(&mut self, scope: SourceScope) {
|
|
|
|
if self.body.source_scopes.get(scope).is_none() {
|
2023-12-18 11:21:37 +00:00
|
|
|
self.tcx.dcx().span_delayed_bug(
|
2023-07-19 10:33:29 +00:00
|
|
|
self.body.span,
|
|
|
|
format!(
|
|
|
|
"broken MIR in {:?} ({}):\ninvalid source scope {:?}",
|
|
|
|
self.body.source.instance, self.when, scope,
|
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-09-25 13:46:38 +00:00
|
|
|
/// A faster version of the validation pass that only checks those things which may break when
|
|
|
|
/// instantiating any generic parameters.
|
2023-07-19 10:47:16 +00:00
|
|
|
pub fn validate_types<'tcx>(
|
2023-07-19 10:44:00 +00:00
|
|
|
tcx: TyCtxt<'tcx>,
|
|
|
|
mir_phase: MirPhase,
|
|
|
|
param_env: ty::ParamEnv<'tcx>,
|
|
|
|
body: &Body<'tcx>,
|
|
|
|
) -> Vec<(Location, String)> {
|
|
|
|
let mut type_checker = TypeChecker { body, tcx, param_env, mir_phase, failures: Vec::new() };
|
|
|
|
type_checker.visit_body(body);
|
|
|
|
type_checker.failures
|
|
|
|
}
|
|
|
|
|
2023-07-19 10:33:29 +00:00
|
|
|
struct TypeChecker<'a, 'tcx> {
|
|
|
|
body: &'a Body<'tcx>,
|
|
|
|
tcx: TyCtxt<'tcx>,
|
|
|
|
param_env: ParamEnv<'tcx>,
|
|
|
|
mir_phase: MirPhase,
|
2023-07-19 10:44:00 +00:00
|
|
|
failures: Vec<(Location, String)>,
|
2023-07-19 10:33:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
|
2023-07-19 10:44:00 +00:00
|
|
|
fn fail(&mut self, location: Location, msg: impl Into<String>) {
|
|
|
|
self.failures.push((location, msg.into()));
|
2023-07-19 10:33:29 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Check if src can be assigned into dest.
|
|
|
|
/// This is not precise, it will accept some incorrect assignments.
|
|
|
|
fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
|
|
|
|
// Fast path before we normalize.
|
|
|
|
if src == dest {
|
|
|
|
// Equal types, all is good.
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
// We sometimes have to use `defining_opaque_types` for subtyping
|
|
|
|
// to succeed here and figuring out how exactly that should work
|
|
|
|
// is annoying. It is harmless enough to just not validate anything
|
|
|
|
// in that case. We still check this after analysis as all opaque
|
|
|
|
// types have been revealed at this point.
|
|
|
|
if (src, dest).has_opaque_types() {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2023-09-30 10:44:31 +00:00
|
|
|
// After borrowck subtyping should be fully explicit via
|
|
|
|
// `Subtype` projections.
|
|
|
|
let variance = if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) {
|
|
|
|
Variance::Invariant
|
|
|
|
} else {
|
|
|
|
Variance::Covariant
|
|
|
|
};
|
|
|
|
|
|
|
|
crate::util::relate_types(self.tcx, self.param_env, variance, src, dest)
|
2023-07-19 10:33:29 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
|
2020-05-23 22:55:44 +00:00
|
|
|
fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
|
2020-10-25 00:00:00 +00:00
|
|
|
// This check is somewhat expensive, so only run it when -Zvalidate-mir is passed.
|
2022-07-10 01:04:38 +00:00
|
|
|
if self.tcx.sess.opts.unstable_opts.validate_mir
|
|
|
|
&& self.mir_phase < MirPhase::Runtime(RuntimePhase::Initial)
|
2022-04-22 20:43:26 +00:00
|
|
|
{
|
2020-10-25 00:00:00 +00:00
|
|
|
// `Operand::Copy` is only supposed to be used with `Copy` types.
|
|
|
|
if let Operand::Copy(place) = operand {
|
|
|
|
let ty = place.ty(&self.body.local_decls, self.tcx).ty;
|
|
|
|
|
2022-10-27 10:45:02 +00:00
|
|
|
if !ty.is_copy_modulo_regions(self.tcx, self.param_env) {
|
2023-07-25 21:17:39 +00:00
|
|
|
self.fail(location, format!("`Operand::Copy` with non-`Copy` type {ty}"));
|
2020-10-25 00:00:00 +00:00
|
|
|
}
|
2020-05-23 22:55:44 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
self.super_operand(operand, location);
|
|
|
|
}
|
|
|
|
|
2021-06-02 00:00:00 +00:00
|
|
|
fn visit_projection_elem(
|
|
|
|
&mut self,
|
2023-06-26 00:38:01 +00:00
|
|
|
place_ref: PlaceRef<'tcx>,
|
2021-06-02 00:00:00 +00:00
|
|
|
elem: PlaceElem<'tcx>,
|
|
|
|
context: PlaceContext,
|
|
|
|
location: Location,
|
|
|
|
) {
|
2022-05-14 04:53:03 +00:00
|
|
|
match elem {
|
2023-09-27 11:35:09 +00:00
|
|
|
ProjectionElem::OpaqueCast(ty)
|
|
|
|
if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
|
|
|
|
{
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!("explicit opaque type cast to `{ty}` after `RevealAll`"),
|
|
|
|
)
|
|
|
|
}
|
2022-05-14 04:53:03 +00:00
|
|
|
ProjectionElem::Index(index) => {
|
|
|
|
let index_ty = self.body.local_decls[index].ty;
|
|
|
|
if index_ty != self.tcx.types.usize {
|
2023-07-25 21:17:39 +00:00
|
|
|
self.fail(location, format!("bad index ({index_ty:?} != usize)"))
|
2022-05-14 04:53:03 +00:00
|
|
|
}
|
2021-06-02 00:00:00 +00:00
|
|
|
}
|
2022-07-10 01:04:38 +00:00
|
|
|
ProjectionElem::Deref
|
|
|
|
if self.mir_phase >= MirPhase::Runtime(RuntimePhase::PostCleanup) =>
|
|
|
|
{
|
2023-06-26 00:38:01 +00:00
|
|
|
let base_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
|
2022-05-14 04:53:03 +00:00
|
|
|
|
|
|
|
if base_ty.is_box() {
|
|
|
|
self.fail(
|
|
|
|
location,
|
2023-07-25 21:17:39 +00:00
|
|
|
format!("{base_ty:?} dereferenced after ElaborateBoxDerefs"),
|
2022-05-14 04:53:03 +00:00
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ProjectionElem::Field(f, ty) => {
|
2023-06-26 00:38:01 +00:00
|
|
|
let parent_ty = place_ref.ty(&self.body.local_decls, self.tcx);
|
2023-07-19 10:44:00 +00:00
|
|
|
let fail_out_of_bounds = |this: &mut Self, location| {
|
2023-07-25 21:17:39 +00:00
|
|
|
this.fail(location, format!("Out of bounds field {f:?} for {parent_ty:?}"));
|
2022-05-14 04:53:03 +00:00
|
|
|
};
|
2023-07-19 10:44:00 +00:00
|
|
|
let check_equal = |this: &mut Self, location, f_ty| {
|
2022-05-14 04:53:03 +00:00
|
|
|
if !this.mir_assign_valid_types(ty, f_ty) {
|
|
|
|
this.fail(
|
2022-11-03 08:17:32 +00:00
|
|
|
location,
|
|
|
|
format!(
|
2023-07-25 21:17:39 +00:00
|
|
|
"Field projection `{place_ref:?}.{f:?}` specified type `{ty:?}`, but actual type is `{f_ty:?}`"
|
2022-11-03 08:17:32 +00:00
|
|
|
)
|
2022-05-06 20:09:59 +00:00
|
|
|
)
|
2022-05-14 04:53:03 +00:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2022-05-09 04:23:52 +00:00
|
|
|
let kind = match parent_ty.ty.kind() {
|
2023-07-11 21:35:29 +00:00
|
|
|
&ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => {
|
|
|
|
self.tcx.type_of(def_id).instantiate(self.tcx, args).kind()
|
2022-05-09 04:23:52 +00:00
|
|
|
}
|
|
|
|
kind => kind,
|
|
|
|
};
|
|
|
|
|
|
|
|
match kind {
|
2022-05-14 04:53:03 +00:00
|
|
|
ty::Tuple(fields) => {
|
|
|
|
let Some(f_ty) = fields.get(f.as_usize()) else {
|
|
|
|
fail_out_of_bounds(self, location);
|
|
|
|
return;
|
|
|
|
};
|
|
|
|
check_equal(self, location, *f_ty);
|
|
|
|
}
|
2023-07-11 21:35:29 +00:00
|
|
|
ty::Adt(adt_def, args) => {
|
2023-03-26 01:43:03 +00:00
|
|
|
let var = parent_ty.variant_index.unwrap_or(FIRST_VARIANT);
|
2023-03-29 06:32:25 +00:00
|
|
|
let Some(field) = adt_def.variant(var).fields.get(f) else {
|
2022-05-14 04:53:03 +00:00
|
|
|
fail_out_of_bounds(self, location);
|
|
|
|
return;
|
|
|
|
};
|
2023-07-11 21:35:29 +00:00
|
|
|
check_equal(self, location, field.ty(self.tcx, args));
|
2022-05-14 04:53:03 +00:00
|
|
|
}
|
2023-07-11 21:35:29 +00:00
|
|
|
ty::Closure(_, args) => {
|
|
|
|
let args = args.as_closure();
|
2023-07-25 23:31:21 +00:00
|
|
|
let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
|
2022-05-14 04:53:03 +00:00
|
|
|
fail_out_of_bounds(self, location);
|
|
|
|
return;
|
|
|
|
};
|
|
|
|
check_equal(self, location, f_ty);
|
|
|
|
}
|
2024-01-24 18:01:56 +00:00
|
|
|
ty::CoroutineClosure(_, args) => {
|
|
|
|
let args = args.as_coroutine_closure();
|
|
|
|
let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
|
|
|
|
fail_out_of_bounds(self, location);
|
|
|
|
return;
|
|
|
|
};
|
|
|
|
check_equal(self, location, f_ty);
|
|
|
|
}
|
2023-12-21 01:52:10 +00:00
|
|
|
&ty::Coroutine(def_id, args) => {
|
2022-05-09 04:23:52 +00:00
|
|
|
let f_ty = if let Some(var) = parent_ty.variant_index {
|
|
|
|
let gen_body = if def_id == self.body.source.def_id() {
|
|
|
|
self.body
|
|
|
|
} else {
|
|
|
|
self.tcx.optimized_mir(def_id)
|
|
|
|
};
|
|
|
|
|
2023-10-19 21:46:28 +00:00
|
|
|
let Some(layout) = gen_body.coroutine_layout() else {
|
2022-05-09 04:23:52 +00:00
|
|
|
self.fail(
|
|
|
|
location,
|
2023-10-19 21:46:28 +00:00
|
|
|
format!("No coroutine layout for {parent_ty:?}"),
|
2022-05-09 04:23:52 +00:00
|
|
|
);
|
|
|
|
return;
|
|
|
|
};
|
|
|
|
|
|
|
|
let Some(&local) = layout.variant_fields[var].get(f) else {
|
|
|
|
fail_out_of_bounds(self, location);
|
|
|
|
return;
|
|
|
|
};
|
|
|
|
|
2022-09-11 15:24:53 +00:00
|
|
|
let Some(f_ty) = layout.field_tys.get(local) else {
|
2022-05-09 04:23:52 +00:00
|
|
|
self.fail(
|
|
|
|
location,
|
2023-07-25 21:17:39 +00:00
|
|
|
format!("Out of bounds local {local:?} for {parent_ty:?}"),
|
2022-05-09 04:23:52 +00:00
|
|
|
);
|
|
|
|
return;
|
|
|
|
};
|
|
|
|
|
2023-08-18 15:19:26 +00:00
|
|
|
ty::EarlyBinder::bind(f_ty.ty).instantiate(self.tcx, args)
|
2022-05-09 04:23:52 +00:00
|
|
|
} else {
|
2023-10-19 21:46:28 +00:00
|
|
|
let Some(&f_ty) = args.as_coroutine().prefix_tys().get(f.index())
|
2023-07-25 23:31:21 +00:00
|
|
|
else {
|
2022-05-09 04:23:52 +00:00
|
|
|
fail_out_of_bounds(self, location);
|
|
|
|
return;
|
|
|
|
};
|
|
|
|
|
|
|
|
f_ty
|
2022-05-14 04:53:03 +00:00
|
|
|
};
|
2022-05-09 04:23:52 +00:00
|
|
|
|
2022-05-14 04:53:03 +00:00
|
|
|
check_equal(self, location, f_ty);
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
self.fail(location, format!("{:?} does not have fields", parent_ty.ty));
|
|
|
|
}
|
2022-05-06 20:09:59 +00:00
|
|
|
}
|
|
|
|
}
|
2023-08-28 08:19:19 +00:00
|
|
|
ProjectionElem::Subtype(ty) => {
|
2023-09-30 10:44:31 +00:00
|
|
|
if !relate_types(
|
2023-08-28 08:19:19 +00:00
|
|
|
self.tcx,
|
|
|
|
self.param_env,
|
2023-09-30 10:44:31 +00:00
|
|
|
Variance::Covariant,
|
2023-08-28 08:19:19 +00:00
|
|
|
ty,
|
|
|
|
place_ref.ty(&self.body.local_decls, self.tcx).ty,
|
|
|
|
) {
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!(
|
|
|
|
"Failed subtyping {ty:#?} and {:#?}",
|
|
|
|
place_ref.ty(&self.body.local_decls, self.tcx).ty
|
|
|
|
),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
2022-05-14 04:53:03 +00:00
|
|
|
_ => {}
|
2022-05-06 20:09:59 +00:00
|
|
|
}
|
2023-06-26 00:38:01 +00:00
|
|
|
self.super_projection_elem(place_ref, elem, context, location);
|
2021-06-02 00:00:00 +00:00
|
|
|
}
|
|
|
|
|
2023-01-14 22:23:49 +00:00
|
|
|
fn visit_var_debug_info(&mut self, debuginfo: &VarDebugInfo<'tcx>) {
|
2023-08-26 16:58:42 +00:00
|
|
|
if let Some(box VarDebugInfoFragment { ty, ref projection }) = debuginfo.composite {
|
|
|
|
if ty.is_union() || ty.is_enum() {
|
|
|
|
self.fail(
|
2023-01-14 22:23:49 +00:00
|
|
|
START_BLOCK.start_location(),
|
2023-08-26 16:58:42 +00:00
|
|
|
format!("invalid type {ty:?} in debuginfo for {:?}", debuginfo.name),
|
2023-01-14 22:23:49 +00:00
|
|
|
);
|
|
|
|
}
|
2023-08-26 16:58:42 +00:00
|
|
|
if projection.is_empty() {
|
|
|
|
self.fail(
|
|
|
|
START_BLOCK.start_location(),
|
|
|
|
format!("invalid empty projection in debuginfo for {:?}", debuginfo.name),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
if projection.iter().any(|p| !matches!(p, PlaceElem::Field(..))) {
|
|
|
|
self.fail(
|
|
|
|
START_BLOCK.start_location(),
|
|
|
|
format!(
|
|
|
|
"illegal projection {:?} in debuginfo for {:?}",
|
|
|
|
projection, debuginfo.name
|
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2023-01-14 22:23:49 +00:00
|
|
|
match debuginfo.value {
|
|
|
|
VarDebugInfoContents::Const(_) => {}
|
2023-05-07 15:39:47 +00:00
|
|
|
VarDebugInfoContents::Place(place) => {
|
2023-08-26 16:58:42 +00:00
|
|
|
if place.projection.iter().any(|p| !p.can_use_in_debuginfo()) {
|
|
|
|
self.fail(
|
|
|
|
START_BLOCK.start_location(),
|
|
|
|
format!("illegal place {:?} in debuginfo for {:?}", place, debuginfo.name),
|
|
|
|
);
|
2023-01-14 22:23:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
self.super_var_debug_info(debuginfo);
|
|
|
|
}
|
|
|
|
|
2022-05-11 21:27:06 +00:00
|
|
|
fn visit_place(&mut self, place: &Place<'tcx>, cntxt: PlaceContext, location: Location) {
|
2022-03-24 22:25:37 +00:00
|
|
|
// Set off any `bug!`s in the type computation code
|
2022-04-09 14:00:19 +00:00
|
|
|
let _ = place.ty(&self.body.local_decls, self.tcx);
|
2022-05-11 21:27:06 +00:00
|
|
|
|
2022-07-10 01:04:38 +00:00
|
|
|
if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial)
|
2022-05-11 21:27:06 +00:00
|
|
|
&& place.projection.len() > 1
|
2023-01-14 22:23:49 +00:00
|
|
|
&& cntxt != PlaceContext::NonUse(NonUseContext::VarDebugInfo)
|
2022-05-11 21:27:06 +00:00
|
|
|
&& place.projection[1..].contains(&ProjectionElem::Deref)
|
|
|
|
{
|
2023-07-25 21:17:39 +00:00
|
|
|
self.fail(location, format!("{place:?}, has deref at the wrong place"));
|
2022-05-11 21:27:06 +00:00
|
|
|
}
|
2022-05-09 04:23:52 +00:00
|
|
|
|
|
|
|
self.super_place(place, cntxt, location);
|
2022-03-24 22:25:37 +00:00
|
|
|
}
|
|
|
|
|
2022-03-25 02:30:23 +00:00
|
|
|
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
|
|
|
|
macro_rules! check_kinds {
|
2023-06-04 18:09:20 +00:00
|
|
|
($t:expr, $text:literal, $typat:pat) => {
|
|
|
|
if !matches!(($t).kind(), $typat) {
|
2022-03-25 02:30:23 +00:00
|
|
|
self.fail(location, format!($text, $t));
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
|
|
|
match rvalue {
|
2024-01-19 19:47:00 +00:00
|
|
|
Rvalue::Use(_) | Rvalue::CopyForDeref(_) => {}
|
|
|
|
Rvalue::Aggregate(kind, fields) => match **kind {
|
|
|
|
AggregateKind::Tuple => {}
|
|
|
|
AggregateKind::Array(dest) => {
|
|
|
|
for src in fields {
|
|
|
|
if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
|
|
|
|
self.fail(location, "array field has the wrong type");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
AggregateKind::Adt(def_id, idx, args, _, Some(field)) => {
|
|
|
|
let adt_def = self.tcx.adt_def(def_id);
|
|
|
|
assert!(adt_def.is_union());
|
|
|
|
assert_eq!(idx, FIRST_VARIANT);
|
2024-01-23 19:09:12 +00:00
|
|
|
let dest_ty = self.tcx.normalize_erasing_regions(
|
|
|
|
self.param_env,
|
|
|
|
adt_def.non_enum_variant().fields[field].ty(self.tcx, args),
|
|
|
|
);
|
|
|
|
if fields.len() == 1 {
|
|
|
|
let src_ty = fields.raw[0].ty(self.body, self.tcx);
|
|
|
|
if !self.mir_assign_valid_types(src_ty, dest_ty) {
|
|
|
|
self.fail(location, "union field has the wrong type");
|
|
|
|
}
|
|
|
|
} else {
|
2024-01-19 19:47:00 +00:00
|
|
|
self.fail(location, "unions should have one initialized field");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
AggregateKind::Adt(def_id, idx, args, _, None) => {
|
|
|
|
let adt_def = self.tcx.adt_def(def_id);
|
|
|
|
assert!(!adt_def.is_union());
|
|
|
|
let variant = &adt_def.variants()[idx];
|
|
|
|
if variant.fields.len() != fields.len() {
|
|
|
|
self.fail(location, "adt has the wrong number of initialized fields");
|
|
|
|
}
|
|
|
|
for (src, dest) in std::iter::zip(fields, &variant.fields) {
|
2024-01-23 19:09:12 +00:00
|
|
|
let dest_ty = self
|
|
|
|
.tcx
|
|
|
|
.normalize_erasing_regions(self.param_env, dest.ty(self.tcx, args));
|
|
|
|
if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest_ty) {
|
2024-01-19 19:47:00 +00:00
|
|
|
self.fail(location, "adt field has the wrong type");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
AggregateKind::Closure(_, args) => {
|
|
|
|
let upvars = args.as_closure().upvar_tys();
|
|
|
|
if upvars.len() != fields.len() {
|
|
|
|
self.fail(location, "closure has the wrong number of initialized fields");
|
|
|
|
}
|
|
|
|
for (src, dest) in std::iter::zip(fields, upvars) {
|
|
|
|
if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
|
|
|
|
self.fail(location, "closure field has the wrong type");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
AggregateKind::Coroutine(_, args) => {
|
|
|
|
let upvars = args.as_coroutine().upvar_tys();
|
|
|
|
if upvars.len() != fields.len() {
|
|
|
|
self.fail(location, "coroutine has the wrong number of initialized fields");
|
|
|
|
}
|
|
|
|
for (src, dest) in std::iter::zip(fields, upvars) {
|
|
|
|
if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
|
|
|
|
self.fail(location, "coroutine field has the wrong type");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2024-01-24 18:01:56 +00:00
|
|
|
AggregateKind::CoroutineClosure(_, args) => {
|
|
|
|
let upvars = args.as_coroutine_closure().upvar_tys();
|
|
|
|
if upvars.len() != fields.len() {
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
"coroutine-closure has the wrong number of initialized fields",
|
|
|
|
);
|
|
|
|
}
|
|
|
|
for (src, dest) in std::iter::zip(fields, upvars) {
|
|
|
|
if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
|
|
|
|
self.fail(location, "coroutine-closure field has the wrong type");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2024-01-19 19:47:00 +00:00
|
|
|
},
|
2023-11-08 12:42:30 +00:00
|
|
|
Rvalue::Ref(_, BorrowKind::Fake, _) => {
|
2022-07-10 01:04:38 +00:00
|
|
|
if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) {
|
2022-03-25 02:30:23 +00:00
|
|
|
self.fail(
|
|
|
|
location,
|
2023-11-08 12:42:30 +00:00
|
|
|
"`Assign` statement with a `Fake` borrow should have been removed in runtime MIR",
|
2020-05-31 07:54:25 +00:00
|
|
|
);
|
|
|
|
}
|
2022-03-25 02:30:23 +00:00
|
|
|
}
|
2022-06-29 14:18:55 +00:00
|
|
|
Rvalue::Ref(..) => {}
|
2022-03-25 02:30:23 +00:00
|
|
|
Rvalue::Len(p) => {
|
|
|
|
let pty = p.ty(&self.body.local_decls, self.tcx).ty;
|
|
|
|
check_kinds!(
|
|
|
|
pty,
|
|
|
|
"Cannot compute length of non-array type {:?}",
|
|
|
|
ty::Array(..) | ty::Slice(..)
|
|
|
|
);
|
|
|
|
}
|
2022-05-06 01:15:20 +00:00
|
|
|
Rvalue::BinaryOp(op, vals) => {
|
2022-03-25 02:30:23 +00:00
|
|
|
use BinOp::*;
|
|
|
|
let a = vals.0.ty(&self.body.local_decls, self.tcx);
|
|
|
|
let b = vals.1.ty(&self.body.local_decls, self.tcx);
|
2023-06-04 18:09:20 +00:00
|
|
|
if crate::util::binop_right_homogeneous(*op) {
|
|
|
|
if let Eq | Lt | Le | Ne | Ge | Gt = op {
|
|
|
|
// The function pointer types can have lifetimes
|
|
|
|
if !self.mir_assign_valid_types(a, b) {
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!("Cannot {op:?} compare incompatible types {a:?} and {b:?}"),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
} else if a != b {
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!(
|
|
|
|
"Cannot perform binary op {op:?} on unequal types {a:?} and {b:?}"
|
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-25 02:30:23 +00:00
|
|
|
match op {
|
|
|
|
Offset => {
|
|
|
|
check_kinds!(a, "Cannot offset non-pointer type {:?}", ty::RawPtr(..));
|
|
|
|
if b != self.tcx.types.isize && b != self.tcx.types.usize {
|
2023-07-25 21:17:39 +00:00
|
|
|
self.fail(location, format!("Cannot offset by non-isize type {b:?}"));
|
2022-03-25 02:30:23 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Eq | Lt | Le | Ne | Ge | Gt => {
|
|
|
|
for x in [a, b] {
|
|
|
|
check_kinds!(
|
|
|
|
x,
|
2023-06-04 18:09:20 +00:00
|
|
|
"Cannot {op:?} compare type {:?}",
|
2022-03-25 02:30:23 +00:00
|
|
|
ty::Bool
|
|
|
|
| ty::Char
|
|
|
|
| ty::Int(..)
|
|
|
|
| ty::Uint(..)
|
|
|
|
| ty::Float(..)
|
|
|
|
| ty::RawPtr(..)
|
|
|
|
| ty::FnPtr(..)
|
|
|
|
)
|
|
|
|
}
|
2020-05-23 22:55:44 +00:00
|
|
|
}
|
2023-06-04 18:09:20 +00:00
|
|
|
AddUnchecked | SubUnchecked | MulUnchecked | Shl | ShlUnchecked | Shr
|
|
|
|
| ShrUnchecked => {
|
2022-03-25 02:30:23 +00:00
|
|
|
for x in [a, b] {
|
|
|
|
check_kinds!(
|
|
|
|
x,
|
2023-06-04 18:09:20 +00:00
|
|
|
"Cannot {op:?} non-integer type {:?}",
|
2022-03-25 02:30:23 +00:00
|
|
|
ty::Uint(..) | ty::Int(..)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
BitAnd | BitOr | BitXor => {
|
|
|
|
for x in [a, b] {
|
|
|
|
check_kinds!(
|
|
|
|
x,
|
2023-06-04 18:09:20 +00:00
|
|
|
"Cannot perform bitwise op {op:?} on type {:?}",
|
2022-03-25 02:30:23 +00:00
|
|
|
ty::Uint(..) | ty::Int(..) | ty::Bool
|
|
|
|
)
|
|
|
|
}
|
2020-08-14 16:01:14 +00:00
|
|
|
}
|
2022-03-25 02:30:23 +00:00
|
|
|
Add | Sub | Mul | Div | Rem => {
|
|
|
|
for x in [a, b] {
|
|
|
|
check_kinds!(
|
|
|
|
x,
|
2023-06-04 18:09:20 +00:00
|
|
|
"Cannot perform arithmetic {op:?} on type {:?}",
|
2022-03-25 02:30:23 +00:00
|
|
|
ty::Uint(..) | ty::Int(..) | ty::Float(..)
|
|
|
|
)
|
|
|
|
}
|
2023-06-03 07:41:50 +00:00
|
|
|
}
|
2022-05-06 01:15:20 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Rvalue::CheckedBinaryOp(op, vals) => {
|
|
|
|
use BinOp::*;
|
|
|
|
let a = vals.0.ty(&self.body.local_decls, self.tcx);
|
|
|
|
let b = vals.1.ty(&self.body.local_decls, self.tcx);
|
|
|
|
match op {
|
|
|
|
Add | Sub | Mul => {
|
|
|
|
for x in [a, b] {
|
|
|
|
check_kinds!(
|
|
|
|
x,
|
|
|
|
"Cannot perform checked arithmetic on type {:?}",
|
|
|
|
ty::Uint(..) | ty::Int(..)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
if a != b {
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!(
|
2023-07-25 21:17:39 +00:00
|
|
|
"Cannot perform checked arithmetic on unequal types {a:?} and {b:?}"
|
2022-05-06 01:15:20 +00:00
|
|
|
),
|
2020-09-10 00:00:00 +00:00
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2023-07-25 21:17:39 +00:00
|
|
|
_ => self.fail(location, format!("There is no checked version of {op:?}")),
|
2022-03-25 02:30:23 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
Rvalue::UnaryOp(op, operand) => {
|
|
|
|
let a = operand.ty(&self.body.local_decls, self.tcx);
|
|
|
|
match op {
|
|
|
|
UnOp::Neg => {
|
|
|
|
check_kinds!(a, "Cannot negate type {:?}", ty::Int(..) | ty::Float(..))
|
|
|
|
}
|
|
|
|
UnOp::Not => {
|
|
|
|
check_kinds!(
|
|
|
|
a,
|
|
|
|
"Cannot binary not type {:?}",
|
|
|
|
ty::Int(..) | ty::Uint(..) | ty::Bool
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
Rvalue::ShallowInitBox(operand, _) => {
|
|
|
|
let a = operand.ty(&self.body.local_decls, self.tcx);
|
|
|
|
check_kinds!(a, "Cannot shallow init type {:?}", ty::RawPtr(..));
|
|
|
|
}
|
2022-06-29 14:18:55 +00:00
|
|
|
Rvalue::Cast(kind, operand, target_type) => {
|
2022-10-16 18:30:32 +00:00
|
|
|
let op_ty = operand.ty(self.body, self.tcx);
|
2022-06-29 14:18:55 +00:00
|
|
|
match kind {
|
2022-10-04 18:39:43 +00:00
|
|
|
CastKind::DynStar => {
|
|
|
|
// FIXME(dyn-star): make sure nothing needs to be done here.
|
|
|
|
}
|
2022-10-16 18:30:32 +00:00
|
|
|
// FIXME: Add Checks for these
|
2022-10-04 18:39:43 +00:00
|
|
|
CastKind::PointerFromExposedAddress
|
|
|
|
| CastKind::PointerExposeAddress
|
2023-07-05 18:07:03 +00:00
|
|
|
| CastKind::PointerCoercion(_) => {}
|
2022-10-16 18:30:32 +00:00
|
|
|
CastKind::IntToInt | CastKind::IntToFloat => {
|
|
|
|
let input_valid = op_ty.is_integral() || op_ty.is_char() || op_ty.is_bool();
|
|
|
|
let target_valid = target_type.is_numeric() || target_type.is_char();
|
|
|
|
if !input_valid || !target_valid {
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!("Wrong cast kind {kind:?} for the type {op_ty}",),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
|
|
|
|
if !(op_ty.is_any_ptr() && target_type.is_unsafe_ptr()) {
|
|
|
|
self.fail(location, "Can't cast {op_ty} into 'Ptr'");
|
|
|
|
}
|
|
|
|
}
|
|
|
|
CastKind::FloatToFloat | CastKind::FloatToInt => {
|
|
|
|
if !op_ty.is_floating_point() || !target_type.is_numeric() {
|
2022-06-29 14:18:55 +00:00
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!(
|
2022-10-16 18:30:32 +00:00
|
|
|
"Trying to cast non 'Float' as {kind:?} into {target_type:?}"
|
2022-06-29 14:18:55 +00:00
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2023-02-25 02:32:52 +00:00
|
|
|
CastKind::Transmute => {
|
|
|
|
if let MirPhase::Runtime(..) = self.mir_phase {
|
|
|
|
// Unlike `mem::transmute`, a MIR `Transmute` is well-formed
|
|
|
|
// for any two `Sized` types, just potentially UB to run.
|
|
|
|
|
2023-04-05 06:07:10 +00:00
|
|
|
if !self
|
|
|
|
.tcx
|
|
|
|
.normalize_erasing_regions(self.param_env, op_ty)
|
|
|
|
.is_sized(self.tcx, self.param_env)
|
|
|
|
{
|
2023-02-25 02:32:52 +00:00
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!("Cannot transmute from non-`Sized` type {op_ty:?}"),
|
|
|
|
);
|
|
|
|
}
|
2023-04-05 06:07:10 +00:00
|
|
|
if !self
|
|
|
|
.tcx
|
|
|
|
.normalize_erasing_regions(self.param_env, *target_type)
|
|
|
|
.is_sized(self.tcx, self.param_env)
|
|
|
|
{
|
2023-02-25 02:32:52 +00:00
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!("Cannot transmute to non-`Sized` type {target_type:?}"),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!(
|
|
|
|
"Transmute is not supported in non-runtime phase {:?}.",
|
|
|
|
self.mir_phase
|
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2022-06-29 14:18:55 +00:00
|
|
|
}
|
|
|
|
}
|
2023-08-15 19:10:45 +00:00
|
|
|
Rvalue::NullaryOp(NullOp::OffsetOf(indices), container) => {
|
2023-07-19 10:44:00 +00:00
|
|
|
let fail_out_of_bounds = |this: &mut Self, location, field, ty| {
|
2022-09-11 07:37:49 +00:00
|
|
|
this.fail(location, format!("Out of bounds field {field:?} for {ty:?}"));
|
|
|
|
};
|
|
|
|
|
|
|
|
let mut current_ty = *container;
|
|
|
|
|
2023-08-15 21:32:55 +00:00
|
|
|
for (variant, field) in indices.iter() {
|
|
|
|
match current_ty.kind() {
|
|
|
|
ty::Tuple(fields) => {
|
|
|
|
if variant != FIRST_VARIANT {
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!("tried to get variant {variant:?} of tuple"),
|
|
|
|
);
|
2022-09-11 07:37:49 +00:00
|
|
|
return;
|
2023-08-15 21:32:55 +00:00
|
|
|
}
|
|
|
|
let Some(&f_ty) = fields.get(field.as_usize()) else {
|
2023-08-15 19:10:45 +00:00
|
|
|
fail_out_of_bounds(self, location, field, current_ty);
|
|
|
|
return;
|
|
|
|
};
|
|
|
|
|
|
|
|
current_ty = self.tcx.normalize_erasing_regions(self.param_env, f_ty);
|
|
|
|
}
|
2023-08-15 21:32:55 +00:00
|
|
|
ty::Adt(adt_def, args) => {
|
2023-08-15 19:10:45 +00:00
|
|
|
let Some(field) = adt_def.variant(variant).fields.get(field) else {
|
2022-09-11 07:37:49 +00:00
|
|
|
fail_out_of_bounds(self, location, field, current_ty);
|
|
|
|
return;
|
|
|
|
};
|
|
|
|
|
2023-07-11 21:35:29 +00:00
|
|
|
let f_ty = field.ty(self.tcx, args);
|
2022-09-11 07:37:49 +00:00
|
|
|
current_ty = self.tcx.normalize_erasing_regions(self.param_env, f_ty);
|
|
|
|
}
|
|
|
|
_ => {
|
|
|
|
self.fail(
|
|
|
|
location,
|
2023-08-15 21:32:55 +00:00
|
|
|
format!("Cannot get offset ({variant:?}, {field:?}) from type {current_ty:?}"),
|
2022-09-11 07:37:49 +00:00
|
|
|
);
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-06-29 14:18:55 +00:00
|
|
|
Rvalue::Repeat(_, _)
|
|
|
|
| Rvalue::ThreadLocalRef(_)
|
|
|
|
| Rvalue::AddressOf(_, _)
|
2022-09-11 07:37:49 +00:00
|
|
|
| Rvalue::NullaryOp(NullOp::SizeOf | NullOp::AlignOf, _)
|
2022-06-29 14:18:55 +00:00
|
|
|
| Rvalue::Discriminant(_) => {}
|
2022-03-25 02:30:23 +00:00
|
|
|
}
|
|
|
|
self.super_rvalue(rvalue, location);
|
|
|
|
}
|
|
|
|
|
|
|
|
fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
|
|
|
|
match &statement.kind {
|
|
|
|
StatementKind::Assign(box (dest, rvalue)) => {
|
|
|
|
// LHS and RHS of the assignment must have the same type.
|
|
|
|
let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
|
|
|
|
let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
|
2023-08-16 05:43:30 +00:00
|
|
|
|
2022-03-25 02:30:23 +00:00
|
|
|
if !self.mir_assign_valid_types(right_ty, left_ty) {
|
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!(
|
|
|
|
"encountered `{:?}` with incompatible types:\n\
|
|
|
|
left-hand side has type: {}\n\
|
|
|
|
right-hand side has type: {}",
|
|
|
|
statement.kind, left_ty, right_ty,
|
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
2022-06-13 13:37:41 +00:00
|
|
|
if let Rvalue::CopyForDeref(place) = rvalue {
|
2023-04-01 21:55:22 +00:00
|
|
|
if place.ty(&self.body.local_decls, self.tcx).ty.builtin_deref(true).is_none() {
|
2022-06-13 13:37:41 +00:00
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
"`CopyForDeref` should only be used for dereferenceable types",
|
|
|
|
)
|
|
|
|
}
|
|
|
|
}
|
2020-05-23 22:55:44 +00:00
|
|
|
}
|
2020-09-10 00:00:00 +00:00
|
|
|
StatementKind::AscribeUserType(..) => {
|
2022-07-10 01:04:38 +00:00
|
|
|
if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) {
|
2020-09-10 00:00:00 +00:00
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
"`AscribeUserType` should have been removed after drop lowering phase",
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
StatementKind::FakeRead(..) => {
|
2022-07-10 01:04:38 +00:00
|
|
|
if self.mir_phase >= MirPhase::Runtime(RuntimePhase::Initial) {
|
2020-09-10 00:00:00 +00:00
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
"`FakeRead` should have been removed after drop lowering phase",
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2022-07-12 10:05:00 +00:00
|
|
|
StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(op)) => {
|
2022-06-30 08:16:05 +00:00
|
|
|
let ty = op.ty(&self.body.local_decls, self.tcx);
|
|
|
|
if !ty.is_bool() {
|
|
|
|
self.fail(
|
|
|
|
location,
|
2023-07-25 21:17:39 +00:00
|
|
|
format!("`assume` argument must be `bool`, but got: `{ty}`"),
|
2022-06-30 08:16:05 +00:00
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2022-07-12 10:05:00 +00:00
|
|
|
StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(
|
|
|
|
CopyNonOverlapping { src, dst, count },
|
|
|
|
)) => {
|
2021-02-26 16:42:51 +00:00
|
|
|
let src_ty = src.ty(&self.body.local_decls, self.tcx);
|
|
|
|
let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) {
|
|
|
|
src_deref.ty
|
|
|
|
} else {
|
|
|
|
self.fail(
|
|
|
|
location,
|
2023-07-25 21:17:39 +00:00
|
|
|
format!("Expected src to be ptr in copy_nonoverlapping, got: {src_ty}"),
|
2021-02-26 16:42:51 +00:00
|
|
|
);
|
|
|
|
return;
|
|
|
|
};
|
|
|
|
let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
|
|
|
|
let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) {
|
|
|
|
dst_deref.ty
|
|
|
|
} else {
|
|
|
|
self.fail(
|
|
|
|
location,
|
2023-07-25 21:17:39 +00:00
|
|
|
format!("Expected dst to be ptr in copy_nonoverlapping, got: {dst_ty}"),
|
2021-02-26 16:42:51 +00:00
|
|
|
);
|
|
|
|
return;
|
|
|
|
};
|
|
|
|
// since CopyNonOverlapping is parametrized by 1 type,
|
|
|
|
// we only need to check that they are equal and not keep an extra parameter.
|
2022-04-13 09:42:28 +00:00
|
|
|
if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) {
|
2023-07-25 21:17:39 +00:00
|
|
|
self.fail(location, format!("bad arg ({op_src_ty:?} != {op_dst_ty:?})"));
|
2021-02-26 16:42:51 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
|
|
|
|
if op_cnt_ty != self.tcx.types.usize {
|
2023-07-25 21:17:39 +00:00
|
|
|
self.fail(location, format!("bad arg ({op_cnt_ty:?} != usize)"))
|
2021-02-26 16:42:51 +00:00
|
|
|
}
|
|
|
|
}
|
2022-04-05 21:14:59 +00:00
|
|
|
StatementKind::SetDiscriminant { place, .. } => {
|
2022-07-10 01:04:38 +00:00
|
|
|
if self.mir_phase < MirPhase::Runtime(RuntimePhase::Initial) {
|
2022-04-05 21:14:59 +00:00
|
|
|
self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
|
|
|
|
}
|
|
|
|
let pty = place.ty(&self.body.local_decls, self.tcx).ty.kind();
|
2023-10-19 16:06:43 +00:00
|
|
|
if !matches!(pty, ty::Adt(..) | ty::Coroutine(..) | ty::Alias(ty::Opaque, ..)) {
|
2022-04-05 21:14:59 +00:00
|
|
|
self.fail(
|
|
|
|
location,
|
|
|
|
format!(
|
2023-10-19 21:46:28 +00:00
|
|
|
"`SetDiscriminant` is only allowed on ADTs and coroutines, not {pty:?}"
|
2022-04-05 21:14:59 +00:00
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
StatementKind::Deinit(..) => {
|
2022-07-10 01:04:38 +00:00
|
|
|
if self.mir_phase < MirPhase::Runtime(RuntimePhase::Initial) {
|
2022-04-05 21:14:59 +00:00
|
|
|
self.fail(location, "`Deinit`is not allowed until deaggregation");
|
2022-03-06 01:37:04 +00:00
|
|
|
}
|
|
|
|
}
|
2022-12-21 18:30:38 +00:00
|
|
|
StatementKind::Retag(kind, _) => {
|
2022-03-06 01:37:04 +00:00
|
|
|
// FIXME(JakobDegen) The validator should check that `self.mir_phase <
|
|
|
|
// DropsLowered`. However, this causes ICEs with generation of drop shims, which
|
|
|
|
// seem to fail to set their `MirPhase` correctly.
|
2023-01-30 12:01:09 +00:00
|
|
|
if matches!(kind, RetagKind::Raw | RetagKind::TwoPhase) {
|
2023-07-25 21:17:39 +00:00
|
|
|
self.fail(location, format!("explicit `{kind:?}` is forbidden"));
|
2022-12-21 18:30:38 +00:00
|
|
|
}
|
2022-03-06 01:37:04 +00:00
|
|
|
}
|
2023-07-19 10:33:29 +00:00
|
|
|
StatementKind::StorageLive(_)
|
|
|
|
| StatementKind::StorageDead(_)
|
2021-02-26 16:42:51 +00:00
|
|
|
| StatementKind::Coverage(_)
|
2022-12-20 00:51:17 +00:00
|
|
|
| StatementKind::ConstEvalCounter
|
2023-03-18 16:11:48 +00:00
|
|
|
| StatementKind::PlaceMention(..)
|
2021-02-26 16:42:51 +00:00
|
|
|
| StatementKind::Nop => {}
|
2020-05-23 22:55:44 +00:00
|
|
|
}
|
2020-10-25 00:00:00 +00:00
|
|
|
|
|
|
|
self.super_statement(statement, location);
|
2020-05-23 22:55:44 +00:00
|
|
|
}
|
2020-05-31 07:52:51 +00:00
|
|
|
|
2020-05-31 13:21:09 +00:00
|
|
|
fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
|
2020-05-31 07:52:51 +00:00
|
|
|
match &terminator.kind {
|
2022-12-04 00:03:27 +00:00
|
|
|
TerminatorKind::SwitchInt { targets, discr } => {
|
|
|
|
let switch_ty = discr.ty(&self.body.local_decls, self.tcx);
|
2020-10-18 20:32:25 +00:00
|
|
|
|
|
|
|
let target_width = self.tcx.sess.target.pointer_width;
|
|
|
|
|
|
|
|
let size = Size::from_bits(match switch_ty.kind() {
|
|
|
|
ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
|
|
|
|
ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
|
|
|
|
ty::Char => 32,
|
|
|
|
ty::Bool => 1,
|
|
|
|
other => bug!("unhandled type: {:?}", other),
|
|
|
|
});
|
|
|
|
|
2023-07-19 10:33:29 +00:00
|
|
|
for (value, _) in targets.iter() {
|
2020-10-18 20:32:25 +00:00
|
|
|
if Scalar::<()>::try_from_uint(value, size).is_none() {
|
|
|
|
self.fail(
|
|
|
|
location,
|
2023-07-25 21:17:39 +00:00
|
|
|
format!("the value {value:#x} is not a proper {switch_ty:?}"),
|
2020-10-18 20:32:25 +00:00
|
|
|
)
|
|
|
|
}
|
2022-02-01 00:00:00 +00:00
|
|
|
}
|
2020-05-31 07:52:51 +00:00
|
|
|
}
|
2023-07-19 10:33:29 +00:00
|
|
|
TerminatorKind::Call { func, .. } => {
|
2020-05-31 07:52:51 +00:00
|
|
|
let func_ty = func.ty(&self.body.local_decls, self.tcx);
|
2020-08-02 22:49:11 +00:00
|
|
|
match func_ty.kind() {
|
2020-05-31 07:52:51 +00:00
|
|
|
ty::FnPtr(..) | ty::FnDef(..) => {}
|
|
|
|
_ => self.fail(
|
2020-05-31 13:21:09 +00:00
|
|
|
location,
|
2023-07-25 21:17:39 +00:00
|
|
|
format!("encountered non-callable type {func_ty} in `Call` terminator"),
|
2020-05-31 07:52:51 +00:00
|
|
|
),
|
|
|
|
}
|
|
|
|
}
|
2023-07-19 10:33:29 +00:00
|
|
|
TerminatorKind::Assert { cond, .. } => {
|
2020-05-31 07:52:51 +00:00
|
|
|
let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
|
|
|
|
if cond_ty != self.tcx.types.bool {
|
|
|
|
self.fail(
|
2020-05-31 13:21:09 +00:00
|
|
|
location,
|
2020-05-31 07:52:51 +00:00
|
|
|
format!(
|
2023-07-25 21:17:39 +00:00
|
|
|
"encountered non-boolean condition of type {cond_ty} in `Assert` terminator"
|
2020-05-31 07:52:51 +00:00
|
|
|
),
|
|
|
|
);
|
|
|
|
}
|
|
|
|
}
|
2023-07-19 10:33:29 +00:00
|
|
|
TerminatorKind::Goto { .. }
|
|
|
|
| TerminatorKind::Drop { .. }
|
|
|
|
| TerminatorKind::Yield { .. }
|
|
|
|
| TerminatorKind::FalseEdge { .. }
|
|
|
|
| TerminatorKind::FalseUnwind { .. }
|
|
|
|
| TerminatorKind::InlineAsm { .. }
|
2023-10-19 16:06:43 +00:00
|
|
|
| TerminatorKind::CoroutineDrop
|
2023-08-19 11:10:25 +00:00
|
|
|
| TerminatorKind::UnwindResume
|
2023-08-21 07:57:10 +00:00
|
|
|
| TerminatorKind::UnwindTerminate(_)
|
2023-07-19 10:33:29 +00:00
|
|
|
| TerminatorKind::Return
|
|
|
|
| TerminatorKind::Unreachable => {}
|
2020-05-31 07:52:51 +00:00
|
|
|
}
|
2020-10-25 00:00:00 +00:00
|
|
|
|
|
|
|
self.super_terminator(terminator, location);
|
2020-05-31 07:52:51 +00:00
|
|
|
}
|
2020-05-23 22:55:44 +00:00
|
|
|
}
|