Make some rustc code conform to strict provenance.

There's some really bad stuff around `ty` and pointer tagging stuff that
was too much work to handle here.
This commit is contained in:
Aria Beingessner 2022-03-22 16:21:33 -04:00
parent c7de289e1c
commit 68643603ad
3 changed files with 13 additions and 10 deletions

View File

@ -18,6 +18,7 @@
#![feature(decl_macro)]
#![feature(rustc_attrs)]
#![cfg_attr(test, feature(test))]
#![feature(strict_provenance)]
use smallvec::SmallVec;
@ -87,7 +88,7 @@ impl<T> ArenaChunk<T> {
unsafe {
if mem::size_of::<T>() == 0 {
// A pointer as large as possible for zero-sized elements.
!0 as *mut T
ptr::invalid_mut(!0)
} else {
self.start().add(self.storage.len())
}
@ -199,7 +200,7 @@ impl<T> TypedArena<T> {
unsafe {
if mem::size_of::<T>() == 0 {
self.ptr.set((self.ptr.get() as *mut u8).wrapping_offset(1) as *mut T);
let ptr = mem::align_of::<T>() as *mut T;
let ptr = ptr::NonNull::<T>::dangling().as_ptr();
// Don't drop the object. This `write` is equivalent to `forget`.
ptr::write(ptr, object);
&mut *ptr
@ -216,7 +217,7 @@ impl<T> TypedArena<T> {
#[inline]
fn can_allocate(&self, additional: usize) -> bool {
let available_bytes = self.end.get() as usize - self.ptr.get() as usize;
let available_bytes = self.end.get().addr() - self.ptr.get().addr();
let additional_bytes = additional.checked_mul(mem::size_of::<T>()).unwrap();
available_bytes >= additional_bytes
}
@ -262,7 +263,7 @@ impl<T> TypedArena<T> {
// If a type is `!needs_drop`, we don't need to keep track of how many elements
// the chunk stores - the field will be ignored anyway.
if mem::needs_drop::<T>() {
let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
let used_bytes = self.ptr.get().addr() - last_chunk.start().addr();
last_chunk.entries = used_bytes / mem::size_of::<T>();
}
@ -288,9 +289,9 @@ impl<T> TypedArena<T> {
// chunks.
fn clear_last_chunk(&self, last_chunk: &mut ArenaChunk<T>) {
// Determine how much was filled.
let start = last_chunk.start() as usize;
let start = last_chunk.start().addr();
// We obtain the value of the pointer to the first uninitialized element.
let end = self.ptr.get() as usize;
let end = self.ptr.get().addr();
// We then calculate the number of elements to be dropped in the last chunk,
// which is the filled area's length.
let diff = if mem::size_of::<T>() == 0 {
@ -395,15 +396,16 @@ impl DroplessArena {
/// request.
#[inline]
fn alloc_raw_without_grow(&self, layout: Layout) -> Option<*mut u8> {
let start = self.start.get() as usize;
let end = self.end.get() as usize;
let start = self.start.get().addr();
let old_end = self.end.get();
let end = old_end.addr();
let align = layout.align();
let bytes = layout.size();
let new_end = end.checked_sub(bytes)? & !(align - 1);
if start <= new_end {
let new_end = new_end as *mut u8;
let new_end = old_end.with_addr(new_end);
self.end.set(new_end);
Some(new_end)
} else {

View File

@ -6,6 +6,7 @@
#![feature(once_cell)]
#![feature(nll)]
#![feature(associated_type_bounds)]
#![feature(strict_provenance)]
#![recursion_limit = "256"]
#![allow(rustc::potential_query_instability)]

View File

@ -116,7 +116,7 @@ impl<'a, 'tcx: 'a> MonoItemExt<'a, 'tcx> for MonoItem<'tcx> {
fn to_raw_string(&self) -> String {
match *self {
MonoItem::Fn(instance) => {
format!("Fn({:?}, {})", instance.def, instance.substs.as_ptr() as usize)
format!("Fn({:?}, {})", instance.def, instance.substs.as_ptr().addr())
}
MonoItem::Static(id) => format!("Static({:?})", id),
MonoItem::GlobalAsm(id) => format!("GlobalAsm({:?})", id),