mirror of
https://github.com/rust-lang/rust.git
synced 2024-12-03 12:13:43 +00:00
Rollup merge of #74477 - chansuke:sys-wasm-unsafe-op-in-unsafe-fn, r=Mark-Simulacrum
`#[deny(unsafe_op_in_unsafe_fn)]` in sys/wasm This is part of #73904. This encloses unsafe operations in unsafe fn in `libstd/sys/wasm`. @rustbot modify labels: F-unsafe-block-in-unsafe-fn
This commit is contained in:
commit
e0c08ae4e1
@ -24,26 +24,34 @@ static mut DLMALLOC: dlmalloc::Dlmalloc = dlmalloc::DLMALLOC_INIT;
|
||||
unsafe impl GlobalAlloc for System {
|
||||
#[inline]
|
||||
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
|
||||
// SAFETY: DLMALLOC access is guranteed to be safe because the lock gives us unique and non-reentrant access.
|
||||
// Calling malloc() is safe because preconditions on this function match the trait method preconditions.
|
||||
let _lock = lock::lock();
|
||||
DLMALLOC.malloc(layout.size(), layout.align())
|
||||
unsafe { DLMALLOC.malloc(layout.size(), layout.align()) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
|
||||
// SAFETY: DLMALLOC access is guranteed to be safe because the lock gives us unique and non-reentrant access.
|
||||
// Calling calloc() is safe because preconditions on this function match the trait method preconditions.
|
||||
let _lock = lock::lock();
|
||||
DLMALLOC.calloc(layout.size(), layout.align())
|
||||
unsafe { DLMALLOC.calloc(layout.size(), layout.align()) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
|
||||
// SAFETY: DLMALLOC access is guranteed to be safe because the lock gives us unique and non-reentrant access.
|
||||
// Calling free() is safe because preconditions on this function match the trait method preconditions.
|
||||
let _lock = lock::lock();
|
||||
DLMALLOC.free(ptr, layout.size(), layout.align())
|
||||
unsafe { DLMALLOC.free(ptr, layout.size(), layout.align()) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
|
||||
// SAFETY: DLMALLOC access is guranteed to be safe because the lock gives us unique and non-reentrant access.
|
||||
// Calling realloc() is safe because preconditions on this function match the trait method preconditions.
|
||||
let _lock = lock::lock();
|
||||
DLMALLOC.realloc(ptr, layout.size(), layout.align(), new_size)
|
||||
unsafe { DLMALLOC.realloc(ptr, layout.size(), layout.align(), new_size) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -44,14 +44,20 @@ impl Condvar {
|
||||
|
||||
pub unsafe fn notify_one(&self) {
|
||||
self.cnt.fetch_add(1, SeqCst);
|
||||
// SAFETY: ptr() is always valid
|
||||
unsafe {
|
||||
wasm32::memory_atomic_notify(self.ptr(), 1);
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn notify_all(&self) {
|
||||
self.cnt.fetch_add(1, SeqCst);
|
||||
// SAFETY: ptr() is always valid
|
||||
unsafe {
|
||||
wasm32::memory_atomic_notify(self.ptr(), u32::MAX); // -1 == "wake everyone"
|
||||
}
|
||||
}
|
||||
|
||||
pub unsafe fn wait(&self, mutex: &Mutex) {
|
||||
// "atomically block and unlock" implemented by loading our current
|
||||
|
@ -14,6 +14,8 @@
|
||||
//! compiling for wasm. That way it's a compile time error for something that's
|
||||
//! guaranteed to be a runtime error!
|
||||
|
||||
#![deny(unsafe_op_in_unsafe_fn)]
|
||||
|
||||
pub mod alloc;
|
||||
pub mod args;
|
||||
#[path = "../unsupported/cmath.rs"]
|
||||
|
@ -28,11 +28,14 @@ impl Mutex {
|
||||
|
||||
pub unsafe fn lock(&self) {
|
||||
while !self.try_lock() {
|
||||
let val = wasm32::memory_atomic_wait32(
|
||||
// SAFETY: the caller must uphold the safety contract for `memory_atomic_wait32`.
|
||||
let val = unsafe {
|
||||
wasm32::memory_atomic_wait32(
|
||||
self.ptr(),
|
||||
1, // we expect our mutex is locked
|
||||
-1, // wait infinitely
|
||||
);
|
||||
)
|
||||
};
|
||||
// we should have either woke up (0) or got a not-equal due to a
|
||||
// race (1). We should never time out (2)
|
||||
debug_assert!(val == 0 || val == 1);
|
||||
@ -93,19 +96,20 @@ impl ReentrantMutex {
|
||||
pub unsafe fn lock(&self) {
|
||||
let me = thread::my_id();
|
||||
while let Err(owner) = self._try_lock(me) {
|
||||
let val = wasm32::memory_atomic_wait32(self.ptr(), owner as i32, -1);
|
||||
// SAFETY: the caller must gurantee that `self.ptr()` and `owner` are valid i32.
|
||||
let val = unsafe { wasm32::memory_atomic_wait32(self.ptr(), owner as i32, -1) };
|
||||
debug_assert!(val == 0 || val == 1);
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn try_lock(&self) -> bool {
|
||||
self._try_lock(thread::my_id()).is_ok()
|
||||
unsafe { self._try_lock(thread::my_id()).is_ok() }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn _try_lock(&self, id: u32) -> Result<(), u32> {
|
||||
let id = id.checked_add(1).unwrap(); // make sure `id` isn't 0
|
||||
let id = id.checked_add(1).unwrap();
|
||||
match self.owner.compare_exchange(0, id, SeqCst, SeqCst) {
|
||||
// we transitioned from unlocked to locked
|
||||
Ok(_) => {
|
||||
@ -132,7 +136,10 @@ impl ReentrantMutex {
|
||||
match *self.recursions.get() {
|
||||
0 => {
|
||||
self.owner.swap(0, SeqCst);
|
||||
wasm32::memory_atomic_notify(self.ptr() as *mut i32, 1); // wake up one waiter, if any
|
||||
// SAFETY: the caller must gurantee that `self.ptr()` is valid i32.
|
||||
unsafe {
|
||||
wasm32::atomic_notify(self.ptr() as *mut i32, 1);
|
||||
} // wake up one waiter, if any
|
||||
}
|
||||
ref mut n => *n -= 1,
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user