mirror of
https://github.com/rust-lang/rust.git
synced 2025-01-23 21:23:20 +00:00
#[deny(unsafe_op_in_unsafe_fn)]
in sys/wasm
This commit is contained in:
parent
7bade6ef73
commit
d413bb6f57
@ -25,25 +25,25 @@ unsafe impl GlobalAlloc for System {
|
||||
#[inline]
|
||||
unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
|
||||
let _lock = lock::lock();
|
||||
DLMALLOC.malloc(layout.size(), layout.align())
|
||||
unsafe { DLMALLOC.malloc(layout.size(), layout.align()) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 {
|
||||
let _lock = lock::lock();
|
||||
DLMALLOC.calloc(layout.size(), layout.align())
|
||||
unsafe { DLMALLOC.calloc(layout.size(), layout.align()) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
|
||||
let _lock = lock::lock();
|
||||
DLMALLOC.free(ptr, layout.size(), layout.align())
|
||||
unsafe { DLMALLOC.free(ptr, layout.size(), layout.align()) }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn realloc(&self, ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
|
||||
let _lock = lock::lock();
|
||||
DLMALLOC.realloc(ptr, layout.size(), layout.align(), new_size)
|
||||
unsafe { DLMALLOC.realloc(ptr, layout.size(), layout.align(), new_size) }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -44,13 +44,18 @@ impl Condvar {
|
||||
|
||||
pub unsafe fn notify_one(&self) {
|
||||
self.cnt.fetch_add(1, SeqCst);
|
||||
wasm32::memory_atomic_notify(self.ptr(), 1);
|
||||
// SAFETY: ptr() is always valid
|
||||
unsafe {
|
||||
wasm32::memory_atomic_notify(self.ptr(), 1);
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn notify_all(&self) {
|
||||
self.cnt.fetch_add(1, SeqCst);
|
||||
wasm32::memory_atomic_notify(self.ptr(), u32::MAX); // -1 == "wake everyone"
|
||||
unsafe {
|
||||
self.cnt.fetch_add(1, SeqCst);
|
||||
wasm32::memory_atomic_notify(self.ptr(), u32::MAX); // -1 == "wake everyone"
|
||||
}
|
||||
}
|
||||
|
||||
pub unsafe fn wait(&self, mutex: &Mutex) {
|
||||
|
@ -14,6 +14,8 @@
|
||||
//! compiling for wasm. That way it's a compile time error for something that's
|
||||
//! guaranteed to be a runtime error!
|
||||
|
||||
#![deny(unsafe_op_in_unsafe_fn)]
|
||||
|
||||
pub mod alloc;
|
||||
pub mod args;
|
||||
#[path = "../unsupported/cmath.rs"]
|
||||
|
@ -28,11 +28,14 @@ impl Mutex {
|
||||
|
||||
pub unsafe fn lock(&self) {
|
||||
while !self.try_lock() {
|
||||
let val = wasm32::memory_atomic_wait32(
|
||||
self.ptr(),
|
||||
1, // we expect our mutex is locked
|
||||
-1, // wait infinitely
|
||||
);
|
||||
// SAFETY: the caller must uphold the safety contract for `memory_atomic_wait32`.
|
||||
let val = unsafe {
|
||||
wasm32::memory_atomic_wait32(
|
||||
self.ptr(),
|
||||
1, // we expect our mutex is locked
|
||||
-1, // wait infinitely
|
||||
)
|
||||
};
|
||||
// we should have either woke up (0) or got a not-equal due to a
|
||||
// race (1). We should never time out (2)
|
||||
debug_assert!(val == 0 || val == 1);
|
||||
@ -47,7 +50,7 @@ impl Mutex {
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn try_lock(&self) -> bool {
|
||||
self.locked.compare_exchange(0, 1, SeqCst, SeqCst).is_ok()
|
||||
unsafe { self.locked.compare_exchange(0, 1, SeqCst, SeqCst).is_ok() }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@ -83,7 +86,7 @@ unsafe impl Sync for ReentrantMutex {}
|
||||
|
||||
impl ReentrantMutex {
|
||||
pub const unsafe fn uninitialized() -> ReentrantMutex {
|
||||
ReentrantMutex { owner: AtomicU32::new(0), recursions: UnsafeCell::new(0) }
|
||||
unsafe { ReentrantMutex { owner: AtomicU32::new(0), recursions: UnsafeCell::new(0) } }
|
||||
}
|
||||
|
||||
pub unsafe fn init(&self) {
|
||||
@ -93,19 +96,20 @@ impl ReentrantMutex {
|
||||
pub unsafe fn lock(&self) {
|
||||
let me = thread::my_id();
|
||||
while let Err(owner) = self._try_lock(me) {
|
||||
let val = wasm32::memory_atomic_wait32(self.ptr(), owner as i32, -1);
|
||||
// SAFETY: the caller must gurantee that `self.ptr()` and `owner` are valid i32.
|
||||
let val = unsafe { wasm32::memory_atomic_wait32(self.ptr(), owner as i32, -1) };
|
||||
debug_assert!(val == 0 || val == 1);
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub unsafe fn try_lock(&self) -> bool {
|
||||
self._try_lock(thread::my_id()).is_ok()
|
||||
unsafe { self._try_lock(thread::my_id()).is_ok() }
|
||||
}
|
||||
|
||||
#[inline]
|
||||
unsafe fn _try_lock(&self, id: u32) -> Result<(), u32> {
|
||||
let id = id.checked_add(1).unwrap(); // make sure `id` isn't 0
|
||||
let id = id.checked_add(1).unwrap();
|
||||
match self.owner.compare_exchange(0, id, SeqCst, SeqCst) {
|
||||
// we transitioned from unlocked to locked
|
||||
Ok(_) => {
|
||||
@ -132,7 +136,10 @@ impl ReentrantMutex {
|
||||
match *self.recursions.get() {
|
||||
0 => {
|
||||
self.owner.swap(0, SeqCst);
|
||||
wasm32::memory_atomic_notify(self.ptr() as *mut i32, 1); // wake up one waiter, if any
|
||||
// SAFETY: the caller must gurantee that `self.ptr()` is valid i32.
|
||||
unsafe {
|
||||
wasm32::atomic_notify(self.ptr() as *mut i32, 1);
|
||||
} // wake up one waiter, if any
|
||||
}
|
||||
ref mut n => *n -= 1,
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user