- Fix __atomic_compare_exchange_4(). - Add patch for core::sync::atomic. - Revert kernel Mutex.master
parent
925a08f9ae
commit
81196729e4
@ -0,0 +1,57 @@
|
||||
--- atomic_backup.rs 2018-10-06 19:59:14.000000000 +0800
|
||||
+++ atomic.rs 2018-10-26 14:34:31.000000000 +0800
|
||||
@@ -125,6 +125,9 @@
|
||||
#[cfg(target_has_atomic = "8")]
|
||||
#[stable(feature = "rust1", since = "1.0.0")]
|
||||
pub struct AtomicBool {
|
||||
+ #[cfg(any(target_arch = "riscv32", target_arch = "riscv64"))]
|
||||
+ v: UnsafeCell<u32>,
|
||||
+ #[cfg(not(any(target_arch = "riscv32", target_arch = "riscv64")))]
|
||||
v: UnsafeCell<u8>,
|
||||
}
|
||||
|
||||
@@ -265,6 +268,44 @@
|
||||
pub const ATOMIC_BOOL_INIT: AtomicBool = AtomicBool::new(false);
|
||||
|
||||
#[cfg(target_has_atomic = "8")]
|
||||
+#[cfg(any(target_arch = "riscv32", target_arch = "riscv64"))]
|
||||
+impl AtomicBool {
|
||||
+ ///
|
||||
+ #[inline]
|
||||
+ #[stable(feature = "rust1", since = "1.0.0")]
|
||||
+ pub const fn new(v: bool) -> AtomicBool {
|
||||
+ AtomicBool { v: UnsafeCell::new(v as u32) }
|
||||
+ }
|
||||
+
|
||||
+ ///
|
||||
+ #[inline]
|
||||
+ #[stable(feature = "rust1", since = "1.0.0")]
|
||||
+ pub fn load(&self, order: Ordering) -> bool {
|
||||
+ unsafe { atomic_load(self.v.get(), order) != 0 }
|
||||
+ }
|
||||
+
|
||||
+ ///
|
||||
+ #[inline]
|
||||
+ #[stable(feature = "rust1", since = "1.0.0")]
|
||||
+ pub fn store(&self, val: bool, order: Ordering) {
|
||||
+ unsafe { atomic_store(self.v.get(), val as u32, order); }
|
||||
+ }
|
||||
+
|
||||
+ ///
|
||||
+ #[inline]
|
||||
+ #[stable(feature = "rust1", since = "1.0.0")]
|
||||
+ #[cfg(target_has_atomic = "cas")]
|
||||
+ pub fn compare_and_swap(&self, current: bool, new: bool, order: Ordering) -> bool {
|
||||
+ loop {
|
||||
+ if let Ok(val) = unsafe { atomic_compare_exchange(self.v.get(), current as u32, new as u32, order, order) } {
|
||||
+ return val != 0;
|
||||
+ }
|
||||
+ }
|
||||
+ }
|
||||
+}
|
||||
+
|
||||
+#[cfg(target_has_atomic = "8")]
|
||||
+#[cfg(not(any(target_arch = "riscv32", target_arch = "riscv64")))]
|
||||
impl AtomicBool {
|
||||
/// Creates a new `AtomicBool`.
|
||||
///
|
@ -1,44 +0,0 @@
|
||||
//! RISCV atomic is not currently supported by Rust.
|
||||
//! This is a ugly workaround.
|
||||
|
||||
use core::cell::UnsafeCell;
|
||||
|
||||
extern {
|
||||
fn __atomic_load_4(src: *const u32) -> u32;
|
||||
fn __atomic_store_4(dst: *mut u32, val: u32);
|
||||
fn __atomic_compare_exchange_4(dst: *mut u32, expected: *mut u32, desired: u32) -> bool;
|
||||
}
|
||||
|
||||
pub struct AtomicLock
|
||||
{
|
||||
lock: UnsafeCell<u32>
|
||||
}
|
||||
|
||||
impl AtomicLock
|
||||
{
|
||||
pub fn new() -> Self {
|
||||
AtomicLock {
|
||||
lock: UnsafeCell::new(0)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns 1 if lock is acquired
|
||||
pub fn try_lock(&self) -> bool {
|
||||
let mut expected: u32 = 0;
|
||||
unsafe {
|
||||
__atomic_compare_exchange_4(self.lock.get(), &mut expected as *mut u32, 1)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load(&self) -> bool {
|
||||
unsafe {
|
||||
__atomic_load_4(self.lock.get()) == 1
|
||||
}
|
||||
}
|
||||
|
||||
pub fn store(&self) {
|
||||
unsafe {
|
||||
__atomic_store_4(self.lock.get(), 0);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
use core::sync::atomic::{AtomicBool, Ordering};
|
||||
|
||||
pub struct AtomicLock
|
||||
{
|
||||
lock: AtomicBool
|
||||
}
|
||||
|
||||
impl AtomicLock
|
||||
{
|
||||
pub fn new() -> AtomicLock {
|
||||
AtomicLock {
|
||||
lock: AtomicBool::new(false)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn try_lock(&self) -> bool {
|
||||
self.lock.compare_and_swap(false, true, Ordering::Acquire) == false
|
||||
}
|
||||
|
||||
pub fn load(&self) -> bool {
|
||||
self.lock.load(Ordering::Relaxed)
|
||||
}
|
||||
|
||||
pub fn store(&self) {
|
||||
self.lock.store(false, Ordering::Release);
|
||||
}
|
||||
}
|
||||
|
||||
pub const ATOMIC_LOCK_INIT: AtomicLock = AtomicLock {
|
||||
lock: AtomicBool::new(false)
|
||||
};
|
Loading…
Reference in new issue