Skip to content

Commit 1d7d149

Browse files
committed
Use simple spinlock in AtomicU64
1 parent b095588 commit 1d7d149

File tree

3 files changed

+62
-19
lines changed

3 files changed

+62
-19
lines changed

Diff for: src/sync/atomic.rs

+62-9
Original file line numberDiff line numberDiff line change
@@ -11,33 +11,86 @@ mod imp {
1111

1212
#[cfg(any(target_arch = "arm", target_arch = "mips", target_arch = "powerpc"))]
1313
mod imp {
14-
use std::sync::atomic::Ordering;
15-
use std::sync::Mutex;
14+
use std::cell::UnsafeCell;
15+
use std::ops::{Deref, DerefMut};
16+
use std::sync::atomic::{AtomicBool, Ordering};
1617

17-
#[derive(Debug)]
18-
pub(crate) struct AtomicU64(Mutex<u64>);
18+
use crossbeam_utils::Backoff;
19+
20+
pub(crate) struct AtomicU64(Spinlock<u64>);
1921

2022
impl AtomicU64 {
21-
pub(crate) fn new(val: u64) -> Self {
22-
Self(Mutex::new(val))
23+
pub(crate) const fn new(val: u64) -> Self {
24+
Self(Spinlock::new(val))
2325
}
2426

2527
pub(crate) fn load(&self, _: Ordering) -> u64 {
26-
*self.0.lock().unwrap()
28+
*self.0.lock()
2729
}
2830

2931
pub(crate) fn fetch_add(&self, val: u64, _: Ordering) -> u64 {
30-
let mut lock = self.0.lock().unwrap();
32+
let mut lock = self.0.lock();
3133
let prev = *lock;
3234
*lock = prev + val;
3335
prev
3436
}
3537

3638
pub(crate) fn fetch_sub(&self, val: u64, _: Ordering) -> u64 {
37-
let mut lock = self.0.lock().unwrap();
39+
let mut lock = self.0.lock();
3840
let prev = *lock;
3941
*lock = prev - val;
4042
prev
4143
}
4244
}
45+
46+
/// A simple spinlock.
47+
struct Spinlock<T> {
48+
flag: AtomicBool,
49+
value: UnsafeCell<T>,
50+
}
51+
52+
unsafe impl<T: Send> Send for Spinlock<T> {}
53+
unsafe impl<T: Send> Sync for Spinlock<T> {}
54+
55+
impl<T> Spinlock<T> {
56+
/// Returns a new spinlock initialized with `value`.
57+
const fn new(value: T) -> Self {
58+
Self {
59+
flag: AtomicBool::new(false),
60+
value: UnsafeCell::new(value),
61+
}
62+
}
63+
64+
/// Locks the spinlock.
65+
fn lock(&self) -> SpinlockGuard<'_, T> {
66+
let backoff = Backoff::new();
67+
while self.flag.swap(true, Ordering::Acquire) {
68+
backoff.snooze();
69+
}
70+
SpinlockGuard(self)
71+
}
72+
}
73+
74+
/// A guard holding a spinlock locked.
75+
struct SpinlockGuard<'a, T>(&'a Spinlock<T>);
76+
77+
impl<T> Drop for SpinlockGuard<'_, T> {
78+
fn drop(&mut self) {
79+
self.0.flag.store(false, Ordering::Release);
80+
}
81+
}
82+
83+
impl<T> Deref for SpinlockGuard<'_, T> {
84+
type Target = T;
85+
86+
fn deref(&self) -> &T {
87+
unsafe { &*self.0.value.get() }
88+
}
89+
}
90+
91+
impl<T> DerefMut for SpinlockGuard<'_, T> {
92+
fn deref_mut(&mut self) -> &mut T {
93+
unsafe { &mut *self.0.value.get() }
94+
}
95+
}
4396
}

Diff for: src/task/blocking.rs

-5
Original file line numberDiff line numberDiff line change
@@ -13,11 +13,6 @@ use crate::utils::abort_on_panic;
1313

1414
const MAX_THREADS: u64 = 10_000;
1515

16-
#[cfg(any(target_arch = "arm", target_arch = "mips", target_arch = "powerpc"))]
17-
lazy_static! {
18-
static ref DYNAMIC_THREAD_COUNT: AtomicU64 = AtomicU64::new(0);
19-
}
20-
#[cfg(not(any(target_arch = "arm", target_arch = "mips", target_arch = "powerpc")))]
2116
static DYNAMIC_THREAD_COUNT: AtomicU64 = AtomicU64::new(0);
2217

2318
struct Pool {

Diff for: src/task/task.rs

-5
Original file line numberDiff line numberDiff line change
@@ -113,11 +113,6 @@ pub struct TaskId(NonZeroU64);
113113

114114
impl TaskId {
115115
pub(crate) fn new() -> TaskId {
116-
#[cfg(any(target_arch = "arm", target_arch = "mips", target_arch = "powerpc"))]
117-
lazy_static::lazy_static! {
118-
static ref COUNTER: AtomicU64 = AtomicU64::new(1);
119-
}
120-
#[cfg(not(any(target_arch = "arm", target_arch = "mips", target_arch = "powerpc")))]
121116
static COUNTER: AtomicU64 = AtomicU64::new(1);
122117

123118
let id = COUNTER.fetch_add(1, Ordering::Relaxed);

0 commit comments

Comments
 (0)