atomiccell

This commit is contained in:
Janis 2025-07-03 16:50:41 +02:00
parent 4f1e4b1672
commit 1ea8bcb3ed
2 changed files with 263 additions and 0 deletions

262
src/atomic.rs Normal file
View file

@ -0,0 +1,262 @@
use core::{
cell::UnsafeCell,
mem::{self, ManuallyDrop, MaybeUninit},
sync::atomic::{AtomicU8, AtomicU16, AtomicU32, AtomicU64, AtomicUsize, Ordering},
};
use crate::sync::SpinWait;
macro_rules! atomic {
(@check, $t:ty, $atomic:ty, $a:ident, $op:expr) => {
if crate::can_transmute::<$t, $atomic>() {
let $a: &$atomic;
break $op;
}
};
($t:ty, $a:ident, $op:expr, $fallback:expr) => {
loop {
atomic!(@check, $t, AtomicU8, $a, $op);
atomic!(@check, $t, AtomicU16, $a, $op);
atomic!(@check, $t, AtomicU32, $a, $op);
atomic!(@check, $t, AtomicU64, $a, $op);
atomic!(@check, $t, AtomicUsize, $a, $op);
// Fallback to the provided expression if no atomic type is found.
break $fallback;
}
};
}
pub struct AtomicCell<T> {
inner: AtomicCellInner<T>,
_phantom: core::marker::PhantomData<T>,
}
impl<T> AtomicCell<T> {
pub const fn new() -> Self {
Self {
inner: AtomicCellInner::none(),
_phantom: core::marker::PhantomData,
}
}
pub fn set(&self, value: T) {
self.inner.set(value);
}
pub fn take(&self) -> Option<T> {
self.inner.take()
}
pub fn get(&self) -> Option<T>
where
T: Copy,
{
self.inner.get()
}
pub fn swap(&self, value: Option<T>) -> Option<T> {
self.inner.swap(value)
}
}
struct AtomicCellInner<T> {
value: UnsafeCell<ManuallyDrop<MaybeUninit<T>>>,
state: AtomicU8,
}
impl<T> AtomicCellInner<T> {
const EMPTY: u8 = 0;
const FULL: u8 = 1;
const LOCKED: u8 = 2;
const fn none() -> Self {
Self {
value: UnsafeCell::new(ManuallyDrop::new(MaybeUninit::uninit())),
state: AtomicU8::new(Self::EMPTY),
}
}
fn from_option(value: Option<T>) -> Self {
match value {
Some(v) => Self {
value: UnsafeCell::new(ManuallyDrop::new(MaybeUninit::new(v))),
state: AtomicU8::new(Self::FULL),
},
None => Self {
value: UnsafeCell::new(ManuallyDrop::new(MaybeUninit::uninit())),
state: AtomicU8::new(Self::EMPTY),
},
}
}
unsafe fn copy_from(&self, other: &Self, load: Ordering, store: Ordering) {
unsafe {
self.value.get().write(other.value.get().read());
self.state.store(other.state.load(load), store);
}
}
fn set(&self, value: T) {
self.swap(Some(value));
}
fn take(&self) -> Option<T> {
self.swap(None)
}
fn get(&self) -> Option<T>
where
T: Copy,
{
let this: Self;
atomic! {
Self, a,
{
unsafe {
a = &*(self as *const Self as *const _);
let old = a.load(Ordering::Acquire);
this = mem::transmute_copy(&old);
}
},
{
let mut state = self.state.load(Ordering::Acquire);
if state == Self::EMPTY {
this = Self::none();
} else {
// if the state is `FULL`, we have to lock
let mut spin_wait = SpinWait::new();
let old = loop {
// if the state is `LOCKED`, we need to wait
if state == Self::LOCKED {
spin_wait.spin();
continue;
}
// if the state is `FULL`, we can try locking and swapping the value`
if self.state.compare_exchange_weak(
state,
Self::LOCKED,
Ordering::Acquire,
Ordering::Relaxed,
).is_ok() {
break state;
} else {
// the state changed, we need to check again
state = self.state.load(Ordering::Relaxed);
continue;
}
};
let empty = Self::none();
if old == Self::FULL {
// copy the value out of the cell
unsafe {
empty.copy_from(&self, Ordering::Relaxed, Ordering::Release);
}
}
this = empty;
}
}
}
match this.state.load(Ordering::Relaxed) {
Self::FULL => {
// SAFETY: We are returning the value only if it was previously full.
unsafe { Some(ManuallyDrop::into_inner(this.value.get().read()).assume_init()) }
}
_ => None,
}
}
fn swap(&self, value: Option<T>) -> Option<T> {
let mut this = Self::from_option(value);
atomic! {
Self, a,
{
// SAFETY: this block is only executed if `Self` can be transmuted into an atomic type.
// self.state cannot be `LOCKED` here, so we can safely swap the value.
unsafe {
// turn `self` into an atomic pointer
a = &*(self as *const Self as *const _);
// swap the value atomically
let old = a.swap(mem::transmute_copy(&this), Ordering::Release);
this = mem::transmute_copy(&old);
if this.state.load(Ordering::Relaxed) == Self::FULL {
// SAFETY: We are returning the value only if it was previously full.
Some( ManuallyDrop::into_inner(this.value.into_inner()).assume_init() )
} else {
None
}
}
},
{
// Fallback if no atomic type is found.
// we need to lock the cell to swap the value.
// attempt to lock optimistically
match self.state.compare_exchange_weak(
Self::EMPTY,
Self::LOCKED,
Ordering::Acquire,
Ordering::Relaxed,
) {
Ok(_) => {
// SAFETY: We are the only thread that can access this cell now.
unsafe {
self.copy_from(&this, Ordering::Relaxed, Ordering::Release);
}
None
}
Err(mut state) => {
let mut spin_wait = SpinWait::new();
let old = loop {
// if the state is `LOCKED`, we need to wait
if state == Self::LOCKED {
spin_wait.spin();
continue;
}
// if the state is not `LOCKED`, we can try locking and swapping the value`
if self.state.compare_exchange_weak(
state,
Self::LOCKED,
Ordering::Acquire,
Ordering::Relaxed,
).is_ok() {
break state;
} else {
// the state changed, we need to check again
state = self.state.load(Ordering::Relaxed);
continue;
}
};
let old = if old == Self::FULL {
// SAFETY: the cell is locked, and is initialised.
unsafe {
Some(ManuallyDrop::into_inner(self.value.get().read()).assume_init())
}
} else {None};
// SAFETY: the cell is locked, so we can safely copy the value
unsafe {
self.copy_from(&this, Ordering::Relaxed, Ordering::Release);
}
old
}
}
}
};
None
}
}

View file

@ -7,6 +7,7 @@ extern crate alloc;
#[cfg(any(test, feature = "std"))]
extern crate std;
pub mod atomic;
pub mod cachepadded;
pub mod drop_guard;
pub mod ptr;