executor/src/job/v2.rs
2025-02-08 04:52:18 +01:00

286 lines
7.9 KiB
Rust

use core::{
cell::UnsafeCell,
mem::{self, ManuallyDrop, MaybeUninit},
sync::atomic::{AtomicU8, Ordering},
};
use std::thread::Thread;
use parking_lot_core::SpinWait;
use crate::util::SendPtr;
#[allow(dead_code)]
#[cfg_attr(target_pointer_width = "64", repr(align(16)))]
#[cfg_attr(target_pointer_width = "32", repr(align(8)))]
#[derive(Debug, Default, Clone, Copy)]
struct Size2([usize; 2]);
struct Value<T>(pub MaybeUninit<Box<MaybeUninit<T>>>);
impl<T> Value<T> {
unsafe fn get(self, inline: bool) -> T {
if inline {
unsafe { mem::transmute_copy(&self.0) }
} else {
unsafe { (*self.0.assume_init()).assume_init() }
}
}
}
#[repr(u8)]
pub enum JobState {
Empty,
Locked = 1,
Pending,
Finished,
Inline = 1 << (u8::BITS - 1),
}
pub struct Job<T = (), S = ()> {
state: AtomicU8,
this: SendPtr<()>,
harness: unsafe fn(*const (), *const Job<()>, &mut S),
maybe_boxed_val: UnsafeCell<MaybeUninit<Value<T>>>,
waiting_thread: UnsafeCell<Option<Thread>>,
}
impl<T, S> Job<T, S> {
pub unsafe fn cast_box<U, V>(self: Box<Self>) -> Box<Job<U, V>>
where
T: Sized,
U: Sized,
{
let ptr = Box::into_raw(self);
Box::from_raw(ptr.cast())
}
pub unsafe fn cast<U, V>(self: &Self) -> &Job<U, V>
where
T: Sized,
U: Sized,
{
// SAFETY: both T and U are sized, so Box<T> and Box<U> should be the
// same size as well.
unsafe { mem::transmute(self) }
}
pub fn id(&self) -> impl Eq {
(self.this, self.harness)
}
pub fn state(&self) -> u8 {
self.state.load(Ordering::Relaxed) & !(JobState::Inline as u8)
}
pub fn wait(&self) -> T {
let mut state = self.state.load(Ordering::Relaxed);
let mask = JobState::Inline as u8;
let mut spin = SpinWait::new();
loop {
match self.state.compare_exchange(
JobState::Pending as u8 | (state & mask),
JobState::Locked as u8 | (state & mask),
Ordering::Acquire,
Ordering::Relaxed,
) {
Ok(x) => {
state = x;
unsafe {
*self.waiting_thread.get() = Some(std::thread::current());
}
self.state
.store(JobState::Pending as u8 | (state & mask), Ordering::Release);
std::thread::park();
spin.reset();
continue;
}
Err(x) => {
if x & JobState::Finished as u8 != 0 {
let val = unsafe {
let value = (&*self.maybe_boxed_val.get()).assume_init_read();
value.get(state & JobState::Inline as u8 != 0)
};
return val;
} else {
spin.spin();
}
}
}
}
}
/// call this when popping value from local queue
pub fn set_pending(&self) {
let state = self.state.load(Ordering::Relaxed);
let mask = JobState::Inline as u8;
let mut spin = SpinWait::new();
loop {
match self.state.compare_exchange(
JobState::Empty as u8 | (state & mask),
JobState::Pending as u8 | (state & mask),
Ordering::Acquire,
Ordering::Relaxed,
) {
Ok(_) => {
return;
}
Err(_) => {
spin.spin();
}
}
}
}
pub fn execute(&self, s: &mut S) {
// SAFETY: self is non-null
unsafe { (self.harness)(self.this.as_ptr().cast(), (self as *const Self).cast(), s) };
}
fn complete(&self, result: T) {
let mut state = self.state.load(Ordering::Relaxed);
let mask = JobState::Inline as u8;
let mut spin = SpinWait::new();
loop {
match self.state.compare_exchange(
JobState::Pending as u8 | (state & mask),
JobState::Locked as u8 | (state & mask),
Ordering::Acquire,
Ordering::Relaxed,
) {
Ok(x) => {
state = x;
break;
}
Err(_) => {
spin.spin();
}
}
}
unsafe {
let value = (&mut *self.maybe_boxed_val.get()).assume_init_mut();
// SAFETY: we know the box is allocated if state was `Pending`.
if state & JobState::Inline as u8 == 0 {
value.0 = MaybeUninit::new(Box::new(MaybeUninit::new(result)));
} else {
*mem::transmute::<_, &mut T>(&mut value.0) = result;
}
}
if let Some(thread) = unsafe { &mut *self.waiting_thread.get() }.take() {
thread.unpark();
}
self.state
.store(JobState::Finished as u8 | (state & mask), Ordering::Release);
}
}
impl Job {}
pub struct HeapJob<F> {
f: F,
}
impl<F> HeapJob<F> {
pub fn new(f: F) -> Box<Self> {
Box::new(Self { f })
}
pub fn into_boxed_job<T, S>(self: Box<Self>) -> Box<Job<(), S>>
where
F: FnOnce(&mut S) -> T + Send,
T: Send,
{
unsafe fn harness<F, T, S>(this: *const (), job: *const Job<()>, s: &mut S)
where
F: FnOnce(&mut S) -> T + Send,
T: Sized + Send,
{
let job = unsafe { &*job.cast::<Job<T>>() };
let this = unsafe { Box::from_raw(this.cast::<HeapJob<F>>().cast_mut()) };
let f = this.f;
job.complete(f(s));
}
let size = mem::size_of::<T>();
let align = mem::align_of::<T>();
let new_state = if size > mem::size_of::<Box<T>>() || align > mem::align_of::<Box<T>>() {
JobState::Empty as u8
} else {
JobState::Inline as u8
};
Box::new(Job {
state: AtomicU8::new(new_state),
this: SendPtr::new(Box::into_raw(self)).unwrap().cast(),
waiting_thread: UnsafeCell::new(None),
harness: harness::<F, T, S>,
maybe_boxed_val: UnsafeCell::new(MaybeUninit::uninit()),
})
}
}
impl<T, S> crate::latch::Probe for &Job<T, S> {
fn probe(&self) -> bool {
self.state() == JobState::Finished as u8
}
}
pub struct StackJob<F> {
f: UnsafeCell<ManuallyDrop<F>>,
}
impl<F> StackJob<F> {
pub fn new(f: F) -> Self {
Self {
f: UnsafeCell::new(ManuallyDrop::new(f)),
}
}
pub unsafe fn unwrap(&self) -> F {
unsafe { ManuallyDrop::take(&mut *self.f.get()) }
}
pub fn as_job<T, S>(&self) -> Job<T, S>
where
F: FnOnce(&mut S) -> T + Send,
T: Send,
{
unsafe fn harness<F, T, S>(this: *const (), job: *const Job<()>, s: &mut S)
where
F: FnOnce(&mut S) -> T + Send,
T: Sized + Send,
{
let job = unsafe { &*job.cast::<Job<T>>() };
let this = unsafe { &*this.cast::<StackJob<F>>() };
let f = unsafe { this.unwrap() };
job.complete(f(s));
}
let size = mem::size_of::<T>();
let align = mem::align_of::<T>();
let new_state = if size > mem::size_of::<Box<T>>() || align > mem::align_of::<Box<T>>() {
JobState::Empty as u8
} else {
JobState::Inline as u8
};
Job {
state: AtomicU8::new(new_state),
this: SendPtr::new(self).unwrap().cast(),
waiting_thread: UnsafeCell::new(None),
harness: harness::<F, T, S>,
maybe_boxed_val: UnsafeCell::new(MaybeUninit::uninit()),
}
}
}