diff --git a/src/praetor/mod.rs b/src/praetor/mod.rs index b7ab61f..064c567 100644 --- a/src/praetor/mod.rs +++ b/src/praetor/mod.rs @@ -207,47 +207,52 @@ mod job { #[derive(Debug, Default, Clone, Copy)] struct Size2([usize; 2]); - struct Value(pub MaybeUninit>>); + pub struct Value(pub MaybeUninit>>); impl Value { /// must only be called once. takes a reference so this can be called in /// drop() unsafe fn get_unchecked(&self, inline: bool) -> T { if inline { - unsafe { mem::transmute_copy(&self.0) } + unsafe { mem::transmute_copy::>>, T>(&self.0) } } else { - unsafe { (*self.0.assume_init_read()).assume_init() } + unsafe { + let inner = *self.0.assume_init_read(); + inner.assume_init() + } } } - fn get(self) -> T { + pub fn get(self) -> T { + let this = ManuallyDrop::new(self); let inline = Self::is_inline(); // SAFETY: inline is correctly calculated and this function // consumes `self` - unsafe { self.get_unchecked(inline) } + unsafe { this.get_unchecked(inline) } } - fn is_inline() -> bool { + pub fn is_inline() -> bool { // the value can be stored inline iff the size of T is equal or // smaller than the size of the boxed type and the alignment of the // boxed type is an integer multiple of the alignment of T - mem::size_of::() > mem::size_of::>>() - || mem::align_of::>>() % mem::align_of::() != 0 + mem::size_of::() < mem::size_of::>>() + && mem::align_of::>>() % mem::align_of::() == 0 } - fn new(value: T) -> Self { + pub fn new(value: T) -> Self { let inline = Self::is_inline(); // SAFETY: we know the box is allocated if state was `Pending`. if inline { - Self(MaybeUninit::new(Box::new(MaybeUninit::new(value)))) - } else { let mut this = Self(MaybeUninit::uninit()); unsafe { - *mem::transmute::<_, &mut T>(&mut this.0) = value; + *mem::transmute::<&mut MaybeUninit>>, &mut T>(&mut this.0) = + value; } this + } else { + Self(MaybeUninit::new(Box::new(MaybeUninit::new(value)))) } } } @@ -298,15 +303,13 @@ mod job { // head and tail point at themselves unsafe { - (&mut *head.err_or_link.get()).link.next = - NonNull::new_unchecked((&raw const *head).cast_mut()); + (&mut *head.err_or_link.get()).link.next = None; (&mut *head.err_or_link.get()).link.prev = - NonNull::new_unchecked((&raw const *tail).cast_mut()); + Some(NonNull::new_unchecked((&raw const *tail).cast_mut())); (&mut *tail.err_or_link.get()).link.next = - NonNull::new_unchecked((&raw const *head).cast_mut()); - (&mut *tail.err_or_link.get()).link.prev = - NonNull::new_unchecked((&raw const *tail).cast_mut()); + Some(NonNull::new_unchecked((&raw const *head).cast_mut())); + (&mut *tail.err_or_link.get()).link.prev = None; } Self { head, tail } @@ -329,52 +332,50 @@ mod job { pub unsafe fn push_front(&mut self, elem: Pin<&Job>) { let head_link = unsafe { self.head.link_mut() }; - let prev = head_link.prev; + // SAFETY: head will always have a previous element. + let prev = head_link.prev.unwrap(); let prev_link = unsafe { prev.as_ref().link_mut() }; let elem_ptr = unsafe { NonNull::new_unchecked(&*elem as *const Job as *mut Job) }; - head_link.prev = elem_ptr; - prev_link.next = elem_ptr; + head_link.prev = Some(elem_ptr); + prev_link.next = Some(elem_ptr); let elem_link = unsafe { elem.link_mut() }; - elem_link.prev = prev; - elem_link.next = self.head(); + elem_link.prev = Some(prev); + elem_link.next = Some(self.head()); } /// elem must be valid until it is popped. pub unsafe fn push_back(&mut self, elem: Pin<&Job>) { let tail_link = unsafe { self.tail.link_mut() }; - let next = tail_link.next; + // SAFETY: tail will always have a previous element. + let next = tail_link.next.unwrap(); let next_link = unsafe { next.as_ref().link_mut() }; let elem_ptr = unsafe { NonNull::new_unchecked(&*elem as *const Job as *mut Job) }; - tail_link.next = elem_ptr; - next_link.prev = elem_ptr; + tail_link.next = Some(elem_ptr); + next_link.prev = Some(elem_ptr); let elem_link = unsafe { elem.link_mut() }; - elem_link.next = next; - elem_link.prev = self.tail(); + elem_link.next = Some(next); + elem_link.prev = Some(self.tail()); } pub fn pop_front(&mut self) -> Option> { let head_link = unsafe { self.head.link_mut() }; // SAFETY: head will always have a previous element. - let elem = head_link.prev; + let elem = head_link.prev.unwrap(); let elem_link = unsafe { elem.as_ref().link_mut() }; - let prev = elem_link.prev.as_ptr(); - head_link.prev = unsafe { NonNull::new_unchecked(prev) }; + let prev = elem_link.prev?.as_ptr(); + head_link.prev = unsafe { Some(NonNull::new_unchecked(prev)) }; let prev_link = unsafe { (&*prev).link_mut() }; - prev_link.next = self.head(); + prev_link.next = Some(self.head()); - if elem == self.tail() { - None - } else { - Some(elem) - } + Some(elem) } pub fn pop_back(&mut self) -> Option> { @@ -382,20 +383,16 @@ mod job { let tail_link = unsafe { self.tail.link_mut() }; // SAFETY: head will always have a previous element. - let elem = tail_link.next; + let elem = tail_link.next.unwrap(); let elem_link = unsafe { elem.as_ref().link_mut() }; - let next = elem_link.next.as_ptr(); - tail_link.next = unsafe { NonNull::new_unchecked(next) }; + let next = elem_link.next?.as_ptr(); + tail_link.next = unsafe { Some(NonNull::new_unchecked(next)) }; let next_link = unsafe { (&*next).link_mut() }; - next_link.prev = self.tail(); + next_link.prev = Some(self.tail()); - if elem == self.head() { - None - } else { - Some(elem) - } + Some(elem) } } @@ -407,8 +404,8 @@ mod job { #[derive(Debug, PartialEq, Eq)] struct Link { - prev: NonNull, - next: NonNull, + prev: Option>, + next: Option>, } impl Clone for Link { @@ -500,8 +497,8 @@ mod job { val_or_this: UnsafeCell::new(ValueOrThis { this }), err_or_link: UnsafeCell::new(LinkOrError { link: Link { - prev: NonNull::dangling(), - next: NonNull::dangling(), + prev: None, + next: None, }, }), phantom: PhantomPinned, @@ -509,14 +506,17 @@ mod job { } pub fn empty() -> Job { Self { - harness_and_state: TaggedAtomicPtr::new(ptr::dangling_mut(), 0), + harness_and_state: TaggedAtomicPtr::new( + ptr::dangling_mut(), + JobState::Empty as usize, + ), val_or_this: UnsafeCell::new(ValueOrThis { this: NonNull::dangling(), }), err_or_link: UnsafeCell::new(LinkOrError { link: Link { - prev: NonNull::dangling(), - next: NonNull::dangling(), + prev: None, + next: None, }, }), phantom: PhantomPinned, @@ -528,17 +528,19 @@ mod job { } /// assumes job is in joblist - pub unsafe fn unlink(&self) { + pub unsafe fn unlink(&self) -> Option<()> { unsafe { let link = self.link_mut(); - link.prev.as_ref().link_mut().next = link.next; - link.next.as_ref().link_mut().prev = link.prev; + link.prev?.as_ref().link_mut().next = link.next; + link.next?.as_ref().link_mut().prev = link.prev; } + Some(()) } pub fn state(&self) -> u8 { self.harness_and_state.tag(Ordering::Relaxed) as u8 } + pub fn wait(&self) -> std::thread::Result { let mut spin = SpinWait::new(); loop { @@ -549,7 +551,8 @@ mod job { Ordering::Relaxed, ) { // if still pending, sleep until completed - Ok(_) => { + Ok(state) => { + assert_eq!(state, JobState::Pending as usize); unsafe { *(&mut *self.err_or_link.get()).waker = Some(std::thread::current()); } @@ -568,6 +571,8 @@ mod job { continue; } Err(state) => { + assert_ne!(state, JobState::Pending as usize); + if state == JobState::Finished as usize { let err = unsafe { (&mut *self.err_or_link.get()).error.take() }; @@ -606,14 +611,17 @@ mod job { Ordering::Acquire, Ordering::Relaxed, ) { - Ok(_) => { + Ok(state) => { + assert_eq!(state, JobState::Empty as usize); // set waker to None unsafe { (&mut *self.err_or_link.get()).waker = ManuallyDrop::new(None); } return; } - Err(_) => { + Err(state) => { + assert_ne!(state, JobState::Empty as usize); + eprintln!("######## what the sigma?"); spin.spin(); } @@ -624,8 +632,10 @@ mod job { pub fn execute(&self) { // SAFETY: self is non-null unsafe { - let harness: unsafe fn(*const (), *const Self) = - mem::transmute(self.harness_and_state.ptr(Ordering::Relaxed).as_ptr()); + let (ptr, state) = self.harness_and_state.ptr_and_tag(Ordering::Relaxed); + assert_eq!(state, JobState::Pending as usize); + + let harness: unsafe fn(*const (), *const Self) = mem::transmute(ptr.as_ptr()); let this = (*self.val_or_this.get()).this; eprintln!("{harness:?}({this:?}, {:?})", self as *const Self); @@ -643,14 +653,17 @@ mod job { Ordering::Acquire, Ordering::Relaxed, ) { - Ok(_) => { + Ok(state) => { + assert_eq!(state, JobState::Pending as usize); break; } - Err(tag) => { - eprintln!( - "complete(): spin waiting for lock to complete: ({:?})", - JobState::from_u8(tag as u8).unwrap() - ); + Err(state) => { + assert_ne!(state, JobState::Pending as usize); + // eprintln!( + // "complete(): spin waiting for lock to complete with {:?}: ({:?})", + // result.as_ref().map(|_| ()), + // JobState::from_u8(state as u8).unwrap() + // ); spin.spin(); } } @@ -953,8 +966,8 @@ impl Scope { if let Some(job) = self.pop_back() { unsafe { job.as_ref().set_pending(); + eprintln!("sharing {job:?} {:#?}", job.as_ref()); } - eprintln!("sharing {job:?}"); guard.jobs.insert(self.index, job); self.context.shared_job.notify_one(); } @@ -979,14 +992,22 @@ impl Scope { // } while job.state() != JobState::Finished as u8 { - let Some(job) = self.pop_front().or_else(|| { - self.context - .shared - .lock() - .jobs - .pop_first() - .map(|(_, job)| job) - }) else { + let Some(job) = + // self + // .pop_front() + // .inspect(|job| unsafe { + // job.as_ref().set_pending(); + // }) + None + .or_else(|| { + self.context + .shared + .lock() + .jobs + .pop_first() + .map(|(_, job)| job) + }) + else { // no more jobs, sleep instead break; }; diff --git a/src/praetor/tests.rs b/src/praetor/tests.rs index 6c6a133..22ade42 100644 --- a/src/praetor/tests.rs +++ b/src/praetor/tests.rs @@ -170,3 +170,114 @@ fn tagged_ptr_exchange_failure() { assert_eq!(ptr.tag(Ordering::Relaxed), 1); assert_eq!(ptr.ptr(Ordering::Relaxed).as_ptr(), boxed); } + +#[test] +fn value_inline() { + let val = Value::new(3usize); + + let inner = val.get(); + assert_eq!(inner, 3usize); +} + +#[test] +fn value_inline_struct() { + #[derive(Default, PartialEq, Debug)] + struct Small { + c: f32, + } + let val = Value::new(Small { c: 3.2 }); + + let inner = val.get(); + assert_eq!(inner.c, 3.2); +} + +#[test] +fn value_boxed() { + #[derive(Default, PartialEq, Debug)] + struct Big { + a: usize, + b: f32, + c: u32, + } + let val = Value::new(Big { + a: 42, + b: 2.25, + c: 7, + }); + + let inner = val.get(); + assert_eq!( + inner, + Big { + a: 42, + b: 2.25, + c: 7 + } + ); +} + +#[test] +fn value_inline_drop() { + #[derive(PartialEq, Debug)] + #[repr(transparent)] + struct Small<'a> { + inner: &'a mut usize, + } + impl Drop for Small<'_> { + fn drop(&mut self) { + *self.inner += 1; + } + } + let mut dropped = 0; + { + let inner = { + let val = Value::new(Small { + inner: &mut dropped, + }); + + val.get() + }; + assert_eq!(*inner.inner, 0); + } + assert_eq!(dropped, 1); + { + let _val = Value::new(Small { + inner: &mut dropped, + }); + } + assert_eq!(dropped, 2); +} + +#[test] +fn value_boxed_drop() { + #[derive(PartialEq, Debug)] + struct Big<'a> { + inner: &'a mut usize, + pad: [usize; 3], + } + impl Drop for Big<'_> { + fn drop(&mut self) { + *self.inner += 1; + } + } + let mut dropped = 0; + { + let inner = { + let val = Value::new(Big { + inner: &mut dropped, + pad: [0; 3], + }); + + val.get() + }; + assert_eq!(*inner.inner, 0); + } + assert_eq!(dropped, 1); + { + let _val = Value::new(Big { + inner: &mut dropped, + pad: [0; 3], + }); + } + assert_eq!(dropped, 2); +}