use core::{ borrow::{Borrow, BorrowMut}, cell::UnsafeCell, fmt::Display, marker::PhantomData, mem::{self, ManuallyDrop, MaybeUninit}, ops::{Deref, DerefMut}, ptr::NonNull, sync::atomic::{AtomicPtr, Ordering}, }; use alloc::boxed::Box; /// A guard that runs a closure when it is dropped. pub struct DropGuard(UnsafeCell>); impl DropGuard where F: FnOnce(), { pub fn new(f: F) -> DropGuard { Self(UnsafeCell::new(ManuallyDrop::new(f))) } } impl Drop for DropGuard where F: FnOnce(), { fn drop(&mut self) { // SAFETY: We are the only owner of `self.0`, and we ensure that the // closure is only called once. unsafe { ManuallyDrop::take(&mut *self.0.get())(); } } } #[repr(transparent)] #[derive(Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct SendPtr(NonNull); impl Copy for SendPtr {} impl Clone for SendPtr { fn clone(&self) -> Self { Self(self.0.clone()) } } impl core::fmt::Pointer for SendPtr { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { as core::fmt::Pointer>::fmt(&self.0, f) } } unsafe impl core::marker::Send for SendPtr {} impl Deref for SendPtr { type Target = NonNull; fn deref(&self) -> &Self::Target { &self.0 } } impl DerefMut for SendPtr { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl SendPtr { pub const fn new(ptr: *mut T) -> Option { match NonNull::new(ptr) { Some(ptr) => Some(Self(ptr)), None => None, } } /// ptr must be non-null #[allow(dead_code)] pub const unsafe fn new_unchecked(ptr: *mut T) -> Self { unsafe { Self(NonNull::new_unchecked(ptr)) } } pub const fn new_const(ptr: *const T) -> Option { Self::new(ptr.cast_mut()) } /// ptr must be non-null #[allow(dead_code)] pub const unsafe fn new_const_unchecked(ptr: *const T) -> Self { unsafe { Self::new_unchecked(ptr.cast_mut()) } } pub(crate) unsafe fn as_ref(&self) -> &T { unsafe { self.0.as_ref() } } } /// A tagged atomic pointer that can store a pointer and a tag `BITS` wide in the same space /// as the pointer. /// The pointer must be aligned to `BITS` bits, i.e. `align_of::() >= 2^BITS`. #[repr(transparent)] #[derive(Debug)] pub struct TaggedAtomicPtr { ptr: AtomicPtr<()>, _pd: PhantomData, } impl TaggedAtomicPtr { const fn mask() -> usize { !(!0usize << BITS) } pub fn new(ptr: *mut T, tag: usize) -> TaggedAtomicPtr { debug_assert!(core::mem::align_of::().ilog2() as u8 >= BITS); let mask = Self::mask(); Self { ptr: AtomicPtr::new(ptr.with_addr((ptr.addr() & !mask) | (tag & mask)).cast()), _pd: PhantomData, } } pub fn ptr(&self, order: Ordering) -> NonNull { unsafe { NonNull::new_unchecked( self.ptr .load(order) .map_addr(|addr| addr & !Self::mask()) .cast(), ) } } pub fn tag(&self, order: Ordering) -> usize { self.ptr.load(order).addr() & Self::mask() } pub fn fetch_or_tag(&self, tag: usize, order: Ordering) -> usize { let mask = Self::mask(); let old_ptr = self.ptr.fetch_or(tag & mask, order); old_ptr.addr() & mask } /// returns the tag and clears it pub fn take_tag(&self, order: Ordering) -> usize { let mask = Self::mask(); let old_ptr = self.ptr.fetch_and(!mask, order); old_ptr.addr() & mask } /// returns tag #[inline(always)] fn compare_exchange_tag_inner( &self, old: usize, new: usize, success: Ordering, failure: Ordering, cmpxchg: fn( &AtomicPtr<()>, *mut (), *mut (), Ordering, Ordering, ) -> Result<*mut (), *mut ()>, ) -> Result { let mask = Self::mask(); let old_ptr = self.ptr.load(failure); let old = old_ptr.map_addr(|addr| (addr & !mask) | (old & mask)); let new = old_ptr.map_addr(|addr| (addr & !mask) | (new & mask)); let result = cmpxchg(&self.ptr, old, new, success, failure); result .map(|ptr| ptr.addr() & mask) .map_err(|ptr| ptr.addr() & mask) } /// returns tag #[allow(dead_code)] pub fn compare_exchange_tag( &self, old: usize, new: usize, success: Ordering, failure: Ordering, ) -> Result { self.compare_exchange_tag_inner( old, new, success, failure, AtomicPtr::<()>::compare_exchange, ) } /// returns tag pub fn compare_exchange_weak_tag( &self, old: usize, new: usize, success: Ordering, failure: Ordering, ) -> Result { self.compare_exchange_tag_inner( old, new, success, failure, AtomicPtr::<()>::compare_exchange_weak, ) } #[allow(dead_code)] pub fn set_ptr(&self, ptr: *mut T, success: Ordering, failure: Ordering) { let mask = Self::mask(); let ptr = ptr.cast::<()>(); loop { let old = self.ptr.load(failure); let new = ptr.map_addr(|addr| (addr & !mask) | (old.addr() & mask)); if self .ptr .compare_exchange_weak(old, new, success, failure) .is_ok() { break; } } } pub fn set_tag(&self, tag: usize, success: Ordering, failure: Ordering) { let mask = Self::mask(); loop { let ptr = self.ptr.load(failure); let new = ptr.map_addr(|addr| (addr & !mask) | (tag & mask)); if self .ptr .compare_exchange_weak(ptr, new, success, failure) .is_ok() { break; } } } pub fn ptr_and_tag(&self, order: Ordering) -> (NonNull, usize) { let mask = Self::mask(); let ptr = self.ptr.load(order); let tag = ptr.addr() & mask; let ptr = ptr.map_addr(|addr| addr & !mask); let ptr = unsafe { NonNull::new_unchecked(ptr.cast()) }; (ptr, tag) } } /// A small box that can store a value inline if the size and alignment of T is /// less than or equal to the size and alignment of a boxed type. Typically this /// will be `sizeof::()` bytes, but might be larger if /// `sizeof::>()` is larger than that, like it is for dynamically sized /// types like `[T]` or `dyn Trait`. #[derive(Debug)] #[repr(transparent)] // We use a box here because a box can be unboxed, while a pointer cannot. pub struct SmallBox(pub MaybeUninit>); impl Display for SmallBox { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { (**self).fmt(f) } } impl Ord for SmallBox { fn cmp(&self, other: &Self) -> core::cmp::Ordering { self.as_ref().cmp(other.as_ref()) } } impl PartialOrd for SmallBox { fn partial_cmp(&self, other: &Self) -> Option { self.as_ref().partial_cmp(other.as_ref()) } } impl Eq for SmallBox {} impl PartialEq for SmallBox { fn eq(&self, other: &Self) -> bool { self.as_ref().eq(other.as_ref()) } } impl Default for SmallBox { fn default() -> Self { Self::new(Default::default()) } } impl Clone for SmallBox { fn clone(&self) -> Self { Self::new(self.as_ref().clone()) } } impl Deref for SmallBox { type Target = T; fn deref(&self) -> &Self::Target { self.as_ref() } } impl DerefMut for SmallBox { fn deref_mut(&mut self) -> &mut Self::Target { self.as_mut() } } impl AsRef for SmallBox { fn as_ref(&self) -> &T { Self::as_ref(self) } } impl AsMut for SmallBox { fn as_mut(&mut self) -> &mut T { Self::as_mut(self) } } impl Borrow for SmallBox { fn borrow(&self) -> &T { &**self } } impl BorrowMut for SmallBox { fn borrow_mut(&mut self) -> &mut T { &mut **self } } impl SmallBox { /// must only be called once. takes a reference so this can be called in /// drop() unsafe fn get_unchecked(&self, inline: bool) -> T { if inline { unsafe { mem::transmute_copy::>, T>(&self.0) } } else { unsafe { *self.0.assume_init_read() } } } pub fn as_ref(&self) -> &T { unsafe { if Self::is_inline() { mem::transmute::<&MaybeUninit>, &T>(&self.0) } else { self.0.assume_init_ref() } } } pub fn as_mut(&mut self) -> &mut T { unsafe { if Self::is_inline() { mem::transmute::<&mut MaybeUninit>, &mut T>(&mut self.0) } else { self.0.assume_init_mut() } } } pub fn into_inner(self) -> T { let this = ManuallyDrop::new(self); let inline = Self::is_inline(); // SAFETY: inline is correctly calculated and this function // consumes `self` unsafe { this.get_unchecked(inline) } } #[inline(always)] pub const fn is_inline() -> bool { // the value can be stored inline iff the size of T is equal or // smaller than the size of the boxed type and the alignment of the // boxed type is an integer multiple of the alignment of T mem::size_of::() <= mem::size_of::>>() && mem::align_of::>>() % mem::align_of::() == 0 } pub fn new(value: T) -> Self { let inline = Self::is_inline(); if inline { let mut this = MaybeUninit::new(Self(MaybeUninit::uninit())); unsafe { this.as_mut_ptr().cast::().write(value); this.assume_init() } } else { Self(MaybeUninit::new(Box::new(value))) } } } impl Drop for SmallBox { fn drop(&mut self) { // drop contained value. drop(unsafe { self.get_unchecked(Self::is_inline()) }); } } /// returns the number of available hardware threads, or 1 if it cannot be determined. pub fn available_parallelism() -> usize { std::thread::available_parallelism() .map(|n| n.get()) .unwrap_or(1) } #[repr(transparent)] pub struct Send(pub(self) T); unsafe impl core::marker::Send for Send {} impl Deref for Send { type Target = T; fn deref(&self) -> &Self::Target { &self.0 } } impl DerefMut for Send { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.0 } } impl Send { pub unsafe fn new(value: T) -> Self { Self(value) } } pub fn unwrap_or_panic(result: std::thread::Result) -> T { match result { Ok(value) => value, Err(payload) => std::panic::resume_unwind(payload), } } #[cfg(test)] mod tests { use super::*; #[test] fn tagged_ptr_zero_tag() { let ptr = Box::into_raw(Box::new(42u32)); let tagged_ptr = TaggedAtomicPtr::::new(ptr, 0); assert_eq!(tagged_ptr.tag(Ordering::Relaxed), 0); assert_eq!(tagged_ptr.ptr(Ordering::Relaxed).as_ptr(), ptr); unsafe { _ = Box::from_raw(ptr); } } #[test] fn tagged_ptr_exchange() { let ptr = Box::into_raw(Box::new(42u32)); let tagged_ptr = TaggedAtomicPtr::::new(ptr, 0b11); assert_eq!(tagged_ptr.tag(Ordering::Relaxed), 0b11); assert_eq!(tagged_ptr.ptr(Ordering::Relaxed).as_ptr(), ptr); assert_eq!( tagged_ptr .compare_exchange_tag(0b11, 0b10, Ordering::Relaxed, Ordering::Relaxed) .unwrap(), 0b11 ); assert_eq!(tagged_ptr.tag(Ordering::Relaxed), 0b10); assert_eq!(tagged_ptr.ptr(Ordering::Relaxed).as_ptr(), ptr); unsafe { _ = Box::from_raw(ptr); } } #[test] fn value_inline() { assert!(SmallBox::::is_inline(), "u32 should be inline"); assert!(SmallBox::::is_inline(), "u8 should be inline"); assert!( SmallBox::>::is_inline(), "Box should be inline" ); assert!( SmallBox::<[u32; 2]>::is_inline(), "[u32; 2] should be inline" ); assert!( !SmallBox::<[u32; 3]>::is_inline(), "[u32; 3] should not be inline" ); assert!(SmallBox::::is_inline(), "usize should be inline"); #[repr(C, align(16))] struct LargeType(u8); assert!( !SmallBox::::is_inline(), "LargeType should not be inline" ); #[repr(C, align(4))] struct SmallType(u8); assert!( SmallBox::::is_inline(), "SmallType should be inline" ); } }