diff --git a/src/ptr.rs b/src/ptr.rs index 411d636..d4a65f6 100644 --- a/src/ptr.rs +++ b/src/ptr.rs @@ -2,13 +2,17 @@ use core::{ cmp::Ordering, fmt, hash, marker::{PhantomData, Send}, - mem, + mem::{self, ManuallyDrop}, num::NonZero, ops::{Deref, DerefMut}, + pin::Pin, ptr::NonNull, sync::atomic::{self, AtomicPtr}, }; +/// This is a wrapper around `NonNull` that is `Send` even if `T` is not +/// `Send`. This is useful for types that use `NonNull` internally but are +/// safe to send to other threads. #[repr(transparent)] pub struct SendNonNull(NonNull); @@ -99,6 +103,7 @@ impl DerefMut for SendNonNull { } impl SendNonNull { + /// Creates a new `SendNonNull` if `ptr` is non-null, otherwise returns `None`. pub const fn new(ptr: *mut T) -> Option { match NonNull::new(ptr) { Some(ptr) => Some(Self(ptr)), @@ -106,14 +111,17 @@ impl SendNonNull { } } + /// Creates a new `SendNonNull` that is dangling. pub const fn dangling() -> Self { Self(NonNull::dangling()) } + /// Casts the pointer to a different type pub const fn cast(self) -> SendNonNull { SendNonNull(self.0.cast()) } + /// Creates a new `SendNonNull` with the given address, keeping the provenance of `self`. pub fn with_addr(self, addr: NonZero) -> Self { // SAFETY: addr is non-zero, so the pointer is valid. unsafe { @@ -123,11 +131,17 @@ impl SendNonNull { } } + /// Maps the address of the pointer using the given function, keeping the provenance of `self`. pub fn map_addr(self, f: impl FnOnce(NonZero) -> NonZero) -> Self { // SAFETY: addr is non-zero, so the pointer is valid. self.with_addr(f(self.addr())) } + /// Returns a new pointer, offset from `self` by `offset` elements. + /// + /// # Safety + /// + /// The caller must ensure that the resulting pointer points at the same allocation as `self`. pub unsafe fn offset(self, offset: isize) -> Self { // SAFETY: self is a valid pointer, offset is guaranteed to point to a valid memory location by the contract of `offset` unsafe { Self(NonNull::new_unchecked(self.as_ptr().offset(offset))) } @@ -453,6 +467,85 @@ impl TaggedAtomicPtr { } } +#[repr(transparent)] +pub struct UniquePtr<'a, T> { + ptr: NonNull, + _marker: PhantomData<&'a mut T>, +} + +impl<'a, T> UniquePtr<'a, T> { + #[inline] + pub fn map(value: T, f: F) -> U + where + F: FnOnce(UniquePtr<'_, T>) -> U, + { + let mut inner = ManuallyDrop::new(value); + let this = UniquePtr::new(&mut inner); + f(this) + } + + pub fn new_pinned(inner: Pin<&'a mut ManuallyDrop>) -> Pin { + // SAFETY: `inner` is pinned, so it must remain pinned for the lifetime of `Self`. + unsafe { + Pin::new_unchecked(Self { + ptr: NonNull::new_unchecked(core::mem::transmute::<_, _>(inner)), + _marker: PhantomData, + }) + } + } + + pub fn new(inner: &'a mut ManuallyDrop) -> Self { + Self { + ptr: NonNull::from(&mut **inner), + _marker: PhantomData, + } + } + + pub unsafe fn new_unchecked(ptr: *mut T) -> Self { + Self { + ptr: unsafe { NonNull::new_unchecked(ptr) }, + _marker: PhantomData, + } + } + + pub fn as_ptr(&self) -> *mut T { + self.ptr.as_ptr() + } + + pub fn as_non_null(&self) -> NonNull { + self.ptr + } + + pub unsafe fn cast(self) -> UniquePtr<'a, U> { + UniquePtr { + ptr: self.ptr.cast(), + _marker: PhantomData, + } + } +} + +impl<'a, T> Deref for UniquePtr<'a, T> { + type Target = T; + + fn deref(&self) -> &Self::Target { + unsafe { self.ptr.as_ref() } + } +} + +impl<'a, T> DerefMut for UniquePtr<'a, T> { + fn deref_mut(&mut self) -> &mut Self::Target { + unsafe { self.ptr.as_mut() } + } +} + +impl<'a, T> Drop for UniquePtr<'a, T> { + fn drop(&mut self) { + unsafe { + core::ptr::drop_in_place(&raw mut **self); + } + } +} + #[cfg(test)] mod tests { use core::sync::atomic::Ordering;