diff --git a/Cargo.toml b/Cargo.toml index 3c15d50..cbb91c3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -7,5 +7,6 @@ edition = "2024" default = ["alloc"] alloc = [] std = [] +nightly = [] [dependencies] diff --git a/src/cachepadded.rs b/src/cachepadded.rs index 9f4222c..52cf323 100644 --- a/src/cachepadded.rs +++ b/src/cachepadded.rs @@ -35,7 +35,7 @@ use core::ops::{Deref, DerefMut}; /// Alignment and padding: /// /// ``` -/// use crossbeam_utils::CachePadded; +/// use werkzeug::CachePadded; /// /// let array = [CachePadded::new(1i8), CachePadded::new(2i8)]; /// let addr1 = &*array[0] as *const i8 as usize; @@ -51,7 +51,7 @@ use core::ops::{Deref, DerefMut}; /// each other's cache lines: /// /// ``` -/// use crossbeam_utils::CachePadded; +/// use werkzeug::CachePadded; /// use std::sync::atomic::AtomicUsize; /// /// struct Queue { @@ -159,7 +159,7 @@ impl CachePadded { /// # Examples /// /// ``` - /// use crossbeam_utils::CachePadded; + /// use werkzeug::CachePadded; /// /// let padded_value = CachePadded::new(1); /// ``` @@ -172,7 +172,7 @@ impl CachePadded { /// # Examples /// /// ``` - /// use crossbeam_utils::CachePadded; + /// use werkzeug::CachePadded; /// /// let padded_value = CachePadded::new(7); /// let value = padded_value.into_inner(); diff --git a/src/lib.rs b/src/lib.rs index cbc549e..b2a7e43 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,4 +1,5 @@ #![cfg_attr(not(feature = "std"), no_std)] +#![cfg_attr(feature = "nightly", feature(strict_provenance_atomic_ptr))] #[cfg(any(test, feature = "std", feature = "alloc"))] extern crate alloc; @@ -14,4 +15,5 @@ pub mod rand; pub mod smallbox; pub mod util; +pub use cachepadded::CachePadded; pub use util::can_transmute; diff --git a/src/ptr.rs b/src/ptr.rs index f96009c..dca0c4d 100644 --- a/src/ptr.rs +++ b/src/ptr.rs @@ -6,7 +6,7 @@ use core::{ num::NonZero, ops::{Deref, DerefMut}, ptr::NonNull, - sync::atomic::{self, AtomicPtr, AtomicUsize}, + sync::atomic::{self, AtomicPtr}, }; #[repr(transparent)] @@ -203,24 +203,39 @@ impl TaggedAtomicPtr { // TODO: switch to fetch_or when stable // let old_ptr = self.ptr.fetch_or(tag & mask, order); + #[cfg(feature = "nightly")] + { + let old_ptr = self.ptr.fetch_or(tag & mask, order); + old_ptr.addr() & mask + } + #[cfg(not(feature = "nightly"))] + { + use core::sync::atomic::AtomicUsize; + let ptr = unsafe { AtomicUsize::from_ptr(self.ptr.as_ptr() as *mut usize) }; + let old_ptr = ptr.fetch_or(tag & mask, order); - let ptr = unsafe { AtomicUsize::from_ptr(self.ptr.as_ptr() as *mut usize) }; - let old_ptr = ptr.fetch_or(tag & mask, order); - - old_ptr & mask + old_ptr & mask + } } /// returns the tag and clears it pub fn take_tag(&self, order: atomic::Ordering) -> usize { let mask = Self::mask(); - // TODO: switch to fetch_and when stable - // let old_ptr = self.ptr.fetch_and(!mask, order); + #[cfg(feature = "nightly")] + { + let old_ptr = self.ptr.fetch_and(!mask, order); + old_ptr.addr() & mask + } - let ptr = unsafe { AtomicUsize::from_ptr(self.ptr.as_ptr() as *mut usize) }; - let old_ptr = ptr.fetch_and(!mask, order); + #[cfg(not(feature = "nightly"))] + { + use core::sync::atomic::AtomicUsize; + let ptr = unsafe { AtomicUsize::from_ptr(self.ptr.as_ptr() as *mut usize) }; + let old_ptr = ptr.fetch_and(!mask, order); - old_ptr & mask + old_ptr & mask + } } /// returns tag