triomphe/
thin_arc.rs

1use core::cmp::Ordering;
2use core::ffi::c_void;
3use core::fmt;
4use core::hash::{Hash, Hasher};
5use core::iter::{ExactSizeIterator, Iterator};
6use core::marker::PhantomData;
7use core::mem::ManuallyDrop;
8use core::ops::Deref;
9use core::ptr;
10use core::usize;
11
12use super::{Arc, ArcInner, HeaderSliceWithLength, HeaderWithLength};
13
14/// A "thin" `Arc` containing dynamically sized data
15///
16/// This is functionally equivalent to `Arc<(H, [T])>`
17///
18/// When you create an `Arc` containing a dynamically sized type
19/// like `HeaderSlice<H, [T]>`, the `Arc` is represented on the stack
20/// as a "fat pointer", where the length of the slice is stored
21/// alongside the `Arc`'s pointer. In some situations you may wish to
22/// have a thin pointer instead, perhaps for FFI compatibility
23/// or space efficiency.
24///
25/// Note that we use `[T; 0]` in order to have the right alignment for `T`.
26///
27/// `ThinArc` solves this by storing the length in the allocation itself,
28/// via `HeaderSliceWithLength`.
29#[repr(transparent)]
30pub struct ThinArc<H, T> {
31    ptr: ptr::NonNull<ArcInner<HeaderSliceWithLength<H, [T; 0]>>>,
32    phantom: PhantomData<(H, T)>,
33}
34
35unsafe impl<H: Sync + Send, T: Sync + Send> Send for ThinArc<H, T> {}
36unsafe impl<H: Sync + Send, T: Sync + Send> Sync for ThinArc<H, T> {}
37
38// Synthesize a fat pointer from a thin pointer.
39//
40// See the comment around the analogous operation in from_header_and_iter.
41#[inline]
42fn thin_to_thick<H, T>(
43    thin: *mut ArcInner<HeaderSliceWithLength<H, [T; 0]>>,
44) -> *mut ArcInner<HeaderSliceWithLength<H, [T]>> {
45    let len = unsafe { (*thin).data.header.length };
46    let fake_slice = ptr::slice_from_raw_parts_mut(thin as *mut T, len);
47
48    fake_slice as *mut ArcInner<HeaderSliceWithLength<H, [T]>>
49}
50
51impl<H, T> ThinArc<H, T> {
52    /// Temporarily converts |self| into a bonafide Arc and exposes it to the
53    /// provided callback. The refcount is not modified.
54    #[inline]
55    pub fn with_arc<F, U>(&self, f: F) -> U
56    where
57        F: FnOnce(&Arc<HeaderSliceWithLength<H, [T]>>) -> U,
58    {
59        // Synthesize transient Arc, which never touches the refcount of the ArcInner.
60        let transient = unsafe {
61            ManuallyDrop::new(Arc {
62                p: ptr::NonNull::new_unchecked(thin_to_thick(self.ptr.as_ptr())),
63                phantom: PhantomData,
64            })
65        };
66
67        // Expose the transient Arc to the callback, which may clone it if it wants
68        // and forward the result to the user
69        f(&transient)
70    }
71
72    /// Temporarily converts |self| into a bonafide Arc and exposes it to the
73    /// provided callback. The refcount is not modified.
74    #[inline]
75    pub fn with_arc_mut<F, U>(&mut self, f: F) -> U
76    where
77        F: FnOnce(&mut Arc<HeaderSliceWithLength<H, [T]>>) -> U,
78    {
79        // Synthesize transient Arc, which never touches the refcount of the ArcInner.
80        let mut transient = unsafe {
81            ManuallyDrop::new(Arc {
82                p: ptr::NonNull::new_unchecked(thin_to_thick(self.ptr.as_ptr())),
83                phantom: PhantomData,
84            })
85        };
86
87        // Expose the transient Arc to the callback, which may clone it if it wants
88        // and forward the result to the user
89        f(&mut transient)
90    }
91
92    /// Creates a `ThinArc` for a HeaderSlice using the given header struct and
93    /// iterator to generate the slice.
94    pub fn from_header_and_iter<I>(header: H, items: I) -> Self
95    where
96        I: Iterator<Item = T> + ExactSizeIterator,
97    {
98        let header = HeaderWithLength::new(header, items.len());
99        Arc::into_thin(Arc::from_header_and_iter(header, items))
100    }
101
102    /// Creates a `ThinArc` for a HeaderSlice using the given header struct and
103    /// a slice to copy.
104    pub fn from_header_and_slice(header: H, items: &[T]) -> Self
105    where
106        T: Copy,
107    {
108        let header = HeaderWithLength::new(header, items.len());
109        Arc::into_thin(Arc::from_header_and_slice(header, items))
110    }
111
112    /// Returns the address on the heap of the ThinArc itself -- not the T
113    /// within it -- for memory reporting.
114    #[inline]
115    pub fn ptr(&self) -> *const c_void {
116        self.ptr.as_ptr() as *const ArcInner<T> as *const c_void
117    }
118
119    /// Returns the address on the heap of the Arc itself -- not the T within it -- for memory
120    /// reporting.
121    #[inline]
122    pub fn heap_ptr(&self) -> *const c_void {
123        self.ptr()
124    }
125
126    /// # Safety
127    ///
128    /// Constructs an ThinArc from a raw pointer.
129    ///
130    /// The raw pointer must have been previously returned by a call to
131    /// ThinArc::into_raw.
132    ///
133    /// The user of from_raw has to make sure a specific value of T is only dropped once.
134    ///
135    /// This function is unsafe because improper use may lead to memory unsafety,
136    /// even if the returned ThinArc is never accessed.
137    #[inline]
138    pub unsafe fn from_raw(ptr: *const c_void) -> Self {
139        Self {
140            ptr: ptr::NonNull::new_unchecked(ptr as *mut c_void).cast(),
141            phantom: PhantomData,
142        }
143    }
144
145    /// Consume ThinArc and returned the wrapped pointer.
146    #[inline]
147    pub fn into_raw(self) -> *const c_void {
148        let this = ManuallyDrop::new(self);
149        this.ptr.cast().as_ptr()
150    }
151
152    /// Provides a raw pointer to the data.
153    /// The counts are not affected in any way and the ThinArc is not consumed.
154    /// The pointer is valid for as long as there are strong counts in the ThinArc.
155    #[inline]
156    pub fn as_ptr(&self) -> *const c_void {
157        self.ptr()
158    }
159
160    /// The reference count of this `Arc`.
161    ///
162    /// The number does not include borrowed pointers,
163    /// or temporary `Arc` pointers created with functions like
164    /// [`ArcBorrow::with_arc`](crate::ArcBorrow::with_arc).
165    ///
166    /// The function is called `strong_count` to mirror `std::sync::Arc::strong_count`,
167    /// however `triomphe::Arc` does not support weak references.
168    #[inline]
169    pub fn strong_count(this: &Self) -> usize {
170        Self::with_arc(this, |arc| Arc::strong_count(arc))
171    }
172}
173
174impl<H, T> Deref for ThinArc<H, T> {
175    type Target = HeaderSliceWithLength<H, [T]>;
176
177    #[inline]
178    fn deref(&self) -> &Self::Target {
179        unsafe { &(*thin_to_thick(self.ptr.as_ptr())).data }
180    }
181}
182
183impl<H, T> Clone for ThinArc<H, T> {
184    #[inline]
185    fn clone(&self) -> Self {
186        ThinArc::with_arc(self, |a| {
187            // Safety: `a` isn't mutable thus the header length remains valid
188            unsafe { Arc::into_thin_unchecked(a.clone()) }
189        })
190    }
191}
192
193impl<H, T> Drop for ThinArc<H, T> {
194    #[inline]
195    fn drop(&mut self) {
196        let _ = Arc::from_thin(ThinArc {
197            ptr: self.ptr,
198            phantom: PhantomData,
199        });
200    }
201}
202
203impl<H, T> Arc<HeaderSliceWithLength<H, [T]>> {
204    /// Converts an `Arc` into a `ThinArc`. This consumes the `Arc`, so the refcount
205    /// is not modified.
206    ///
207    /// # Safety
208    /// Assumes that the header length matches the slice length.
209    #[inline]
210    unsafe fn into_thin_unchecked(a: Self) -> ThinArc<H, T> {
211        let a = ManuallyDrop::new(a);
212        debug_assert_eq!(
213            a.header.length,
214            a.slice.len(),
215            "Length needs to be correct for ThinArc to work"
216        );
217        let fat_ptr: *mut ArcInner<HeaderSliceWithLength<H, [T]>> = a.ptr();
218        let thin_ptr = fat_ptr as *mut [usize] as *mut usize;
219        ThinArc {
220            ptr: unsafe {
221                ptr::NonNull::new_unchecked(
222                    thin_ptr as *mut ArcInner<HeaderSliceWithLength<H, [T; 0]>>,
223                )
224            },
225            phantom: PhantomData,
226        }
227    }
228
229    /// Converts an `Arc` into a `ThinArc`. This consumes the `Arc`, so the refcount
230    /// is not modified.
231    #[inline]
232    pub fn into_thin(a: Self) -> ThinArc<H, T> {
233        assert_eq!(
234            a.header.length,
235            a.slice.len(),
236            "Length needs to be correct for ThinArc to work"
237        );
238        unsafe { Self::into_thin_unchecked(a) }
239    }
240
241    /// Converts a `ThinArc` into an `Arc`. This consumes the `ThinArc`, so the refcount
242    /// is not modified.
243    #[inline]
244    pub fn from_thin(a: ThinArc<H, T>) -> Self {
245        let a = ManuallyDrop::new(a);
246        let ptr = thin_to_thick(a.ptr.as_ptr());
247        unsafe {
248            Arc {
249                p: ptr::NonNull::new_unchecked(ptr),
250                phantom: PhantomData,
251            }
252        }
253    }
254}
255
256impl<H: PartialEq, T: PartialEq> PartialEq for ThinArc<H, T> {
257    #[inline]
258    fn eq(&self, other: &ThinArc<H, T>) -> bool {
259        ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| *a == *b))
260    }
261}
262
263impl<H: Eq, T: Eq> Eq for ThinArc<H, T> {}
264
265impl<H: PartialOrd, T: PartialOrd> PartialOrd for ThinArc<H, T> {
266    #[inline]
267    fn partial_cmp(&self, other: &ThinArc<H, T>) -> Option<Ordering> {
268        ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| a.partial_cmp(b)))
269    }
270}
271
272impl<H: Ord, T: Ord> Ord for ThinArc<H, T> {
273    #[inline]
274    fn cmp(&self, other: &ThinArc<H, T>) -> Ordering {
275        ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| a.cmp(b)))
276    }
277}
278
279impl<H: Hash, T: Hash> Hash for ThinArc<H, T> {
280    fn hash<HSR: Hasher>(&self, state: &mut HSR) {
281        ThinArc::with_arc(self, |a| a.hash(state))
282    }
283}
284
285impl<H: fmt::Debug, T: fmt::Debug> fmt::Debug for ThinArc<H, T> {
286    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
287        fmt::Debug::fmt(&**self, f)
288    }
289}
290
291impl<H, T> fmt::Pointer for ThinArc<H, T> {
292    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
293        fmt::Pointer::fmt(&self.ptr(), f)
294    }
295}
296
297#[cfg(test)]
298mod tests {
299    use crate::{Arc, HeaderWithLength, ThinArc};
300    use alloc::vec;
301    use core::clone::Clone;
302    use core::ops::Drop;
303    use core::sync::atomic;
304    use core::sync::atomic::Ordering::{Acquire, SeqCst};
305
306    #[derive(PartialEq)]
307    struct Canary(*mut atomic::AtomicUsize);
308
309    impl Drop for Canary {
310        fn drop(&mut self) {
311            unsafe {
312                (*self.0).fetch_add(1, SeqCst);
313            }
314        }
315    }
316
317    #[test]
318    fn empty_thin() {
319        let header = HeaderWithLength::new(100u32, 0);
320        let x = Arc::from_header_and_iter(header, core::iter::empty::<i32>());
321        let y = Arc::into_thin(x.clone());
322        assert_eq!(y.header.header, 100);
323        assert!(y.slice.is_empty());
324        assert_eq!(x.header.header, 100);
325        assert!(x.slice.is_empty());
326    }
327
328    #[test]
329    fn thin_assert_padding() {
330        #[derive(Clone, Default)]
331        #[repr(C)]
332        struct Padded {
333            i: u16,
334        }
335
336        // The header will have more alignment than `Padded`
337        let header = HeaderWithLength::new(0i32, 2);
338        let items = vec![Padded { i: 0xdead }, Padded { i: 0xbeef }];
339        let a = ThinArc::from_header_and_iter(header, items.into_iter());
340        assert_eq!(a.slice.len(), 2);
341        assert_eq!(a.slice[0].i, 0xdead);
342        assert_eq!(a.slice[1].i, 0xbeef);
343    }
344
345    #[test]
346    #[allow(clippy::redundant_clone, clippy::eq_op)]
347    fn slices_and_thin() {
348        let mut canary = atomic::AtomicUsize::new(0);
349        let c = Canary(&mut canary as *mut atomic::AtomicUsize);
350        let v = vec![5, 6];
351        let header = HeaderWithLength::new(c, v.len());
352        {
353            let x = Arc::into_thin(Arc::from_header_and_slice(header, &v));
354            let y = ThinArc::with_arc(&x, |q| q.clone());
355            let _ = y.clone();
356            let _ = x == x;
357            Arc::from_thin(x.clone());
358        }
359        assert_eq!(canary.load(Acquire), 1);
360    }
361
362    #[test]
363    #[allow(clippy::redundant_clone, clippy::eq_op)]
364    fn iter_and_thin() {
365        let mut canary = atomic::AtomicUsize::new(0);
366        let c = Canary(&mut canary as *mut atomic::AtomicUsize);
367        let v = vec![5, 6];
368        let header = HeaderWithLength::new(c, v.len());
369        {
370            let x = Arc::into_thin(Arc::from_header_and_iter(header, v.into_iter()));
371            let y = ThinArc::with_arc(&x, |q| q.clone());
372            let _ = y.clone();
373            let _ = x == x;
374            Arc::from_thin(x.clone());
375        }
376        assert_eq!(canary.load(Acquire), 1);
377    }
378
379    #[test]
380    fn into_raw_and_from_raw() {
381        let mut canary = atomic::AtomicUsize::new(0);
382        let c = Canary(&mut canary as *mut atomic::AtomicUsize);
383        let v = vec![5, 6];
384        let header = HeaderWithLength::new(c, v.len());
385        {
386            type ThinArcCanary = ThinArc<Canary, u32>;
387            let x: ThinArcCanary = Arc::into_thin(Arc::from_header_and_iter(header, v.into_iter()));
388            let ptr = x.as_ptr();
389
390            assert_eq!(x.into_raw(), ptr);
391
392            let _x = unsafe { ThinArcCanary::from_raw(ptr) };
393        }
394        assert_eq!(canary.load(Acquire), 1);
395    }
396
397    #[test]
398    fn thin_eq_and_cmp() {
399        [
400            [("*", &b"AB"[..]), ("*", &b"ab"[..])],
401            [("*", &b"AB"[..]), ("*", &b"a"[..])],
402            [("*", &b"A"[..]), ("*", &b"ab"[..])],
403            [("A", &b"*"[..]), ("a", &b"*"[..])],
404            [("a", &b"*"[..]), ("A", &b"*"[..])],
405            [("AB", &b"*"[..]), ("a", &b"*"[..])],
406            [("A", &b"*"[..]), ("ab", &b"*"[..])],
407        ]
408        .iter()
409        .for_each(|[lt @ (lh, ls), rt @ (rh, rs)]| {
410            let l = ThinArc::from_header_and_slice(lh, ls);
411            let r = ThinArc::from_header_and_slice(rh, rs);
412
413            assert_eq!(l, l);
414            assert_eq!(r, r);
415
416            assert_ne!(l, r);
417            assert_ne!(r, l);
418
419            assert_eq!(l <= l, lt <= lt, "{lt:?} <= {lt:?}");
420            assert_eq!(l >= l, lt >= lt, "{lt:?} >= {lt:?}");
421
422            assert_eq!(l < l, lt < lt, "{lt:?} < {lt:?}");
423            assert_eq!(l > l, lt > lt, "{lt:?} > {lt:?}");
424
425            assert_eq!(r <= r, rt <= rt, "{rt:?} <= {rt:?}");
426            assert_eq!(r >= r, rt >= rt, "{rt:?} >= {rt:?}");
427
428            assert_eq!(r < r, rt < rt, "{rt:?} < {rt:?}");
429            assert_eq!(r > r, rt > rt, "{rt:?} > {rt:?}");
430
431            assert_eq!(l < r, lt < rt, "{lt:?} < {rt:?}");
432            assert_eq!(r > l, rt > lt, "{rt:?} > {lt:?}");
433        })
434    }
435
436    #[test]
437    fn thin_eq_and_partial_cmp() {
438        [
439            [(0.0, &[0.0, 0.0][..]), (1.0, &[0.0, 0.0][..])],
440            [(1.0, &[0.0, 0.0][..]), (0.0, &[0.0, 0.0][..])],
441            [(0.0, &[0.0][..]), (0.0, &[0.0, 0.0][..])],
442            [(0.0, &[0.0, 0.0][..]), (0.0, &[0.0][..])],
443            [(0.0, &[1.0, 2.0][..]), (0.0, &[10.0, 20.0][..])],
444        ]
445        .iter()
446        .for_each(|[lt @ (lh, ls), rt @ (rh, rs)]| {
447            let l = ThinArc::from_header_and_slice(lh, ls);
448            let r = ThinArc::from_header_and_slice(rh, rs);
449
450            assert_eq!(l, l);
451            assert_eq!(r, r);
452
453            assert_ne!(l, r);
454            assert_ne!(r, l);
455
456            assert_eq!(l <= l, lt <= lt, "{lt:?} <= {lt:?}");
457            assert_eq!(l >= l, lt >= lt, "{lt:?} >= {lt:?}");
458
459            assert_eq!(l < l, lt < lt, "{lt:?} < {lt:?}");
460            assert_eq!(l > l, lt > lt, "{lt:?} > {lt:?}");
461
462            assert_eq!(r <= r, rt <= rt, "{rt:?} <= {rt:?}");
463            assert_eq!(r >= r, rt >= rt, "{rt:?} >= {rt:?}");
464
465            assert_eq!(r < r, rt < rt, "{rt:?} < {rt:?}");
466            assert_eq!(r > r, rt > rt, "{rt:?} > {rt:?}");
467
468            assert_eq!(l < r, lt < rt, "{lt:?} < {rt:?}");
469            assert_eq!(r > l, rt > lt, "{rt:?} > {lt:?}");
470        })
471    }
472
473    #[test]
474    fn with_arc_mut() {
475        let mut arc: ThinArc<u8, u16> = ThinArc::from_header_and_slice(1u8, &[1, 2, 3]);
476        arc.with_arc_mut(|arc| Arc::get_mut(arc).unwrap().slice.fill(2));
477        arc.with_arc_mut(|arc| assert!(Arc::get_unique(arc).is_some()));
478        arc.with_arc(|arc| assert!(Arc::is_unique(arc)));
479        // Using clone to that the layout generated in new_uninit_slice is compatible
480        // with ArcInner.
481        let arcs = [
482            arc.clone(),
483            arc.clone(),
484            arc.clone(),
485            arc.clone(),
486            arc.clone(),
487        ];
488        arc.with_arc(|arc| assert_eq!(6, Arc::count(&arc)));
489
490        // If the layout is not compatible, then the data might be corrupted.
491        assert_eq!(arc.header.header, 1);
492        assert_eq!(&arc.slice, [2, 2, 2]);
493
494        // Drop the arcs and check the count and the content to
495        // make sure it isn't corrupted.
496        drop(arcs);
497        arc.with_arc_mut(|arc| assert!(Arc::get_unique(arc).is_some()));
498        arc.with_arc(|arc| assert!(Arc::is_unique(arc)));
499        assert_eq!(arc.header.header, 1);
500        assert_eq!(&arc.slice, [2, 2, 2]);
501    }
502
503    #[allow(dead_code)]
504    const fn is_partial_ord<T: ?Sized + PartialOrd>() {}
505
506    #[allow(dead_code)]
507    const fn is_ord<T: ?Sized + Ord>() {}
508
509    // compile-time check that PartialOrd/Ord is correctly derived
510    const _: () = is_partial_ord::<ThinArc<f64, f64>>();
511    const _: () = is_partial_ord::<ThinArc<f64, u64>>();
512    const _: () = is_partial_ord::<ThinArc<u64, f64>>();
513    const _: () = is_ord::<ThinArc<u64, u64>>();
514}