triomphe/
thin_arc.rs

1use core::cmp::Ordering;
2use core::ffi::c_void;
3use core::fmt;
4use core::hash::{Hash, Hasher};
5use core::iter::{ExactSizeIterator, Iterator};
6use core::marker::PhantomData;
7use core::mem::ManuallyDrop;
8use core::ops::Deref;
9use core::panic::{RefUnwindSafe, UnwindSafe};
10use core::ptr;
11
12use super::{Arc, ArcInner, HeaderSlice, HeaderSliceWithLengthProtected, HeaderWithLength};
13use crate::header::HeaderSliceWithLengthUnchecked;
14
15/// A "thin" `Arc` containing dynamically sized data
16///
17/// This is functionally equivalent to `Arc<(H, [T])>`
18///
19/// When you create an `Arc` containing a dynamically sized type
20/// like `HeaderSlice<H, [T]>`, the `Arc` is represented on the stack
21/// as a "fat pointer", where the length of the slice is stored
22/// alongside the `Arc`'s pointer. In some situations you may wish to
23/// have a thin pointer instead, perhaps for FFI compatibility
24/// or space efficiency.
25///
26/// Note that we use `[T; 0]` in order to have the right alignment for `T`.
27///
28/// `ThinArc` solves this by storing the length in the allocation itself,
29/// via `HeaderSliceWithLengthProtected`.
30#[repr(transparent)]
31pub struct ThinArc<H, T> {
32    // We can pointer-cast between this target type
33    // of `ArcInner<HeaderSlice<HeaderWithLength<H>, [T; 0]>`
34    // and the types
35    // `ArcInner<HeaderSliceWithLengthProtected<H, T>>` and
36    // `ArcInner<HeaderSliceWithLengthUnchecked<H, T>>` (= `ArcInner<HeaderSlice<HeaderWithLength<H>, [T]>>`).
37    // [By adding appropriate length metadata to the pointer.]
38    // All types involved are #[repr(C)] or #[repr(transparent)], to ensure the safety of such casts
39    // (in particular `HeaderSlice`, `HeaderWithLength`, `HeaderSliceWithLengthProtected`).
40    //
41    // The safe API of `ThinArc` ensures that the length in the `HeaderWithLength`
42    // corretcly set - or verified - upon creation of a `ThinArc` and can't be modified
43    // to fall out of sync with the true slice length for this value & allocation.
44    ptr: ptr::NonNull<ArcInner<HeaderSlice<HeaderWithLength<H>, [T; 0]>>>,
45    phantom: PhantomData<(H, T)>,
46}
47
48unsafe impl<H: Sync + Send, T: Sync + Send> Send for ThinArc<H, T> {}
49unsafe impl<H: Sync + Send, T: Sync + Send> Sync for ThinArc<H, T> {}
50
51impl<H: RefUnwindSafe, T: RefUnwindSafe> UnwindSafe for ThinArc<H, T> {}
52
53// Synthesize a fat pointer from a thin pointer.
54//
55// See the comment around the analogous operation in from_header_and_iter.
56#[inline]
57fn thin_to_thick<H, T>(arc: &ThinArc<H, T>) -> *mut ArcInner<HeaderSliceWithLengthProtected<H, T>> {
58    let thin = arc.ptr.as_ptr();
59    let len = unsafe { (*thin).data.header.length };
60    let fake_slice = ptr::slice_from_raw_parts_mut(thin as *mut T, len);
61
62    fake_slice as *mut ArcInner<HeaderSliceWithLengthProtected<H, T>>
63}
64
65impl<H, T> ThinArc<H, T> {
66    /// Temporarily converts |self| into a bonafide Arc and exposes it to the
67    /// provided callback. The refcount is not modified.
68    #[inline]
69    pub fn with_arc<F, U>(&self, f: F) -> U
70    where
71        F: FnOnce(&Arc<HeaderSliceWithLengthUnchecked<H, T>>) -> U,
72    {
73        // Synthesize transient Arc, which never touches the refcount of the ArcInner.
74        let transient = ManuallyDrop::new(Arc::from_protected(unsafe {
75            Arc::from_raw_inner(thin_to_thick(self))
76        }));
77
78        // Expose the transient Arc to the callback, which may clone it if it wants
79        // and forward the result to the user
80        f(&transient)
81    }
82
83    /// Temporarily converts |self| into a bonafide Arc and exposes it to the
84    /// provided callback. The refcount is not modified.
85    #[inline]
86    fn with_protected_arc<F, U>(&self, f: F) -> U
87    where
88        F: FnOnce(&Arc<HeaderSliceWithLengthProtected<H, T>>) -> U,
89    {
90        // Synthesize transient Arc, which never touches the refcount of the ArcInner.
91        let transient = ManuallyDrop::new(unsafe { Arc::from_raw_inner(thin_to_thick(self)) });
92
93        // Expose the transient Arc to the callback, which may clone it if it wants
94        // and forward the result to the user
95        f(&transient)
96    }
97
98    /// Temporarily converts |self| into a bonafide Arc and exposes it to the
99    /// provided callback. The refcount is not modified.
100    #[inline]
101    pub fn with_arc_mut<F, U>(&mut self, f: F) -> U
102    where
103        F: FnOnce(&mut Arc<HeaderSliceWithLengthProtected<H, T>>) -> U,
104    {
105        // It is possible for the user to replace the Arc entirely here. If so, we need to update the ThinArc as well
106        // whenever this method exits. We do this with a drop guard to handle the panicking case
107        struct DropGuard<'a, H, T> {
108            transient: ManuallyDrop<Arc<HeaderSliceWithLengthProtected<H, T>>>,
109            this: &'a mut ThinArc<H, T>,
110        }
111
112        impl<'a, H, T> Drop for DropGuard<'a, H, T> {
113            fn drop(&mut self) {
114                // This guard is only dropped when the same debug_assert already succeeded
115                // or while panicking. This has the effect that, if the debug_assert fails, we abort!
116                // This should never fail, unless a user used `transmute` to violate the invariants of
117                // `HeaderSliceWithLengthProtected`.
118                // In this case, there is no sound fallback other than aborting.
119                debug_assert_eq!(
120                    self.transient.length(),
121                    self.transient.slice().len(),
122                    "Length needs to be correct for ThinArc to work"
123                );
124                // Safety: We're still in the realm of Protected types so this cast is safe
125                self.this.ptr = self.transient.p.cast();
126            }
127        }
128
129        // Synthesize transient Arc, which never touches the refcount of the ArcInner.
130        let transient = ManuallyDrop::new(unsafe { Arc::from_raw_inner(thin_to_thick(self)) });
131
132        let mut guard = DropGuard {
133            transient,
134            this: self,
135        };
136
137        // Expose the transient Arc to the callback, which may clone it if it wants
138        // and forward the result to the user
139        let ret = f(&mut guard.transient);
140
141        // deliberately checked both here AND in the `DropGuard`
142        debug_assert_eq!(
143            guard.transient.length(),
144            guard.transient.slice().len(),
145            "Length needs to be correct for ThinArc to work"
146        );
147
148        ret
149    }
150
151    /// Creates a `ThinArc` for a HeaderSlice using the given header struct and
152    /// iterator to generate the slice.
153    pub fn from_header_and_iter<I>(header: H, items: I) -> Self
154    where
155        I: Iterator<Item = T> + ExactSizeIterator,
156    {
157        let header = HeaderWithLength::new(header, items.len());
158        Arc::into_thin(Arc::from_header_and_iter(header, items))
159    }
160
161    /// Creates a `ThinArc` for a HeaderSlice using the given header struct and
162    /// a slice to copy.
163    pub fn from_header_and_slice(header: H, items: &[T]) -> Self
164    where
165        T: Copy,
166    {
167        let header = HeaderWithLength::new(header, items.len());
168        Arc::into_thin(Arc::from_header_and_slice(header, items))
169    }
170
171    /// Returns the address on the heap of the ThinArc itself -- not the T
172    /// within it -- for memory reporting.
173    #[inline]
174    pub fn ptr(&self) -> *const c_void {
175        self.ptr.cast().as_ptr()
176    }
177
178    /// Returns the address on the heap of the Arc itself -- not the T within it -- for memory
179    /// reporting.
180    #[inline]
181    pub fn heap_ptr(&self) -> *const c_void {
182        self.ptr()
183    }
184
185    /// # Safety
186    ///
187    /// Constructs an ThinArc from a raw pointer.
188    ///
189    /// The raw pointer must have been previously returned by a call to
190    /// ThinArc::into_raw.
191    ///
192    /// The user of from_raw has to make sure a specific value of T is only dropped once.
193    ///
194    /// This function is unsafe because improper use may lead to memory unsafety,
195    /// even if the returned ThinArc is never accessed.
196    #[inline]
197    pub unsafe fn from_raw(ptr: *const c_void) -> Self {
198        Self {
199            ptr: ptr::NonNull::new_unchecked(ptr as *mut c_void).cast(),
200            phantom: PhantomData,
201        }
202    }
203
204    /// Consume ThinArc and returned the wrapped pointer.
205    #[inline]
206    pub fn into_raw(self) -> *const c_void {
207        let this = ManuallyDrop::new(self);
208        this.ptr()
209    }
210
211    /// Provides a raw pointer to the data.
212    /// The counts are not affected in any way and the ThinArc is not consumed.
213    /// The pointer is valid for as long as there are strong counts in the ThinArc.
214    #[inline]
215    pub fn as_ptr(&self) -> *const c_void {
216        self.ptr()
217    }
218
219    /// The reference count of this `Arc`.
220    ///
221    /// The number does not include borrowed pointers,
222    /// or temporary `Arc` pointers created with functions like
223    /// [`ArcBorrow::with_arc`](crate::ArcBorrow::with_arc).
224    ///
225    /// The function is called `strong_count` to mirror `std::sync::Arc::strong_count`,
226    /// however `triomphe::Arc` does not support weak references.
227    #[inline]
228    pub fn strong_count(this: &Self) -> usize {
229        Self::with_arc(this, Arc::strong_count)
230    }
231}
232
233impl<H, T> Deref for ThinArc<H, T> {
234    type Target = HeaderSliceWithLengthUnchecked<H, T>;
235
236    #[inline]
237    fn deref(&self) -> &Self::Target {
238        unsafe { (*thin_to_thick(self)).data.inner() }
239    }
240}
241
242impl<H, T> Clone for ThinArc<H, T> {
243    #[inline]
244    fn clone(&self) -> Self {
245        ThinArc::with_protected_arc(self, |a| Arc::protected_into_thin(a.clone()))
246    }
247}
248
249impl<H, T> Drop for ThinArc<H, T> {
250    #[inline]
251    fn drop(&mut self) {
252        let _ = Arc::protected_from_thin(ThinArc {
253            ptr: self.ptr,
254            phantom: PhantomData,
255        });
256    }
257}
258
259impl<H, T> Arc<HeaderSliceWithLengthUnchecked<H, T>> {
260    /// Converts an `Arc` into a `ThinArc`. This consumes the `Arc`, so the refcount
261    /// is not modified.
262    ///
263    /// # Safety
264    /// Assumes that the header length matches the slice length.
265    #[inline]
266    unsafe fn into_thin_unchecked(a: Self) -> ThinArc<H, T> {
267        // Safety: invariant bubbled up
268        let this_protected: Arc<HeaderSliceWithLengthProtected<H, T>> =
269            unsafe { Arc::from_unprotected_unchecked(a) };
270
271        Arc::protected_into_thin(this_protected)
272    }
273
274    /// Converts an `Arc` into a `ThinArc`. This consumes the `Arc`, so the refcount
275    /// is not modified.
276    #[inline]
277    pub fn into_thin(a: Self) -> ThinArc<H, T> {
278        assert_eq!(
279            a.header.length,
280            a.slice.len(),
281            "Length needs to be correct for ThinArc to work"
282        );
283        // Safety: invariant checked in assertion above
284        unsafe { Self::into_thin_unchecked(a) }
285    }
286
287    /// Converts a `ThinArc` into an `Arc`. This consumes the `ThinArc`, so the refcount
288    /// is not modified.
289    #[inline]
290    pub fn from_thin(a: ThinArc<H, T>) -> Self {
291        Self::from_protected(Arc::<HeaderSliceWithLengthProtected<H, T>>::protected_from_thin(a))
292    }
293
294    /// Converts an `Arc` into a `ThinArc`. This consumes the `Arc`, so the refcount
295    /// is not modified.
296    #[inline]
297    fn from_protected(a: Arc<HeaderSliceWithLengthProtected<H, T>>) -> Self {
298        // Safety: HeaderSliceWithLengthProtected and HeaderSliceWithLengthUnchecked have the same layout
299        // The whole `Arc` should also be layout compatible (as a transparent wrapper around `NonNull` pointers with the same
300        // metadata type) but we still conservatively avoid a direct transmute here and use a pointer-cast instead.
301        unsafe { Arc::from_raw_inner(Arc::into_raw_inner(a) as _) }
302    }
303}
304
305impl<H, T> Arc<HeaderSliceWithLengthProtected<H, T>> {
306    /// Converts an `Arc` into a `ThinArc`. This consumes the `Arc`, so the refcount
307    /// is not modified.
308    #[inline]
309    pub fn protected_into_thin(a: Self) -> ThinArc<H, T> {
310        debug_assert_eq!(
311            a.length(),
312            a.slice().len(),
313            "Length needs to be correct for ThinArc to work"
314        );
315
316        let fat_ptr: *mut ArcInner<HeaderSliceWithLengthProtected<H, T>> = Arc::into_raw_inner(a);
317        // Safety: The pointer comes from a valid Arc, and HeaderSliceWithLengthProtected has the correct length invariant
318        let thin_ptr: *mut ArcInner<HeaderSlice<HeaderWithLength<H>, [T; 0]>> = fat_ptr.cast();
319        ThinArc {
320            ptr: unsafe { ptr::NonNull::new_unchecked(thin_ptr) },
321            phantom: PhantomData,
322        }
323    }
324
325    /// Converts a `ThinArc` into an `Arc`. This consumes the `ThinArc`, so the refcount
326    /// is not modified.
327    #[inline]
328    pub fn protected_from_thin(a: ThinArc<H, T>) -> Self {
329        let a = ManuallyDrop::new(a);
330        let ptr = thin_to_thick(&a);
331        unsafe { Arc::from_raw_inner(ptr) }
332    }
333
334    /// Obtains a HeaderSliceWithLengthProtected from an unchecked HeaderSliceWithLengthUnchecked, wrapped in an Arc
335    ///
336    /// # Safety
337    /// Assumes that the header length matches the slice length.
338    #[inline]
339    unsafe fn from_unprotected_unchecked(a: Arc<HeaderSliceWithLengthUnchecked<H, T>>) -> Self {
340        // Safety: HeaderSliceWithLengthProtected and HeaderSliceWithLengthUnchecked have the same layout
341        // and the safety invariant on HeaderSliceWithLengthProtected.inner is bubbled up
342        // The whole `Arc` should also be layout compatible (as a transparent wrapper around `NonNull` pointers with the same
343        // metadata type) but we still conservatively avoid a direct transmute here and use a pointer-cast instead.
344        unsafe { Arc::from_raw_inner(Arc::into_raw_inner(a) as _) }
345    }
346}
347
348impl<H: PartialEq, T: PartialEq> PartialEq for ThinArc<H, T> {
349    #[inline]
350    fn eq(&self, other: &ThinArc<H, T>) -> bool {
351        ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| *a == *b))
352    }
353}
354
355impl<H: Eq, T: Eq> Eq for ThinArc<H, T> {}
356
357impl<H: PartialOrd, T: PartialOrd> PartialOrd for ThinArc<H, T> {
358    #[inline]
359    fn partial_cmp(&self, other: &ThinArc<H, T>) -> Option<Ordering> {
360        ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| a.partial_cmp(b)))
361    }
362}
363
364impl<H: Ord, T: Ord> Ord for ThinArc<H, T> {
365    #[inline]
366    fn cmp(&self, other: &ThinArc<H, T>) -> Ordering {
367        ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| a.cmp(b)))
368    }
369}
370
371impl<H: Hash, T: Hash> Hash for ThinArc<H, T> {
372    fn hash<HSR: Hasher>(&self, state: &mut HSR) {
373        ThinArc::with_arc(self, |a| a.hash(state))
374    }
375}
376
377impl<H: fmt::Debug, T: fmt::Debug> fmt::Debug for ThinArc<H, T> {
378    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
379        fmt::Debug::fmt(&**self, f)
380    }
381}
382
383impl<H, T> fmt::Pointer for ThinArc<H, T> {
384    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
385        fmt::Pointer::fmt(&self.ptr(), f)
386    }
387}
388
389#[cfg(test)]
390mod tests {
391    use crate::{Arc, HeaderWithLength, ThinArc};
392    use alloc::vec;
393    use core::clone::Clone;
394    use core::ops::Drop;
395    use core::sync::atomic;
396    use core::sync::atomic::Ordering::{Acquire, SeqCst};
397
398    #[derive(PartialEq)]
399    struct Canary(*mut atomic::AtomicUsize);
400
401    impl Drop for Canary {
402        fn drop(&mut self) {
403            unsafe {
404                (*self.0).fetch_add(1, SeqCst);
405            }
406        }
407    }
408
409    #[test]
410    fn empty_thin() {
411        let header = HeaderWithLength::new(100u32, 0);
412        let x = Arc::from_header_and_iter(header, core::iter::empty::<i32>());
413        let y = Arc::into_thin(x.clone());
414        assert_eq!(y.header.header, 100);
415        assert!(y.slice.is_empty());
416        assert_eq!(x.header.header, 100);
417        assert!(x.slice.is_empty());
418    }
419
420    #[test]
421    fn thin_assert_padding() {
422        #[derive(Clone, Default)]
423        #[repr(C)]
424        struct Padded {
425            i: u16,
426        }
427
428        // The header will have more alignment than `Padded`
429        let header = HeaderWithLength::new(0i32, 2);
430        let items = vec![Padded { i: 0xdead }, Padded { i: 0xbeef }];
431        let a = ThinArc::from_header_and_iter(header, items.into_iter());
432        assert_eq!(a.slice.len(), 2);
433        assert_eq!(a.slice[0].i, 0xdead);
434        assert_eq!(a.slice[1].i, 0xbeef);
435    }
436
437    #[test]
438    #[allow(clippy::redundant_clone, clippy::eq_op)]
439    fn slices_and_thin() {
440        let mut canary = atomic::AtomicUsize::new(0);
441        let c = Canary(&mut canary as *mut atomic::AtomicUsize);
442        let v = vec![5, 6];
443        let header = HeaderWithLength::new(c, v.len());
444        {
445            let x = Arc::into_thin(Arc::from_header_and_slice(header, &v));
446            let y = ThinArc::with_arc(&x, |q| q.clone());
447            let _ = y.clone();
448            let _ = x == x;
449            Arc::from_thin(x.clone());
450        }
451        assert_eq!(canary.load(Acquire), 1);
452    }
453
454    #[test]
455    #[allow(clippy::redundant_clone, clippy::eq_op)]
456    fn iter_and_thin() {
457        let mut canary = atomic::AtomicUsize::new(0);
458        let c = Canary(&mut canary as *mut atomic::AtomicUsize);
459        let v = vec![5, 6];
460        let header = HeaderWithLength::new(c, v.len());
461        {
462            let x = Arc::into_thin(Arc::from_header_and_iter(header, v.into_iter()));
463            let y = ThinArc::with_arc(&x, |q| q.clone());
464            let _ = y.clone();
465            let _ = x == x;
466            Arc::from_thin(x.clone());
467        }
468        assert_eq!(canary.load(Acquire), 1);
469    }
470
471    #[test]
472    fn into_raw_and_from_raw() {
473        let mut canary = atomic::AtomicUsize::new(0);
474        let c = Canary(&mut canary as *mut atomic::AtomicUsize);
475        let v = vec![5, 6];
476        let header = HeaderWithLength::new(c, v.len());
477        {
478            type ThinArcCanary = ThinArc<Canary, u32>;
479            let x: ThinArcCanary = Arc::into_thin(Arc::from_header_and_iter(header, v.into_iter()));
480            let ptr = x.as_ptr();
481
482            assert_eq!(x.into_raw(), ptr);
483
484            let _x = unsafe { ThinArcCanary::from_raw(ptr) };
485        }
486        assert_eq!(canary.load(Acquire), 1);
487    }
488
489    #[test]
490    fn thin_eq_and_cmp() {
491        [
492            [("*", &b"AB"[..]), ("*", &b"ab"[..])],
493            [("*", &b"AB"[..]), ("*", &b"a"[..])],
494            [("*", &b"A"[..]), ("*", &b"ab"[..])],
495            [("A", &b"*"[..]), ("a", &b"*"[..])],
496            [("a", &b"*"[..]), ("A", &b"*"[..])],
497            [("AB", &b"*"[..]), ("a", &b"*"[..])],
498            [("A", &b"*"[..]), ("ab", &b"*"[..])],
499        ]
500        .iter()
501        .for_each(|[lt @ (lh, ls), rt @ (rh, rs)]| {
502            let l = ThinArc::from_header_and_slice(lh, ls);
503            let r = ThinArc::from_header_and_slice(rh, rs);
504
505            assert_eq!(l, l);
506            assert_eq!(r, r);
507
508            assert_ne!(l, r);
509            assert_ne!(r, l);
510
511            assert_eq!(l <= l, lt <= lt, "{lt:?} <= {lt:?}");
512            assert_eq!(l >= l, lt >= lt, "{lt:?} >= {lt:?}");
513
514            assert_eq!(l < l, lt < lt, "{lt:?} < {lt:?}");
515            assert_eq!(l > l, lt > lt, "{lt:?} > {lt:?}");
516
517            assert_eq!(r <= r, rt <= rt, "{rt:?} <= {rt:?}");
518            assert_eq!(r >= r, rt >= rt, "{rt:?} >= {rt:?}");
519
520            assert_eq!(r < r, rt < rt, "{rt:?} < {rt:?}");
521            assert_eq!(r > r, rt > rt, "{rt:?} > {rt:?}");
522
523            assert_eq!(l < r, lt < rt, "{lt:?} < {rt:?}");
524            assert_eq!(r > l, rt > lt, "{rt:?} > {lt:?}");
525        })
526    }
527
528    #[test]
529    fn thin_eq_and_partial_cmp() {
530        [
531            [(0.0, &[0.0, 0.0][..]), (1.0, &[0.0, 0.0][..])],
532            [(1.0, &[0.0, 0.0][..]), (0.0, &[0.0, 0.0][..])],
533            [(0.0, &[0.0][..]), (0.0, &[0.0, 0.0][..])],
534            [(0.0, &[0.0, 0.0][..]), (0.0, &[0.0][..])],
535            [(0.0, &[1.0, 2.0][..]), (0.0, &[10.0, 20.0][..])],
536        ]
537        .iter()
538        .for_each(|[lt @ (lh, ls), rt @ (rh, rs)]| {
539            let l = ThinArc::from_header_and_slice(lh, ls);
540            let r = ThinArc::from_header_and_slice(rh, rs);
541
542            assert_eq!(l, l);
543            assert_eq!(r, r);
544
545            assert_ne!(l, r);
546            assert_ne!(r, l);
547
548            assert_eq!(l <= l, lt <= lt, "{lt:?} <= {lt:?}");
549            assert_eq!(l >= l, lt >= lt, "{lt:?} >= {lt:?}");
550
551            assert_eq!(l < l, lt < lt, "{lt:?} < {lt:?}");
552            assert_eq!(l > l, lt > lt, "{lt:?} > {lt:?}");
553
554            assert_eq!(r <= r, rt <= rt, "{rt:?} <= {rt:?}");
555            assert_eq!(r >= r, rt >= rt, "{rt:?} >= {rt:?}");
556
557            assert_eq!(r < r, rt < rt, "{rt:?} < {rt:?}");
558            assert_eq!(r > r, rt > rt, "{rt:?} > {rt:?}");
559
560            assert_eq!(l < r, lt < rt, "{lt:?} < {rt:?}");
561            assert_eq!(r > l, rt > lt, "{rt:?} > {lt:?}");
562        })
563    }
564
565    #[test]
566    fn with_arc_mut() {
567        let mut arc: ThinArc<u8, u16> = ThinArc::from_header_and_slice(1u8, &[1, 2, 3]);
568        arc.with_arc_mut(|arc| Arc::get_mut(arc).unwrap().slice_mut().fill(2));
569        arc.with_arc_mut(|arc| assert!(Arc::get_unique(arc).is_some()));
570        arc.with_arc(|arc| assert!(Arc::is_unique(arc)));
571        // Using clone to that the layout generated in new_uninit_slice is compatible
572        // with ArcInner.
573        let arcs = [
574            arc.clone(),
575            arc.clone(),
576            arc.clone(),
577            arc.clone(),
578            arc.clone(),
579        ];
580        arc.with_arc(|arc| assert_eq!(6, Arc::count(arc)));
581
582        // If the layout is not compatible, then the data might be corrupted.
583        assert_eq!(arc.header.header, 1);
584        assert_eq!(&arc.slice, [2, 2, 2]);
585
586        // Drop the arcs and check the count and the content to
587        // make sure it isn't corrupted.
588        drop(arcs);
589        arc.with_arc_mut(|arc| assert!(Arc::get_unique(arc).is_some()));
590        arc.with_arc(|arc| assert!(Arc::is_unique(arc)));
591        assert_eq!(arc.header.header, 1);
592        assert_eq!(&arc.slice, [2, 2, 2]);
593    }
594
595    #[allow(dead_code)]
596    const fn is_partial_ord<T: ?Sized + PartialOrd>() {}
597
598    #[allow(dead_code)]
599    const fn is_ord<T: ?Sized + Ord>() {}
600
601    // compile-time check that PartialOrd/Ord is correctly derived
602    const _: () = is_partial_ord::<ThinArc<f64, f64>>();
603    const _: () = is_partial_ord::<ThinArc<f64, u64>>();
604    const _: () = is_partial_ord::<ThinArc<u64, f64>>();
605    const _: () = is_ord::<ThinArc<u64, u64>>();
606}