1use core::cmp::Ordering;
2use core::ffi::c_void;
3use core::fmt;
4use core::hash::{Hash, Hasher};
5use core::iter::{ExactSizeIterator, Iterator};
6use core::marker::PhantomData;
7use core::mem::ManuallyDrop;
8use core::ops::Deref;
9use core::panic::{RefUnwindSafe, UnwindSafe};
10use core::ptr;
11
12use super::{Arc, ArcInner, HeaderSlice, HeaderSliceWithLengthProtected, HeaderWithLength};
13use crate::header::HeaderSliceWithLengthUnchecked;
14
15#[repr(transparent)]
31pub struct ThinArc<H, T> {
32 ptr: ptr::NonNull<ArcInner<HeaderSlice<HeaderWithLength<H>, [T; 0]>>>,
45 phantom: PhantomData<(H, T)>,
46}
47
48unsafe impl<H: Sync + Send, T: Sync + Send> Send for ThinArc<H, T> {}
49unsafe impl<H: Sync + Send, T: Sync + Send> Sync for ThinArc<H, T> {}
50
51impl<H: RefUnwindSafe, T: RefUnwindSafe> UnwindSafe for ThinArc<H, T> {}
52
53#[inline]
57fn thin_to_thick<H, T>(arc: &ThinArc<H, T>) -> *mut ArcInner<HeaderSliceWithLengthProtected<H, T>> {
58 let thin = arc.ptr.as_ptr();
59 let len = unsafe { (*thin).data.header.length };
60 let fake_slice = ptr::slice_from_raw_parts_mut(thin as *mut T, len);
61
62 fake_slice as *mut ArcInner<HeaderSliceWithLengthProtected<H, T>>
63}
64
65impl<H, T> ThinArc<H, T> {
66 #[inline]
69 pub fn with_arc<F, U>(&self, f: F) -> U
70 where
71 F: FnOnce(&Arc<HeaderSliceWithLengthUnchecked<H, T>>) -> U,
72 {
73 let transient = ManuallyDrop::new(Arc::from_protected(unsafe {
75 Arc::from_raw_inner(thin_to_thick(self))
76 }));
77
78 f(&transient)
81 }
82
83 #[inline]
86 fn with_protected_arc<F, U>(&self, f: F) -> U
87 where
88 F: FnOnce(&Arc<HeaderSliceWithLengthProtected<H, T>>) -> U,
89 {
90 let transient = ManuallyDrop::new(unsafe { Arc::from_raw_inner(thin_to_thick(self)) });
92
93 f(&transient)
96 }
97
98 #[inline]
101 pub fn with_arc_mut<F, U>(&mut self, f: F) -> U
102 where
103 F: FnOnce(&mut Arc<HeaderSliceWithLengthProtected<H, T>>) -> U,
104 {
105 struct DropGuard<'a, H, T> {
108 transient: ManuallyDrop<Arc<HeaderSliceWithLengthProtected<H, T>>>,
109 this: &'a mut ThinArc<H, T>,
110 }
111
112 impl<'a, H, T> Drop for DropGuard<'a, H, T> {
113 fn drop(&mut self) {
114 debug_assert_eq!(
120 self.transient.length(),
121 self.transient.slice().len(),
122 "Length needs to be correct for ThinArc to work"
123 );
124 self.this.ptr = self.transient.p.cast();
126 }
127 }
128
129 let transient = ManuallyDrop::new(unsafe { Arc::from_raw_inner(thin_to_thick(self)) });
131
132 let mut guard = DropGuard {
133 transient,
134 this: self,
135 };
136
137 let ret = f(&mut guard.transient);
140
141 debug_assert_eq!(
143 guard.transient.length(),
144 guard.transient.slice().len(),
145 "Length needs to be correct for ThinArc to work"
146 );
147
148 ret
149 }
150
151 pub fn from_header_and_iter<I>(header: H, items: I) -> Self
154 where
155 I: Iterator<Item = T> + ExactSizeIterator,
156 {
157 let header = HeaderWithLength::new(header, items.len());
158 Arc::into_thin(Arc::from_header_and_iter(header, items))
159 }
160
161 pub fn from_header_and_slice(header: H, items: &[T]) -> Self
164 where
165 T: Copy,
166 {
167 let header = HeaderWithLength::new(header, items.len());
168 Arc::into_thin(Arc::from_header_and_slice(header, items))
169 }
170
171 #[inline]
174 pub fn ptr(&self) -> *const c_void {
175 self.ptr.cast().as_ptr()
176 }
177
178 #[inline]
181 pub fn heap_ptr(&self) -> *const c_void {
182 self.ptr()
183 }
184
185 #[inline]
197 pub unsafe fn from_raw(ptr: *const c_void) -> Self {
198 Self {
199 ptr: ptr::NonNull::new_unchecked(ptr as *mut c_void).cast(),
200 phantom: PhantomData,
201 }
202 }
203
204 #[inline]
206 pub fn into_raw(self) -> *const c_void {
207 let this = ManuallyDrop::new(self);
208 this.ptr()
209 }
210
211 #[inline]
215 pub fn as_ptr(&self) -> *const c_void {
216 self.ptr()
217 }
218
219 #[inline]
228 pub fn strong_count(this: &Self) -> usize {
229 Self::with_arc(this, Arc::strong_count)
230 }
231}
232
233impl<H, T> Deref for ThinArc<H, T> {
234 type Target = HeaderSliceWithLengthUnchecked<H, T>;
235
236 #[inline]
237 fn deref(&self) -> &Self::Target {
238 unsafe { (*thin_to_thick(self)).data.inner() }
239 }
240}
241
242impl<H, T> Clone for ThinArc<H, T> {
243 #[inline]
244 fn clone(&self) -> Self {
245 ThinArc::with_protected_arc(self, |a| Arc::protected_into_thin(a.clone()))
246 }
247}
248
249impl<H, T> Drop for ThinArc<H, T> {
250 #[inline]
251 fn drop(&mut self) {
252 let _ = Arc::protected_from_thin(ThinArc {
253 ptr: self.ptr,
254 phantom: PhantomData,
255 });
256 }
257}
258
259impl<H, T> Arc<HeaderSliceWithLengthUnchecked<H, T>> {
260 #[inline]
266 unsafe fn into_thin_unchecked(a: Self) -> ThinArc<H, T> {
267 let this_protected: Arc<HeaderSliceWithLengthProtected<H, T>> =
269 unsafe { Arc::from_unprotected_unchecked(a) };
270
271 Arc::protected_into_thin(this_protected)
272 }
273
274 #[inline]
277 pub fn into_thin(a: Self) -> ThinArc<H, T> {
278 assert_eq!(
279 a.header.length,
280 a.slice.len(),
281 "Length needs to be correct for ThinArc to work"
282 );
283 unsafe { Self::into_thin_unchecked(a) }
285 }
286
287 #[inline]
290 pub fn from_thin(a: ThinArc<H, T>) -> Self {
291 Self::from_protected(Arc::<HeaderSliceWithLengthProtected<H, T>>::protected_from_thin(a))
292 }
293
294 #[inline]
297 fn from_protected(a: Arc<HeaderSliceWithLengthProtected<H, T>>) -> Self {
298 unsafe { Arc::from_raw_inner(Arc::into_raw_inner(a) as _) }
302 }
303}
304
305impl<H, T> Arc<HeaderSliceWithLengthProtected<H, T>> {
306 #[inline]
309 pub fn protected_into_thin(a: Self) -> ThinArc<H, T> {
310 debug_assert_eq!(
311 a.length(),
312 a.slice().len(),
313 "Length needs to be correct for ThinArc to work"
314 );
315
316 let fat_ptr: *mut ArcInner<HeaderSliceWithLengthProtected<H, T>> = Arc::into_raw_inner(a);
317 let thin_ptr: *mut ArcInner<HeaderSlice<HeaderWithLength<H>, [T; 0]>> = fat_ptr.cast();
319 ThinArc {
320 ptr: unsafe { ptr::NonNull::new_unchecked(thin_ptr) },
321 phantom: PhantomData,
322 }
323 }
324
325 #[inline]
328 pub fn protected_from_thin(a: ThinArc<H, T>) -> Self {
329 let a = ManuallyDrop::new(a);
330 let ptr = thin_to_thick(&a);
331 unsafe { Arc::from_raw_inner(ptr) }
332 }
333
334 #[inline]
339 unsafe fn from_unprotected_unchecked(a: Arc<HeaderSliceWithLengthUnchecked<H, T>>) -> Self {
340 unsafe { Arc::from_raw_inner(Arc::into_raw_inner(a) as _) }
345 }
346}
347
348impl<H: PartialEq, T: PartialEq> PartialEq for ThinArc<H, T> {
349 #[inline]
350 fn eq(&self, other: &ThinArc<H, T>) -> bool {
351 ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| *a == *b))
352 }
353}
354
355impl<H: Eq, T: Eq> Eq for ThinArc<H, T> {}
356
357impl<H: PartialOrd, T: PartialOrd> PartialOrd for ThinArc<H, T> {
358 #[inline]
359 fn partial_cmp(&self, other: &ThinArc<H, T>) -> Option<Ordering> {
360 ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| a.partial_cmp(b)))
361 }
362}
363
364impl<H: Ord, T: Ord> Ord for ThinArc<H, T> {
365 #[inline]
366 fn cmp(&self, other: &ThinArc<H, T>) -> Ordering {
367 ThinArc::with_arc(self, |a| ThinArc::with_arc(other, |b| a.cmp(b)))
368 }
369}
370
371impl<H: Hash, T: Hash> Hash for ThinArc<H, T> {
372 fn hash<HSR: Hasher>(&self, state: &mut HSR) {
373 ThinArc::with_arc(self, |a| a.hash(state))
374 }
375}
376
377impl<H: fmt::Debug, T: fmt::Debug> fmt::Debug for ThinArc<H, T> {
378 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
379 fmt::Debug::fmt(&**self, f)
380 }
381}
382
383impl<H, T> fmt::Pointer for ThinArc<H, T> {
384 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
385 fmt::Pointer::fmt(&self.ptr(), f)
386 }
387}
388
389#[cfg(test)]
390mod tests {
391 use crate::{Arc, HeaderWithLength, ThinArc};
392 use alloc::vec;
393 use core::clone::Clone;
394 use core::ops::Drop;
395 use core::sync::atomic;
396 use core::sync::atomic::Ordering::{Acquire, SeqCst};
397
398 #[derive(PartialEq)]
399 struct Canary(*mut atomic::AtomicUsize);
400
401 impl Drop for Canary {
402 fn drop(&mut self) {
403 unsafe {
404 (*self.0).fetch_add(1, SeqCst);
405 }
406 }
407 }
408
409 #[test]
410 fn empty_thin() {
411 let header = HeaderWithLength::new(100u32, 0);
412 let x = Arc::from_header_and_iter(header, core::iter::empty::<i32>());
413 let y = Arc::into_thin(x.clone());
414 assert_eq!(y.header.header, 100);
415 assert!(y.slice.is_empty());
416 assert_eq!(x.header.header, 100);
417 assert!(x.slice.is_empty());
418 }
419
420 #[test]
421 fn thin_assert_padding() {
422 #[derive(Clone, Default)]
423 #[repr(C)]
424 struct Padded {
425 i: u16,
426 }
427
428 let header = HeaderWithLength::new(0i32, 2);
430 let items = vec![Padded { i: 0xdead }, Padded { i: 0xbeef }];
431 let a = ThinArc::from_header_and_iter(header, items.into_iter());
432 assert_eq!(a.slice.len(), 2);
433 assert_eq!(a.slice[0].i, 0xdead);
434 assert_eq!(a.slice[1].i, 0xbeef);
435 }
436
437 #[test]
438 #[allow(clippy::redundant_clone, clippy::eq_op)]
439 fn slices_and_thin() {
440 let mut canary = atomic::AtomicUsize::new(0);
441 let c = Canary(&mut canary as *mut atomic::AtomicUsize);
442 let v = vec![5, 6];
443 let header = HeaderWithLength::new(c, v.len());
444 {
445 let x = Arc::into_thin(Arc::from_header_and_slice(header, &v));
446 let y = ThinArc::with_arc(&x, |q| q.clone());
447 let _ = y.clone();
448 let _ = x == x;
449 Arc::from_thin(x.clone());
450 }
451 assert_eq!(canary.load(Acquire), 1);
452 }
453
454 #[test]
455 #[allow(clippy::redundant_clone, clippy::eq_op)]
456 fn iter_and_thin() {
457 let mut canary = atomic::AtomicUsize::new(0);
458 let c = Canary(&mut canary as *mut atomic::AtomicUsize);
459 let v = vec![5, 6];
460 let header = HeaderWithLength::new(c, v.len());
461 {
462 let x = Arc::into_thin(Arc::from_header_and_iter(header, v.into_iter()));
463 let y = ThinArc::with_arc(&x, |q| q.clone());
464 let _ = y.clone();
465 let _ = x == x;
466 Arc::from_thin(x.clone());
467 }
468 assert_eq!(canary.load(Acquire), 1);
469 }
470
471 #[test]
472 fn into_raw_and_from_raw() {
473 let mut canary = atomic::AtomicUsize::new(0);
474 let c = Canary(&mut canary as *mut atomic::AtomicUsize);
475 let v = vec![5, 6];
476 let header = HeaderWithLength::new(c, v.len());
477 {
478 type ThinArcCanary = ThinArc<Canary, u32>;
479 let x: ThinArcCanary = Arc::into_thin(Arc::from_header_and_iter(header, v.into_iter()));
480 let ptr = x.as_ptr();
481
482 assert_eq!(x.into_raw(), ptr);
483
484 let _x = unsafe { ThinArcCanary::from_raw(ptr) };
485 }
486 assert_eq!(canary.load(Acquire), 1);
487 }
488
489 #[test]
490 fn thin_eq_and_cmp() {
491 [
492 [("*", &b"AB"[..]), ("*", &b"ab"[..])],
493 [("*", &b"AB"[..]), ("*", &b"a"[..])],
494 [("*", &b"A"[..]), ("*", &b"ab"[..])],
495 [("A", &b"*"[..]), ("a", &b"*"[..])],
496 [("a", &b"*"[..]), ("A", &b"*"[..])],
497 [("AB", &b"*"[..]), ("a", &b"*"[..])],
498 [("A", &b"*"[..]), ("ab", &b"*"[..])],
499 ]
500 .iter()
501 .for_each(|[lt @ (lh, ls), rt @ (rh, rs)]| {
502 let l = ThinArc::from_header_and_slice(lh, ls);
503 let r = ThinArc::from_header_and_slice(rh, rs);
504
505 assert_eq!(l, l);
506 assert_eq!(r, r);
507
508 assert_ne!(l, r);
509 assert_ne!(r, l);
510
511 assert_eq!(l <= l, lt <= lt, "{lt:?} <= {lt:?}");
512 assert_eq!(l >= l, lt >= lt, "{lt:?} >= {lt:?}");
513
514 assert_eq!(l < l, lt < lt, "{lt:?} < {lt:?}");
515 assert_eq!(l > l, lt > lt, "{lt:?} > {lt:?}");
516
517 assert_eq!(r <= r, rt <= rt, "{rt:?} <= {rt:?}");
518 assert_eq!(r >= r, rt >= rt, "{rt:?} >= {rt:?}");
519
520 assert_eq!(r < r, rt < rt, "{rt:?} < {rt:?}");
521 assert_eq!(r > r, rt > rt, "{rt:?} > {rt:?}");
522
523 assert_eq!(l < r, lt < rt, "{lt:?} < {rt:?}");
524 assert_eq!(r > l, rt > lt, "{rt:?} > {lt:?}");
525 })
526 }
527
528 #[test]
529 fn thin_eq_and_partial_cmp() {
530 [
531 [(0.0, &[0.0, 0.0][..]), (1.0, &[0.0, 0.0][..])],
532 [(1.0, &[0.0, 0.0][..]), (0.0, &[0.0, 0.0][..])],
533 [(0.0, &[0.0][..]), (0.0, &[0.0, 0.0][..])],
534 [(0.0, &[0.0, 0.0][..]), (0.0, &[0.0][..])],
535 [(0.0, &[1.0, 2.0][..]), (0.0, &[10.0, 20.0][..])],
536 ]
537 .iter()
538 .for_each(|[lt @ (lh, ls), rt @ (rh, rs)]| {
539 let l = ThinArc::from_header_and_slice(lh, ls);
540 let r = ThinArc::from_header_and_slice(rh, rs);
541
542 assert_eq!(l, l);
543 assert_eq!(r, r);
544
545 assert_ne!(l, r);
546 assert_ne!(r, l);
547
548 assert_eq!(l <= l, lt <= lt, "{lt:?} <= {lt:?}");
549 assert_eq!(l >= l, lt >= lt, "{lt:?} >= {lt:?}");
550
551 assert_eq!(l < l, lt < lt, "{lt:?} < {lt:?}");
552 assert_eq!(l > l, lt > lt, "{lt:?} > {lt:?}");
553
554 assert_eq!(r <= r, rt <= rt, "{rt:?} <= {rt:?}");
555 assert_eq!(r >= r, rt >= rt, "{rt:?} >= {rt:?}");
556
557 assert_eq!(r < r, rt < rt, "{rt:?} < {rt:?}");
558 assert_eq!(r > r, rt > rt, "{rt:?} > {rt:?}");
559
560 assert_eq!(l < r, lt < rt, "{lt:?} < {rt:?}");
561 assert_eq!(r > l, rt > lt, "{rt:?} > {lt:?}");
562 })
563 }
564
565 #[test]
566 fn with_arc_mut() {
567 let mut arc: ThinArc<u8, u16> = ThinArc::from_header_and_slice(1u8, &[1, 2, 3]);
568 arc.with_arc_mut(|arc| Arc::get_mut(arc).unwrap().slice_mut().fill(2));
569 arc.with_arc_mut(|arc| assert!(Arc::get_unique(arc).is_some()));
570 arc.with_arc(|arc| assert!(Arc::is_unique(arc)));
571 let arcs = [
574 arc.clone(),
575 arc.clone(),
576 arc.clone(),
577 arc.clone(),
578 arc.clone(),
579 ];
580 arc.with_arc(|arc| assert_eq!(6, Arc::count(arc)));
581
582 assert_eq!(arc.header.header, 1);
584 assert_eq!(&arc.slice, [2, 2, 2]);
585
586 drop(arcs);
589 arc.with_arc_mut(|arc| assert!(Arc::get_unique(arc).is_some()));
590 arc.with_arc(|arc| assert!(Arc::is_unique(arc)));
591 assert_eq!(arc.header.header, 1);
592 assert_eq!(&arc.slice, [2, 2, 2]);
593 }
594
595 #[allow(dead_code)]
596 const fn is_partial_ord<T: ?Sized + PartialOrd>() {}
597
598 #[allow(dead_code)]
599 const fn is_ord<T: ?Sized + Ord>() {}
600
601 const _: () = is_partial_ord::<ThinArc<f64, f64>>();
603 const _: () = is_partial_ord::<ThinArc<f64, u64>>();
604 const _: () = is_partial_ord::<ThinArc<u64, f64>>();
605 const _: () = is_ord::<ThinArc<u64, u64>>();
606}