wiwi/rc/
inner.rs

1use crate::prelude::*;
2use crate::macro_util::void;
3use self::alloc_mod::Layout;
4use self::atomic::Ordering::*;
5
6#[repr(transparent)]
7pub struct RcInner<C, V, S> {
8	ptr: ptr::NonNull<RcLayout<C, V, S>>
9}
10
11// if fields in this struct need to change,
12// make sure to change `calc_layout` accordingly
13#[repr(C)]
14struct RcLayout<C, V, S> {
15	/// The reference counter (handles counting both strong and weak references)
16	counter: C,
17
18	/// The length of the slice stored in the unsized portion
19	slice_len: usize,
20
21	/// The value (the sized portion)
22	value: V,
23
24	/// A "header" of the unsized slice portion I guess?
25	///
26	/// This forces this struct to have an alignment of (at least) S's alignment,
27	/// while also not requiring that there be at least 1 S element in this struct
28	/// itself, and the slice will follow right after this field.
29	slice: [S; 0]
30}
31
32#[inline]
33pub fn new_from_value<C, V>(value: V) -> RcInner<C, V, ()>
34where
35	C: Counter
36{
37	new_from_value_and_slice_copy(value, &[])
38}
39
40#[inline]
41pub fn new_from_array_into_slice<C, S, const N: usize>(array: [S; N]) -> RcInner<C, (), S>
42where
43	C: Counter
44{
45	new_from_value_and_array_into_slice((), array)
46}
47
48#[inline]
49pub fn new_from_slice_clone<C, S>(slice: &[S]) -> RcInner<C, (), S>
50where
51	C: Counter,
52	S: Clone
53{
54	new_from_value_and_slice_clone((), slice)
55}
56
57#[inline]
58pub fn new_from_slice_copy<C, S>(slice: &[S]) -> RcInner<C, (), S>
59where
60	C: Counter,
61	S: Copy
62{
63	new_from_value_and_slice_copy((), slice)
64}
65
66#[inline]
67pub fn new_from_value_and_array_into_slice<C, V, S, const N: usize>(value: V, array: [S; N]) -> RcInner<C, V, S>
68where
69	C: Counter
70{
71	let array = ManuallyDrop::new(array);
72
73	// SAFETY: we put the array into `ManuallyDrop`
74	unsafe { new_from_value_and_slice_copy_unchecked(value, &*array) }
75}
76
77#[inline]
78pub fn new_from_value_and_slice_clone<C, V, S>(value: V, slice: &[S]) -> RcInner<C, V, S>
79where
80	C: Counter,
81	S: Clone
82{
83	let instance = alloc_instance::<_, _, S>(slice.len());
84
85	// SAFETY:
86	// - instance just allocated in statement above
87	// - because just allocated, we must have exclusive reference to `instance`
88	// - reference is used just for this single `write` statement and
89	//   dropped immediately after
90	unsafe { void!(value_uninit(instance).write(value)) }
91
92	// SAFETY: instance just allocated in statement above
93	let ptr = unsafe { slice_thin_ptr(instance).as_ptr() };
94
95	slice.iter().enumerate().for_each(|(i, s)| {
96		// SAFETY: `ptr` is writeable for `slice.len()` elements
97		let ptr = unsafe { ptr.add(i) };
98
99		// SAFETY: see above
100		unsafe { ptr.write(s.clone()) }
101	});
102
103	instance
104}
105
106#[inline]
107pub fn new_from_value_and_slice_copy<C, V, S>(value: V, slice: &[S]) -> RcInner<C, V, S>
108where
109	C: Counter,
110	S: Copy
111{
112	// SAFETY: `S: Copy` enforced by trait bound
113	unsafe { new_from_value_and_slice_copy_unchecked(value, slice) }
114}
115
116/// # Safety
117///
118/// The provided slice should either contain elements that implement [`Copy`],
119/// or the input slice should be prevented from dropping to avoid double
120/// dropping elements.
121#[inline]
122unsafe fn new_from_value_and_slice_copy_unchecked<C, V, S>(value: V, slice: &[S]) -> RcInner<C, V, S>
123where
124	C: Counter
125{
126	let instance = alloc_instance(slice.len());
127
128	// SAFETY:
129	// - instance just allocated in statement above
130	// - because just allocated, we must have exclusive reference to `instance`
131	// - reference is used just for this single `write` statement and
132	//   dropped immediately after
133	unsafe { void!(value_uninit(instance).write(value)) }
134
135	// SAFETY: instance just allocated in statement above
136	let ptr = unsafe { slice_thin_ptr(instance).as_ptr() };
137
138	// SAFETY: `ptr` is writeable for `slice.len()` elements
139	unsafe {
140		ptr::copy_nonoverlapping(
141			slice.as_ptr(),
142			ptr,
143			slice.len()
144		)
145	}
146
147	instance
148}
149
150/// Initialise a new instance with the provided length
151///
152/// The instance returned will have fields `counter` and `slice_length` fields
153/// initialised. Counter is set to 1 strong and 1 weak according to contract of
154/// [`Counter`]. Caller is responsible for initialising the `value` and `slice`
155/// fields.
156#[inline]
157fn alloc_instance<C, V, S>(slice_len: usize) -> RcInner<C, V, S>
158where
159	C: Counter
160{
161	let layout = calc_layout::<C, V, S>(slice_len);
162
163	// SAFETY: `calc_layout` never returns layout with 0 size
164	let ptr = unsafe { alloc(layout) };
165
166	let Some(ptr) = ptr::NonNull::new(ptr.cast()) else {
167		alloc_mod::handle_alloc_error(layout)
168	};
169
170	let instance = RcInner { ptr };
171
172	// we can fill in counter since we know the type of counter already
173	// SAFETY:
174	// - instance just allocated in statements above
175	// - because just allocated, we must have exclusive reference to `instance`
176	// - reference is used just for this single `write` statement and
177	//   dropped immediately after
178	unsafe { void!(counter_uninit(instance).write(C::new())) }
179
180	// we can fill in length since that will never change
181	// SAFETY: same as above
182	unsafe { void!(slice_len_uninit(instance).write(slice_len)) }
183
184	instance
185}
186
187/// Drop the value and slice contents of the provided instance
188///
189/// # Safety
190///
191/// This instance must be fully initialised, and this must be the first time
192/// this function is called on this particular `instance`.
193///
194/// There also must not be any existing references to `slice`
195/// so we can safely create an exclusive reference to the slice to drop it.
196/// Ideally we can do this through a fat pointer directly without needing to
197/// create an intermediate wide reference, but, those APIs are unstable right
198/// now smh
199#[inline]
200pub unsafe fn drop_instance<C, V, S>(instance: RcInner<C, V, S>)
201where
202	C: Counter
203{
204	// SAFETY: caller promises `instance` is fully initialised
205	let value_ptr = unsafe { value_ptr(instance).as_ptr() };
206
207	// SAFETY: see above
208	unsafe { ptr::drop_in_place(value_ptr) }
209
210	// SAFETY: caller promises `instance` is fully initialised, and that we can
211	// safely create an exclusive reference. If this is only called in `drop`
212	// handler of the last strong pointer, this should be safe. We drop here
213	// using this temporary exclusive reference which is dropped before this
214	// function returns, so dealloc can run without triggering UB
215	let slice_ref = unsafe { slice_mut(instance) };
216
217	// SAFETY: see above
218	unsafe { ptr::drop_in_place(slice_ref) }
219}
220
221/// Drop the counter and deallocate the backing allocation of the provided instance
222///
223/// # Safety
224///
225/// This instance must be in the partially initialised state following a call to
226/// [`drop_instance`], and this must be the first time this function is called on
227/// this particular `instance`. This may be called on an instance that is still
228/// fully initialised (ie. [`drop_instance`] has not been called on it), but
229/// that is equivalent to leaking the value and slice fields, and is almost
230/// certainly incorrect.
231#[inline]
232pub unsafe fn dealloc_instance<C, V, S>(instance: RcInner<C, V, S>)
233where
234	C: Counter
235{
236	// SAFETY: caller promises `counter` is initialised
237	let counter_ptr = unsafe { counter_ptr(instance).as_ptr() };
238
239	// SAFETY: see above
240	unsafe { ptr::drop_in_place(counter_ptr) }
241
242	// SAFETY: caller promises `slice_len` is initialised
243	let slice_len = unsafe { slice_len(instance) };
244
245	let layout = calc_layout::<C, V, S>(slice_len);
246
247	// SAFETY: see above
248	unsafe { dealloc(instance.ptr.as_ptr().cast(), layout) }
249}
250
251/// Calculate the layout to allocate a new instance with the specified counter,
252/// value type, slice type, and slice length
253// TODO: make this fn `const` when `feature(const_alloc_layout)` is stable
254#[inline]
255fn calc_layout<C, V, S>(slice_len: usize) -> Layout {
256	fn inner<C, V, S>(slice_len: usize) -> Option<Layout> {
257		// if the size of `V` is not an even multiple of the align of the rest of the
258		// struct (max of `usize` and `C`), and align of `S` is less than or equal to
259		// align of `V`, the `slice` field will be at the end of `V` and there will be
260		// some padding after it. I don't think this causes UB, but it will allocate
261		// and use more memory than is necessary in these edge cases. So, we calculate
262		// it manually (we can do this because `repr(C)`), to attach the real layout
263		// of the slice where `slice` would have been, and place some additional checks
264		// in debug to assert it would have been the same as just using `Layout::new`.
265
266		let mut layout = Layout::new::<()>();
267
268		macro_rules! extend_layout {
269			($field_name:ident, $layout:expr) => {
270				let new = layout
271					.extend($layout)
272					.ok()?;
273
274				debug_assert_eq!(
275					mem::offset_of!(RcLayout<C, V, S>, $field_name),
276					new.1
277				);
278
279				layout = new.0;
280			}
281		}
282
283		extend_layout!(counter, Layout::new::<C>());
284		extend_layout!(slice_len, Layout::new::<usize>());
285		extend_layout!(value, Layout::new::<V>());
286		extend_layout!(slice, Layout::array::<S>(slice_len).ok()?);
287
288		Some(layout.pad_to_align())
289	}
290
291	inner::<C, V, S>(slice_len).expect("rc layout calculation failed")
292}
293
294/// # Safety
295///
296/// - The provided `instance` must not have been deallocated
297#[inline]
298unsafe fn counter_ptr<C, V, S>(instance: RcInner<C, V, S>) -> ptr::NonNull<C>
299where
300	C: Counter
301{
302	// SAFETY: caller promises to uphold the requirements
303	let ptr = unsafe { &raw mut (*instance.ptr.as_ptr()).counter };
304
305	// SAFETY: ptr is guaranteed to be nonnull
306	unsafe { ptr::NonNull::new_unchecked(ptr) }
307}
308
309/// # Safety
310///
311/// - The provided `instance` must not have been deallocated
312/// - `instance` must outlive `'h` (the lifetime of the returned reference)
313/// - The returned reference must be the only mut reference into `counter` (exclusive borrow)
314#[inline]
315unsafe fn counter_uninit<'h, C, V, S>(instance: RcInner<C, V, S>) -> &'h mut MaybeUninit<C>
316where
317	C: Counter
318{
319	// SAFETY: caller promises to uphold the requirements
320	let ptr = unsafe { counter_ptr(instance).as_ptr() };
321
322	// SAFETY: ptr is valid, and `MaybeUninit` has same ABI as inner type
323	unsafe { &mut *ptr.cast() }
324}
325
326/// # Safety
327///
328/// - The provided `instance` must not have been deallocated
329/// - The provided `instance` must have field `counter` already initialised
330/// - `instance` must outlive `'h` (the lifetime of the returned reference)
331#[inline]
332pub unsafe fn counter_ref<'h, C, V, S>(instance: RcInner<C, V, S>) -> &'h C
333where
334	C: Counter
335{
336	// SAFETY: caller promises to uphold the requirements
337	let ptr = unsafe { counter_ptr(instance).as_ptr() };
338
339	// SAFETY: ptr is valid
340	unsafe { &*ptr }
341}
342
343/// # Safety
344///
345/// - The provided `instance` must not have been deallocated
346#[inline]
347unsafe fn slice_len_ptr<C, V, S>(instance: RcInner<C, V, S>) -> ptr::NonNull<usize>
348where
349	C: Counter
350{
351	// SAFETY: caller promises to uphold the requirements
352	let ptr = unsafe { &raw mut (*instance.ptr.as_ptr()).slice_len };
353
354	// SAFETY: ptr is guaranteed to be nonnull
355	unsafe { ptr::NonNull::new_unchecked(ptr) }
356}
357
358/// # Safety
359///
360/// - The provided `instance` must not have been deallocated
361/// - `instance` must outlive `'h` (the lifetime of the returned reference)
362/// - The returned reference must be the only mut reference into `slice_len` (exclusive borrow)
363#[inline]
364unsafe fn slice_len_uninit<'h, C, V, S>(instance: RcInner<C, V, S>) -> &'h mut MaybeUninit<usize>
365where
366	C: Counter
367{
368	// SAFETY: caller promises to uphold the requirements
369	let ptr = unsafe { slice_len_ptr(instance).as_ptr() };
370
371	// SAFETY: ptr is valid, and `MaybeUninit` has same ABI as inner type
372	unsafe { &mut *ptr.cast() }
373}
374
375/// # Safety
376///
377/// - The provided `instance` must not have been deallocated
378/// - The provided `instance` must have field `slice_len` already initialised
379/// - `instance` must outlive `'h` (the lifetime of the returned reference)
380#[inline]
381unsafe fn slice_len<C, V, S>(instance: RcInner<C, V, S>) -> usize
382where
383	C: Counter
384{
385	// SAFETY: caller promises to uphold the requirements
386	let ptr = unsafe { slice_len_ptr(instance).as_ptr() };
387
388	// SAFETY: ptr is valid
389	unsafe { *ptr }
390}
391
392/// # Safety
393///
394/// - The provided `instance` must not have been dropped or deallocated
395#[inline]
396unsafe fn value_ptr<C, V, S>(instance: RcInner<C, V, S>) -> ptr::NonNull<V>
397where
398	C: Counter
399{
400	// SAFETY: caller promises to uphold the requirements
401	let ptr = unsafe { &raw mut (*instance.ptr.as_ptr()).value };
402
403	// SAFETY: ptr is guaranteed to be nonnull
404	unsafe { ptr::NonNull::new_unchecked(ptr) }
405}
406
407/// # Safety
408///
409/// - The provided `instance` must not have been dropped or deallocated
410/// - `instance` must outlive `'h` (the lifetime of the returned reference)
411/// - The returned reference must be the only mut reference into `value` (exclusive borrow)
412#[inline]
413unsafe fn value_uninit<'h, C, V, S>(instance: RcInner<C, V, S>) -> &'h mut MaybeUninit<V>
414where
415	C: Counter
416{
417	// SAFETY: caller promises to uphold the requirements
418	let ptr = unsafe { value_ptr(instance).as_ptr() };
419
420	// SAFETY: ptr is valid, and `MaybeUninit` has same ABI as inner type
421	unsafe { &mut *ptr.cast() }
422}
423
424/// # Safety
425///
426/// - The provided `instance` must not have been dropped or deallocated
427/// - The provided `instance` must have field `value` already initialised
428/// - `instance` must outlive `'h` (the lifetime of the returned reference)
429#[inline]
430pub unsafe fn value_ref<'h, C, V, S>(instance: RcInner<C, V, S>) -> &'h V
431where
432	C: Counter
433{
434	// SAFETY: caller promises to uphold the requirements
435	let ptr = unsafe { value_ptr(instance).as_ptr() };
436
437	// SAFETY: ptr is valid
438	unsafe { &*ptr }
439}
440
441/// # Safety
442///
443/// - The provided `instance` must not have been dropped or deallocated
444#[inline]
445unsafe fn slice_thin_ptr<C, V, S>(instance: RcInner<C, V, S>) -> ptr::NonNull<S>
446where
447	C: Counter
448{
449	// SAFETY: caller promises to uphold the requirements
450	let ptr = unsafe { &raw mut (*instance.ptr.as_ptr()).slice };
451	let ptr = ptr.cast::<S>();
452
453	// SAFETY: ptr is guaranteed to be nonnull
454	unsafe { ptr::NonNull::new_unchecked(ptr) }
455}
456
457/// # Safety
458///
459/// - The provided `instance` must not have been dropped or deallocated
460/// - The provided `instance` must have field `slice_len` already initialised
461/// - The provided `instance` must have `slice_len` elements in `slice` already initialised
462/// - `instance` must outlive `'h` (the lifetime of the returned reference)
463#[inline]
464pub unsafe fn slice_ref<'h, C, V, S>(instance: RcInner<C, V, S>) -> &'h [S]
465where
466	C: Counter
467{
468	// SAFETY: caller promises to uphold the requirements
469	let ptr = unsafe { slice_thin_ptr(instance).as_ptr() };
470
471	// SAFETY: caller promises to uphold the requirements
472	let slice_len = unsafe { slice_len(instance) };
473
474	// SAFETY: caller promises ptr is valid for at least `len` elements
475	unsafe { slice::from_raw_parts(ptr, slice_len) }
476}
477
478/// # Safety
479///
480/// - The provided `instance` must not have been dropped or deallocated
481/// - The provided `instance` must have field `slice_len` already initialised
482/// - The provided `instance` must have `slice_len` elements in `slice` already initialised
483/// - `instance` must outlive `'h` (the lifetime of the returned reference)
484/// - there must not be any other references, shared or exclusive, to `instance`,
485///   so the returned exclusive reference can be valid
486#[inline]
487pub unsafe fn slice_mut<'h, C, V, S>(instance: RcInner<C, V, S>) -> &'h mut [S]
488where
489	C: Counter
490{
491	// SAFETY: caller promises to uphold the requirements
492	let ptr = unsafe { slice_thin_ptr(instance).as_ptr() };
493
494	// SAFETY: caller promises to uphold the requirements
495	let slice_len = unsafe { slice_len(instance) };
496
497	// SAFETY: caller promises ptr is valid for at least `len` elements
498	unsafe { slice::from_raw_parts_mut(ptr, slice_len) }
499}
500
501impl<C, V, S> Clone for RcInner<C, V, S>
502where
503	C: Counter
504{
505	#[inline]
506	fn clone(&self) -> Self {
507		*self
508	}
509}
510
511impl<C, V, S> Copy for RcInner<C, V, S>
512where
513	C: Counter
514{}
515
516/// Trait for structs that can count references
517///
518/// `wiwi` includes two implementations: one for single threaded access (akin
519/// to `std`'s [`Rc`]), and the other for atomic multithreaded access (akin to
520/// `std`'s [`Arc`]).
521///
522/// # Safety
523///
524/// You must implement this trait correctly (ie. functions must return correct
525/// values), as values returned from functions are directly used to control the
526/// allocation/deallocation of memory and dropping of values.
527pub unsafe trait Counter: Sized {
528	/// Create a new couter with strong and weak count both set to 1
529	fn new() -> Self;
530
531	/// Get the strong reference count
532	fn strong_count(&self) -> usize;
533
534	/// Get the weak reference count
535	///
536	/// Don't subtract the "fake" weak reference that
537	/// is held by all the strong references.
538	fn weak_count(&self) -> usize;
539
540	/// Increment the strong count for creation of a new strong reference
541	fn inc_strong_for_new_ref(&self);
542
543	/// Decrements the strong count for dropping a reference, returning `true`
544	/// if there are no more strong pointers left (and the value and items in
545	/// the slice should be dropped)
546	fn dec_strong_for_drop(&self) -> bool;
547
548	/// Increments the weak count for creation of a new weak reference
549	fn inc_weak_for_new_ref(&self);
550
551	/// Decrements the weak count for dropping a reference, returning `true`
552	/// if there are no more weak pointers left (and the allocation should be
553	/// deallocated)
554	fn dec_weak_for_drop(&self) -> bool;
555
556	/// Increment the strong count if it is possible to upgrade a weak pointer
557	/// to strong, and return `true`, otherwise return `false` and do nothing
558	fn try_inc_strong_for_upgrade(&self) -> bool;
559}
560
561pub struct ThreadCounter {
562	strong: cell::Cell<usize>,
563	weak: cell::Cell<usize>,
564	__not_thread_safe: PhantomData<*const ()>
565}
566
567// SAFETY: we implement everything correctly
568unsafe impl Counter for ThreadCounter {
569	#[inline]
570	fn new() -> Self {
571		Self {
572			strong: cell::Cell::new(1),
573			weak: cell::Cell::new(1),
574			__not_thread_safe: PhantomData
575		}
576	}
577
578	#[inline]
579	fn strong_count(&self) -> usize {
580		self.strong.get()
581	}
582
583	#[inline]
584	fn weak_count(&self) -> usize {
585		self.weak.get()
586	}
587
588	#[inline]
589	fn inc_strong_for_new_ref(&self) {
590		let old = self.strong.get();
591		self.strong.set(old + 1);
592	}
593
594	#[inline]
595	fn dec_strong_for_drop(&self) -> bool {
596		let old = self.strong.get();
597		self.strong.set(old - 1);
598		old == 1
599	}
600
601	#[inline]
602	fn inc_weak_for_new_ref(&self) {
603		let old = self.weak.get();
604		self.weak.set(old + 1);
605	}
606
607	#[inline]
608	fn dec_weak_for_drop(&self) -> bool {
609		let old = self.weak.get();
610		self.weak.set(old - 1);
611		old == 1
612	}
613
614	#[inline]
615	fn try_inc_strong_for_upgrade(&self) -> bool {
616		let old = self.strong.get();
617		let should_upgrade = old > 0;
618
619		if should_upgrade {
620			self.strong.set(old + 1)
621		}
622
623		should_upgrade
624	}
625}
626
627pub struct AtomicCounter {
628	strong: AtomicUsize,
629	weak: AtomicUsize
630}
631
632// SAFETY: we implement everything correctly
633unsafe impl Counter for AtomicCounter {
634	#[inline]
635	fn new() -> Self {
636		Self {
637			strong: AtomicUsize::new(1),
638			weak: AtomicUsize::new(1)
639		}
640	}
641
642	#[inline]
643	fn strong_count(&self) -> usize {
644		self.strong.load(Relaxed)
645	}
646
647	#[inline]
648	fn weak_count(&self) -> usize {
649		self.weak.load(Relaxed)
650	}
651
652	#[inline]
653	fn inc_strong_for_new_ref(&self) {
654		self.strong.fetch_add(1, Relaxed);
655	}
656
657	#[inline]
658	fn dec_strong_for_drop(&self) -> bool {
659		let old = self.strong.fetch_sub(1, Release);
660		if old != 1 { return false }
661
662		atomic::fence(Acquire);
663		true
664	}
665
666	#[inline]
667	fn inc_weak_for_new_ref(&self) {
668		self.weak.fetch_add(1, Relaxed);
669	}
670
671	#[inline]
672	fn dec_weak_for_drop(&self) -> bool {
673		let old = self.weak.fetch_sub(1, Release);
674		if old != 1 { return false }
675
676		atomic::fence(Acquire);
677		true
678	}
679
680	#[inline]
681	fn try_inc_strong_for_upgrade(&self) -> bool {
682		self.strong
683			.fetch_update(
684				Acquire,
685				Relaxed,
686				|old| (old > 0).then(move || old + 1)
687			)
688			.is_ok()
689	}
690}