engine/collections/
ring_buffer.rs1mod boxed;
6mod slice;
7
8use core::{
9 marker::PhantomData,
10 mem::{transmute, MaybeUninit},
11 sync::atomic::{AtomicUsize, Ordering},
12};
13
14use bytemuck::{fill_zeroes, Zeroable};
15use platform::Box;
16
17use crate::allocators::LinearAllocator;
18
19pub use boxed::*;
20pub use slice::*;
21
22#[derive(Debug)]
24pub struct RingAllocationMetadata {
25 pub(super) offset: usize,
26 pub(super) padding: usize,
27 pub(super) buffer_identifier: usize,
28}
29
30pub struct RingBuffer<'a, T> {
45 buffer_lifetime: PhantomData<&'a mut [T]>,
46 buffer_ptr: *mut MaybeUninit<T>,
47 buffer_len: usize,
48 allocated_offset: usize,
49 allocated_len: usize,
50 buffer_identifier: usize,
51}
52
53unsafe impl<T: Sync> Sync for RingBuffer<'_, T> {}
60
61fn make_buffer_id() -> usize {
62 static BUFFER_ID_COUNTER: AtomicUsize = AtomicUsize::new(0);
63 let prev_id = BUFFER_ID_COUNTER.fetch_add(1, Ordering::Relaxed);
64 prev_id.checked_add(1).unwrap()
65}
66
67impl<T> RingBuffer<'_, T> {
68 pub fn new(
70 allocator: &'static LinearAllocator,
71 capacity: usize,
72 ) -> Option<RingBuffer<'static, T>> {
73 let buffer = allocator.try_alloc_uninit_slice(capacity, None)?;
74 Some(RingBuffer {
75 buffer_lifetime: PhantomData,
76 buffer_ptr: buffer.as_mut_ptr(),
77 buffer_len: buffer.len(),
78 allocated_offset: 0,
79 allocated_len: 0,
80 buffer_identifier: make_buffer_id(),
81 })
82 }
83
84 pub unsafe fn from_mut(buffer: &mut [MaybeUninit<T>]) -> RingBuffer<T> {
92 RingBuffer {
93 buffer_lifetime: PhantomData,
94 buffer_ptr: buffer.as_mut_ptr(),
95 buffer_len: buffer.len(),
96 allocated_offset: 0,
97 allocated_len: 0,
98 buffer_identifier: make_buffer_id(),
99 }
100 }
101
102 pub fn capacity(&self) -> usize {
104 self.buffer_len
105 }
106
107 fn allocate_offset(&mut self, len: usize) -> Option<(usize, usize)> {
110 let allocated_end = self.allocated_offset + self.allocated_len;
111 let padding_to_end = self.buffer_len - (allocated_end % self.buffer_len);
112 if allocated_end + len <= self.buffer_len {
113 self.allocated_len += len;
116 Some((allocated_end, 0))
117 } else if self.allocated_len + padding_to_end + len <= self.buffer_len {
118 self.allocated_len += padding_to_end + len;
121 Some((0, padding_to_end))
122 } else {
123 None
124 }
125 }
126}
127
128impl<T: Zeroable> RingBuffer<'_, T> {
129 pub fn allocate(&mut self, len: usize) -> Option<RingSlice<T>> {
132 let (offset, padding) = self.allocate_offset(len)?;
133
134 let ptr = unsafe { self.buffer_ptr.add(offset) };
137
138 let slice: &mut [MaybeUninit<T>] = unsafe { core::slice::from_raw_parts_mut(ptr, len) };
143
144 fill_zeroes(slice);
145
146 let slice = unsafe { transmute::<&mut [MaybeUninit<T>], &mut [T]>(slice) };
148
149 let slice = unsafe { Box::from_ptr(&raw mut *slice) };
153
154 Some(RingSlice {
155 slice,
156 metadata: RingAllocationMetadata {
157 offset,
158 padding,
159 buffer_identifier: self.buffer_identifier,
160 },
161 })
162 }
163
164 pub fn free(&mut self, slice: RingSlice<T>) -> Result<(), RingSlice<T>> {
173 assert_eq!(
174 self.buffer_identifier, slice.metadata.buffer_identifier,
175 "this slice was not allocated from this ring buffer",
176 );
177 let allocated_offset_with_padding =
178 (self.allocated_offset + slice.metadata.padding) % self.buffer_len;
179 if slice.metadata.padding == 0 && slice.len() * size_of::<T>() == 0 {
180 Ok(())
185 } else if slice.metadata.offset == allocated_offset_with_padding {
186 let freed_len = slice.len();
187 self.allocated_offset = (self.allocated_offset + freed_len) % self.buffer_len;
188 self.allocated_len -= freed_len + slice.metadata.padding;
189 if self.allocated_len == 0 {
190 self.allocated_offset = 0;
191 }
192 Ok(())
193 } else {
194 Err(slice)
195 }
196 }
197}
198
199impl<T> RingBuffer<'_, T> {
200 pub fn allocate_box(&mut self, value: T) -> Result<RingBox<T>, T> {
202 let Some((offset, padding)) = self.allocate_offset(1) else {
203 return Err(value);
204 };
205
206 let ptr = unsafe { self.buffer_ptr.add(offset) };
209
210 let uninit_mut: &mut MaybeUninit<T> = unsafe { &mut *ptr };
214
215 let init_mut = uninit_mut.write(value);
216
217 let boxed = unsafe { Box::from_ptr(&raw mut *init_mut) };
221
222 Ok(RingBox {
223 boxed,
224 metadata: RingAllocationMetadata {
225 offset,
226 padding,
227 buffer_identifier: self.buffer_identifier,
228 },
229 })
230 }
231
232 pub fn free_box(&mut self, boxed: RingBox<T>) -> Result<(), RingBox<T>> {
240 assert_eq!(
241 self.buffer_identifier, boxed.metadata.buffer_identifier,
242 "this box was not allocated from this ring buffer",
243 );
244 let allocated_offset_with_padding =
245 (self.allocated_offset + boxed.metadata.padding) % self.buffer_len;
246 if boxed.metadata.padding == 0 && size_of::<T>() == 0 {
247 Ok(())
252 } else if boxed.metadata.offset == allocated_offset_with_padding {
253 self.allocated_offset = (self.allocated_offset + 1) % self.buffer_len;
254 self.allocated_len -= 1 + boxed.metadata.padding;
255 if self.allocated_len == 0 {
256 self.allocated_offset = 0;
257 }
258 Ok(())
259 } else {
260 Err(boxed)
261 }
262 }
263}
264
265#[cfg(test)]
266mod tests {
267 use crate::allocators::{static_allocator, LinearAllocator};
268
269 use super::RingBuffer;
270
271 #[test]
272 fn works_at_all() {
273 static ALLOC: &LinearAllocator = static_allocator!(1);
274 let mut ring = RingBuffer::<u8>::new(ALLOC, 1).unwrap();
275 let mut slice = ring.allocate(1).unwrap();
276 slice[0] = 123;
277 ring.free(slice).unwrap();
278 }
279
280 #[test]
281 fn wraps_when_full() {
282 static ALLOC: &LinearAllocator = static_allocator!(10);
283 let mut ring = RingBuffer::<u8>::new(ALLOC, 10).unwrap();
284
285 let first_half = ring.allocate(4).unwrap();
286 let _second_half = ring.allocate(4).unwrap();
287 assert!(ring.allocate(4).is_none(), "ring should be full");
288
289 ring.free(first_half).unwrap();
291 let _third_half = ring.allocate(4).unwrap();
292
293 assert!(ring.allocate(4).is_none(), "ring should be full");
294 }
295
296 #[test]
297 #[should_panic]
298 fn panics_on_free_with_wrong_buffer_identity() {
299 static ALLOC_0: &LinearAllocator = static_allocator!(1);
300 static ALLOC_1: &LinearAllocator = static_allocator!(1);
301
302 let mut ring0 = RingBuffer::<u8>::new(ALLOC_0, 1).unwrap();
303 let mut ring1 = RingBuffer::<u8>::new(ALLOC_1, 1).unwrap();
304
305 let foo0 = ring0.allocate(1).unwrap();
306 let _ = ring1.free(foo0); }
308}