engine/collections/
vec.rs1use core::{
6 fmt::Debug,
7 mem::{needs_drop, transmute, MaybeUninit},
8 ops::{Deref, DerefMut},
9 slice,
10};
11
12use bytemuck::{fill_zeroes, Zeroable};
13
14use crate::allocators::LinearAllocator;
15
16pub struct FixedVec<'a, T> {
25 uninit_slice: &'a mut [MaybeUninit<T>],
26 initialized_len: usize,
27}
28
29impl<T> FixedVec<'_, T> {
30 pub fn empty() -> FixedVec<'static, T> {
33 FixedVec {
34 uninit_slice: &mut [],
35 initialized_len: 0,
36 }
37 }
38
39 pub fn new<'a>(allocator: &'a LinearAllocator, capacity: usize) -> Option<FixedVec<'a, T>> {
42 let uninit_slice: &'a mut [MaybeUninit<T>] =
43 allocator.try_alloc_uninit_slice::<T>(capacity, None)?;
44 Some(FixedVec {
45 uninit_slice,
46 initialized_len: 0,
47 })
48 }
49
50 pub fn with_alignment<'a>(
54 allocator: &'a LinearAllocator,
55 capacity: usize,
56 alignment: usize,
57 ) -> Option<FixedVec<'a, T>> {
58 let uninit_slice: &'a mut [MaybeUninit<T>] =
59 allocator.try_alloc_uninit_slice::<T>(capacity, Some(alignment))?;
60 Some(FixedVec {
61 uninit_slice,
62 initialized_len: 0,
63 })
64 }
65
66 pub fn push(&mut self, value: T) -> Result<(), T> {
72 let i = self.initialized_len;
74 let Some(uninit_at_i) = self.uninit_slice.get_mut(i) else {
75 return Err(value);
76 };
77
78 uninit_at_i.write(value);
89
90 self.initialized_len = i + 1;
92
93 Ok(())
94 }
95
96 pub fn pop(&mut self) -> Option<T> {
98 if self.initialized_len == 0 {
99 return None;
100 }
101 let i = self.initialized_len - 1;
102
103 let value = unsafe { self.uninit_slice[i].assume_init_read() };
108 self.initialized_len -= 1;
109
110 Some(value)
111 }
112
113 pub fn clear(&mut self) {
115 self.truncate(0);
116 }
117
118 pub fn truncate(&mut self, new_len: usize) {
121 if new_len >= self.initialized_len {
122 return;
123 }
124
125 if needs_drop::<T>() {
126 for initialized_value in &mut self.uninit_slice[new_len..self.initialized_len] {
127 unsafe { initialized_value.assume_init_drop() };
132 }
133 }
134
135 self.initialized_len = new_len;
136 }
137
138 pub fn is_full(&self) -> bool {
140 self.initialized_len == self.uninit_slice.len()
141 }
142
143 pub fn spare_capacity(&self) -> usize {
146 self.uninit_slice.len() - self.initialized_len
147 }
148}
149
150impl<T: Copy> FixedVec<'_, T> {
151 pub fn extend_from_slice(&mut self, slice: &[T]) -> bool {
155 if self.initialized_len + slice.len() > self.uninit_slice.len() {
160 return false;
161 }
162
163 for (src, dst) in (slice.iter()).zip(&mut self.uninit_slice[self.initialized_len..]) {
164 dst.write(*src);
165 self.initialized_len += 1;
166 }
167
168 true
169 }
170}
171
172impl<T: Zeroable> FixedVec<'_, T> {
173 pub fn fill_with_zeroes(&mut self) {
175 fill_zeroes(&mut self.uninit_slice[self.initialized_len..]);
176 self.initialized_len = self.uninit_slice.len();
181 }
182}
183
184impl<'a, T> FixedVec<'a, T> {
185 pub fn split_off_head(&mut self, length: usize) -> Option<FixedVec<'a, T>> {
189 if length > self.uninit_slice.len() {
190 return None;
191 }
192 let mid = length;
193 let len = self.uninit_slice.len();
194 let head_ptr = self.uninit_slice.as_mut_ptr();
195 let head = unsafe { slice::from_raw_parts_mut(head_ptr, mid) };
199 let tail = unsafe { slice::from_raw_parts_mut(head_ptr.add(mid), len - mid) };
205 let head_initialized = self.initialized_len.min(mid);
206 let tail_initialized = self.initialized_len.saturating_sub(mid);
207
208 self.initialized_len = tail_initialized;
209 self.uninit_slice = tail;
210 Some(FixedVec {
211 uninit_slice: head,
212 initialized_len: head_initialized,
213 })
214 }
215}
216
217impl<T> Drop for FixedVec<'_, T> {
218 fn drop(&mut self) {
219 self.clear();
220 }
221}
222
223impl<T> Deref for FixedVec<'_, T> {
224 type Target = [T];
225
226 fn deref<'a>(&'a self) -> &'a Self::Target {
227 let initialized_slice = &self.uninit_slice[..self.initialized_len];
228 unsafe { transmute::<&'a [MaybeUninit<T>], &'a [T]>(initialized_slice) }
232 }
233}
234
235impl<T> DerefMut for FixedVec<'_, T> {
236 fn deref_mut<'a>(&'a mut self) -> &'a mut Self::Target {
237 let initialized_slice = &mut self.uninit_slice[..self.initialized_len];
238 unsafe { transmute::<&'a mut [MaybeUninit<T>], &'a mut [T]>(initialized_slice) }
242 }
243}
244
245impl<T: Debug> Debug for FixedVec<'_, T> {
246 fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
247 let slice: &[T] = self;
248 f.debug_list().entries(slice).finish()
249 }
250}
251
252#[cfg(test)]
253mod tests {
254 use core::{
255 str::FromStr,
256 sync::atomic::{AtomicI32, Ordering},
257 };
258
259 use arrayvec::ArrayString;
260
261 use crate::{
262 allocators::{static_allocator, LinearAllocator},
263 collections::FixedVec,
264 };
265
266 #[test]
267 fn does_not_leak() {
268 const COUNT: usize = 100;
269 static ELEMENT_COUNT: AtomicI32 = AtomicI32::new(0);
270
271 #[derive(Debug)]
272 struct Element {
273 _foo: bool,
274 _bar: ArrayString<100>,
275 }
276 impl Element {
277 pub fn create_and_count() -> Element {
278 ELEMENT_COUNT.fetch_add(1, Ordering::Relaxed);
279 Element {
280 _foo: true,
281 _bar: ArrayString::from_str("Bar").unwrap(),
282 }
283 }
284 }
285 impl Drop for Element {
286 fn drop(&mut self) {
287 ELEMENT_COUNT.fetch_add(-1, Ordering::Relaxed);
288 }
289 }
290
291 const ALLOCATOR_SIZE: usize = size_of::<Element>() * COUNT + align_of::<Element>() - 1;
292 static ARENA: &LinearAllocator = static_allocator!(ALLOCATOR_SIZE);
293 let mut vec: FixedVec<Element> = FixedVec::new(ARENA, COUNT).unwrap();
294
295 assert_eq!(0, ELEMENT_COUNT.load(Ordering::Relaxed));
297 for _ in 0..COUNT / 2 {
298 vec.push(Element::create_and_count()).unwrap();
299 }
300 assert_eq!(COUNT as i32 / 2, ELEMENT_COUNT.load(Ordering::Relaxed));
301
302 vec.clear();
304 assert_eq!(0, ELEMENT_COUNT.load(Ordering::Relaxed));
305
306 for _ in 0..COUNT {
308 vec.push(Element::create_and_count()).unwrap();
309 }
310 assert_eq!(COUNT as i32, ELEMENT_COUNT.load(Ordering::Relaxed));
311 assert!(
312 vec.push(Element::create_and_count()).is_err(),
313 "vec should be full already"
314 );
315 assert_eq!(COUNT as i32, ELEMENT_COUNT.load(Ordering::Relaxed));
316
317 drop(vec);
319 assert_eq!(0, ELEMENT_COUNT.load(Ordering::Relaxed));
320 }
321
322 #[test]
323 fn zst_elements_work() {
324 #[derive(Debug, PartialEq)]
325 struct Zst;
326
327 static ARENA: &LinearAllocator = static_allocator!(0);
328 let alloc = LinearAllocator::new(ARENA, 0).unwrap();
329 let mut vec: FixedVec<Zst> = FixedVec::new(&alloc, 2).unwrap();
330
331 vec.push(Zst).unwrap();
332 vec.push(Zst).unwrap();
333 assert_eq!(Err(Zst), vec.push(Zst));
334 assert_eq!(2, vec.len());
335
336 vec.clear();
337 assert!(vec.is_empty());
338 }
339}