1#![allow(clippy::unwrap_used)]
7#![allow(clippy::expect_used)]
8#![allow(clippy::indexing_slicing)]
9#![allow(clippy::panic)]
10
11use super::*;
12use crate::ule::*;
13use alloc::vec::Vec;
14use core::any;
15use core::convert::TryInto;
16use core::marker::PhantomData;
17use core::ops::Deref;
18use core::ops::Range;
19use core::{fmt, ptr, slice};
20
21use super::components::IntegerULE;
22
23pub struct VarZeroVecOwned<T: ?Sized, F = Index16> {
32 marker1: PhantomData<T>,
33 marker2: PhantomData<F>,
34 entire_slice: Vec<u8>,
36}
37
38impl<T: ?Sized, F> Clone for VarZeroVecOwned<T, F> {
39 fn clone(&self) -> Self {
40 VarZeroVecOwned {
41 marker1: PhantomData,
42 marker2: PhantomData,
43 entire_slice: self.entire_slice.clone(),
44 }
45 }
46}
47
48#[derive(PartialEq)]
50enum ShiftType {
51 Insert,
52 Replace,
53 Remove,
54}
55
56impl<T: VarULE + ?Sized, F: VarZeroVecFormat> Deref for VarZeroVecOwned<T, F> {
57 type Target = VarZeroSlice<T, F>;
58 fn deref(&self) -> &VarZeroSlice<T, F> {
59 self.as_slice()
60 }
61}
62
63impl<T: VarULE + ?Sized, F> VarZeroVecOwned<T, F> {
64 pub fn new() -> Self {
66 Self {
67 marker1: PhantomData,
68 marker2: PhantomData,
69 entire_slice: Vec::new(),
70 }
71 }
72}
73
74impl<T: VarULE + ?Sized, F: VarZeroVecFormat> VarZeroVecOwned<T, F> {
75 pub fn from_slice(slice: &VarZeroSlice<T, F>) -> Self {
77 Self {
78 marker1: PhantomData,
79 marker2: PhantomData,
80 entire_slice: slice.as_bytes().into(),
81 }
82 }
83
84 pub fn try_from_elements<A>(elements: &[A]) -> Result<Self, &'static str>
86 where
87 A: EncodeAsVarULE<T>,
88 {
89 Ok(if elements.is_empty() {
90 Self::from_slice(VarZeroSlice::new_empty())
91 } else {
92 Self {
93 marker1: PhantomData,
94 marker2: PhantomData,
95 entire_slice: components::get_serializable_bytes_non_empty::<T, A, F>(elements)
97 .ok_or(F::Index::TOO_LARGE_ERROR)?,
98 }
99 })
100 }
101
102 pub fn as_slice(&self) -> &VarZeroSlice<T, F> {
104 let slice: &[u8] = &self.entire_slice;
105 unsafe {
106 VarZeroSlice::from_bytes_unchecked(slice)
108 }
109 }
110
111 pub(crate) fn with_capacity(capacity: usize) -> Self {
115 Self {
116 marker1: PhantomData,
117 marker2: PhantomData,
118 entire_slice: Vec::with_capacity(capacity * (F::Index::SIZE + 4)),
119 }
120 }
121
122 pub(crate) fn reserve(&mut self, capacity: usize) {
126 self.entire_slice.reserve(capacity * (F::Index::SIZE + 4))
127 }
128
129 unsafe fn element_position_unchecked(&self, idx: usize) -> usize {
136 let len = self.len();
137 let out = if idx == len {
138 self.entire_slice.len() - F::Len::SIZE - (F::Index::SIZE * (len - 1))
139 } else if let Some(idx) = self.index_data(idx) {
140 idx.iule_to_usize()
141 } else {
142 0
143 };
144 debug_assert!(out + F::Len::SIZE + (len - 1) * F::Index::SIZE <= self.entire_slice.len());
145 out
146 }
147
148 unsafe fn element_range_unchecked(&self, idx: usize) -> core::ops::Range<usize> {
153 let start = self.element_position_unchecked(idx);
154 let end = self.element_position_unchecked(idx + 1);
155 debug_assert!(start <= end, "{start} > {end}");
156 start..end
157 }
158
159 unsafe fn set_len(&mut self, len: usize) {
164 assert!(len <= F::Len::MAX_VALUE as usize);
165 let len_bytes = len.to_le_bytes();
166 let len_ule = F::Len::iule_from_usize(len).expect(F::Len::TOO_LARGE_ERROR);
167 self.entire_slice[0..F::Len::SIZE].copy_from_slice(ULE::slice_as_bytes(&[len_ule]));
168 assert_eq!(len_bytes[F::Len::SIZE..].iter().sum::<u8>(), 0);
170 }
171
172 fn index_range(index: usize) -> Option<Range<usize>> {
175 let index_minus_one = index.checked_sub(1)?;
176 let pos = F::Len::SIZE + F::Index::SIZE * index_minus_one;
177 Some(pos..pos + F::Index::SIZE)
178 }
179
180 unsafe fn index_data(&self, index: usize) -> Option<&F::Index> {
185 let index_range = Self::index_range(index)?;
186 Some(&F::Index::slice_from_bytes_unchecked(&self.entire_slice[index_range])[0])
187 }
188
189 unsafe fn index_data_mut(&mut self, index: usize) -> Option<&mut F::Index> {
195 let ptr = self.entire_slice.as_mut_ptr();
196 let range = Self::index_range(index)?;
197
198 let data = slice::from_raw_parts_mut(ptr.add(range.start), F::Index::SIZE);
202 Some(&mut F::Index::iule_from_bytes_unchecked_mut(data)[0])
203 }
204
205 unsafe fn shift_indices(&mut self, starting_index: usize, amount: i32) {
214 let normalized_idx = starting_index
215 .checked_sub(1)
216 .expect("shift_indices called with a 0 starting index");
217 let len = self.len();
218 let indices = F::Index::iule_from_bytes_unchecked_mut(
219 &mut self.entire_slice[F::Len::SIZE..F::Len::SIZE + F::Index::SIZE * (len - 1)],
220 );
221 for idx in &mut indices[normalized_idx..] {
222 let mut new_idx = idx.iule_to_usize();
223 if amount > 0 {
224 new_idx = new_idx.checked_add(amount.try_into().unwrap()).unwrap();
225 } else {
226 new_idx = new_idx.checked_sub((-amount).try_into().unwrap()).unwrap();
227 }
228 *idx = F::Index::iule_from_usize(new_idx).expect(F::Index::TOO_LARGE_ERROR);
229 }
230 }
231
232 pub fn as_varzerovec<'a>(&'a self) -> VarZeroVec<'a, T, F> {
237 self.as_slice().into()
238 }
239
240 pub fn clear(&mut self) {
242 self.entire_slice.clear()
243 }
244
245 #[inline]
247 pub fn into_bytes(self) -> Vec<u8> {
248 self.entire_slice
249 }
250
251 unsafe fn shift(&mut self, index: usize, new_size: usize, shift_type: ShiftType) -> &mut [u8] {
262 let len = self.len();
271 let slice_len = self.entire_slice.len();
272
273 let prev_element = match shift_type {
274 ShiftType::Insert => {
275 let pos = self.element_position_unchecked(index);
276 pos..pos
279 }
280 _ => self.element_range_unchecked(index),
281 };
282
283 let index_shift: i64 = match shift_type {
285 ShiftType::Insert => F::Index::SIZE as i64,
286 ShiftType::Replace => 0,
287 ShiftType::Remove => -(F::Index::SIZE as i64),
288 };
289 let shift: i64 =
291 new_size as i64 - (prev_element.end - prev_element.start) as i64 + index_shift;
292 let new_slice_len = slice_len.wrapping_add(shift as usize);
293 if shift > 0 {
294 if new_slice_len > F::Index::MAX_VALUE as usize {
295 panic!(
296 "Attempted to grow VarZeroVec to an encoded size that does not fit within the length size used by {}",
297 any::type_name::<F>()
298 );
299 }
300 self.entire_slice.resize(new_slice_len, 0);
301 }
302
303 {
305 let slice_range = self.entire_slice.as_mut_ptr_range();
308 let indices_start = slice_range.start.add(F::Len::SIZE);
310 let old_slice_end = slice_range.start.add(slice_len);
311 let data_start = indices_start.add((len - 1) * F::Index::SIZE);
312 let prev_element_p =
313 data_start.add(prev_element.start)..data_start.add(prev_element.end);
314
315 let index_range = if let Some(index_minus_one) = index.checked_sub(1) {
321 let index_start = indices_start.add(F::Index::SIZE * index_minus_one);
322 Some(index_start..index_start.add(F::Index::SIZE))
323 } else {
324 None
325 };
326
327 unsafe fn shift_bytes(block: Range<*const u8>, to: *mut u8) {
328 debug_assert!(block.end >= block.start);
329 ptr::copy(block.start, to, block.end.offset_from(block.start) as usize);
330 }
331
332 if shift_type == ShiftType::Remove {
333 if let Some(ref index_range) = index_range {
334 shift_bytes(index_range.end..prev_element_p.start, index_range.start);
335 } else {
336 shift_bytes(
339 indices_start.add(F::Index::SIZE)..prev_element_p.start,
340 indices_start,
341 )
342 }
343 }
344
345 shift_bytes(
347 prev_element_p.end..old_slice_end,
348 prev_element_p
349 .start
350 .offset((new_size as i64 + index_shift) as isize),
351 );
352
353 let first_affected_index = match shift_type {
354 ShiftType::Insert => {
355 if let Some(index_range) = index_range {
356 shift_bytes(index_range.start..prev_element_p.start, index_range.end);
358 let index_data = self
359 .index_data_mut(index)
360 .expect("If index_range is some, index is > 0 and should not panic in index_data_mut");
361 *index_data = F::Index::iule_from_usize(prev_element.start)
362 .expect(F::Index::TOO_LARGE_ERROR);
363 } else {
364 shift_bytes(
368 indices_start..prev_element_p.start,
369 indices_start.add(F::Index::SIZE),
370 );
371 let index_data = self
373 .index_data_mut(1)
374 .expect("Should be able to write to index 1");
375 *index_data = F::Index::iule_from_usize(0).expect("0 is always valid!");
376 }
377
378 self.set_len(len + 1);
379 index + 1
380 }
381 ShiftType::Remove => {
382 self.set_len(len - 1);
383 if index == 0 {
384 index + 1
386 } else {
387 index
388 }
389 }
390 ShiftType::Replace => index + 1,
391 };
392 self.entire_slice.set_len(new_slice_len);
396 self.shift_indices(first_affected_index, (shift - index_shift) as i32);
398 };
399
400 debug_assert!(self.verify_integrity());
401
402 let element_pos = F::Len::SIZE
404 + (self.len() - 1) * F::Index::SIZE
405 + self.element_position_unchecked(index);
406 &mut self.entire_slice[element_pos..element_pos + new_size]
407 }
408
409 fn verify_integrity(&self) -> bool {
415 if self.is_empty() {
416 if self.entire_slice.is_empty() {
417 return true;
418 } else {
419 panic!(
420 "VarZeroVecOwned integrity: Found empty VarZeroVecOwned with a nonempty slice"
421 );
422 }
423 }
424 let len = unsafe {
425 <F::Len as ULE>::slice_from_bytes_unchecked(&self.entire_slice[..F::Len::SIZE])[0]
426 .iule_to_usize()
427 };
428 if len == 0 {
429 panic!("VarZeroVecOwned integrity: Found empty VarZeroVecOwned with a nonempty slice");
431 }
432 if self.entire_slice.len() < F::Len::SIZE + (len - 1) * F::Index::SIZE {
433 panic!("VarZeroVecOwned integrity: Not enough room for the indices");
434 }
435 let data_len = self.entire_slice.len() - F::Len::SIZE - (len - 1) * F::Index::SIZE;
436 if data_len > F::Index::MAX_VALUE as usize {
437 panic!("VarZeroVecOwned integrity: Data segment is too long");
438 }
439
440 let indices = unsafe {
442 F::Index::slice_from_bytes_unchecked(
443 &self.entire_slice[F::Len::SIZE..F::Len::SIZE + (len - 1) * F::Index::SIZE],
444 )
445 };
446 for idx in indices {
447 if idx.iule_to_usize() > data_len {
448 panic!("VarZeroVecOwned integrity: Indices must not point past the data segment");
449 }
450 }
451 for window in indices.windows(2) {
452 if window[0].iule_to_usize() > window[1].iule_to_usize() {
453 panic!("VarZeroVecOwned integrity: Indices must be in non-decreasing order");
454 }
455 }
456 true
457 }
458
459 pub fn push<A: EncodeAsVarULE<T> + ?Sized>(&mut self, element: &A) {
461 self.insert(self.len(), element)
462 }
463
464 pub fn insert<A: EncodeAsVarULE<T> + ?Sized>(&mut self, index: usize, element: &A) {
466 let len = self.len();
467 if index > len {
468 panic!("Called out-of-bounds insert() on VarZeroVec, index {index} len {len}");
469 }
470
471 let value_len = element.encode_var_ule_len();
472
473 if len == 0 {
474 let header_len = F::Len::SIZE; let cap = header_len + value_len;
476 self.entire_slice.resize(cap, 0);
477 self.entire_slice[0] = 1; element.encode_var_ule_write(&mut self.entire_slice[header_len..]);
479 return;
480 }
481
482 assert!(value_len < F::Index::MAX_VALUE as usize);
483 unsafe {
484 let place = self.shift(index, value_len, ShiftType::Insert);
485 element.encode_var_ule_write(place);
486 }
487 }
488
489 pub fn remove(&mut self, index: usize) {
491 let len = self.len();
492 if index >= len {
493 panic!("Called out-of-bounds remove() on VarZeroVec, index {index} len {len}");
494 }
495 if len == 1 {
496 self.entire_slice.clear();
498 return;
499 }
500 unsafe {
501 self.shift(index, 0, ShiftType::Remove);
502 }
503 }
504
505 pub fn replace<A: EncodeAsVarULE<T> + ?Sized>(&mut self, index: usize, element: &A) {
507 let len = self.len();
508 if index >= len {
509 panic!("Called out-of-bounds replace() on VarZeroVec, index {index} len {len}");
510 }
511
512 let value_len = element.encode_var_ule_len();
513
514 assert!(value_len < F::Index::MAX_VALUE as usize);
515 unsafe {
516 let place = self.shift(index, value_len, ShiftType::Replace);
517 element.encode_var_ule_write(place);
518 }
519 }
520}
521
522impl<T: VarULE + ?Sized, F: VarZeroVecFormat> fmt::Debug for VarZeroVecOwned<T, F>
523where
524 T: fmt::Debug,
525{
526 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
527 VarZeroSlice::fmt(self, f)
528 }
529}
530
531impl<T: VarULE + ?Sized, F> Default for VarZeroVecOwned<T, F> {
532 fn default() -> Self {
533 Self::new()
534 }
535}
536
537impl<T, A, F> PartialEq<&'_ [A]> for VarZeroVecOwned<T, F>
538where
539 T: VarULE + ?Sized,
540 T: PartialEq,
541 A: AsRef<T>,
542 F: VarZeroVecFormat,
543{
544 #[inline]
545 fn eq(&self, other: &&[A]) -> bool {
546 self.iter().eq(other.iter().map(|t| t.as_ref()))
547 }
548}
549
550impl<'a, T: ?Sized + VarULE, F: VarZeroVecFormat> From<&'a VarZeroSlice<T, F>>
551 for VarZeroVecOwned<T, F>
552{
553 fn from(other: &'a VarZeroSlice<T, F>) -> Self {
554 Self::from_slice(other)
555 }
556}
557
558#[cfg(test)]
559mod test {
560 use super::VarZeroVecOwned;
561 #[test]
562 fn test_insert_integrity() {
563 let mut items: Vec<String> = Vec::new();
564 let mut zerovec = VarZeroVecOwned::<str>::new();
565
566 items.insert(0, "1234567890".into());
568 zerovec.insert(0, "1234567890");
569 assert_eq!(zerovec, &*items);
570
571 zerovec.insert(1, "foo3");
572 items.insert(1, "foo3".into());
573 assert_eq!(zerovec, &*items);
574
575 items.insert(items.len(), "qwertyuiop".into());
577 zerovec.insert(zerovec.len(), "qwertyuiop");
578 assert_eq!(zerovec, &*items);
579
580 items.insert(0, "asdfghjkl;".into());
581 zerovec.insert(0, "asdfghjkl;");
582 assert_eq!(zerovec, &*items);
583
584 items.insert(2, "".into());
585 zerovec.insert(2, "");
586 assert_eq!(zerovec, &*items);
587 }
588
589 #[test]
590 fn test_empty_inserts() {
592 let mut items: Vec<String> = Vec::new();
593 let mut zerovec = VarZeroVecOwned::<str>::new();
594
595 items.insert(0, "".into());
597 zerovec.insert(0, "");
598 assert_eq!(zerovec, &*items);
599
600 items.insert(0, "".into());
601 zerovec.insert(0, "");
602 assert_eq!(zerovec, &*items);
603
604 items.insert(0, "1234567890".into());
605 zerovec.insert(0, "1234567890");
606 assert_eq!(zerovec, &*items);
607
608 items.insert(0, "".into());
609 zerovec.insert(0, "");
610 assert_eq!(zerovec, &*items);
611 }
612
613 #[test]
614 fn test_small_insert_integrity() {
615 let mut items: Vec<String> = Vec::new();
618 let mut zerovec = VarZeroVecOwned::<str>::new();
619
620 items.insert(0, "abc".into());
622 zerovec.insert(0, "abc");
623 assert_eq!(zerovec, &*items);
624
625 zerovec.insert(1, "def");
626 items.insert(1, "def".into());
627 assert_eq!(zerovec, &*items);
628 }
629
630 #[test]
631 #[should_panic]
632 fn test_insert_past_end() {
633 VarZeroVecOwned::<str>::new().insert(1, "");
634 }
635
636 #[test]
637 fn test_remove_integrity() {
638 let mut items: Vec<&str> = vec!["apples", "bananas", "eeples", "", "baneenees", "five", ""];
639 let mut zerovec = VarZeroVecOwned::<str>::try_from_elements(&items).unwrap();
640
641 for index in [0, 2, 4, 0, 1, 1, 0] {
642 items.remove(index);
643 zerovec.remove(index);
644 assert_eq!(zerovec, &*items, "index {}, len {}", index, items.len());
645 }
646 }
647
648 #[test]
649 fn test_removing_last_element_clears() {
650 let mut zerovec = VarZeroVecOwned::<str>::try_from_elements(&["buy some apples"]).unwrap();
651 assert!(!zerovec.as_bytes().is_empty());
652 zerovec.remove(0);
653 assert!(zerovec.as_bytes().is_empty());
654 }
655
656 #[test]
657 #[should_panic]
658 fn test_remove_past_end() {
659 VarZeroVecOwned::<str>::new().remove(0);
660 }
661
662 #[test]
663 fn test_replace_integrity() {
664 let mut items: Vec<&str> = vec!["apples", "bananas", "eeples", "", "baneenees", "five", ""];
665 let mut zerovec = VarZeroVecOwned::<str>::try_from_elements(&items).unwrap();
666
667 items[0] = "blablah";
669 zerovec.replace(0, "blablah");
670 assert_eq!(zerovec, &*items);
671
672 items[1] = "twily";
674 zerovec.replace(1, "twily");
675 assert_eq!(zerovec, &*items);
676
677 items[3] = "aoeuidhtns";
679 zerovec.replace(3, "aoeuidhtns");
680 assert_eq!(zerovec, &*items);
681
682 items[6] = "0123456789";
684 zerovec.replace(6, "0123456789");
685 assert_eq!(zerovec, &*items);
686
687 items[2] = "";
689 zerovec.replace(2, "");
690 assert_eq!(zerovec, &*items);
691 }
692
693 #[test]
694 #[should_panic]
695 fn test_replace_past_end() {
696 VarZeroVecOwned::<str>::new().replace(0, "");
697 }
698}