1#![allow(clippy::unwrap_used)]
7#![allow(clippy::expect_used)]
8#![allow(clippy::indexing_slicing)]
9#![allow(clippy::panic)]
10
11use super::*;
12use crate::ule::*;
13use alloc::vec::Vec;
14use core::any;
15use core::convert::TryInto;
16use core::marker::PhantomData;
17use core::ops::Deref;
18use core::ops::Range;
19use core::{fmt, ptr, slice};
20
21use super::components::IntegerULE;
22
23pub struct VarZeroVecOwned<T: ?Sized, F = Index16> {
30 marker1: PhantomData<T>,
31 marker2: PhantomData<F>,
32 entire_slice: Vec<u8>,
34}
35
36impl<T: ?Sized, F> Clone for VarZeroVecOwned<T, F> {
37 fn clone(&self) -> Self {
38 VarZeroVecOwned {
39 marker1: PhantomData,
40 marker2: PhantomData,
41 entire_slice: self.entire_slice.clone(),
42 }
43 }
44}
45
46#[derive(PartialEq)]
48enum ShiftType {
49 Insert,
50 Replace,
51 Remove,
52}
53
54impl<T: VarULE + ?Sized, F: VarZeroVecFormat> Deref for VarZeroVecOwned<T, F> {
55 type Target = VarZeroSlice<T, F>;
56 fn deref(&self) -> &VarZeroSlice<T, F> {
57 self.as_slice()
58 }
59}
60
61impl<T: VarULE + ?Sized, F> VarZeroVecOwned<T, F> {
62 pub fn new() -> Self {
64 Self {
65 marker1: PhantomData,
66 marker2: PhantomData,
67 entire_slice: Vec::new(),
68 }
69 }
70}
71
72impl<T: VarULE + ?Sized, F: VarZeroVecFormat> VarZeroVecOwned<T, F> {
73 pub fn from_slice(slice: &VarZeroSlice<T, F>) -> Self {
75 Self {
76 marker1: PhantomData,
77 marker2: PhantomData,
78 entire_slice: slice.as_bytes().into(),
79 }
80 }
81
82 pub fn try_from_elements<A>(elements: &[A]) -> Result<Self, &'static str>
84 where
85 A: EncodeAsVarULE<T>,
86 {
87 Ok(if elements.is_empty() {
88 Self::from_slice(VarZeroSlice::new_empty())
89 } else {
90 Self {
91 marker1: PhantomData,
92 marker2: PhantomData,
93 entire_slice: components::get_serializable_bytes_non_empty::<T, A, F>(elements)
95 .ok_or(F::Index::TOO_LARGE_ERROR)?,
96 }
97 })
98 }
99
100 pub fn as_slice(&self) -> &VarZeroSlice<T, F> {
102 let slice: &[u8] = &self.entire_slice;
103 unsafe {
104 VarZeroSlice::from_bytes_unchecked(slice)
106 }
107 }
108
109 pub(crate) fn with_capacity(capacity: usize) -> Self {
113 Self {
114 marker1: PhantomData,
115 marker2: PhantomData,
116 entire_slice: Vec::with_capacity(capacity * (F::Index::SIZE + 4)),
117 }
118 }
119
120 pub(crate) fn reserve(&mut self, capacity: usize) {
124 self.entire_slice.reserve(capacity * (F::Index::SIZE + 4))
125 }
126
127 unsafe fn element_position_unchecked(&self, idx: usize) -> usize {
134 let len = self.len();
135 let out = if idx == len {
136 self.entire_slice.len() - F::Len::SIZE - (F::Index::SIZE * (len - 1))
137 } else if let Some(idx) = self.index_data(idx) {
138 idx.iule_to_usize()
139 } else {
140 0
141 };
142 debug_assert!(out + F::Len::SIZE + (len - 1) * F::Index::SIZE <= self.entire_slice.len());
143 out
144 }
145
146 unsafe fn element_range_unchecked(&self, idx: usize) -> core::ops::Range<usize> {
151 let start = self.element_position_unchecked(idx);
152 let end = self.element_position_unchecked(idx + 1);
153 debug_assert!(start <= end, "{start} > {end}");
154 start..end
155 }
156
157 unsafe fn set_len(&mut self, len: usize) {
162 assert!(len <= F::Len::MAX_VALUE as usize);
163 let len_bytes = len.to_le_bytes();
164 let len_ule = F::Len::iule_from_usize(len).expect(F::Len::TOO_LARGE_ERROR);
165 self.entire_slice[0..F::Len::SIZE].copy_from_slice(ULE::slice_as_bytes(&[len_ule]));
166 assert_eq!(len_bytes[F::Len::SIZE..].iter().sum::<u8>(), 0);
168 }
169
170 fn index_range(index: usize) -> Option<Range<usize>> {
173 let index_minus_one = index.checked_sub(1)?;
174 let pos = F::Len::SIZE + F::Index::SIZE * index_minus_one;
175 Some(pos..pos + F::Index::SIZE)
176 }
177
178 unsafe fn index_data(&self, index: usize) -> Option<&F::Index> {
183 let index_range = Self::index_range(index)?;
184 Some(&F::Index::slice_from_bytes_unchecked(&self.entire_slice[index_range])[0])
185 }
186
187 unsafe fn index_data_mut(&mut self, index: usize) -> Option<&mut F::Index> {
193 let ptr = self.entire_slice.as_mut_ptr();
194 let range = Self::index_range(index)?;
195
196 let data = slice::from_raw_parts_mut(ptr.add(range.start), F::Index::SIZE);
200 Some(&mut F::Index::iule_from_bytes_unchecked_mut(data)[0])
201 }
202
203 unsafe fn shift_indices(&mut self, starting_index: usize, amount: i32) {
212 let normalized_idx = starting_index
213 .checked_sub(1)
214 .expect("shift_indices called with a 0 starting index");
215 let len = self.len();
216 let indices = F::Index::iule_from_bytes_unchecked_mut(
217 &mut self.entire_slice[F::Len::SIZE..F::Len::SIZE + F::Index::SIZE * (len - 1)],
218 );
219 for idx in &mut indices[normalized_idx..] {
220 let mut new_idx = idx.iule_to_usize();
221 if amount > 0 {
222 new_idx = new_idx.checked_add(amount.try_into().unwrap()).unwrap();
223 } else {
224 new_idx = new_idx.checked_sub((-amount).try_into().unwrap()).unwrap();
225 }
226 *idx = F::Index::iule_from_usize(new_idx).expect(F::Index::TOO_LARGE_ERROR);
227 }
228 }
229
230 pub fn as_varzerovec<'a>(&'a self) -> VarZeroVec<'a, T, F> {
235 self.as_slice().into()
236 }
237
238 pub fn clear(&mut self) {
240 self.entire_slice.clear()
241 }
242
243 #[inline]
245 pub fn into_bytes(self) -> Vec<u8> {
246 self.entire_slice
247 }
248
249 unsafe fn shift(&mut self, index: usize, new_size: usize, shift_type: ShiftType) -> &mut [u8] {
260 let len = self.len();
269 let slice_len = self.entire_slice.len();
270
271 let prev_element = match shift_type {
272 ShiftType::Insert => {
273 let pos = self.element_position_unchecked(index);
274 pos..pos
277 }
278 _ => self.element_range_unchecked(index),
279 };
280
281 let index_shift: i64 = match shift_type {
283 ShiftType::Insert => F::Index::SIZE as i64,
284 ShiftType::Replace => 0,
285 ShiftType::Remove => -(F::Index::SIZE as i64),
286 };
287 let shift: i64 =
289 new_size as i64 - (prev_element.end - prev_element.start) as i64 + index_shift;
290 let new_slice_len = slice_len.wrapping_add(shift as usize);
291 if shift > 0 {
292 if new_slice_len > F::Index::MAX_VALUE as usize {
293 panic!(
294 "Attempted to grow VarZeroVec to an encoded size that does not fit within the length size used by {}",
295 any::type_name::<F>()
296 );
297 }
298 self.entire_slice.resize(new_slice_len, 0);
299 }
300
301 {
303 let slice_range = self.entire_slice.as_mut_ptr_range();
306 let indices_start = slice_range.start.add(F::Len::SIZE);
308 let old_slice_end = slice_range.start.add(slice_len);
309 let data_start = indices_start.add((len - 1) * F::Index::SIZE);
310 let prev_element_p =
311 data_start.add(prev_element.start)..data_start.add(prev_element.end);
312
313 let index_range = if let Some(index_minus_one) = index.checked_sub(1) {
319 let index_start = indices_start.add(F::Index::SIZE * index_minus_one);
320 Some(index_start..index_start.add(F::Index::SIZE))
321 } else {
322 None
323 };
324
325 unsafe fn shift_bytes(block: Range<*const u8>, to: *mut u8) {
326 debug_assert!(block.end >= block.start);
327 ptr::copy(block.start, to, block.end.offset_from(block.start) as usize);
328 }
329
330 if shift_type == ShiftType::Remove {
331 if let Some(ref index_range) = index_range {
332 shift_bytes(index_range.end..prev_element_p.start, index_range.start);
333 } else {
334 shift_bytes(
337 indices_start.add(F::Index::SIZE)..prev_element_p.start,
338 indices_start,
339 )
340 }
341 }
342
343 shift_bytes(
345 prev_element_p.end..old_slice_end,
346 prev_element_p
347 .start
348 .offset((new_size as i64 + index_shift) as isize),
349 );
350
351 let first_affected_index = match shift_type {
352 ShiftType::Insert => {
353 if let Some(index_range) = index_range {
354 shift_bytes(index_range.start..prev_element_p.start, index_range.end);
356 let index_data = self
357 .index_data_mut(index)
358 .expect("If index_range is some, index is > 0 and should not panic in index_data_mut");
359 *index_data = F::Index::iule_from_usize(prev_element.start)
360 .expect(F::Index::TOO_LARGE_ERROR);
361 } else {
362 shift_bytes(
366 indices_start..prev_element_p.start,
367 indices_start.add(F::Index::SIZE),
368 );
369 let index_data = self
371 .index_data_mut(1)
372 .expect("Should be able to write to index 1");
373 *index_data = F::Index::iule_from_usize(0).expect("0 is always valid!");
374 }
375
376 self.set_len(len + 1);
377 index + 1
378 }
379 ShiftType::Remove => {
380 self.set_len(len - 1);
381 if index == 0 {
382 index + 1
384 } else {
385 index
386 }
387 }
388 ShiftType::Replace => index + 1,
389 };
390 self.entire_slice.set_len(new_slice_len);
394 self.shift_indices(first_affected_index, (shift - index_shift) as i32);
396 };
397
398 debug_assert!(self.verify_integrity());
399
400 let element_pos = F::Len::SIZE
402 + (self.len() - 1) * F::Index::SIZE
403 + self.element_position_unchecked(index);
404 &mut self.entire_slice[element_pos..element_pos + new_size]
405 }
406
407 fn verify_integrity(&self) -> bool {
413 if self.is_empty() {
414 if self.entire_slice.is_empty() {
415 return true;
416 } else {
417 panic!(
418 "VarZeroVecOwned integrity: Found empty VarZeroVecOwned with a nonempty slice"
419 );
420 }
421 }
422 let len = unsafe {
423 <F::Len as ULE>::slice_from_bytes_unchecked(&self.entire_slice[..F::Len::SIZE])[0]
424 .iule_to_usize()
425 };
426 if len == 0 {
427 panic!("VarZeroVecOwned integrity: Found empty VarZeroVecOwned with a nonempty slice");
429 }
430 if self.entire_slice.len() < F::Len::SIZE + (len - 1) * F::Index::SIZE {
431 panic!("VarZeroVecOwned integrity: Not enough room for the indices");
432 }
433 let data_len = self.entire_slice.len() - F::Len::SIZE - (len - 1) * F::Index::SIZE;
434 if data_len > F::Index::MAX_VALUE as usize {
435 panic!("VarZeroVecOwned integrity: Data segment is too long");
436 }
437
438 let indices = unsafe {
440 F::Index::slice_from_bytes_unchecked(
441 &self.entire_slice[F::Len::SIZE..F::Len::SIZE + (len - 1) * F::Index::SIZE],
442 )
443 };
444 for idx in indices {
445 if idx.iule_to_usize() > data_len {
446 panic!("VarZeroVecOwned integrity: Indices must not point past the data segment");
447 }
448 }
449 for window in indices.windows(2) {
450 if window[0].iule_to_usize() > window[1].iule_to_usize() {
451 panic!("VarZeroVecOwned integrity: Indices must be in non-decreasing order");
452 }
453 }
454 true
455 }
456
457 pub fn push<A: EncodeAsVarULE<T> + ?Sized>(&mut self, element: &A) {
459 self.insert(self.len(), element)
460 }
461
462 pub fn insert<A: EncodeAsVarULE<T> + ?Sized>(&mut self, index: usize, element: &A) {
464 let len = self.len();
465 if index > len {
466 panic!("Called out-of-bounds insert() on VarZeroVec, index {index} len {len}");
467 }
468
469 let value_len = element.encode_var_ule_len();
470
471 if len == 0 {
472 let header_len = F::Len::SIZE; let cap = header_len + value_len;
474 self.entire_slice.resize(cap, 0);
475 self.entire_slice[0] = 1; element.encode_var_ule_write(&mut self.entire_slice[header_len..]);
477 return;
478 }
479
480 assert!(value_len < F::Index::MAX_VALUE as usize);
481 unsafe {
482 let place = self.shift(index, value_len, ShiftType::Insert);
483 element.encode_var_ule_write(place);
484 }
485 }
486
487 pub fn remove(&mut self, index: usize) {
489 let len = self.len();
490 if index >= len {
491 panic!("Called out-of-bounds remove() on VarZeroVec, index {index} len {len}");
492 }
493 if len == 1 {
494 self.entire_slice.clear();
496 return;
497 }
498 unsafe {
499 self.shift(index, 0, ShiftType::Remove);
500 }
501 }
502
503 pub fn replace<A: EncodeAsVarULE<T> + ?Sized>(&mut self, index: usize, element: &A) {
505 let len = self.len();
506 if index >= len {
507 panic!("Called out-of-bounds replace() on VarZeroVec, index {index} len {len}");
508 }
509
510 let value_len = element.encode_var_ule_len();
511
512 assert!(value_len < F::Index::MAX_VALUE as usize);
513 unsafe {
514 let place = self.shift(index, value_len, ShiftType::Replace);
515 element.encode_var_ule_write(place);
516 }
517 }
518}
519
520impl<T: VarULE + ?Sized, F: VarZeroVecFormat> fmt::Debug for VarZeroVecOwned<T, F>
521where
522 T: fmt::Debug,
523{
524 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
525 VarZeroSlice::fmt(self, f)
526 }
527}
528
529impl<T: VarULE + ?Sized, F> Default for VarZeroVecOwned<T, F> {
530 fn default() -> Self {
531 Self::new()
532 }
533}
534
535impl<T, A, F> PartialEq<&'_ [A]> for VarZeroVecOwned<T, F>
536where
537 T: VarULE + ?Sized,
538 T: PartialEq,
539 A: AsRef<T>,
540 F: VarZeroVecFormat,
541{
542 #[inline]
543 fn eq(&self, other: &&[A]) -> bool {
544 self.iter().eq(other.iter().map(|t| t.as_ref()))
545 }
546}
547
548impl<'a, T: ?Sized + VarULE, F: VarZeroVecFormat> From<&'a VarZeroSlice<T, F>>
549 for VarZeroVecOwned<T, F>
550{
551 fn from(other: &'a VarZeroSlice<T, F>) -> Self {
552 Self::from_slice(other)
553 }
554}
555
556#[cfg(test)]
557mod test {
558 use super::VarZeroVecOwned;
559 #[test]
560 fn test_insert_integrity() {
561 let mut items: Vec<String> = Vec::new();
562 let mut zerovec = VarZeroVecOwned::<str>::new();
563
564 items.insert(0, "1234567890".into());
566 zerovec.insert(0, "1234567890");
567 assert_eq!(zerovec, &*items);
568
569 zerovec.insert(1, "foo3");
570 items.insert(1, "foo3".into());
571 assert_eq!(zerovec, &*items);
572
573 items.insert(items.len(), "qwertyuiop".into());
575 zerovec.insert(zerovec.len(), "qwertyuiop");
576 assert_eq!(zerovec, &*items);
577
578 items.insert(0, "asdfghjkl;".into());
579 zerovec.insert(0, "asdfghjkl;");
580 assert_eq!(zerovec, &*items);
581
582 items.insert(2, "".into());
583 zerovec.insert(2, "");
584 assert_eq!(zerovec, &*items);
585 }
586
587 #[test]
588 fn test_empty_inserts() {
590 let mut items: Vec<String> = Vec::new();
591 let mut zerovec = VarZeroVecOwned::<str>::new();
592
593 items.insert(0, "".into());
595 zerovec.insert(0, "");
596 assert_eq!(zerovec, &*items);
597
598 items.insert(0, "".into());
599 zerovec.insert(0, "");
600 assert_eq!(zerovec, &*items);
601
602 items.insert(0, "1234567890".into());
603 zerovec.insert(0, "1234567890");
604 assert_eq!(zerovec, &*items);
605
606 items.insert(0, "".into());
607 zerovec.insert(0, "");
608 assert_eq!(zerovec, &*items);
609 }
610
611 #[test]
612 fn test_small_insert_integrity() {
613 let mut items: Vec<String> = Vec::new();
616 let mut zerovec = VarZeroVecOwned::<str>::new();
617
618 items.insert(0, "abc".into());
620 zerovec.insert(0, "abc");
621 assert_eq!(zerovec, &*items);
622
623 zerovec.insert(1, "def");
624 items.insert(1, "def".into());
625 assert_eq!(zerovec, &*items);
626 }
627
628 #[test]
629 #[should_panic]
630 fn test_insert_past_end() {
631 VarZeroVecOwned::<str>::new().insert(1, "");
632 }
633
634 #[test]
635 fn test_remove_integrity() {
636 let mut items: Vec<&str> = vec!["apples", "bananas", "eeples", "", "baneenees", "five", ""];
637 let mut zerovec = VarZeroVecOwned::<str>::try_from_elements(&items).unwrap();
638
639 for index in [0, 2, 4, 0, 1, 1, 0] {
640 items.remove(index);
641 zerovec.remove(index);
642 assert_eq!(zerovec, &*items, "index {}, len {}", index, items.len());
643 }
644 }
645
646 #[test]
647 fn test_removing_last_element_clears() {
648 let mut zerovec = VarZeroVecOwned::<str>::try_from_elements(&["buy some apples"]).unwrap();
649 assert!(!zerovec.as_bytes().is_empty());
650 zerovec.remove(0);
651 assert!(zerovec.as_bytes().is_empty());
652 }
653
654 #[test]
655 #[should_panic]
656 fn test_remove_past_end() {
657 VarZeroVecOwned::<str>::new().remove(0);
658 }
659
660 #[test]
661 fn test_replace_integrity() {
662 let mut items: Vec<&str> = vec!["apples", "bananas", "eeples", "", "baneenees", "five", ""];
663 let mut zerovec = VarZeroVecOwned::<str>::try_from_elements(&items).unwrap();
664
665 items[0] = "blablah";
667 zerovec.replace(0, "blablah");
668 assert_eq!(zerovec, &*items);
669
670 items[1] = "twily";
672 zerovec.replace(1, "twily");
673 assert_eq!(zerovec, &*items);
674
675 items[3] = "aoeuidhtns";
677 zerovec.replace(3, "aoeuidhtns");
678 assert_eq!(zerovec, &*items);
679
680 items[6] = "0123456789";
682 zerovec.replace(6, "0123456789");
683 assert_eq!(zerovec, &*items);
684
685 items[2] = "";
687 zerovec.replace(2, "");
688 assert_eq!(zerovec, &*items);
689 }
690
691 #[test]
692 #[should_panic]
693 fn test_replace_past_end() {
694 VarZeroVecOwned::<str>::new().replace(0, "");
695 }
696}