1use std::alloc::{handle_alloc_error, Layout};
19use std::mem;
20use std::ptr::NonNull;
21
22use crate::alloc::{Deallocation, ALIGNMENT};
23use crate::{
24 bytes::Bytes,
25 native::{ArrowNativeType, ToByteSlice},
26 util::bit_util,
27};
28
29#[cfg(feature = "pool")]
30use crate::pool::{MemoryPool, MemoryReservation};
31#[cfg(feature = "pool")]
32use std::sync::Mutex;
33
34use super::Buffer;
35
36#[derive(Debug)]
59pub struct MutableBuffer {
60 data: NonNull<u8>,
62 len: usize,
64 layout: Layout,
65
66 #[cfg(feature = "pool")]
68 reservation: Mutex<Option<Box<dyn MemoryReservation>>>,
69}
70
71impl MutableBuffer {
72 #[inline]
76 pub fn new(capacity: usize) -> Self {
77 Self::with_capacity(capacity)
78 }
79
80 #[inline]
87 pub fn with_capacity(capacity: usize) -> Self {
88 let capacity = bit_util::round_upto_multiple_of_64(capacity);
89 let layout = Layout::from_size_align(capacity, ALIGNMENT)
90 .expect("failed to create layout for MutableBuffer");
91 let data = match layout.size() {
92 0 => dangling_ptr(),
93 _ => {
94 let raw_ptr = unsafe { std::alloc::alloc(layout) };
96 NonNull::new(raw_ptr).unwrap_or_else(|| handle_alloc_error(layout))
97 }
98 };
99 Self {
100 data,
101 len: 0,
102 layout,
103 #[cfg(feature = "pool")]
104 reservation: std::sync::Mutex::new(None),
105 }
106 }
107
108 pub fn from_len_zeroed(len: usize) -> Self {
120 let layout = Layout::from_size_align(len, ALIGNMENT).unwrap();
121 let data = match layout.size() {
122 0 => dangling_ptr(),
123 _ => {
124 let raw_ptr = unsafe { std::alloc::alloc_zeroed(layout) };
126 NonNull::new(raw_ptr).unwrap_or_else(|| handle_alloc_error(layout))
127 }
128 };
129 Self {
130 data,
131 len,
132 layout,
133 #[cfg(feature = "pool")]
134 reservation: std::sync::Mutex::new(None),
135 }
136 }
137
138 pub(crate) fn from_bytes(bytes: Bytes) -> Result<Self, Bytes> {
140 let layout = match bytes.deallocation() {
141 Deallocation::Standard(layout) => *layout,
142 _ => return Err(bytes),
143 };
144
145 let len = bytes.len();
146 let data = bytes.ptr();
147 #[cfg(feature = "pool")]
148 let reservation = bytes.reservation.lock().unwrap().take();
149 mem::forget(bytes);
150
151 Ok(Self {
152 data,
153 len,
154 layout,
155 #[cfg(feature = "pool")]
156 reservation: Mutex::new(reservation),
157 })
158 }
159
160 pub fn new_null(len: usize) -> Self {
163 let num_bytes = bit_util::ceil(len, 8);
164 MutableBuffer::from_len_zeroed(num_bytes)
165 }
166
167 pub fn with_bitset(mut self, end: usize, val: bool) -> Self {
174 assert!(end <= self.layout.size());
175 let v = if val { 255 } else { 0 };
176 unsafe {
177 std::ptr::write_bytes(self.data.as_ptr(), v, end);
178 self.len = end;
179 }
180 self
181 }
182
183 pub fn set_null_bits(&mut self, start: usize, count: usize) {
189 assert!(
190 start.saturating_add(count) <= self.layout.size(),
191 "range start index {start} and count {count} out of bounds for \
192 buffer of length {}",
193 self.layout.size(),
194 );
195
196 unsafe {
198 std::ptr::write_bytes(self.data.as_ptr().add(start), 0, count);
199 }
200 }
201
202 #[inline(always)]
216 pub fn reserve(&mut self, additional: usize) {
217 let required_cap = self.len + additional;
218 if required_cap > self.layout.size() {
219 let new_capacity = bit_util::round_upto_multiple_of_64(required_cap);
220 let new_capacity = std::cmp::max(new_capacity, self.layout.size() * 2);
221 self.reallocate(new_capacity)
222 }
223 }
224
225 #[cold]
226 fn reallocate(&mut self, capacity: usize) {
227 let new_layout = Layout::from_size_align(capacity, self.layout.align()).unwrap();
228 if new_layout.size() == 0 {
229 if self.layout.size() != 0 {
230 unsafe { std::alloc::dealloc(self.as_mut_ptr(), self.layout) };
232 self.layout = new_layout
233 }
234 return;
235 }
236
237 let data = match self.layout.size() {
238 0 => unsafe { std::alloc::alloc(new_layout) },
240 _ => unsafe { std::alloc::realloc(self.as_mut_ptr(), self.layout, capacity) },
242 };
243 self.data = NonNull::new(data).unwrap_or_else(|| handle_alloc_error(new_layout));
244 self.layout = new_layout;
245 #[cfg(feature = "pool")]
246 {
247 if let Some(reservation) = self.reservation.lock().unwrap().as_mut() {
248 reservation.resize(self.layout.size());
249 }
250 }
251 }
252
253 #[inline(always)]
257 pub fn truncate(&mut self, len: usize) {
258 if len > self.len {
259 return;
260 }
261 self.len = len;
262 #[cfg(feature = "pool")]
263 {
264 if let Some(reservation) = self.reservation.lock().unwrap().as_mut() {
265 reservation.resize(self.len);
266 }
267 }
268 }
269
270 #[inline(always)]
282 pub fn resize(&mut self, new_len: usize, value: u8) {
283 if new_len > self.len {
284 let diff = new_len - self.len;
285 self.reserve(diff);
286 unsafe { self.data.as_ptr().add(self.len).write_bytes(value, diff) };
288 }
289 self.len = new_len;
291 #[cfg(feature = "pool")]
292 {
293 if let Some(reservation) = self.reservation.lock().unwrap().as_mut() {
294 reservation.resize(self.len);
295 }
296 }
297 }
298
299 pub fn shrink_to_fit(&mut self) {
315 let new_capacity = bit_util::round_upto_multiple_of_64(self.len);
316 if new_capacity < self.layout.size() {
317 self.reallocate(new_capacity)
318 }
319 }
320
321 #[inline]
323 pub const fn is_empty(&self) -> bool {
324 self.len == 0
325 }
326
327 #[inline]
330 pub const fn len(&self) -> usize {
331 self.len
332 }
333
334 #[inline]
338 pub const fn capacity(&self) -> usize {
339 self.layout.size()
340 }
341
342 pub fn clear(&mut self) {
344 self.len = 0
345 }
346
347 pub fn as_slice(&self) -> &[u8] {
349 self
350 }
351
352 pub fn as_slice_mut(&mut self) -> &mut [u8] {
354 self
355 }
356
357 #[inline]
360 pub const fn as_ptr(&self) -> *const u8 {
361 self.data.as_ptr()
362 }
363
364 #[inline]
367 pub fn as_mut_ptr(&mut self) -> *mut u8 {
368 self.data.as_ptr()
369 }
370
371 #[inline]
372 pub(super) fn into_buffer(self) -> Buffer {
373 let bytes = unsafe { Bytes::new(self.data, self.len, Deallocation::Standard(self.layout)) };
374 #[cfg(feature = "pool")]
375 {
376 let reservation = self.reservation.lock().unwrap().take();
377 *bytes.reservation.lock().unwrap() = reservation;
378 }
379 std::mem::forget(self);
380 Buffer::from(bytes)
381 }
382
383 pub fn typed_data_mut<T: ArrowNativeType>(&mut self) -> &mut [T] {
390 let (prefix, offsets, suffix) = unsafe { self.as_slice_mut().align_to_mut::<T>() };
394 assert!(prefix.is_empty() && suffix.is_empty());
395 offsets
396 }
397
398 pub fn typed_data<T: ArrowNativeType>(&self) -> &[T] {
405 let (prefix, offsets, suffix) = unsafe { self.as_slice().align_to::<T>() };
409 assert!(prefix.is_empty() && suffix.is_empty());
410 offsets
411 }
412
413 #[inline]
422 pub fn extend_from_slice<T: ArrowNativeType>(&mut self, items: &[T]) {
423 let additional = mem::size_of_val(items);
424 self.reserve(additional);
425 unsafe {
426 let src = items.as_ptr() as *const u8;
430 let dst = self.data.as_ptr().add(self.len);
431 std::ptr::copy_nonoverlapping(src, dst, additional)
432 }
433 self.len += additional;
434 }
435
436 #[inline]
445 pub fn push<T: ToByteSlice>(&mut self, item: T) {
446 let additional = std::mem::size_of::<T>();
447 self.reserve(additional);
448 unsafe {
449 let src = item.to_byte_slice().as_ptr();
450 let dst = self.data.as_ptr().add(self.len);
451 std::ptr::copy_nonoverlapping(src, dst, additional);
452 }
453 self.len += additional;
454 }
455
456 #[inline]
460 pub unsafe fn push_unchecked<T: ToByteSlice>(&mut self, item: T) {
461 let additional = std::mem::size_of::<T>();
462 let src = item.to_byte_slice().as_ptr();
463 let dst = self.data.as_ptr().add(self.len);
464 std::ptr::copy_nonoverlapping(src, dst, additional);
465 self.len += additional;
466 }
467
468 #[inline]
470 pub fn extend_zeros(&mut self, additional: usize) {
471 self.resize(self.len + additional, 0);
472 }
473
474 #[inline]
477 pub unsafe fn set_len(&mut self, len: usize) {
478 assert!(len <= self.capacity());
479 self.len = len;
480 }
481
482 #[inline]
487 pub fn collect_bool<F: FnMut(usize) -> bool>(len: usize, mut f: F) -> Self {
488 let mut buffer = Self::new(bit_util::ceil(len, 64) * 8);
489
490 let chunks = len / 64;
491 let remainder = len % 64;
492 for chunk in 0..chunks {
493 let mut packed = 0;
494 for bit_idx in 0..64 {
495 let i = bit_idx + chunk * 64;
496 packed |= (f(i) as u64) << bit_idx;
497 }
498
499 unsafe { buffer.push_unchecked(packed) }
501 }
502
503 if remainder != 0 {
504 let mut packed = 0;
505 for bit_idx in 0..remainder {
506 let i = bit_idx + chunks * 64;
507 packed |= (f(i) as u64) << bit_idx;
508 }
509
510 unsafe { buffer.push_unchecked(packed) }
512 }
513
514 buffer.truncate(bit_util::ceil(len, 8));
515 buffer
516 }
517
518 #[cfg(feature = "pool")]
525 pub fn claim(&self, pool: &dyn MemoryPool) {
526 *self.reservation.lock().unwrap() = Some(pool.reserve(self.capacity()));
527 }
528}
529
530#[inline]
534pub(crate) fn dangling_ptr() -> NonNull<u8> {
535 #[cfg(miri)]
539 {
540 unsafe { NonNull::new_unchecked(std::ptr::without_provenance_mut(ALIGNMENT)) }
542 }
543 #[cfg(not(miri))]
544 {
545 unsafe { NonNull::new_unchecked(ALIGNMENT as *mut u8) }
546 }
547}
548
549impl<A: ArrowNativeType> Extend<A> for MutableBuffer {
550 #[inline]
551 fn extend<T: IntoIterator<Item = A>>(&mut self, iter: T) {
552 let iterator = iter.into_iter();
553 self.extend_from_iter(iterator)
554 }
555}
556
557impl<T: ArrowNativeType> From<Vec<T>> for MutableBuffer {
558 fn from(value: Vec<T>) -> Self {
559 let data = unsafe { NonNull::new_unchecked(value.as_ptr() as _) };
562 let len = value.len() * mem::size_of::<T>();
563 let layout = unsafe { Layout::array::<T>(value.capacity()).unwrap_unchecked() };
567 mem::forget(value);
568 Self {
569 data,
570 len,
571 layout,
572 #[cfg(feature = "pool")]
573 reservation: std::sync::Mutex::new(None),
574 }
575 }
576}
577
578impl MutableBuffer {
579 #[inline]
580 pub(super) fn extend_from_iter<T: ArrowNativeType, I: Iterator<Item = T>>(
581 &mut self,
582 mut iterator: I,
583 ) {
584 let item_size = std::mem::size_of::<T>();
585 let (lower, _) = iterator.size_hint();
586 let additional = lower * item_size;
587 self.reserve(additional);
588
589 let mut len = SetLenOnDrop::new(&mut self.len);
591 let mut dst = unsafe { self.data.as_ptr().add(len.local_len) };
592 let capacity = self.layout.size();
593
594 while len.local_len + item_size <= capacity {
595 if let Some(item) = iterator.next() {
596 unsafe {
597 let src = item.to_byte_slice().as_ptr();
598 std::ptr::copy_nonoverlapping(src, dst, item_size);
599 dst = dst.add(item_size);
600 }
601 len.local_len += item_size;
602 } else {
603 break;
604 }
605 }
606 drop(len);
607
608 iterator.for_each(|item| self.push(item));
609 }
610
611 #[inline]
629 pub unsafe fn from_trusted_len_iter<T: ArrowNativeType, I: Iterator<Item = T>>(
630 iterator: I,
631 ) -> Self {
632 let item_size = std::mem::size_of::<T>();
633 let (_, upper) = iterator.size_hint();
634 let upper = upper.expect("from_trusted_len_iter requires an upper limit");
635 let len = upper * item_size;
636
637 let mut buffer = MutableBuffer::new(len);
638
639 let mut dst = buffer.data.as_ptr();
640 for item in iterator {
641 let src = item.to_byte_slice().as_ptr();
643 std::ptr::copy_nonoverlapping(src, dst, item_size);
644 dst = dst.add(item_size);
645 }
646 assert_eq!(
647 dst.offset_from(buffer.data.as_ptr()) as usize,
648 len,
649 "Trusted iterator length was not accurately reported"
650 );
651 buffer.len = len;
652 buffer
653 }
654
655 #[inline]
673 pub unsafe fn from_trusted_len_iter_bool<I: Iterator<Item = bool>>(mut iterator: I) -> Self {
674 let (_, upper) = iterator.size_hint();
675 let len = upper.expect("from_trusted_len_iter requires an upper limit");
676
677 Self::collect_bool(len, |_| iterator.next().unwrap())
678 }
679
680 #[inline]
687 pub unsafe fn try_from_trusted_len_iter<
688 E,
689 T: ArrowNativeType,
690 I: Iterator<Item = Result<T, E>>,
691 >(
692 iterator: I,
693 ) -> Result<Self, E> {
694 let item_size = std::mem::size_of::<T>();
695 let (_, upper) = iterator.size_hint();
696 let upper = upper.expect("try_from_trusted_len_iter requires an upper limit");
697 let len = upper * item_size;
698
699 let mut buffer = MutableBuffer::new(len);
700
701 let mut dst = buffer.data.as_ptr();
702 for item in iterator {
703 let item = item?;
704 let src = item.to_byte_slice().as_ptr();
706 std::ptr::copy_nonoverlapping(src, dst, item_size);
707 dst = dst.add(item_size);
708 }
709 unsafe fn finalize_buffer(dst: *mut u8, buffer: &mut MutableBuffer, len: usize) {
712 assert_eq!(
713 dst.offset_from(buffer.data.as_ptr()) as usize,
714 len,
715 "Trusted iterator length was not accurately reported"
716 );
717 buffer.len = len;
718 }
719 finalize_buffer(dst, &mut buffer, len);
720 Ok(buffer)
721 }
722}
723
724impl Default for MutableBuffer {
725 fn default() -> Self {
726 Self::with_capacity(0)
727 }
728}
729
730impl std::ops::Deref for MutableBuffer {
731 type Target = [u8];
732
733 fn deref(&self) -> &[u8] {
734 unsafe { std::slice::from_raw_parts(self.as_ptr(), self.len) }
735 }
736}
737
738impl std::ops::DerefMut for MutableBuffer {
739 fn deref_mut(&mut self) -> &mut [u8] {
740 unsafe { std::slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) }
741 }
742}
743
744impl Drop for MutableBuffer {
745 fn drop(&mut self) {
746 if self.layout.size() != 0 {
747 unsafe { std::alloc::dealloc(self.data.as_ptr() as _, self.layout) };
749 }
750 }
751}
752
753impl PartialEq for MutableBuffer {
754 fn eq(&self, other: &MutableBuffer) -> bool {
755 if self.len != other.len {
756 return false;
757 }
758 if self.layout != other.layout {
759 return false;
760 }
761 self.as_slice() == other.as_slice()
762 }
763}
764
765unsafe impl Sync for MutableBuffer {}
766unsafe impl Send for MutableBuffer {}
767
768struct SetLenOnDrop<'a> {
769 len: &'a mut usize,
770 local_len: usize,
771}
772
773impl<'a> SetLenOnDrop<'a> {
774 #[inline]
775 fn new(len: &'a mut usize) -> Self {
776 SetLenOnDrop {
777 local_len: *len,
778 len,
779 }
780 }
781}
782
783impl Drop for SetLenOnDrop<'_> {
784 #[inline]
785 fn drop(&mut self) {
786 *self.len = self.local_len;
787 }
788}
789
790impl std::iter::FromIterator<bool> for MutableBuffer {
792 fn from_iter<I>(iter: I) -> Self
793 where
794 I: IntoIterator<Item = bool>,
795 {
796 let mut iterator = iter.into_iter();
797 let mut result = {
798 let byte_capacity: usize = iterator.size_hint().0.saturating_add(7) / 8;
799 MutableBuffer::new(byte_capacity)
800 };
801
802 loop {
803 let mut exhausted = false;
804 let mut byte_accum: u8 = 0;
805 let mut mask: u8 = 1;
806
807 while mask != 0 {
809 if let Some(value) = iterator.next() {
810 byte_accum |= match value {
811 true => mask,
812 false => 0,
813 };
814 mask <<= 1;
815 } else {
816 exhausted = true;
817 break;
818 }
819 }
820
821 if exhausted && mask == 1 {
823 break;
824 }
825
826 if result.len() == result.capacity() {
828 let additional_byte_capacity = 1usize.saturating_add(
830 iterator.size_hint().0.saturating_add(7) / 8, );
832 result.reserve(additional_byte_capacity)
833 }
834
835 unsafe { result.push_unchecked(byte_accum) };
837 if exhausted {
838 break;
839 }
840 }
841 result
842 }
843}
844
845impl<T: ArrowNativeType> std::iter::FromIterator<T> for MutableBuffer {
846 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
847 let mut buffer = Self::default();
848 buffer.extend_from_iter(iter.into_iter());
849 buffer
850 }
851}
852
853#[cfg(test)]
854mod tests {
855 use super::*;
856
857 #[test]
858 fn test_mutable_new() {
859 let buf = MutableBuffer::new(63);
860 assert_eq!(64, buf.capacity());
861 assert_eq!(0, buf.len());
862 assert!(buf.is_empty());
863 }
864
865 #[test]
866 fn test_mutable_default() {
867 let buf = MutableBuffer::default();
868 assert_eq!(0, buf.capacity());
869 assert_eq!(0, buf.len());
870 assert!(buf.is_empty());
871
872 let mut buf = MutableBuffer::default();
873 buf.extend_from_slice(b"hello");
874 assert_eq!(5, buf.len());
875 assert_eq!(b"hello", buf.as_slice());
876 }
877
878 #[test]
879 fn test_mutable_extend_from_slice() {
880 let mut buf = MutableBuffer::new(100);
881 buf.extend_from_slice(b"hello");
882 assert_eq!(5, buf.len());
883 assert_eq!(b"hello", buf.as_slice());
884
885 buf.extend_from_slice(b" world");
886 assert_eq!(11, buf.len());
887 assert_eq!(b"hello world", buf.as_slice());
888
889 buf.clear();
890 assert_eq!(0, buf.len());
891 buf.extend_from_slice(b"hello arrow");
892 assert_eq!(11, buf.len());
893 assert_eq!(b"hello arrow", buf.as_slice());
894 }
895
896 #[test]
897 fn mutable_extend_from_iter() {
898 let mut buf = MutableBuffer::new(0);
899 buf.extend(vec![1u32, 2]);
900 assert_eq!(8, buf.len());
901 assert_eq!(&[1u8, 0, 0, 0, 2, 0, 0, 0], buf.as_slice());
902
903 buf.extend(vec![3u32, 4]);
904 assert_eq!(16, buf.len());
905 assert_eq!(
906 &[1u8, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0],
907 buf.as_slice()
908 );
909 }
910
911 #[test]
912 fn mutable_extend_from_iter_unaligned_u64() {
913 let mut buf = MutableBuffer::new(16);
914 buf.push(1_u8);
915 buf.extend([1_u64]);
916 assert_eq!(9, buf.len());
917 assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
918 }
919
920 #[test]
921 fn mutable_extend_from_slice_unaligned_u64() {
922 let mut buf = MutableBuffer::new(16);
923 buf.extend_from_slice(&[1_u8]);
924 buf.extend_from_slice(&[1_u64]);
925 assert_eq!(9, buf.len());
926 assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
927 }
928
929 #[test]
930 fn mutable_push_unaligned_u64() {
931 let mut buf = MutableBuffer::new(16);
932 buf.push(1_u8);
933 buf.push(1_u64);
934 assert_eq!(9, buf.len());
935 assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
936 }
937
938 #[test]
939 fn mutable_push_unchecked_unaligned_u64() {
940 let mut buf = MutableBuffer::new(16);
941 unsafe {
942 buf.push_unchecked(1_u8);
943 buf.push_unchecked(1_u64);
944 }
945 assert_eq!(9, buf.len());
946 assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
947 }
948
949 #[test]
950 fn test_from_trusted_len_iter() {
951 let iter = vec![1u32, 2].into_iter();
952 let buf = unsafe { MutableBuffer::from_trusted_len_iter(iter) };
953 assert_eq!(8, buf.len());
954 assert_eq!(&[1u8, 0, 0, 0, 2, 0, 0, 0], buf.as_slice());
955 }
956
957 #[test]
958 fn test_mutable_reserve() {
959 let mut buf = MutableBuffer::new(1);
960 assert_eq!(64, buf.capacity());
961
962 buf.reserve(10);
964 assert_eq!(64, buf.capacity());
965
966 buf.reserve(80);
967 assert_eq!(128, buf.capacity());
968
969 buf.reserve(129);
970 assert_eq!(256, buf.capacity());
971 }
972
973 #[test]
974 fn test_mutable_resize() {
975 let mut buf = MutableBuffer::new(1);
976 assert_eq!(64, buf.capacity());
977 assert_eq!(0, buf.len());
978
979 buf.resize(20, 0);
980 assert_eq!(64, buf.capacity());
981 assert_eq!(20, buf.len());
982
983 buf.resize(10, 0);
984 assert_eq!(64, buf.capacity());
985 assert_eq!(10, buf.len());
986
987 buf.resize(100, 0);
988 assert_eq!(128, buf.capacity());
989 assert_eq!(100, buf.len());
990
991 buf.resize(30, 0);
992 assert_eq!(128, buf.capacity());
993 assert_eq!(30, buf.len());
994
995 buf.resize(0, 0);
996 assert_eq!(128, buf.capacity());
997 assert_eq!(0, buf.len());
998 }
999
1000 #[test]
1001 fn test_mutable_into() {
1002 let mut buf = MutableBuffer::new(1);
1003 buf.extend_from_slice(b"aaaa bbbb cccc dddd");
1004 assert_eq!(19, buf.len());
1005 assert_eq!(64, buf.capacity());
1006 assert_eq!(b"aaaa bbbb cccc dddd", buf.as_slice());
1007
1008 let immutable_buf: Buffer = buf.into();
1009 assert_eq!(19, immutable_buf.len());
1010 assert_eq!(64, immutable_buf.capacity());
1011 assert_eq!(b"aaaa bbbb cccc dddd", immutable_buf.as_slice());
1012 }
1013
1014 #[test]
1015 fn test_mutable_equal() {
1016 let mut buf = MutableBuffer::new(1);
1017 let mut buf2 = MutableBuffer::new(1);
1018
1019 buf.extend_from_slice(&[0xaa]);
1020 buf2.extend_from_slice(&[0xaa, 0xbb]);
1021 assert!(buf != buf2);
1022
1023 buf.extend_from_slice(&[0xbb]);
1024 assert_eq!(buf, buf2);
1025
1026 buf2.reserve(65);
1027 assert!(buf != buf2);
1028 }
1029
1030 #[test]
1031 fn test_mutable_shrink_to_fit() {
1032 let mut buffer = MutableBuffer::new(128);
1033 assert_eq!(buffer.capacity(), 128);
1034 buffer.push(1);
1035 buffer.push(2);
1036
1037 buffer.shrink_to_fit();
1038 assert!(buffer.capacity() >= 64 && buffer.capacity() < 128);
1039 }
1040
1041 #[test]
1042 fn test_mutable_set_null_bits() {
1043 let mut buffer = MutableBuffer::new(8).with_bitset(8, true);
1044
1045 for i in 0..=buffer.capacity() {
1046 buffer.set_null_bits(i, 0);
1047 assert_eq!(buffer[..8], [255; 8][..]);
1048 }
1049
1050 buffer.set_null_bits(1, 4);
1051 assert_eq!(buffer[..8], [255, 0, 0, 0, 0, 255, 255, 255][..]);
1052 }
1053
1054 #[test]
1055 #[should_panic = "out of bounds for buffer of length"]
1056 fn test_mutable_set_null_bits_oob() {
1057 let mut buffer = MutableBuffer::new(64);
1058 buffer.set_null_bits(1, buffer.capacity());
1059 }
1060
1061 #[test]
1062 #[should_panic = "out of bounds for buffer of length"]
1063 fn test_mutable_set_null_bits_oob_by_overflow() {
1064 let mut buffer = MutableBuffer::new(0);
1065 buffer.set_null_bits(1, usize::MAX);
1066 }
1067
1068 #[test]
1069 fn from_iter() {
1070 let buffer = [1u16, 2, 3, 4].into_iter().collect::<MutableBuffer>();
1071 assert_eq!(buffer.len(), 4 * mem::size_of::<u16>());
1072 assert_eq!(buffer.as_slice(), &[1, 0, 2, 0, 3, 0, 4, 0]);
1073 }
1074
1075 #[test]
1076 #[should_panic(expected = "failed to create layout for MutableBuffer: LayoutError")]
1077 fn test_with_capacity_panics_above_max_capacity() {
1078 let max_capacity = isize::MAX as usize - (isize::MAX as usize % ALIGNMENT);
1079 let _ = MutableBuffer::with_capacity(max_capacity + 1);
1080 }
1081
1082 #[cfg(feature = "pool")]
1083 mod pool_tests {
1084 use super::*;
1085 use crate::pool::{MemoryPool, TrackingMemoryPool};
1086
1087 #[test]
1088 fn test_reallocate_with_pool() {
1089 let pool = TrackingMemoryPool::default();
1090 let mut buffer = MutableBuffer::with_capacity(100);
1091 buffer.claim(&pool);
1092
1093 assert_eq!(buffer.capacity(), 128);
1095 assert_eq!(pool.used(), 128);
1096
1097 buffer.reallocate(200);
1099
1100 assert_eq!(buffer.capacity(), 200);
1102 assert_eq!(pool.used(), 200);
1103
1104 buffer.reallocate(50);
1106
1107 assert_eq!(buffer.capacity(), 50);
1109 assert_eq!(pool.used(), 50);
1110 }
1111
1112 #[test]
1113 fn test_truncate_with_pool() {
1114 let pool = TrackingMemoryPool::default();
1115 let mut buffer = MutableBuffer::with_capacity(100);
1116
1117 buffer.resize(80, 1);
1119 assert_eq!(buffer.len(), 80);
1120
1121 buffer.claim(&pool);
1122 assert_eq!(pool.used(), 128);
1123
1124 buffer.truncate(40);
1126 assert_eq!(buffer.len(), 40);
1127 assert_eq!(pool.used(), 40);
1128
1129 buffer.truncate(0);
1131 assert_eq!(buffer.len(), 0);
1132 assert_eq!(pool.used(), 0);
1133 }
1134
1135 #[test]
1136 fn test_resize_with_pool() {
1137 let pool = TrackingMemoryPool::default();
1138 let mut buffer = MutableBuffer::with_capacity(100);
1139 buffer.claim(&pool);
1140
1141 assert_eq!(buffer.len(), 0);
1143 assert_eq!(pool.used(), 128);
1144
1145 buffer.resize(50, 1);
1147 assert_eq!(buffer.len(), 50);
1148 assert_eq!(pool.used(), 50);
1149
1150 buffer.resize(150, 1);
1152 assert_eq!(buffer.len(), 150);
1153 assert_eq!(buffer.capacity(), 256);
1154 assert_eq!(pool.used(), 150);
1155
1156 buffer.resize(30, 1);
1158 assert_eq!(buffer.len(), 30);
1159 assert_eq!(pool.used(), 30);
1160 }
1161
1162 #[test]
1163 fn test_buffer_lifecycle_with_pool() {
1164 let pool = TrackingMemoryPool::default();
1165
1166 let mut mutable = MutableBuffer::with_capacity(100);
1168 mutable.resize(80, 1);
1169 mutable.claim(&pool);
1170
1171 assert_eq!(pool.used(), 128);
1173
1174 let buffer = mutable.into_buffer();
1176
1177 assert_eq!(pool.used(), 128);
1179
1180 drop(buffer);
1182 assert_eq!(pool.used(), 0);
1183 }
1184 }
1185}