1use std::alloc::{Layout, handle_alloc_error};
19use std::mem;
20use std::ptr::NonNull;
21
22use crate::alloc::{ALIGNMENT, Deallocation};
23use crate::{
24 bytes::Bytes,
25 native::{ArrowNativeType, ToByteSlice},
26 util::bit_util,
27};
28
29#[cfg(feature = "pool")]
30use crate::pool::{MemoryPool, MemoryReservation};
31#[cfg(feature = "pool")]
32use std::sync::Mutex;
33
34use super::Buffer;
35
36#[derive(Debug)]
59pub struct MutableBuffer {
60 data: NonNull<u8>,
62 len: usize,
64 layout: Layout,
65
66 #[cfg(feature = "pool")]
68 reservation: Mutex<Option<Box<dyn MemoryReservation>>>,
69}
70
71impl MutableBuffer {
72 #[inline]
76 pub fn new(capacity: usize) -> Self {
77 Self::with_capacity(capacity)
78 }
79
80 #[inline]
87 pub fn with_capacity(capacity: usize) -> Self {
88 let capacity = bit_util::round_upto_multiple_of_64(capacity);
89 let layout = Layout::from_size_align(capacity, ALIGNMENT)
90 .expect("failed to create layout for MutableBuffer");
91 let data = match layout.size() {
92 0 => dangling_ptr(),
93 _ => {
94 let raw_ptr = unsafe { std::alloc::alloc(layout) };
96 NonNull::new(raw_ptr).unwrap_or_else(|| handle_alloc_error(layout))
97 }
98 };
99 Self {
100 data,
101 len: 0,
102 layout,
103 #[cfg(feature = "pool")]
104 reservation: std::sync::Mutex::new(None),
105 }
106 }
107
108 pub fn from_len_zeroed(len: usize) -> Self {
120 let layout = Layout::from_size_align(len, ALIGNMENT).unwrap();
121 let data = match layout.size() {
122 0 => dangling_ptr(),
123 _ => {
124 let raw_ptr = unsafe { std::alloc::alloc_zeroed(layout) };
126 NonNull::new(raw_ptr).unwrap_or_else(|| handle_alloc_error(layout))
127 }
128 };
129 Self {
130 data,
131 len,
132 layout,
133 #[cfg(feature = "pool")]
134 reservation: std::sync::Mutex::new(None),
135 }
136 }
137
138 pub(crate) fn from_bytes(bytes: Bytes) -> Result<Self, Bytes> {
140 let layout = match bytes.deallocation() {
141 Deallocation::Standard(layout) => *layout,
142 _ => return Err(bytes),
143 };
144
145 let len = bytes.len();
146 let data = bytes.ptr();
147 #[cfg(feature = "pool")]
148 let reservation = bytes.reservation.lock().unwrap().take();
149 mem::forget(bytes);
150
151 Ok(Self {
152 data,
153 len,
154 layout,
155 #[cfg(feature = "pool")]
156 reservation: Mutex::new(reservation),
157 })
158 }
159
160 pub fn new_null(len: usize) -> Self {
163 let num_bytes = bit_util::ceil(len, 8);
164 MutableBuffer::from_len_zeroed(num_bytes)
165 }
166
167 pub fn with_bitset(mut self, end: usize, val: bool) -> Self {
174 assert!(end <= self.layout.size());
175 let v = if val { 255 } else { 0 };
176 unsafe {
177 std::ptr::write_bytes(self.data.as_ptr(), v, end);
178 self.len = end;
179 }
180 self
181 }
182
183 pub fn set_null_bits(&mut self, start: usize, count: usize) {
189 assert!(
190 start.saturating_add(count) <= self.layout.size(),
191 "range start index {start} and count {count} out of bounds for \
192 buffer of length {}",
193 self.layout.size(),
194 );
195
196 unsafe {
198 std::ptr::write_bytes(self.data.as_ptr().add(start), 0, count);
199 }
200 }
201
202 #[inline(always)]
216 pub fn reserve(&mut self, additional: usize) {
217 let required_cap = self.len + additional;
218 if required_cap > self.layout.size() {
219 let new_capacity = bit_util::round_upto_multiple_of_64(required_cap);
220 let new_capacity = std::cmp::max(new_capacity, self.layout.size() * 2);
221 self.reallocate(new_capacity)
222 }
223 }
224
225 #[cold]
226 fn reallocate(&mut self, capacity: usize) {
227 let new_layout = Layout::from_size_align(capacity, self.layout.align()).unwrap();
228 if new_layout.size() == 0 {
229 if self.layout.size() != 0 {
230 unsafe { std::alloc::dealloc(self.as_mut_ptr(), self.layout) };
232 self.layout = new_layout
233 }
234 return;
235 }
236
237 let data = match self.layout.size() {
238 0 => unsafe { std::alloc::alloc(new_layout) },
240 _ => unsafe { std::alloc::realloc(self.as_mut_ptr(), self.layout, capacity) },
242 };
243 self.data = NonNull::new(data).unwrap_or_else(|| handle_alloc_error(new_layout));
244 self.layout = new_layout;
245 #[cfg(feature = "pool")]
246 {
247 if let Some(reservation) = self.reservation.lock().unwrap().as_mut() {
248 reservation.resize(self.layout.size());
249 }
250 }
251 }
252
253 #[inline(always)]
257 pub fn truncate(&mut self, len: usize) {
258 if len > self.len {
259 return;
260 }
261 self.len = len;
262 #[cfg(feature = "pool")]
263 {
264 if let Some(reservation) = self.reservation.lock().unwrap().as_mut() {
265 reservation.resize(self.len);
266 }
267 }
268 }
269
270 #[inline(always)]
282 pub fn resize(&mut self, new_len: usize, value: u8) {
283 if new_len > self.len {
284 let diff = new_len - self.len;
285 self.reserve(diff);
286 unsafe { self.data.as_ptr().add(self.len).write_bytes(value, diff) };
288 }
289 self.len = new_len;
291 #[cfg(feature = "pool")]
292 {
293 if let Some(reservation) = self.reservation.lock().unwrap().as_mut() {
294 reservation.resize(self.len);
295 }
296 }
297 }
298
299 pub fn shrink_to_fit(&mut self) {
315 let new_capacity = bit_util::round_upto_multiple_of_64(self.len);
316 if new_capacity < self.layout.size() {
317 self.reallocate(new_capacity)
318 }
319 }
320
321 #[inline]
323 pub const fn is_empty(&self) -> bool {
324 self.len == 0
325 }
326
327 #[inline]
330 pub const fn len(&self) -> usize {
331 self.len
332 }
333
334 #[inline]
338 pub const fn capacity(&self) -> usize {
339 self.layout.size()
340 }
341
342 pub fn clear(&mut self) {
344 self.len = 0
345 }
346
347 pub fn as_slice(&self) -> &[u8] {
349 self
350 }
351
352 pub fn as_slice_mut(&mut self) -> &mut [u8] {
354 self
355 }
356
357 #[inline]
360 pub const fn as_ptr(&self) -> *const u8 {
361 self.data.as_ptr()
362 }
363
364 #[inline]
367 pub fn as_mut_ptr(&mut self) -> *mut u8 {
368 self.data.as_ptr()
369 }
370
371 #[inline]
372 pub(super) fn into_buffer(self) -> Buffer {
373 let bytes = unsafe { Bytes::new(self.data, self.len, Deallocation::Standard(self.layout)) };
374 #[cfg(feature = "pool")]
375 {
376 let reservation = self.reservation.lock().unwrap().take();
377 *bytes.reservation.lock().unwrap() = reservation;
378 }
379 std::mem::forget(self);
380 Buffer::from(bytes)
381 }
382
383 pub fn typed_data_mut<T: ArrowNativeType>(&mut self) -> &mut [T] {
390 let (prefix, offsets, suffix) = unsafe { self.as_slice_mut().align_to_mut::<T>() };
394 assert!(prefix.is_empty() && suffix.is_empty());
395 offsets
396 }
397
398 pub fn typed_data<T: ArrowNativeType>(&self) -> &[T] {
405 let (prefix, offsets, suffix) = unsafe { self.as_slice().align_to::<T>() };
409 assert!(prefix.is_empty() && suffix.is_empty());
410 offsets
411 }
412
413 #[inline]
422 pub fn extend_from_slice<T: ArrowNativeType>(&mut self, items: &[T]) {
423 let additional = mem::size_of_val(items);
424 self.reserve(additional);
425 unsafe {
426 let src = items.as_ptr() as *const u8;
430 let dst = self.data.as_ptr().add(self.len);
431 std::ptr::copy_nonoverlapping(src, dst, additional)
432 }
433 self.len += additional;
434 }
435
436 #[inline]
445 pub fn push<T: ToByteSlice>(&mut self, item: T) {
446 let additional = std::mem::size_of::<T>();
447 self.reserve(additional);
448 unsafe {
449 let src = item.to_byte_slice().as_ptr();
450 let dst = self.data.as_ptr().add(self.len);
451 std::ptr::copy_nonoverlapping(src, dst, additional);
452 }
453 self.len += additional;
454 }
455
456 #[inline]
460 pub unsafe fn push_unchecked<T: ToByteSlice>(&mut self, item: T) {
461 let additional = std::mem::size_of::<T>();
462 let src = item.to_byte_slice().as_ptr();
463 let dst = unsafe { self.data.as_ptr().add(self.len) };
464 unsafe { std::ptr::copy_nonoverlapping(src, dst, additional) };
465 self.len += additional;
466 }
467
468 #[inline]
470 pub fn extend_zeros(&mut self, additional: usize) {
471 self.resize(self.len + additional, 0);
472 }
473
474 #[inline]
477 pub unsafe fn set_len(&mut self, len: usize) {
478 assert!(len <= self.capacity());
479 self.len = len;
480 }
481
482 #[inline]
487 pub fn collect_bool<F: FnMut(usize) -> bool>(len: usize, mut f: F) -> Self {
488 let mut buffer = Self::new(bit_util::ceil(len, 64) * 8);
489
490 let chunks = len / 64;
491 let remainder = len % 64;
492 for chunk in 0..chunks {
493 let mut packed = 0;
494 for bit_idx in 0..64 {
495 let i = bit_idx + chunk * 64;
496 packed |= (f(i) as u64) << bit_idx;
497 }
498
499 unsafe { buffer.push_unchecked(packed) }
501 }
502
503 if remainder != 0 {
504 let mut packed = 0;
505 for bit_idx in 0..remainder {
506 let i = bit_idx + chunks * 64;
507 packed |= (f(i) as u64) << bit_idx;
508 }
509
510 unsafe { buffer.push_unchecked(packed) }
512 }
513
514 buffer.truncate(bit_util::ceil(len, 8));
515 buffer
516 }
517
518 #[cfg(feature = "pool")]
525 pub fn claim(&self, pool: &dyn MemoryPool) {
526 *self.reservation.lock().unwrap() = Some(pool.reserve(self.capacity()));
527 }
528}
529
530#[inline]
534pub(crate) fn dangling_ptr() -> NonNull<u8> {
535 #[cfg(miri)]
539 {
540 unsafe { NonNull::new_unchecked(std::ptr::without_provenance_mut(ALIGNMENT)) }
542 }
543 #[cfg(not(miri))]
544 {
545 unsafe { NonNull::new_unchecked(ALIGNMENT as *mut u8) }
546 }
547}
548
549impl<A: ArrowNativeType> Extend<A> for MutableBuffer {
550 #[inline]
551 fn extend<T: IntoIterator<Item = A>>(&mut self, iter: T) {
552 let iterator = iter.into_iter();
553 self.extend_from_iter(iterator)
554 }
555}
556
557impl<T: ArrowNativeType> From<Vec<T>> for MutableBuffer {
558 fn from(value: Vec<T>) -> Self {
559 let data = unsafe { NonNull::new_unchecked(value.as_ptr() as _) };
562 let len = value.len() * mem::size_of::<T>();
563 let layout = unsafe { Layout::array::<T>(value.capacity()).unwrap_unchecked() };
567 mem::forget(value);
568 Self {
569 data,
570 len,
571 layout,
572 #[cfg(feature = "pool")]
573 reservation: std::sync::Mutex::new(None),
574 }
575 }
576}
577
578impl MutableBuffer {
579 #[inline]
580 pub(super) fn extend_from_iter<T: ArrowNativeType, I: Iterator<Item = T>>(
581 &mut self,
582 mut iterator: I,
583 ) {
584 let item_size = std::mem::size_of::<T>();
585 let (lower, _) = iterator.size_hint();
586 let additional = lower * item_size;
587 self.reserve(additional);
588
589 let mut len = SetLenOnDrop::new(&mut self.len);
591 let mut dst = unsafe { self.data.as_ptr().add(len.local_len) };
592 let capacity = self.layout.size();
593
594 while len.local_len + item_size <= capacity {
595 if let Some(item) = iterator.next() {
596 unsafe {
597 let src = item.to_byte_slice().as_ptr();
598 std::ptr::copy_nonoverlapping(src, dst, item_size);
599 dst = dst.add(item_size);
600 }
601 len.local_len += item_size;
602 } else {
603 break;
604 }
605 }
606 drop(len);
607
608 iterator.for_each(|item| self.push(item));
609 }
610
611 #[inline]
629 pub unsafe fn from_trusted_len_iter<T: ArrowNativeType, I: Iterator<Item = T>>(
630 iterator: I,
631 ) -> Self {
632 let item_size = std::mem::size_of::<T>();
633 let (_, upper) = iterator.size_hint();
634 let upper = upper.expect("from_trusted_len_iter requires an upper limit");
635 let len = upper * item_size;
636
637 let mut buffer = MutableBuffer::new(len);
638
639 let mut dst = buffer.data.as_ptr();
640 for item in iterator {
641 let src = item.to_byte_slice().as_ptr();
643 unsafe { std::ptr::copy_nonoverlapping(src, dst, item_size) };
644 dst = unsafe { dst.add(item_size) };
645 }
646 assert_eq!(
647 unsafe { dst.offset_from(buffer.data.as_ptr()) } as usize,
648 len,
649 "Trusted iterator length was not accurately reported"
650 );
651 buffer.len = len;
652 buffer
653 }
654
655 #[inline]
673 pub unsafe fn from_trusted_len_iter_bool<I: Iterator<Item = bool>>(mut iterator: I) -> Self {
674 let (_, upper) = iterator.size_hint();
675 let len = upper.expect("from_trusted_len_iter requires an upper limit");
676
677 Self::collect_bool(len, |_| iterator.next().unwrap())
678 }
679
680 #[inline]
687 pub unsafe fn try_from_trusted_len_iter<
688 E,
689 T: ArrowNativeType,
690 I: Iterator<Item = Result<T, E>>,
691 >(
692 iterator: I,
693 ) -> Result<Self, E> {
694 let item_size = std::mem::size_of::<T>();
695 let (_, upper) = iterator.size_hint();
696 let upper = upper.expect("try_from_trusted_len_iter requires an upper limit");
697 let len = upper * item_size;
698
699 let mut buffer = MutableBuffer::new(len);
700
701 let mut dst = buffer.data.as_ptr();
702 for item in iterator {
703 let item = item?;
704 let src = item.to_byte_slice().as_ptr();
706 unsafe { std::ptr::copy_nonoverlapping(src, dst, item_size) };
707 dst = unsafe { dst.add(item_size) };
708 }
709 unsafe fn finalize_buffer(dst: *mut u8, buffer: &mut MutableBuffer, len: usize) {
712 unsafe {
713 assert_eq!(
714 dst.offset_from(buffer.data.as_ptr()) as usize,
715 len,
716 "Trusted iterator length was not accurately reported"
717 );
718 buffer.len = len;
719 }
720 }
721 unsafe { finalize_buffer(dst, &mut buffer, len) };
722 Ok(buffer)
723 }
724}
725
726impl Default for MutableBuffer {
727 fn default() -> Self {
728 Self::with_capacity(0)
729 }
730}
731
732impl std::ops::Deref for MutableBuffer {
733 type Target = [u8];
734
735 fn deref(&self) -> &[u8] {
736 unsafe { std::slice::from_raw_parts(self.as_ptr(), self.len) }
737 }
738}
739
740impl std::ops::DerefMut for MutableBuffer {
741 fn deref_mut(&mut self) -> &mut [u8] {
742 unsafe { std::slice::from_raw_parts_mut(self.as_mut_ptr(), self.len) }
743 }
744}
745
746impl Drop for MutableBuffer {
747 fn drop(&mut self) {
748 if self.layout.size() != 0 {
749 unsafe { std::alloc::dealloc(self.data.as_ptr() as _, self.layout) };
751 }
752 }
753}
754
755impl PartialEq for MutableBuffer {
756 fn eq(&self, other: &MutableBuffer) -> bool {
757 if self.len != other.len {
758 return false;
759 }
760 if self.layout != other.layout {
761 return false;
762 }
763 self.as_slice() == other.as_slice()
764 }
765}
766
767unsafe impl Sync for MutableBuffer {}
768unsafe impl Send for MutableBuffer {}
769
770struct SetLenOnDrop<'a> {
771 len: &'a mut usize,
772 local_len: usize,
773}
774
775impl<'a> SetLenOnDrop<'a> {
776 #[inline]
777 fn new(len: &'a mut usize) -> Self {
778 SetLenOnDrop {
779 local_len: *len,
780 len,
781 }
782 }
783}
784
785impl Drop for SetLenOnDrop<'_> {
786 #[inline]
787 fn drop(&mut self) {
788 *self.len = self.local_len;
789 }
790}
791
792impl std::iter::FromIterator<bool> for MutableBuffer {
794 fn from_iter<I>(iter: I) -> Self
795 where
796 I: IntoIterator<Item = bool>,
797 {
798 let mut iterator = iter.into_iter();
799 let mut result = {
800 let byte_capacity: usize = iterator.size_hint().0.saturating_add(7) / 8;
801 MutableBuffer::new(byte_capacity)
802 };
803
804 loop {
805 let mut exhausted = false;
806 let mut byte_accum: u8 = 0;
807 let mut mask: u8 = 1;
808
809 while mask != 0 {
811 if let Some(value) = iterator.next() {
812 byte_accum |= match value {
813 true => mask,
814 false => 0,
815 };
816 mask <<= 1;
817 } else {
818 exhausted = true;
819 break;
820 }
821 }
822
823 if exhausted && mask == 1 {
825 break;
826 }
827
828 if result.len() == result.capacity() {
830 let additional_byte_capacity = 1usize.saturating_add(
832 iterator.size_hint().0.saturating_add(7) / 8, );
834 result.reserve(additional_byte_capacity)
835 }
836
837 unsafe { result.push_unchecked(byte_accum) };
839 if exhausted {
840 break;
841 }
842 }
843 result
844 }
845}
846
847impl<T: ArrowNativeType> std::iter::FromIterator<T> for MutableBuffer {
848 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
849 let mut buffer = Self::default();
850 buffer.extend_from_iter(iter.into_iter());
851 buffer
852 }
853}
854
855#[cfg(test)]
856mod tests {
857 use super::*;
858
859 #[test]
860 fn test_mutable_new() {
861 let buf = MutableBuffer::new(63);
862 assert_eq!(64, buf.capacity());
863 assert_eq!(0, buf.len());
864 assert!(buf.is_empty());
865 }
866
867 #[test]
868 fn test_mutable_default() {
869 let buf = MutableBuffer::default();
870 assert_eq!(0, buf.capacity());
871 assert_eq!(0, buf.len());
872 assert!(buf.is_empty());
873
874 let mut buf = MutableBuffer::default();
875 buf.extend_from_slice(b"hello");
876 assert_eq!(5, buf.len());
877 assert_eq!(b"hello", buf.as_slice());
878 }
879
880 #[test]
881 fn test_mutable_extend_from_slice() {
882 let mut buf = MutableBuffer::new(100);
883 buf.extend_from_slice(b"hello");
884 assert_eq!(5, buf.len());
885 assert_eq!(b"hello", buf.as_slice());
886
887 buf.extend_from_slice(b" world");
888 assert_eq!(11, buf.len());
889 assert_eq!(b"hello world", buf.as_slice());
890
891 buf.clear();
892 assert_eq!(0, buf.len());
893 buf.extend_from_slice(b"hello arrow");
894 assert_eq!(11, buf.len());
895 assert_eq!(b"hello arrow", buf.as_slice());
896 }
897
898 #[test]
899 fn mutable_extend_from_iter() {
900 let mut buf = MutableBuffer::new(0);
901 buf.extend(vec![1u32, 2]);
902 assert_eq!(8, buf.len());
903 assert_eq!(&[1u8, 0, 0, 0, 2, 0, 0, 0], buf.as_slice());
904
905 buf.extend(vec![3u32, 4]);
906 assert_eq!(16, buf.len());
907 assert_eq!(
908 &[1u8, 0, 0, 0, 2, 0, 0, 0, 3, 0, 0, 0, 4, 0, 0, 0],
909 buf.as_slice()
910 );
911 }
912
913 #[test]
914 fn mutable_extend_from_iter_unaligned_u64() {
915 let mut buf = MutableBuffer::new(16);
916 buf.push(1_u8);
917 buf.extend([1_u64]);
918 assert_eq!(9, buf.len());
919 assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
920 }
921
922 #[test]
923 fn mutable_extend_from_slice_unaligned_u64() {
924 let mut buf = MutableBuffer::new(16);
925 buf.extend_from_slice(&[1_u8]);
926 buf.extend_from_slice(&[1_u64]);
927 assert_eq!(9, buf.len());
928 assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
929 }
930
931 #[test]
932 fn mutable_push_unaligned_u64() {
933 let mut buf = MutableBuffer::new(16);
934 buf.push(1_u8);
935 buf.push(1_u64);
936 assert_eq!(9, buf.len());
937 assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
938 }
939
940 #[test]
941 fn mutable_push_unchecked_unaligned_u64() {
942 let mut buf = MutableBuffer::new(16);
943 unsafe {
944 buf.push_unchecked(1_u8);
945 buf.push_unchecked(1_u64);
946 }
947 assert_eq!(9, buf.len());
948 assert_eq!(&[1u8, 1u8, 0, 0, 0, 0, 0, 0, 0], buf.as_slice());
949 }
950
951 #[test]
952 fn test_from_trusted_len_iter() {
953 let iter = vec![1u32, 2].into_iter();
954 let buf = unsafe { MutableBuffer::from_trusted_len_iter(iter) };
955 assert_eq!(8, buf.len());
956 assert_eq!(&[1u8, 0, 0, 0, 2, 0, 0, 0], buf.as_slice());
957 }
958
959 #[test]
960 fn test_mutable_reserve() {
961 let mut buf = MutableBuffer::new(1);
962 assert_eq!(64, buf.capacity());
963
964 buf.reserve(10);
966 assert_eq!(64, buf.capacity());
967
968 buf.reserve(80);
969 assert_eq!(128, buf.capacity());
970
971 buf.reserve(129);
972 assert_eq!(256, buf.capacity());
973 }
974
975 #[test]
976 fn test_mutable_resize() {
977 let mut buf = MutableBuffer::new(1);
978 assert_eq!(64, buf.capacity());
979 assert_eq!(0, buf.len());
980
981 buf.resize(20, 0);
982 assert_eq!(64, buf.capacity());
983 assert_eq!(20, buf.len());
984
985 buf.resize(10, 0);
986 assert_eq!(64, buf.capacity());
987 assert_eq!(10, buf.len());
988
989 buf.resize(100, 0);
990 assert_eq!(128, buf.capacity());
991 assert_eq!(100, buf.len());
992
993 buf.resize(30, 0);
994 assert_eq!(128, buf.capacity());
995 assert_eq!(30, buf.len());
996
997 buf.resize(0, 0);
998 assert_eq!(128, buf.capacity());
999 assert_eq!(0, buf.len());
1000 }
1001
1002 #[test]
1003 fn test_mutable_into() {
1004 let mut buf = MutableBuffer::new(1);
1005 buf.extend_from_slice(b"aaaa bbbb cccc dddd");
1006 assert_eq!(19, buf.len());
1007 assert_eq!(64, buf.capacity());
1008 assert_eq!(b"aaaa bbbb cccc dddd", buf.as_slice());
1009
1010 let immutable_buf: Buffer = buf.into();
1011 assert_eq!(19, immutable_buf.len());
1012 assert_eq!(64, immutable_buf.capacity());
1013 assert_eq!(b"aaaa bbbb cccc dddd", immutable_buf.as_slice());
1014 }
1015
1016 #[test]
1017 fn test_mutable_equal() {
1018 let mut buf = MutableBuffer::new(1);
1019 let mut buf2 = MutableBuffer::new(1);
1020
1021 buf.extend_from_slice(&[0xaa]);
1022 buf2.extend_from_slice(&[0xaa, 0xbb]);
1023 assert!(buf != buf2);
1024
1025 buf.extend_from_slice(&[0xbb]);
1026 assert_eq!(buf, buf2);
1027
1028 buf2.reserve(65);
1029 assert!(buf != buf2);
1030 }
1031
1032 #[test]
1033 fn test_mutable_shrink_to_fit() {
1034 let mut buffer = MutableBuffer::new(128);
1035 assert_eq!(buffer.capacity(), 128);
1036 buffer.push(1);
1037 buffer.push(2);
1038
1039 buffer.shrink_to_fit();
1040 assert!(buffer.capacity() >= 64 && buffer.capacity() < 128);
1041 }
1042
1043 #[test]
1044 fn test_mutable_set_null_bits() {
1045 let mut buffer = MutableBuffer::new(8).with_bitset(8, true);
1046
1047 for i in 0..=buffer.capacity() {
1048 buffer.set_null_bits(i, 0);
1049 assert_eq!(buffer[..8], [255; 8][..]);
1050 }
1051
1052 buffer.set_null_bits(1, 4);
1053 assert_eq!(buffer[..8], [255, 0, 0, 0, 0, 255, 255, 255][..]);
1054 }
1055
1056 #[test]
1057 #[should_panic = "out of bounds for buffer of length"]
1058 fn test_mutable_set_null_bits_oob() {
1059 let mut buffer = MutableBuffer::new(64);
1060 buffer.set_null_bits(1, buffer.capacity());
1061 }
1062
1063 #[test]
1064 #[should_panic = "out of bounds for buffer of length"]
1065 fn test_mutable_set_null_bits_oob_by_overflow() {
1066 let mut buffer = MutableBuffer::new(0);
1067 buffer.set_null_bits(1, usize::MAX);
1068 }
1069
1070 #[test]
1071 fn from_iter() {
1072 let buffer = [1u16, 2, 3, 4].into_iter().collect::<MutableBuffer>();
1073 assert_eq!(buffer.len(), 4 * mem::size_of::<u16>());
1074 assert_eq!(buffer.as_slice(), &[1, 0, 2, 0, 3, 0, 4, 0]);
1075 }
1076
1077 #[test]
1078 #[should_panic(expected = "failed to create layout for MutableBuffer: LayoutError")]
1079 fn test_with_capacity_panics_above_max_capacity() {
1080 let max_capacity = isize::MAX as usize - (isize::MAX as usize % ALIGNMENT);
1081 let _ = MutableBuffer::with_capacity(max_capacity + 1);
1082 }
1083
1084 #[cfg(feature = "pool")]
1085 mod pool_tests {
1086 use super::*;
1087 use crate::pool::{MemoryPool, TrackingMemoryPool};
1088
1089 #[test]
1090 fn test_reallocate_with_pool() {
1091 let pool = TrackingMemoryPool::default();
1092 let mut buffer = MutableBuffer::with_capacity(100);
1093 buffer.claim(&pool);
1094
1095 assert_eq!(buffer.capacity(), 128);
1097 assert_eq!(pool.used(), 128);
1098
1099 buffer.reallocate(200);
1101
1102 assert_eq!(buffer.capacity(), 200);
1104 assert_eq!(pool.used(), 200);
1105
1106 buffer.reallocate(50);
1108
1109 assert_eq!(buffer.capacity(), 50);
1111 assert_eq!(pool.used(), 50);
1112 }
1113
1114 #[test]
1115 fn test_truncate_with_pool() {
1116 let pool = TrackingMemoryPool::default();
1117 let mut buffer = MutableBuffer::with_capacity(100);
1118
1119 buffer.resize(80, 1);
1121 assert_eq!(buffer.len(), 80);
1122
1123 buffer.claim(&pool);
1124 assert_eq!(pool.used(), 128);
1125
1126 buffer.truncate(40);
1128 assert_eq!(buffer.len(), 40);
1129 assert_eq!(pool.used(), 40);
1130
1131 buffer.truncate(0);
1133 assert_eq!(buffer.len(), 0);
1134 assert_eq!(pool.used(), 0);
1135 }
1136
1137 #[test]
1138 fn test_resize_with_pool() {
1139 let pool = TrackingMemoryPool::default();
1140 let mut buffer = MutableBuffer::with_capacity(100);
1141 buffer.claim(&pool);
1142
1143 assert_eq!(buffer.len(), 0);
1145 assert_eq!(pool.used(), 128);
1146
1147 buffer.resize(50, 1);
1149 assert_eq!(buffer.len(), 50);
1150 assert_eq!(pool.used(), 50);
1151
1152 buffer.resize(150, 1);
1154 assert_eq!(buffer.len(), 150);
1155 assert_eq!(buffer.capacity(), 256);
1156 assert_eq!(pool.used(), 150);
1157
1158 buffer.resize(30, 1);
1160 assert_eq!(buffer.len(), 30);
1161 assert_eq!(pool.used(), 30);
1162 }
1163
1164 #[test]
1165 fn test_buffer_lifecycle_with_pool() {
1166 let pool = TrackingMemoryPool::default();
1167
1168 let mut mutable = MutableBuffer::with_capacity(100);
1170 mutable.resize(80, 1);
1171 mutable.claim(&pool);
1172
1173 assert_eq!(pool.used(), 128);
1175
1176 let buffer = mutable.into_buffer();
1178
1179 assert_eq!(pool.used(), 128);
1181
1182 drop(buffer);
1184 assert_eq!(pool.used(), 0);
1185 }
1186 }
1187}