1use std::alloc::Layout;
19use std::fmt::Debug;
20use std::ptr::NonNull;
21use std::sync::Arc;
22
23use crate::alloc::{Allocation, Deallocation};
24use crate::util::bit_chunk_iterator::{BitChunks, UnalignedBitChunk};
25use crate::BufferBuilder;
26use crate::{bit_util, bytes::Bytes, native::ArrowNativeType};
27
28use super::ops::bitwise_unary_op_helper;
29use super::{MutableBuffer, ScalarBuffer};
30
31#[derive(Clone, Debug)]
69pub struct Buffer {
70 data: Arc<Bytes>,
72
73 ptr: *const u8,
78
79 length: usize,
83}
84
85impl Default for Buffer {
86 #[inline]
87 fn default() -> Self {
88 MutableBuffer::default().into()
89 }
90}
91
92impl PartialEq for Buffer {
93 fn eq(&self, other: &Self) -> bool {
94 self.as_slice().eq(other.as_slice())
95 }
96}
97
98impl Eq for Buffer {}
99
100unsafe impl Send for Buffer where Bytes: Send {}
101unsafe impl Sync for Buffer where Bytes: Sync {}
102
103impl Buffer {
104 #[deprecated(since = "54.1.0", note = "Use Buffer::from instead")]
111 pub fn from_bytes(bytes: Bytes) -> Self {
112 Self::from(bytes)
113 }
114
115 pub fn ptr_offset(&self) -> usize {
119 unsafe { self.ptr.offset_from(self.data.ptr().as_ptr()) as usize }
121 }
122
123 pub fn data_ptr(&self) -> NonNull<u8> {
125 self.data.ptr()
126 }
127
128 #[inline]
130 pub fn from_vec<T: ArrowNativeType>(vec: Vec<T>) -> Self {
131 MutableBuffer::from(vec).into()
132 }
133
134 pub fn from_slice_ref<U: ArrowNativeType, T: AsRef<[U]>>(items: T) -> Self {
136 let slice = items.as_ref();
137 let capacity = std::mem::size_of_val(slice);
138 let mut buffer = MutableBuffer::with_capacity(capacity);
139 buffer.extend_from_slice(slice);
140 buffer.into()
141 }
142
143 pub unsafe fn from_custom_allocation(
159 ptr: NonNull<u8>,
160 len: usize,
161 owner: Arc<dyn Allocation>,
162 ) -> Self {
163 Buffer::build_with_arguments(ptr, len, Deallocation::Custom(owner, len))
164 }
165
166 unsafe fn build_with_arguments(
168 ptr: NonNull<u8>,
169 len: usize,
170 deallocation: Deallocation,
171 ) -> Self {
172 let bytes = Bytes::new(ptr, len, deallocation);
173 let ptr = bytes.as_ptr();
174 Buffer {
175 ptr,
176 data: Arc::new(bytes),
177 length: len,
178 }
179 }
180
181 #[inline]
183 pub fn len(&self) -> usize {
184 self.length
185 }
186
187 #[inline]
190 pub fn capacity(&self) -> usize {
191 self.data.capacity()
192 }
193
194 pub fn shrink_to_fit(&mut self) {
204 let offset = self.ptr_offset();
205 let is_empty = self.is_empty();
206 let desired_capacity = if is_empty {
207 0
208 } else {
209 offset + self.len()
211 };
212 if desired_capacity < self.capacity() {
213 if let Some(bytes) = Arc::get_mut(&mut self.data) {
214 if bytes.try_realloc(desired_capacity).is_ok() {
215 self.ptr = if is_empty {
217 bytes.as_ptr()
218 } else {
219 unsafe { bytes.as_ptr().add(offset) }
221 }
222 } else {
223 }
225 }
226 }
227 }
228
229 #[inline]
231 pub fn is_empty(&self) -> bool {
232 self.length == 0
233 }
234
235 pub fn as_slice(&self) -> &[u8] {
237 unsafe { std::slice::from_raw_parts(self.ptr, self.length) }
238 }
239
240 pub(crate) fn deallocation(&self) -> &Deallocation {
241 self.data.deallocation()
242 }
243
244 pub fn slice(&self, offset: usize) -> Self {
253 let mut s = self.clone();
254 s.advance(offset);
255 s
256 }
257
258 #[inline]
264 pub fn advance(&mut self, offset: usize) {
265 assert!(
266 offset <= self.length,
267 "the offset of the new Buffer cannot exceed the existing length: offset={} length={}",
268 offset,
269 self.length
270 );
271 self.length -= offset;
272 self.ptr = unsafe { self.ptr.add(offset) };
277 }
278
279 pub fn slice_with_length(&self, offset: usize, length: usize) -> Self {
288 assert!(
289 offset.saturating_add(length) <= self.length,
290 "the offset of the new Buffer cannot exceed the existing length: slice offset={offset} length={length} selflen={}",
291 self.length
292 );
293 let ptr = unsafe { self.ptr.add(offset) };
296 Self {
297 data: self.data.clone(),
298 ptr,
299 length,
300 }
301 }
302
303 #[inline]
308 pub fn as_ptr(&self) -> *const u8 {
309 self.ptr
310 }
311
312 pub fn typed_data<T: ArrowNativeType>(&self) -> &[T] {
319 let (prefix, offsets, suffix) = unsafe { self.as_slice().align_to::<T>() };
323 assert!(prefix.is_empty() && suffix.is_empty());
324 offsets
325 }
326
327 pub fn bit_slice(&self, offset: usize, len: usize) -> Self {
331 if offset % 8 == 0 {
332 return self.slice_with_length(offset / 8, bit_util::ceil(len, 8));
333 }
334
335 bitwise_unary_op_helper(self, offset, len, |a| a)
336 }
337
338 pub fn bit_chunks(&self, offset: usize, len: usize) -> BitChunks {
342 BitChunks::new(self.as_slice(), offset, len)
343 }
344
345 pub fn count_set_bits_offset(&self, offset: usize, len: usize) -> usize {
348 UnalignedBitChunk::new(self.as_slice(), offset, len).count_ones()
349 }
350
351 pub fn into_mutable(self) -> Result<MutableBuffer, Self> {
357 let ptr = self.ptr;
358 let length = self.length;
359 Arc::try_unwrap(self.data)
360 .and_then(|bytes| {
361 assert_eq!(ptr, bytes.ptr().as_ptr());
363 MutableBuffer::from_bytes(bytes).map_err(Arc::new)
364 })
365 .map_err(|bytes| Buffer {
366 data: bytes,
367 ptr,
368 length,
369 })
370 }
371
372 pub fn into_vec<T: ArrowNativeType>(self) -> Result<Vec<T>, Self> {
383 let layout = match self.data.deallocation() {
384 Deallocation::Standard(l) => l,
385 _ => return Err(self), };
387
388 if self.ptr != self.data.as_ptr() {
389 return Err(self); }
391
392 let v_capacity = layout.size() / std::mem::size_of::<T>();
393 match Layout::array::<T>(v_capacity) {
394 Ok(expected) if layout == &expected => {}
395 _ => return Err(self), }
397
398 let length = self.length;
399 let ptr = self.ptr;
400 let v_len = self.length / std::mem::size_of::<T>();
401
402 Arc::try_unwrap(self.data)
403 .map(|bytes| unsafe {
404 let ptr = bytes.ptr().as_ptr() as _;
405 std::mem::forget(bytes);
406 Vec::from_raw_parts(ptr, v_len, v_capacity)
409 })
410 .map_err(|bytes| Buffer {
411 data: bytes,
412 ptr,
413 length,
414 })
415 }
416
417 #[inline]
421 pub fn ptr_eq(&self, other: &Self) -> bool {
422 self.ptr == other.ptr && self.length == other.length
423 }
424}
425
426impl From<&[u8]> for Buffer {
435 fn from(p: &[u8]) -> Self {
436 Self::from_slice_ref(p)
437 }
438}
439
440impl<const N: usize> From<[u8; N]> for Buffer {
441 fn from(p: [u8; N]) -> Self {
442 Self::from_slice_ref(p)
443 }
444}
445
446impl<const N: usize> From<&[u8; N]> for Buffer {
447 fn from(p: &[u8; N]) -> Self {
448 Self::from_slice_ref(p)
449 }
450}
451
452impl<T: ArrowNativeType> From<Vec<T>> for Buffer {
453 fn from(value: Vec<T>) -> Self {
454 Self::from_vec(value)
455 }
456}
457
458impl<T: ArrowNativeType> From<ScalarBuffer<T>> for Buffer {
459 fn from(value: ScalarBuffer<T>) -> Self {
460 value.into_inner()
461 }
462}
463
464impl From<Bytes> for Buffer {
466 #[inline]
467 fn from(bytes: Bytes) -> Self {
468 let length = bytes.len();
469 let ptr = bytes.as_ptr();
470 Self {
471 data: Arc::new(bytes),
472 ptr,
473 length,
474 }
475 }
476}
477
478impl From<bytes::Bytes> for Buffer {
480 fn from(bytes: bytes::Bytes) -> Self {
481 let bytes: Bytes = bytes.into();
482 Self::from(bytes)
483 }
484}
485
486impl FromIterator<bool> for Buffer {
488 fn from_iter<I>(iter: I) -> Self
489 where
490 I: IntoIterator<Item = bool>,
491 {
492 MutableBuffer::from_iter(iter).into()
493 }
494}
495
496impl std::ops::Deref for Buffer {
497 type Target = [u8];
498
499 fn deref(&self) -> &[u8] {
500 unsafe { std::slice::from_raw_parts(self.as_ptr(), self.len()) }
501 }
502}
503
504impl From<MutableBuffer> for Buffer {
505 #[inline]
506 fn from(buffer: MutableBuffer) -> Self {
507 buffer.into_buffer()
508 }
509}
510
511impl<T: ArrowNativeType> From<BufferBuilder<T>> for Buffer {
512 fn from(mut value: BufferBuilder<T>) -> Self {
513 value.finish()
514 }
515}
516
517impl Buffer {
518 #[inline]
538 pub unsafe fn from_trusted_len_iter<T: ArrowNativeType, I: Iterator<Item = T>>(
539 iterator: I,
540 ) -> Self {
541 MutableBuffer::from_trusted_len_iter(iterator).into()
542 }
543
544 #[inline]
551 pub unsafe fn try_from_trusted_len_iter<
552 E,
553 T: ArrowNativeType,
554 I: Iterator<Item = Result<T, E>>,
555 >(
556 iterator: I,
557 ) -> Result<Self, E> {
558 Ok(MutableBuffer::try_from_trusted_len_iter(iterator)?.into())
559 }
560}
561
562impl<T: ArrowNativeType> FromIterator<T> for Buffer {
563 fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
564 let vec = Vec::from_iter(iter);
565 Buffer::from_vec(vec)
566 }
567}
568
569#[cfg(test)]
570mod tests {
571 use crate::i256;
572 use std::panic::{RefUnwindSafe, UnwindSafe};
573 use std::thread;
574
575 use super::*;
576
577 #[test]
578 fn test_buffer_data_equality() {
579 let buf1 = Buffer::from(&[0, 1, 2, 3, 4]);
580 let buf2 = Buffer::from(&[0, 1, 2, 3, 4]);
581 assert_eq!(buf1, buf2);
582
583 let buf3 = buf1.slice(2);
585 assert_ne!(buf1, buf3);
586 let buf4 = buf2.slice_with_length(2, 3);
587 assert_eq!(buf3, buf4);
588
589 let mut buf2 = MutableBuffer::new(65);
591 buf2.extend_from_slice(&[0u8, 1, 2, 3, 4]);
592
593 let buf2 = buf2.into();
594 assert_eq!(buf1, buf2);
595
596 let buf2 = Buffer::from(&[0, 0, 2, 3, 4]);
598 assert_ne!(buf1, buf2);
599
600 let buf2 = Buffer::from(&[0, 1, 2, 3]);
602 assert_ne!(buf1, buf2);
603 }
604
605 #[test]
606 fn test_from_raw_parts() {
607 let buf = Buffer::from(&[0, 1, 2, 3, 4]);
608 assert_eq!(5, buf.len());
609 assert!(!buf.as_ptr().is_null());
610 assert_eq!([0, 1, 2, 3, 4], buf.as_slice());
611 }
612
613 #[test]
614 fn test_from_vec() {
615 let buf = Buffer::from(&[0, 1, 2, 3, 4]);
616 assert_eq!(5, buf.len());
617 assert!(!buf.as_ptr().is_null());
618 assert_eq!([0, 1, 2, 3, 4], buf.as_slice());
619 }
620
621 #[test]
622 fn test_copy() {
623 let buf = Buffer::from(&[0, 1, 2, 3, 4]);
624 let buf2 = buf;
625 assert_eq!(5, buf2.len());
626 assert_eq!(64, buf2.capacity());
627 assert!(!buf2.as_ptr().is_null());
628 assert_eq!([0, 1, 2, 3, 4], buf2.as_slice());
629 }
630
631 #[test]
632 fn test_slice() {
633 let buf = Buffer::from(&[2, 4, 6, 8, 10]);
634 let buf2 = buf.slice(2);
635
636 assert_eq!([6, 8, 10], buf2.as_slice());
637 assert_eq!(3, buf2.len());
638 assert_eq!(unsafe { buf.as_ptr().offset(2) }, buf2.as_ptr());
639
640 let buf3 = buf2.slice_with_length(1, 2);
641 assert_eq!([8, 10], buf3.as_slice());
642 assert_eq!(2, buf3.len());
643 assert_eq!(unsafe { buf.as_ptr().offset(3) }, buf3.as_ptr());
644
645 let buf4 = buf.slice(5);
646 let empty_slice: [u8; 0] = [];
647 assert_eq!(empty_slice, buf4.as_slice());
648 assert_eq!(0, buf4.len());
649 assert!(buf4.is_empty());
650 assert_eq!(buf2.slice_with_length(2, 1).as_slice(), &[10]);
651 }
652
653 #[test]
654 fn test_shrink_to_fit() {
655 let original = Buffer::from(&[0, 1, 2, 3, 4, 5, 6, 7]);
656 assert_eq!(original.as_slice(), &[0, 1, 2, 3, 4, 5, 6, 7]);
657 assert_eq!(original.capacity(), 64);
658
659 let slice = original.slice_with_length(2, 3);
660 drop(original); assert_eq!(slice.as_slice(), &[2, 3, 4]);
662 assert_eq!(slice.capacity(), 64);
663
664 let mut shrunk = slice;
665 shrunk.shrink_to_fit();
666 assert_eq!(shrunk.as_slice(), &[2, 3, 4]);
667 assert_eq!(shrunk.capacity(), 5); let empty_slice = shrunk.slice_with_length(1, 0);
671 drop(shrunk); assert_eq!(empty_slice.as_slice(), &[]);
673 assert_eq!(empty_slice.capacity(), 5);
674
675 let mut shrunk_empty = empty_slice;
676 shrunk_empty.shrink_to_fit();
677 assert_eq!(shrunk_empty.as_slice(), &[]);
678 assert_eq!(shrunk_empty.capacity(), 0);
679 }
680
681 #[test]
682 #[should_panic(expected = "the offset of the new Buffer cannot exceed the existing length")]
683 fn test_slice_offset_out_of_bound() {
684 let buf = Buffer::from(&[2, 4, 6, 8, 10]);
685 buf.slice(6);
686 }
687
688 #[test]
689 fn test_access_concurrently() {
690 let buffer = Buffer::from([1, 2, 3, 4, 5]);
691 let buffer2 = buffer.clone();
692 assert_eq!([1, 2, 3, 4, 5], buffer.as_slice());
693
694 let buffer_copy = thread::spawn(move || {
695 buffer
697 })
698 .join();
699
700 assert!(buffer_copy.is_ok());
701 assert_eq!(buffer2, buffer_copy.ok().unwrap());
702 }
703
704 macro_rules! check_as_typed_data {
705 ($input: expr, $native_t: ty) => {{
706 let buffer = Buffer::from_slice_ref($input);
707 let slice: &[$native_t] = buffer.typed_data::<$native_t>();
708 assert_eq!($input, slice);
709 }};
710 }
711
712 #[test]
713 #[allow(clippy::float_cmp)]
714 fn test_as_typed_data() {
715 check_as_typed_data!(&[1i8, 3i8, 6i8], i8);
716 check_as_typed_data!(&[1u8, 3u8, 6u8], u8);
717 check_as_typed_data!(&[1i16, 3i16, 6i16], i16);
718 check_as_typed_data!(&[1i32, 3i32, 6i32], i32);
719 check_as_typed_data!(&[1i64, 3i64, 6i64], i64);
720 check_as_typed_data!(&[1u16, 3u16, 6u16], u16);
721 check_as_typed_data!(&[1u32, 3u32, 6u32], u32);
722 check_as_typed_data!(&[1u64, 3u64, 6u64], u64);
723 check_as_typed_data!(&[1f32, 3f32, 6f32], f32);
724 check_as_typed_data!(&[1f64, 3f64, 6f64], f64);
725 }
726
727 #[test]
728 fn test_count_bits() {
729 assert_eq!(0, Buffer::from(&[0b00000000]).count_set_bits_offset(0, 8));
730 assert_eq!(8, Buffer::from(&[0b11111111]).count_set_bits_offset(0, 8));
731 assert_eq!(3, Buffer::from(&[0b00001101]).count_set_bits_offset(0, 8));
732 assert_eq!(
733 6,
734 Buffer::from(&[0b01001001, 0b01010010]).count_set_bits_offset(0, 16)
735 );
736 assert_eq!(
737 16,
738 Buffer::from(&[0b11111111, 0b11111111]).count_set_bits_offset(0, 16)
739 );
740 }
741
742 #[test]
743 fn test_count_bits_slice() {
744 assert_eq!(
745 0,
746 Buffer::from(&[0b11111111, 0b00000000])
747 .slice(1)
748 .count_set_bits_offset(0, 8)
749 );
750 assert_eq!(
751 8,
752 Buffer::from(&[0b11111111, 0b11111111])
753 .slice_with_length(1, 1)
754 .count_set_bits_offset(0, 8)
755 );
756 assert_eq!(
757 3,
758 Buffer::from(&[0b11111111, 0b11111111, 0b00001101])
759 .slice(2)
760 .count_set_bits_offset(0, 8)
761 );
762 assert_eq!(
763 6,
764 Buffer::from(&[0b11111111, 0b01001001, 0b01010010])
765 .slice_with_length(1, 2)
766 .count_set_bits_offset(0, 16)
767 );
768 assert_eq!(
769 16,
770 Buffer::from(&[0b11111111, 0b11111111, 0b11111111, 0b11111111])
771 .slice(2)
772 .count_set_bits_offset(0, 16)
773 );
774 }
775
776 #[test]
777 fn test_count_bits_offset_slice() {
778 assert_eq!(8, Buffer::from(&[0b11111111]).count_set_bits_offset(0, 8));
779 assert_eq!(3, Buffer::from(&[0b11111111]).count_set_bits_offset(0, 3));
780 assert_eq!(5, Buffer::from(&[0b11111111]).count_set_bits_offset(3, 5));
781 assert_eq!(1, Buffer::from(&[0b11111111]).count_set_bits_offset(3, 1));
782 assert_eq!(0, Buffer::from(&[0b11111111]).count_set_bits_offset(8, 0));
783 assert_eq!(2, Buffer::from(&[0b01010101]).count_set_bits_offset(0, 3));
784 assert_eq!(
785 16,
786 Buffer::from(&[0b11111111, 0b11111111]).count_set_bits_offset(0, 16)
787 );
788 assert_eq!(
789 10,
790 Buffer::from(&[0b11111111, 0b11111111]).count_set_bits_offset(0, 10)
791 );
792 assert_eq!(
793 10,
794 Buffer::from(&[0b11111111, 0b11111111]).count_set_bits_offset(3, 10)
795 );
796 assert_eq!(
797 8,
798 Buffer::from(&[0b11111111, 0b11111111]).count_set_bits_offset(8, 8)
799 );
800 assert_eq!(
801 5,
802 Buffer::from(&[0b11111111, 0b11111111]).count_set_bits_offset(11, 5)
803 );
804 assert_eq!(
805 0,
806 Buffer::from(&[0b11111111, 0b11111111]).count_set_bits_offset(16, 0)
807 );
808 assert_eq!(
809 2,
810 Buffer::from(&[0b01101101, 0b10101010]).count_set_bits_offset(7, 5)
811 );
812 assert_eq!(
813 4,
814 Buffer::from(&[0b01101101, 0b10101010]).count_set_bits_offset(7, 9)
815 );
816 }
817
818 #[test]
819 fn test_unwind_safe() {
820 fn assert_unwind_safe<T: RefUnwindSafe + UnwindSafe>() {}
821 assert_unwind_safe::<Buffer>()
822 }
823
824 #[test]
825 fn test_from_foreign_vec() {
826 let mut vector = vec![1_i32, 2, 3, 4, 5];
827 let buffer = unsafe {
828 Buffer::from_custom_allocation(
829 NonNull::new_unchecked(vector.as_mut_ptr() as *mut u8),
830 vector.len() * std::mem::size_of::<i32>(),
831 Arc::new(vector),
832 )
833 };
834
835 let slice = buffer.typed_data::<i32>();
836 assert_eq!(slice, &[1, 2, 3, 4, 5]);
837
838 let buffer = buffer.slice(std::mem::size_of::<i32>());
839
840 let slice = buffer.typed_data::<i32>();
841 assert_eq!(slice, &[2, 3, 4, 5]);
842 }
843
844 #[test]
845 #[should_panic(expected = "the offset of the new Buffer cannot exceed the existing length")]
846 fn slice_overflow() {
847 let buffer = Buffer::from(MutableBuffer::from_len_zeroed(12));
848 buffer.slice_with_length(2, usize::MAX);
849 }
850
851 #[test]
852 fn test_vec_interop() {
853 let a: Vec<i128> = Vec::new();
855 let b = Buffer::from_vec(a);
856 b.into_vec::<i128>().unwrap();
857
858 let a: Vec<i128> = Vec::with_capacity(20);
860 let b = Buffer::from_vec(a);
861 let back = b.into_vec::<i128>().unwrap();
862 assert_eq!(back.len(), 0);
863 assert_eq!(back.capacity(), 20);
864
865 let mut a: Vec<i128> = Vec::with_capacity(3);
867 a.extend_from_slice(&[1, 2, 3]);
868 let b = Buffer::from_vec(a);
869 let back = b.into_vec::<i128>().unwrap();
870 assert_eq!(back.len(), 3);
871 assert_eq!(back.capacity(), 3);
872
873 let mut a: Vec<i128> = Vec::with_capacity(20);
875 a.extend_from_slice(&[1, 4, 7, 8, 9, 3, 6]);
876 let b = Buffer::from_vec(a);
877 let back = b.into_vec::<i128>().unwrap();
878 assert_eq!(back.len(), 7);
879 assert_eq!(back.capacity(), 20);
880
881 let a: Vec<i128> = Vec::new();
883 let b = Buffer::from_vec(a);
884 let b = b.into_vec::<i32>().unwrap_err();
885 b.into_vec::<i8>().unwrap_err();
886
887 let a: Vec<i64> = vec![1, 2, 3, 4];
891 let b = Buffer::from_vec(a);
892 let back = b.into_vec::<u64>().unwrap();
893 assert_eq!(back.len(), 4);
894 assert_eq!(back.capacity(), 4);
895
896 let mut b: Vec<i128> = Vec::with_capacity(4);
898 b.extend_from_slice(&[1, 2, 3, 4]);
899 let b = Buffer::from_vec(b);
900 let back = b.into_vec::<i256>().unwrap();
901 assert_eq!(back.len(), 2);
902 assert_eq!(back.capacity(), 2);
903
904 let b: Vec<i128> = vec![1, 2, 3];
906 let b = Buffer::from_vec(b);
907 b.into_vec::<i256>().unwrap_err();
908
909 let mut b: Vec<i128> = Vec::with_capacity(5);
911 b.extend_from_slice(&[1, 2, 3, 4]);
912 let b = Buffer::from_vec(b);
913 b.into_vec::<i256>().unwrap_err();
914
915 let mut b: Vec<i128> = Vec::with_capacity(4);
918 b.extend_from_slice(&[1, 2, 3]);
919 let b = Buffer::from_vec(b);
920 let back = b.into_vec::<i256>().unwrap();
921 assert_eq!(back.len(), 1);
922 assert_eq!(back.capacity(), 2);
923
924 let b = Buffer::from(MutableBuffer::new(10));
926 let b = b.into_vec::<u8>().unwrap_err();
927 b.into_vec::<u64>().unwrap_err();
928
929 let mut a: Vec<i128> = Vec::with_capacity(20);
931 a.extend_from_slice(&[1, 4, 7, 8, 9, 3, 6]);
932 let b = Buffer::from_vec(a);
933 let slice = b.slice_with_length(0, 64);
934
935 let slice = slice.into_vec::<i128>().unwrap_err();
937 drop(b);
938
939 let back = slice.into_vec::<i128>().unwrap();
941 assert_eq!(&back, &[1, 4, 7, 8]);
942 assert_eq!(back.capacity(), 20);
943
944 let mut a: Vec<i128> = Vec::with_capacity(8);
946 a.extend_from_slice(&[1, 4, 7, 3]);
947
948 let b = Buffer::from_vec(a);
949 let slice = b.slice_with_length(0, 34);
950 drop(b);
951
952 let back = slice.into_vec::<i128>().unwrap();
953 assert_eq!(&back, &[1, 4]);
954 assert_eq!(back.capacity(), 8);
955
956 let a: Vec<u32> = vec![1, 3, 4, 6];
958 let b = Buffer::from_vec(a).slice(2);
959 b.into_vec::<u32>().unwrap_err();
960
961 let b = MutableBuffer::new(16).into_buffer();
962 let b = b.into_vec::<u8>().unwrap_err(); let b = b.into_vec::<u32>().unwrap_err(); b.into_mutable().unwrap();
965
966 let b = Buffer::from_vec(vec![1_u32, 3, 5]);
967 let b = b.into_mutable().unwrap();
968 let b = Buffer::from(b);
969 let b = b.into_vec::<u32>().unwrap();
970 assert_eq!(b, &[1, 3, 5]);
971 }
972
973 #[test]
974 #[should_panic(expected = "capacity overflow")]
975 fn test_from_iter_overflow() {
976 let iter_len = usize::MAX / std::mem::size_of::<u64>() + 1;
977 let _ = Buffer::from_iter(std::iter::repeat(0_u64).take(iter_len));
978 }
979
980 #[test]
981 fn bit_slice_length_preserved() {
982 let buf = Buffer::from_iter(std::iter::repeat(true).take(64));
984
985 let assert_preserved = |offset: usize, len: usize| {
986 let new_buf = buf.bit_slice(offset, len);
987 assert_eq!(new_buf.len(), bit_util::ceil(len, 8));
988
989 if offset % 8 == 0 {
994 assert_eq!(new_buf.ptr_offset(), offset / 8);
995 } else {
996 assert_eq!(new_buf.ptr_offset(), 0);
997 }
998 };
999
1000 for o in 0..=64 {
1002 for l in (o..=64).map(|l| l - o) {
1006 assert_preserved(o, l);
1009 }
1010 }
1011 }
1012}