1use crate::util::bit_util::ceil;
21use std::fmt::Debug;
22
23#[derive(Debug)]
31pub struct UnalignedBitChunk<'a> {
32 lead_padding: usize,
33 trailing_padding: usize,
34
35 prefix: Option<u64>,
36 chunks: &'a [u64],
37 suffix: Option<u64>,
38}
39
40impl<'a> UnalignedBitChunk<'a> {
41 pub fn new(buffer: &'a [u8], offset: usize, len: usize) -> Self {
43 if len == 0 {
44 return Self {
45 lead_padding: 0,
46 trailing_padding: 0,
47 prefix: None,
48 chunks: &[],
49 suffix: None,
50 };
51 }
52
53 let byte_offset = offset / 8;
54 let offset_padding = offset % 8;
55
56 let bytes_len = (len + offset_padding).div_ceil(8);
57 let buffer = &buffer[byte_offset..byte_offset + bytes_len];
58
59 let prefix_mask = compute_prefix_mask(offset_padding);
60
61 if buffer.len() <= 8 {
63 let (suffix_mask, trailing_padding) = compute_suffix_mask(len, offset_padding);
64 let prefix = read_u64(buffer) & suffix_mask & prefix_mask;
65
66 return Self {
67 lead_padding: offset_padding,
68 trailing_padding,
69 prefix: Some(prefix),
70 chunks: &[],
71 suffix: None,
72 };
73 }
74
75 if buffer.len() <= 16 {
77 let (suffix_mask, trailing_padding) = compute_suffix_mask(len, offset_padding);
78 let prefix = read_u64(&buffer[..8]) & prefix_mask;
79 let suffix = read_u64(&buffer[8..]) & suffix_mask;
80
81 return Self {
82 lead_padding: offset_padding,
83 trailing_padding,
84 prefix: Some(prefix),
85 chunks: &[],
86 suffix: Some(suffix),
87 };
88 }
89
90 let (prefix, mut chunks, suffix) = unsafe { buffer.align_to::<u64>() };
92 assert!(
93 prefix.len() < 8 && suffix.len() < 8,
94 "align_to did not return largest possible aligned slice"
95 );
96
97 let (alignment_padding, prefix) = match (offset_padding, prefix.is_empty()) {
98 (0, true) => (0, None),
99 (_, true) => {
100 let prefix = chunks[0] & prefix_mask;
101 chunks = &chunks[1..];
102 (0, Some(prefix))
103 }
104 (_, false) => {
105 let alignment_padding = (8 - prefix.len()) * 8;
106
107 let prefix = (read_u64(prefix) & prefix_mask) << alignment_padding;
108 (alignment_padding, Some(prefix))
109 }
110 };
111
112 let lead_padding = offset_padding + alignment_padding;
113 let (suffix_mask, trailing_padding) = compute_suffix_mask(len, lead_padding);
114
115 let suffix = match (trailing_padding, suffix.is_empty()) {
116 (0, _) => None,
117 (_, true) => {
118 let suffix = chunks[chunks.len() - 1] & suffix_mask;
119 chunks = &chunks[..chunks.len() - 1];
120 Some(suffix)
121 }
122 (_, false) => Some(read_u64(suffix) & suffix_mask),
123 };
124
125 Self {
126 lead_padding,
127 trailing_padding,
128 prefix,
129 chunks,
130 suffix,
131 }
132 }
133
134 pub fn lead_padding(&self) -> usize {
136 self.lead_padding
137 }
138
139 pub fn trailing_padding(&self) -> usize {
141 self.trailing_padding
142 }
143
144 pub fn prefix(&self) -> Option<u64> {
146 self.prefix
147 }
148
149 pub fn suffix(&self) -> Option<u64> {
151 self.suffix
152 }
153
154 pub fn chunks(&self) -> &'a [u64] {
156 self.chunks
157 }
158
159 pub fn iter(&self) -> UnalignedBitChunkIterator<'a> {
161 self.prefix
162 .into_iter()
163 .chain(self.chunks.iter().cloned())
164 .chain(self.suffix)
165 }
166
167 pub fn count_ones(&self) -> usize {
169 self.iter().map(|x| x.count_ones() as usize).sum()
170 }
171}
172
173pub type UnalignedBitChunkIterator<'a> = std::iter::Chain<
175 std::iter::Chain<std::option::IntoIter<u64>, std::iter::Cloned<std::slice::Iter<'a, u64>>>,
176 std::option::IntoIter<u64>,
177>;
178
179#[inline]
180fn read_u64(input: &[u8]) -> u64 {
181 let len = input.len().min(8);
182 let mut buf = [0_u8; 8];
183 buf[..len].copy_from_slice(input);
184 u64::from_le_bytes(buf)
185}
186
187#[inline]
188fn compute_prefix_mask(lead_padding: usize) -> u64 {
189 !((1 << lead_padding) - 1)
190}
191
192#[inline]
193fn compute_suffix_mask(len: usize, lead_padding: usize) -> (u64, usize) {
194 let trailing_bits = (len + lead_padding) % 64;
195
196 if trailing_bits == 0 {
197 return (u64::MAX, 0);
198 }
199
200 let trailing_padding = 64 - trailing_bits;
201 let suffix_mask = (1 << trailing_bits) - 1;
202 (suffix_mask, trailing_padding)
203}
204
205#[derive(Debug)]
210pub struct BitChunks<'a> {
211 buffer: &'a [u8],
212 bit_offset: usize,
214 chunk_len: usize,
216 remainder_len: usize,
218}
219
220impl<'a> BitChunks<'a> {
221 pub fn new(buffer: &'a [u8], offset: usize, len: usize) -> Self {
223 assert!(
224 ceil(offset + len, 8) <= buffer.len(),
225 "offset + len out of bounds"
226 );
227
228 let byte_offset = offset / 8;
229 let bit_offset = offset % 8;
230
231 let chunk_len = len / 64;
233 let remainder_len = len % 64;
235
236 BitChunks::<'a> {
237 buffer: &buffer[byte_offset..],
238 bit_offset,
239 chunk_len,
240 remainder_len,
241 }
242 }
243}
244
245#[derive(Debug)]
247pub struct BitChunkIterator<'a> {
248 buffer: &'a [u8],
249 bit_offset: usize,
250 chunk_len: usize,
251 index: usize,
252}
253
254impl<'a> BitChunks<'a> {
255 #[inline]
257 pub const fn remainder_len(&self) -> usize {
258 self.remainder_len
259 }
260
261 #[inline]
263 pub const fn chunk_len(&self) -> usize {
264 self.chunk_len
265 }
266
267 #[inline]
269 pub fn remainder_bits(&self) -> u64 {
270 let bit_len = self.remainder_len;
271 if bit_len == 0 {
272 0
273 } else {
274 let bit_offset = self.bit_offset;
275 let byte_len = ceil(bit_len + bit_offset, 8);
278 let base = unsafe {
280 self.buffer
281 .as_ptr()
282 .add(self.chunk_len * std::mem::size_of::<u64>())
283 };
284
285 let mut bits = unsafe { std::ptr::read(base) } as u64 >> bit_offset;
286 for i in 1..byte_len {
287 let byte = unsafe { std::ptr::read(base.add(i)) };
288 bits |= (byte as u64) << (i * 8 - bit_offset);
289 }
290
291 bits & ((1 << bit_len) - 1)
292 }
293 }
294
295 #[inline]
301 pub fn num_u64s(&self) -> usize {
302 if self.remainder_len == 0 {
303 self.chunk_len
304 } else {
305 self.chunk_len + 1
306 }
307 }
308
309 #[inline]
312 pub fn num_bytes(&self) -> usize {
313 ceil(self.chunk_len * 64 + self.remainder_len, 8)
314 }
315
316 #[inline]
318 pub const fn iter(&self) -> BitChunkIterator<'a> {
319 BitChunkIterator::<'a> {
320 buffer: self.buffer,
321 bit_offset: self.bit_offset,
322 chunk_len: self.chunk_len,
323 index: 0,
324 }
325 }
326
327 #[inline]
329 pub fn iter_padded(&self) -> impl Iterator<Item = u64> + 'a {
330 self.iter().chain(std::iter::once(self.remainder_bits()))
331 }
332}
333
334impl<'a> IntoIterator for BitChunks<'a> {
335 type Item = u64;
336 type IntoIter = BitChunkIterator<'a>;
337
338 fn into_iter(self) -> Self::IntoIter {
339 self.iter()
340 }
341}
342
343impl Iterator for BitChunkIterator<'_> {
344 type Item = u64;
345
346 #[inline]
347 fn next(&mut self) -> Option<u64> {
348 let index = self.index;
349 if index >= self.chunk_len {
350 return None;
351 }
352
353 #[allow(clippy::cast_ptr_alignment)]
355 let raw_data = self.buffer.as_ptr() as *const u64;
356
357 let current = unsafe { std::ptr::read_unaligned(raw_data.add(index)).to_le() };
360
361 let bit_offset = self.bit_offset;
362
363 let combined = if bit_offset == 0 {
364 current
365 } else {
366 let next =
369 unsafe { std::ptr::read_unaligned(raw_data.add(index + 1) as *const u8) as u64 };
370
371 (current >> bit_offset) | (next << (64 - bit_offset))
372 };
373
374 self.index = index + 1;
375
376 Some(combined)
377 }
378
379 #[inline]
380 fn size_hint(&self) -> (usize, Option<usize>) {
381 (
382 self.chunk_len - self.index,
383 Some(self.chunk_len - self.index),
384 )
385 }
386}
387
388impl ExactSizeIterator for BitChunkIterator<'_> {
389 #[inline]
390 fn len(&self) -> usize {
391 self.chunk_len - self.index
392 }
393}
394
395#[cfg(test)]
396mod tests {
397 use rand::distr::uniform::UniformSampler;
398 use rand::distr::uniform::UniformUsize;
399 use rand::prelude::*;
400 use rand::rng;
401
402 use crate::buffer::Buffer;
403 use crate::util::bit_chunk_iterator::UnalignedBitChunk;
404
405 #[test]
406 fn test_iter_aligned() {
407 let input: &[u8] = &[0, 1, 2, 3, 4, 5, 6, 7];
408 let buffer: Buffer = Buffer::from(input);
409
410 let bitchunks = buffer.bit_chunks(0, 64);
411 let result = bitchunks.into_iter().collect::<Vec<_>>();
412
413 assert_eq!(vec![0x0706050403020100], result);
414 }
415
416 #[test]
417 fn test_iter_unaligned() {
418 let input: &[u8] = &[
419 0b00000000, 0b00000001, 0b00000010, 0b00000100, 0b00001000, 0b00010000, 0b00100000,
420 0b01000000, 0b11111111,
421 ];
422 let buffer: Buffer = Buffer::from(input);
423
424 let bitchunks = buffer.bit_chunks(4, 64);
425
426 assert_eq!(0, bitchunks.remainder_len());
427 assert_eq!(0, bitchunks.remainder_bits());
428
429 let result = bitchunks.into_iter().collect::<Vec<_>>();
430
431 assert_eq!(
432 vec![0b1111010000000010000000010000000010000000010000000010000000010000],
433 result
434 );
435 }
436
437 #[test]
438 fn test_iter_unaligned_remainder_1_byte() {
439 let input: &[u8] = &[
440 0b00000000, 0b00000001, 0b00000010, 0b00000100, 0b00001000, 0b00010000, 0b00100000,
441 0b01000000, 0b11111111,
442 ];
443 let buffer: Buffer = Buffer::from(input);
444
445 let bitchunks = buffer.bit_chunks(4, 66);
446
447 assert_eq!(2, bitchunks.remainder_len());
448 assert_eq!(0b00000011, bitchunks.remainder_bits());
449
450 let result = bitchunks.into_iter().collect::<Vec<_>>();
451
452 assert_eq!(
453 vec![0b1111010000000010000000010000000010000000010000000010000000010000],
454 result
455 );
456 }
457
458 #[test]
459 fn test_iter_unaligned_remainder_bits_across_bytes() {
460 let input: &[u8] = &[0b00111111, 0b11111100];
461 let buffer: Buffer = Buffer::from(input);
462
463 let bitchunks = buffer.bit_chunks(6, 7);
466
467 assert_eq!(7, bitchunks.remainder_len());
468 assert_eq!(0b1110000, bitchunks.remainder_bits());
469 }
470
471 #[test]
472 fn test_iter_unaligned_remainder_bits_large() {
473 let input: &[u8] = &[
474 0b11111111, 0b00000000, 0b11111111, 0b00000000, 0b11111111, 0b00000000, 0b11111111,
475 0b00000000, 0b11111111,
476 ];
477 let buffer: Buffer = Buffer::from(input);
478
479 let bitchunks = buffer.bit_chunks(2, 63);
480
481 assert_eq!(63, bitchunks.remainder_len());
482 assert_eq!(
483 0b100_0000_0011_1111_1100_0000_0011_1111_1100_0000_0011_1111_1100_0000_0011_1111,
484 bitchunks.remainder_bits()
485 );
486 }
487
488 #[test]
489 fn test_iter_remainder_out_of_bounds() {
490 const ALLOC_SIZE: usize = 4 * 1024;
492 let input = vec![0xFF_u8; ALLOC_SIZE];
493
494 let buffer: Buffer = Buffer::from_vec(input);
495
496 let bitchunks = buffer.bit_chunks(57, ALLOC_SIZE * 8 - 57);
497
498 assert_eq!(u64::MAX, bitchunks.iter().last().unwrap());
499 assert_eq!(0x7F, bitchunks.remainder_bits());
500 }
501
502 #[test]
503 #[should_panic(expected = "offset + len out of bounds")]
504 fn test_out_of_bound_should_panic_length_is_more_than_buffer_length() {
505 const ALLOC_SIZE: usize = 4 * 1024;
506 let input = vec![0xFF_u8; ALLOC_SIZE];
507
508 let buffer: Buffer = Buffer::from_vec(input);
509
510 buffer.bit_chunks(0, (ALLOC_SIZE + 1) * 8);
512 }
513
514 #[test]
515 #[should_panic(expected = "offset + len out of bounds")]
516 fn test_out_of_bound_should_panic_length_is_more_than_buffer_length_but_not_when_not_using_ceil()
517 {
518 const ALLOC_SIZE: usize = 4 * 1024;
519 let input = vec![0xFF_u8; ALLOC_SIZE];
520
521 let buffer: Buffer = Buffer::from_vec(input);
522
523 buffer.bit_chunks(0, (ALLOC_SIZE * 8) + 1);
525 }
526
527 #[test]
528 #[should_panic(expected = "offset + len out of bounds")]
529 fn test_out_of_bound_should_panic_when_offset_is_not_zero_and_length_is_the_entire_buffer_length()
530 {
531 const ALLOC_SIZE: usize = 4 * 1024;
532 let input = vec![0xFF_u8; ALLOC_SIZE];
533
534 let buffer: Buffer = Buffer::from_vec(input);
535
536 buffer.bit_chunks(8, ALLOC_SIZE * 8);
538 }
539
540 #[test]
541 #[should_panic(expected = "offset + len out of bounds")]
542 fn test_out_of_bound_should_panic_when_offset_is_not_zero_and_length_is_the_entire_buffer_length_with_ceil()
543 {
544 const ALLOC_SIZE: usize = 4 * 1024;
545 let input = vec![0xFF_u8; ALLOC_SIZE];
546
547 let buffer: Buffer = Buffer::from_vec(input);
548
549 buffer.bit_chunks(1, ALLOC_SIZE * 8);
551 }
552
553 #[test]
554 #[allow(clippy::assertions_on_constants)]
555 fn test_unaligned_bit_chunk_iterator() {
556 let buffer = Buffer::from(&[0xFF; 5]);
557 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 0, 40);
558
559 assert!(unaligned.chunks().is_empty()); assert_eq!(unaligned.lead_padding(), 0);
561 assert_eq!(unaligned.trailing_padding(), 24);
562 assert_eq!(
564 unaligned.prefix(),
565 Some(0b0000000000000000000000001111111111111111111111111111111111111111)
566 );
567 assert_eq!(unaligned.suffix(), None);
568
569 let buffer = buffer.slice(1);
570 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 0, 32);
571
572 assert!(unaligned.chunks().is_empty()); assert_eq!(unaligned.lead_padding(), 0);
574 assert_eq!(unaligned.trailing_padding(), 32);
575 assert_eq!(
577 unaligned.prefix(),
578 Some(0b0000000000000000000000000000000011111111111111111111111111111111)
579 );
580 assert_eq!(unaligned.suffix(), None);
581
582 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 5, 27);
583
584 assert!(unaligned.chunks().is_empty()); assert_eq!(unaligned.lead_padding(), 5); assert_eq!(unaligned.trailing_padding(), 32);
587 assert_eq!(
589 unaligned.prefix(),
590 Some(0b0000000000000000000000000000000011111111111111111111111111100000)
591 );
592 assert_eq!(unaligned.suffix(), None);
593
594 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 12, 20);
595
596 assert!(unaligned.chunks().is_empty()); assert_eq!(unaligned.lead_padding(), 4); assert_eq!(unaligned.trailing_padding(), 40);
599 assert_eq!(
601 unaligned.prefix(),
602 Some(0b0000000000000000000000000000000000000000111111111111111111110000)
603 );
604 assert_eq!(unaligned.suffix(), None);
605
606 let buffer = Buffer::from(&[0xFF; 14]);
607
608 let (prefix, aligned, suffix) = unsafe { buffer.as_slice().align_to::<u64>() };
610 assert_eq!(prefix.len(), 0);
611 assert_eq!(aligned.len(), 1);
612 assert_eq!(suffix.len(), 6);
613
614 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 0, 112);
615
616 assert!(unaligned.chunks().is_empty()); assert_eq!(unaligned.lead_padding(), 0); assert_eq!(unaligned.trailing_padding(), 16);
619 assert_eq!(unaligned.prefix(), Some(u64::MAX));
620 assert_eq!(unaligned.suffix(), Some((1 << 48) - 1));
621
622 let buffer = Buffer::from(&[0xFF; 16]);
623
624 let (prefix, aligned, suffix) = unsafe { buffer.as_slice().align_to::<u64>() };
626 assert_eq!(prefix.len(), 0);
627 assert_eq!(aligned.len(), 2);
628 assert_eq!(suffix.len(), 0);
629
630 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 0, 128);
631
632 assert_eq!(unaligned.prefix(), Some(u64::MAX));
633 assert_eq!(unaligned.suffix(), Some(u64::MAX));
634 assert!(unaligned.chunks().is_empty()); let buffer = Buffer::from(&[0xFF; 64]);
637
638 let (prefix, aligned, suffix) = unsafe { buffer.as_slice().align_to::<u64>() };
640 assert_eq!(prefix.len(), 0);
641 assert_eq!(aligned.len(), 8);
642 assert_eq!(suffix.len(), 0);
643
644 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 0, 512);
645
646 assert_eq!(unaligned.suffix(), None);
648 assert_eq!(unaligned.prefix(), None);
649 assert_eq!(unaligned.chunks(), [u64::MAX; 8].as_slice());
650 assert_eq!(unaligned.lead_padding(), 0);
651 assert_eq!(unaligned.trailing_padding(), 0);
652
653 let buffer = buffer.slice(1); let (prefix, aligned, suffix) = unsafe { buffer.as_slice().align_to::<u64>() };
657 assert_eq!(prefix.len(), 7);
658 assert_eq!(aligned.len(), 7);
659 assert_eq!(suffix.len(), 0);
660
661 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 0, 504);
662
663 assert_eq!(unaligned.prefix(), Some(u64::MAX - 0xFF));
665 assert_eq!(unaligned.suffix(), None);
666 assert_eq!(unaligned.chunks(), [u64::MAX; 7].as_slice());
667 assert_eq!(unaligned.lead_padding(), 8);
668 assert_eq!(unaligned.trailing_padding(), 0);
669
670 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 17, 300);
671
672 assert_eq!(unaligned.lead_padding(), 25);
679 assert_eq!(unaligned.trailing_padding(), 59);
680 assert_eq!(unaligned.prefix(), Some(u64::MAX - (1 << 25) + 1));
681 assert_eq!(unaligned.suffix(), Some(0b11111));
682 assert_eq!(unaligned.chunks(), [u64::MAX; 4].as_slice());
683
684 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 17, 0);
685
686 assert_eq!(unaligned.prefix(), None);
687 assert_eq!(unaligned.suffix(), None);
688 assert!(unaligned.chunks().is_empty());
689 assert_eq!(unaligned.lead_padding(), 0);
690 assert_eq!(unaligned.trailing_padding(), 0);
691
692 let unaligned = UnalignedBitChunk::new(buffer.as_slice(), 17, 1);
693
694 assert_eq!(unaligned.prefix(), Some(2));
695 assert_eq!(unaligned.suffix(), None);
696 assert!(unaligned.chunks().is_empty());
697 assert_eq!(unaligned.lead_padding(), 1);
698 assert_eq!(unaligned.trailing_padding(), 62);
699 }
700
701 #[test]
702 #[cfg_attr(miri, ignore)]
703 fn fuzz_unaligned_bit_chunk_iterator() {
704 let mut rng = rng();
705
706 let uusize = UniformUsize::new(usize::MIN, usize::MAX).unwrap();
707 for _ in 0..100 {
708 let mask_len = rng.random_range(0..1024);
709 let bools: Vec<_> = std::iter::from_fn(|| Some(rng.random()))
710 .take(mask_len)
711 .collect();
712
713 let buffer = Buffer::from_iter(bools.iter().cloned());
714
715 let max_offset = 64.min(mask_len);
716 let offset = uusize.sample(&mut rng).checked_rem(max_offset).unwrap_or(0);
717
718 let max_truncate = 128.min(mask_len - offset);
719 let truncate = uusize
720 .sample(&mut rng)
721 .checked_rem(max_truncate)
722 .unwrap_or(0);
723
724 let unaligned =
725 UnalignedBitChunk::new(buffer.as_slice(), offset, mask_len - offset - truncate);
726
727 let bool_slice = &bools[offset..mask_len - truncate];
728
729 let count = unaligned.count_ones();
730 let expected_count = bool_slice.iter().filter(|x| **x).count();
731
732 assert_eq!(count, expected_count);
733
734 let collected: Vec<u64> = unaligned.iter().collect();
735
736 let get_bit = |idx: usize| -> bool {
737 let padded_index = idx + unaligned.lead_padding();
738 let byte_idx = padded_index / 64;
739 let bit_idx = padded_index % 64;
740 (collected[byte_idx] & (1 << bit_idx)) != 0
741 };
742
743 for (idx, b) in bool_slice.iter().enumerate() {
744 assert_eq!(*b, get_bit(idx))
745 }
746 }
747 }
748}