Skip to main content

arrow_buffer/util/
bit_iterator.rs

1// Licensed to the Apache Software Foundation (ASF) under one
2// or more contributor license agreements.  See the NOTICE file
3// distributed with this work for additional information
4// regarding copyright ownership.  The ASF licenses this file
5// to you under the Apache License, Version 2.0 (the
6// "License"); you may not use this file except in compliance
7// with the License.  You may obtain a copy of the License at
8//
9//   http://www.apache.org/licenses/LICENSE-2.0
10//
11// Unless required by applicable law or agreed to in writing,
12// software distributed under the License is distributed on an
13// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14// KIND, either express or implied.  See the License for the
15// specific language governing permissions and limitations
16// under the License.
17
18//! Types for iterating over packed bitmasks
19
20use crate::bit_chunk_iterator::{UnalignedBitChunk, UnalignedBitChunkIterator};
21use crate::bit_util::{ceil, get_bit_raw};
22
23/// Iterator over the bits within a packed bitmask
24///
25/// To efficiently iterate over just the set bits see [`BitIndexIterator`] and [`BitSliceIterator`]
26#[derive(Clone)]
27pub struct BitIterator<'a> {
28    buffer: &'a [u8],
29    current_offset: usize,
30    end_offset: usize,
31}
32
33impl<'a> BitIterator<'a> {
34    /// Create a new [`BitIterator`] from the provided `buffer`,
35    /// and `offset` and `len` in bits
36    ///
37    /// # Panic
38    ///
39    /// Panics if `buffer` is too short for the provided offset and length
40    pub fn new(buffer: &'a [u8], offset: usize, len: usize) -> Self {
41        let end_offset = offset.checked_add(len).unwrap();
42        let required_len = ceil(end_offset, 8);
43        assert!(
44            buffer.len() >= required_len,
45            "BitIterator buffer too small, expected {required_len} got {}",
46            buffer.len()
47        );
48
49        Self {
50            buffer,
51            current_offset: offset,
52            end_offset,
53        }
54    }
55}
56
57impl Iterator for BitIterator<'_> {
58    type Item = bool;
59
60    #[inline]
61    fn next(&mut self) -> Option<Self::Item> {
62        if self.current_offset == self.end_offset {
63            return None;
64        }
65        // Safety:
66        // offsets in bounds
67        let v = unsafe { get_bit_raw(self.buffer.as_ptr(), self.current_offset) };
68        self.current_offset += 1;
69        Some(v)
70    }
71
72    fn size_hint(&self) -> (usize, Option<usize>) {
73        let remaining_bits = self.end_offset - self.current_offset;
74        (remaining_bits, Some(remaining_bits))
75    }
76
77    fn count(self) -> usize
78    where
79        Self: Sized,
80    {
81        self.len()
82    }
83
84    #[inline]
85    fn nth(&mut self, n: usize) -> Option<Self::Item> {
86        // Check if we can advance to the desired offset.
87        // When n is 0 it means we want the next() value
88        // and when n is 1 we want the next().next() value
89        // so adding n to the current offset and not n - 1
90        match self.current_offset.checked_add(n) {
91            // Yes, and still within bounds
92            Some(new_offset) if new_offset < self.end_offset => {
93                self.current_offset = new_offset;
94            }
95
96            // Either overflow or would exceed end_offset
97            _ => {
98                self.current_offset = self.end_offset;
99                return None;
100            }
101        }
102
103        self.next()
104    }
105
106    fn last(mut self) -> Option<Self::Item> {
107        // If already at the end, return None
108        if self.current_offset == self.end_offset {
109            return None;
110        }
111
112        // Go to the one before the last bit
113        self.current_offset = self.end_offset - 1;
114
115        // Return the last bit
116        self.next()
117    }
118
119    fn max(self) -> Option<Self::Item>
120    where
121        Self: Sized,
122        Self::Item: Ord,
123    {
124        if self.current_offset == self.end_offset {
125            return None;
126        }
127
128        // true is greater than false so we only need to check if there's any true bit
129        let mut bit_index_iter = BitIndexIterator::new(
130            self.buffer,
131            self.current_offset,
132            self.end_offset - self.current_offset,
133        );
134
135        if bit_index_iter.next().is_some() {
136            return Some(true);
137        }
138
139        // We know the iterator is not empty and there are no set bits so false is the max
140        Some(false)
141    }
142}
143
144impl ExactSizeIterator for BitIterator<'_> {}
145
146impl DoubleEndedIterator for BitIterator<'_> {
147    fn next_back(&mut self) -> Option<Self::Item> {
148        if self.current_offset == self.end_offset {
149            return None;
150        }
151        self.end_offset -= 1;
152        // Safety:
153        // offsets in bounds
154        let v = unsafe { get_bit_raw(self.buffer.as_ptr(), self.end_offset) };
155        Some(v)
156    }
157
158    fn nth_back(&mut self, n: usize) -> Option<Self::Item> {
159        // Check if we can advance to the desired offset.
160        // When n is 0 it means we want the next_back() value
161        // and when n is 1 we want the next_back().next_back() value
162        // so subtracting n to the current offset and not n - 1
163        match self.end_offset.checked_sub(n) {
164            // Yes, and still within bounds
165            Some(new_offset) if self.current_offset < new_offset => {
166                self.end_offset = new_offset;
167            }
168
169            // Either underflow or would exceed current_offset
170            _ => {
171                self.current_offset = self.end_offset;
172                return None;
173            }
174        }
175
176        self.next_back()
177    }
178}
179
180/// Iterator of contiguous ranges of set bits within a provided packed bitmask
181///
182/// Returns `(usize, usize)` each representing an interval where the corresponding
183/// bits in the provides mask are set
184///
185/// the first value is the start of the range (inclusive) and the second value is the end of the range (exclusive)
186///
187#[derive(Debug)]
188pub struct BitSliceIterator<'a> {
189    iter: UnalignedBitChunkIterator<'a>,
190    len: usize,
191    current_offset: i64,
192    current_chunk: u64,
193}
194
195impl<'a> BitSliceIterator<'a> {
196    /// Create a new [`BitSliceIterator`] from the provided `buffer`,
197    /// and `offset` and `len` in bits
198    pub fn new(buffer: &'a [u8], offset: usize, len: usize) -> Self {
199        let chunk = UnalignedBitChunk::new(buffer, offset, len);
200        let mut iter = chunk.iter();
201
202        let current_offset = -(chunk.lead_padding() as i64);
203        let current_chunk = iter.next().unwrap_or(0);
204
205        Self {
206            iter,
207            len,
208            current_offset,
209            current_chunk,
210        }
211    }
212
213    /// Returns `Some((chunk_offset, bit_offset))` for the next chunk that has at
214    /// least one bit set, or None if there is no such chunk.
215    ///
216    /// Where `chunk_offset` is the bit offset to the current `u64` chunk
217    /// and `bit_offset` is the offset of the first `1` bit in that chunk
218    fn advance_to_set_bit(&mut self) -> Option<(i64, u32)> {
219        loop {
220            if self.current_chunk != 0 {
221                // Find the index of the first 1
222                let bit_pos = self.current_chunk.trailing_zeros();
223                return Some((self.current_offset, bit_pos));
224            }
225
226            self.current_chunk = self.iter.next()?;
227            self.current_offset += 64;
228        }
229    }
230}
231
232impl Iterator for BitSliceIterator<'_> {
233    type Item = (usize, usize);
234
235    fn next(&mut self) -> Option<Self::Item> {
236        // Used as termination condition
237        if self.len == 0 {
238            return None;
239        }
240
241        let (start_chunk, start_bit) = self.advance_to_set_bit()?;
242
243        // Set bits up to start
244        self.current_chunk |= (1 << start_bit) - 1;
245
246        loop {
247            if self.current_chunk != u64::MAX {
248                // Find the index of the first 0
249                let end_bit = self.current_chunk.trailing_ones();
250
251                // Zero out up to end_bit
252                self.current_chunk &= !((1 << end_bit) - 1);
253
254                return Some((
255                    (start_chunk + start_bit as i64) as usize,
256                    (self.current_offset + end_bit as i64) as usize,
257                ));
258            }
259
260            match self.iter.next() {
261                Some(next) => {
262                    self.current_chunk = next;
263                    self.current_offset += 64;
264                }
265                None => {
266                    return Some((
267                        (start_chunk + start_bit as i64) as usize,
268                        std::mem::replace(&mut self.len, 0),
269                    ));
270                }
271            }
272        }
273    }
274}
275
276/// An iterator of `usize` whose index in a provided bitmask is true
277///
278/// This provides the best performance on most masks, apart from those which contain
279/// large runs and therefore favour [`BitSliceIterator`]
280#[derive(Debug)]
281pub struct BitIndexIterator<'a> {
282    current_chunk: u64,
283    chunk_offset: i64,
284    iter: UnalignedBitChunkIterator<'a>,
285}
286
287impl<'a> BitIndexIterator<'a> {
288    /// Create a new [`BitIndexIterator`] from the provide `buffer`,
289    /// and `offset` and `len` in bits
290    pub fn new(buffer: &'a [u8], offset: usize, len: usize) -> Self {
291        let chunks = UnalignedBitChunk::new(buffer, offset, len);
292        let mut iter = chunks.iter();
293
294        let current_chunk = iter.next().unwrap_or(0);
295        let chunk_offset = -(chunks.lead_padding() as i64);
296
297        Self {
298            current_chunk,
299            chunk_offset,
300            iter,
301        }
302    }
303}
304
305impl Iterator for BitIndexIterator<'_> {
306    type Item = usize;
307
308    #[inline(always)]
309    fn next(&mut self) -> Option<Self::Item> {
310        loop {
311            if self.current_chunk != 0 {
312                let bit_pos = self.current_chunk.trailing_zeros();
313                self.current_chunk &= self.current_chunk - 1;
314                return Some((self.chunk_offset + bit_pos as i64) as usize);
315            }
316
317            self.current_chunk = self.iter.next()?;
318            self.chunk_offset += 64;
319        }
320    }
321}
322
323/// An iterator of u32 whose index in a provided bitmask is true
324/// Respects arbitrary offsets and slice lead/trail padding exactly like BitIndexIterator
325#[derive(Debug)]
326pub struct BitIndexU32Iterator<'a> {
327    curr: u64,
328    chunk_offset: i64,
329    iter: UnalignedBitChunkIterator<'a>,
330}
331
332impl<'a> BitIndexU32Iterator<'a> {
333    /// Create a new [BitIndexU32Iterator] from the provided buffer,
334    /// offset and len in bits.
335    pub fn new(buffer: &'a [u8], offset: usize, len: usize) -> Self {
336        // Build the aligned chunks (including prefix/suffix masked)
337        let chunks = UnalignedBitChunk::new(buffer, offset, len);
338        let mut iter = chunks.iter();
339
340        // First 64-bit word (masked for lead padding), or 0 if empty
341        let curr = iter.next().unwrap_or(0);
342        // Negative lead padding ensures the first bit in curr maps to index 0
343        let chunk_offset = -(chunks.lead_padding() as i64);
344
345        Self {
346            curr,
347            chunk_offset,
348            iter,
349        }
350    }
351}
352
353impl<'a> Iterator for BitIndexU32Iterator<'a> {
354    type Item = u32;
355
356    #[inline(always)]
357    fn next(&mut self) -> Option<u32> {
358        loop {
359            if self.curr != 0 {
360                // Position of least-significant set bit
361                let tz = self.curr.trailing_zeros();
362                // Clear that bit
363                self.curr &= self.curr - 1;
364                // Return global index = chunk_offset + tz
365                return Some((self.chunk_offset + tz as i64) as u32);
366            }
367            // Advance to next 64-bit chunk
368            match self.iter.next() {
369                Some(next_chunk) => {
370                    // Move offset forward by 64 bits
371                    self.chunk_offset += 64;
372                    self.curr = next_chunk;
373                }
374                None => return None,
375            }
376        }
377    }
378}
379
380/// Calls the provided closure for each index in the provided null mask that is set,
381/// using an adaptive strategy based on the null count
382///
383/// Ideally this would be encapsulated in an [`Iterator`] that would determine the optimal
384/// strategy up front, and then yield indexes based on this.
385///
386/// Unfortunately, external iteration based on the resulting [`Iterator`] would match the strategy
387/// variant on each call to [`Iterator::next`], and LLVM generally cannot eliminate this.
388///
389/// One solution to this might be internal iteration, e.g. [`Iterator::try_fold`], however,
390/// it is currently [not possible] to override this for custom iterators in stable Rust.
391///
392/// As such this is the next best option
393///
394/// [not possible]: https://github.com/rust-lang/rust/issues/69595
395#[inline]
396pub fn try_for_each_valid_idx<E, F: FnMut(usize) -> Result<(), E>>(
397    len: usize,
398    offset: usize,
399    null_count: usize,
400    nulls: Option<&[u8]>,
401    f: F,
402) -> Result<(), E> {
403    let valid_count = len - null_count;
404
405    if valid_count == len {
406        (0..len).try_for_each(f)
407    } else if null_count != len {
408        BitIndexIterator::new(nulls.unwrap(), offset, len).try_for_each(f)
409    } else {
410        Ok(())
411    }
412}
413
414// Note: further tests located in arrow_select::filter module
415
416#[cfg(test)]
417mod tests {
418    use super::*;
419    use crate::BooleanBuffer;
420    use rand::rngs::StdRng;
421    use rand::{Rng, SeedableRng};
422    use std::fmt::Debug;
423    use std::iter::Copied;
424    use std::slice::Iter;
425
426    #[test]
427    fn test_bit_iterator_size_hint() {
428        let mut b = BitIterator::new(&[0b00000011], 0, 2);
429        assert_eq!(
430            b.size_hint(),
431            (2, Some(2)),
432            "Expected size_hint to be (2, Some(2))"
433        );
434
435        b.next();
436        assert_eq!(
437            b.size_hint(),
438            (1, Some(1)),
439            "Expected size_hint to be (1, Some(1)) after one bit consumed"
440        );
441
442        b.next();
443        assert_eq!(
444            b.size_hint(),
445            (0, Some(0)),
446            "Expected size_hint to be (0, Some(0)) after all bits consumed"
447        );
448    }
449
450    #[test]
451    fn test_bit_iterator() {
452        let mask = &[0b00010010, 0b00100011, 0b00000101, 0b00010001, 0b10010011];
453        let actual: Vec<_> = BitIterator::new(mask, 0, 5).collect();
454        assert_eq!(actual, &[false, true, false, false, true]);
455
456        let actual: Vec<_> = BitIterator::new(mask, 4, 5).collect();
457        assert_eq!(actual, &[true, false, false, false, true]);
458
459        let actual: Vec<_> = BitIterator::new(mask, 12, 14).collect();
460        assert_eq!(
461            actual,
462            &[
463                false, true, false, false, true, false, true, false, false, false, false, false,
464                true, false
465            ]
466        );
467
468        assert_eq!(BitIterator::new(mask, 0, 0).count(), 0);
469        assert_eq!(BitIterator::new(mask, 40, 0).count(), 0);
470    }
471
472    #[test]
473    #[should_panic(expected = "BitIterator buffer too small, expected 3 got 2")]
474    fn test_bit_iterator_bounds() {
475        let mask = &[223, 23];
476        BitIterator::new(mask, 17, 0);
477    }
478
479    #[test]
480    fn test_bit_index_u32_iterator_basic() {
481        let mask = &[0b00010010, 0b00100011];
482
483        let result: Vec<u32> = BitIndexU32Iterator::new(mask, 0, 16).collect();
484        let expected: Vec<u32> = BitIndexIterator::new(mask, 0, 16)
485            .map(|i| i as u32)
486            .collect();
487        assert_eq!(result, expected);
488
489        let result: Vec<u32> = BitIndexU32Iterator::new(mask, 4, 8).collect();
490        let expected: Vec<u32> = BitIndexIterator::new(mask, 4, 8)
491            .map(|i| i as u32)
492            .collect();
493        assert_eq!(result, expected);
494
495        let result: Vec<u32> = BitIndexU32Iterator::new(mask, 10, 4).collect();
496        let expected: Vec<u32> = BitIndexIterator::new(mask, 10, 4)
497            .map(|i| i as u32)
498            .collect();
499        assert_eq!(result, expected);
500
501        let result: Vec<u32> = BitIndexU32Iterator::new(mask, 0, 0).collect();
502        let expected: Vec<u32> = BitIndexIterator::new(mask, 0, 0)
503            .map(|i| i as u32)
504            .collect();
505        assert_eq!(result, expected);
506    }
507
508    #[test]
509    fn test_bit_index_u32_iterator_all_set() {
510        let mask = &[0xFF, 0xFF];
511        let result: Vec<u32> = BitIndexU32Iterator::new(mask, 0, 16).collect();
512        let expected: Vec<u32> = BitIndexIterator::new(mask, 0, 16)
513            .map(|i| i as u32)
514            .collect();
515        assert_eq!(result, expected);
516    }
517
518    #[test]
519    fn test_bit_index_u32_iterator_none_set() {
520        let mask = &[0x00, 0x00];
521        let result: Vec<u32> = BitIndexU32Iterator::new(mask, 0, 16).collect();
522        let expected: Vec<u32> = BitIndexIterator::new(mask, 0, 16)
523            .map(|i| i as u32)
524            .collect();
525        assert_eq!(result, expected);
526    }
527
528    #[test]
529    fn test_bit_index_u32_cross_chunk() {
530        let mut buf = vec![0u8; 16];
531        for bit in 60..68 {
532            let byte = (bit / 8) as usize;
533            let bit_in_byte = bit % 8;
534            buf[byte] |= 1 << bit_in_byte;
535        }
536        let offset = 58;
537        let len = 10;
538
539        let result: Vec<u32> = BitIndexU32Iterator::new(&buf, offset, len).collect();
540        let expected: Vec<u32> = BitIndexIterator::new(&buf, offset, len)
541            .map(|i| i as u32)
542            .collect();
543        assert_eq!(result, expected);
544    }
545
546    #[test]
547    fn test_bit_index_u32_unaligned_offset() {
548        let mask = &[0b0110_1100, 0b1010_0000];
549        let offset = 2;
550        let len = 12;
551
552        let result: Vec<u32> = BitIndexU32Iterator::new(mask, offset, len).collect();
553        let expected: Vec<u32> = BitIndexIterator::new(mask, offset, len)
554            .map(|i| i as u32)
555            .collect();
556        assert_eq!(result, expected);
557    }
558
559    #[test]
560    fn test_bit_index_u32_long_all_set() {
561        let len = 200;
562        let num_bytes = len / 8 + if len % 8 != 0 { 1 } else { 0 };
563        let bytes = vec![0xFFu8; num_bytes];
564
565        let result: Vec<u32> = BitIndexU32Iterator::new(&bytes, 0, len).collect();
566        let expected: Vec<u32> = BitIndexIterator::new(&bytes, 0, len)
567            .map(|i| i as u32)
568            .collect();
569        assert_eq!(result, expected);
570    }
571
572    #[test]
573    fn test_bit_index_u32_none_set() {
574        let len = 50;
575        let num_bytes = len / 8 + if len % 8 != 0 { 1 } else { 0 };
576        let bytes = vec![0u8; num_bytes];
577
578        let result: Vec<u32> = BitIndexU32Iterator::new(&bytes, 0, len).collect();
579        let expected: Vec<u32> = BitIndexIterator::new(&bytes, 0, len)
580            .map(|i| i as u32)
581            .collect();
582        assert_eq!(result, expected);
583    }
584
585    trait SharedBetweenBitIteratorAndSliceIter:
586        ExactSizeIterator<Item = bool> + DoubleEndedIterator<Item = bool>
587    {
588    }
589    impl<T: ?Sized + ExactSizeIterator<Item = bool> + DoubleEndedIterator<Item = bool>>
590        SharedBetweenBitIteratorAndSliceIter for T
591    {
592    }
593
594    fn get_bit_iterator_cases() -> impl Iterator<Item = (BooleanBuffer, Vec<bool>)> {
595        let mut rng = StdRng::seed_from_u64(42);
596
597        [0, 1, 6, 8, 100, 164]
598            .map(|len| {
599                let source = (0..len).map(|_| rng.random_bool(0.5)).collect::<Vec<_>>();
600
601                (BooleanBuffer::from(source.as_slice()), source)
602            })
603            .into_iter()
604    }
605
606    fn setup_and_assert(
607        setup_iters: impl Fn(&mut dyn SharedBetweenBitIteratorAndSliceIter),
608        assert_fn: impl Fn(BitIterator, Copied<Iter<bool>>),
609    ) {
610        for (boolean_buffer, source) in get_bit_iterator_cases() {
611            // Not using `boolean_buffer.iter()` in case the implementation change to not call BitIterator internally
612            // in which case the test would not test what it intends to test
613            let mut actual = BitIterator::new(boolean_buffer.values(), 0, boolean_buffer.len());
614            let mut expected = source.iter().copied();
615
616            setup_iters(&mut actual);
617            setup_iters(&mut expected);
618
619            assert_fn(actual, expected);
620        }
621    }
622
623    /// Trait representing an operation on a BitIterator
624    /// that can be compared against a slice iterator
625    trait BitIteratorOp {
626        /// What the operation returns (e.g. Option<bool> for last/max, usize for count, etc)
627        type Output: PartialEq + Debug;
628
629        /// The name of the operation, used for error messages
630        const NAME: &'static str;
631
632        /// Get the value of the operation for the provided iterator
633        /// This will be either a BitIterator or a slice iterator to make sure they produce the same result
634        fn get_value<T: SharedBetweenBitIteratorAndSliceIter>(iter: T) -> Self::Output;
635    }
636
637    /// Helper function that will assert that the provided operation
638    /// produces the same result for both BitIterator and slice iterator
639    /// under various consumption patterns (e.g. some calls to next/next_back/consume_all/etc)
640    fn assert_bit_iterator_cases<O: BitIteratorOp>() {
641        setup_and_assert(
642            |_iter: &mut dyn SharedBetweenBitIteratorAndSliceIter| {},
643            |actual, expected| {
644                let current_iterator_values: Vec<bool> = expected.clone().collect();
645                assert_eq!(
646                    O::get_value(actual),
647                    O::get_value(expected),
648                    "Failed on op {} for new iter (left actual, right expected) ({current_iterator_values:?})",
649                    O::NAME
650                );
651            },
652        );
653
654        setup_and_assert(
655            |iter: &mut dyn SharedBetweenBitIteratorAndSliceIter| {
656                iter.next();
657            },
658            |actual, expected| {
659                let current_iterator_values: Vec<bool> = expected.clone().collect();
660
661                assert_eq!(
662                    O::get_value(actual),
663                    O::get_value(expected),
664                    "Failed on op {} for new iter after consuming 1 element from the start (left actual, right expected) ({current_iterator_values:?})",
665                    O::NAME
666                );
667            },
668        );
669
670        setup_and_assert(
671            |iter: &mut dyn SharedBetweenBitIteratorAndSliceIter| {
672                iter.next_back();
673            },
674            |actual, expected| {
675                let current_iterator_values: Vec<bool> = expected.clone().collect();
676
677                assert_eq!(
678                    O::get_value(actual),
679                    O::get_value(expected),
680                    "Failed on op {} for new iter after consuming 1 element from the end (left actual, right expected) ({current_iterator_values:?})",
681                    O::NAME
682                );
683            },
684        );
685
686        setup_and_assert(
687            |iter: &mut dyn SharedBetweenBitIteratorAndSliceIter| {
688                iter.next();
689                iter.next_back();
690            },
691            |actual, expected| {
692                let current_iterator_values: Vec<bool> = expected.clone().collect();
693
694                assert_eq!(
695                    O::get_value(actual),
696                    O::get_value(expected),
697                    "Failed on op {} for new iter after consuming 1 element from start and end (left actual, right expected) ({current_iterator_values:?})",
698                    O::NAME
699                );
700            },
701        );
702
703        setup_and_assert(
704            |iter: &mut dyn SharedBetweenBitIteratorAndSliceIter| {
705                while iter.len() > 1 {
706                    iter.next();
707                }
708            },
709            |actual, expected| {
710                let current_iterator_values: Vec<bool> = expected.clone().collect();
711
712                assert_eq!(
713                    O::get_value(actual),
714                    O::get_value(expected),
715                    "Failed on op {} for new iter after consuming all from the start but 1 (left actual, right expected) ({current_iterator_values:?})",
716                    O::NAME
717                );
718            },
719        );
720
721        setup_and_assert(
722            |iter: &mut dyn SharedBetweenBitIteratorAndSliceIter| {
723                while iter.len() > 1 {
724                    iter.next_back();
725                }
726            },
727            |actual, expected| {
728                let current_iterator_values: Vec<bool> = expected.clone().collect();
729
730                assert_eq!(
731                    O::get_value(actual),
732                    O::get_value(expected),
733                    "Failed on op {} for new iter after consuming all from the end but 1 (left actual, right expected) ({current_iterator_values:?})",
734                    O::NAME
735                );
736            },
737        );
738
739        setup_and_assert(
740            |iter: &mut dyn SharedBetweenBitIteratorAndSliceIter| {
741                while iter.next().is_some() {}
742            },
743            |actual, expected| {
744                let current_iterator_values: Vec<bool> = expected.clone().collect();
745
746                assert_eq!(
747                    O::get_value(actual),
748                    O::get_value(expected),
749                    "Failed on op {} for new iter after consuming all from the start (left actual, right expected) ({current_iterator_values:?})",
750                    O::NAME
751                );
752            },
753        );
754
755        setup_and_assert(
756            |iter: &mut dyn SharedBetweenBitIteratorAndSliceIter| {
757                while iter.next_back().is_some() {}
758            },
759            |actual, expected| {
760                let current_iterator_values: Vec<bool> = expected.clone().collect();
761
762                assert_eq!(
763                    O::get_value(actual),
764                    O::get_value(expected),
765                    "Failed on op {} for new iter after consuming all from the end (left actual, right expected) ({current_iterator_values:?})",
766                    O::NAME
767                );
768            },
769        );
770    }
771
772    #[test]
773    fn assert_bit_iterator_count() {
774        struct CountOp;
775
776        impl BitIteratorOp for CountOp {
777            type Output = usize;
778            const NAME: &'static str = "count";
779
780            fn get_value<T: SharedBetweenBitIteratorAndSliceIter>(iter: T) -> Self::Output {
781                iter.count()
782            }
783        }
784
785        assert_bit_iterator_cases::<CountOp>()
786    }
787
788    #[test]
789    fn assert_bit_iterator_last() {
790        struct LastOp;
791
792        impl BitIteratorOp for LastOp {
793            type Output = Option<bool>;
794            const NAME: &'static str = "last";
795
796            fn get_value<T: SharedBetweenBitIteratorAndSliceIter>(iter: T) -> Self::Output {
797                iter.last()
798            }
799        }
800
801        assert_bit_iterator_cases::<LastOp>()
802    }
803
804    #[test]
805    fn assert_bit_iterator_max() {
806        struct MaxOp;
807
808        impl BitIteratorOp for MaxOp {
809            type Output = Option<bool>;
810            const NAME: &'static str = "max";
811
812            fn get_value<T: SharedBetweenBitIteratorAndSliceIter>(iter: T) -> Self::Output {
813                iter.max()
814            }
815        }
816
817        assert_bit_iterator_cases::<MaxOp>()
818    }
819
820    #[test]
821    fn assert_bit_iterator_nth_0() {
822        struct NthOp<const BACK: bool>;
823
824        impl<const BACK: bool> BitIteratorOp for NthOp<BACK> {
825            type Output = Option<bool>;
826            const NAME: &'static str = if BACK { "nth_back(0)" } else { "nth(0)" };
827
828            fn get_value<T: SharedBetweenBitIteratorAndSliceIter>(mut iter: T) -> Self::Output {
829                if BACK { iter.nth_back(0) } else { iter.nth(0) }
830            }
831        }
832
833        assert_bit_iterator_cases::<NthOp<false>>();
834        assert_bit_iterator_cases::<NthOp<true>>();
835    }
836
837    #[test]
838    fn assert_bit_iterator_nth_1() {
839        struct NthOp<const BACK: bool>;
840
841        impl<const BACK: bool> BitIteratorOp for NthOp<BACK> {
842            type Output = Option<bool>;
843            const NAME: &'static str = if BACK { "nth_back(1)" } else { "nth(1)" };
844
845            fn get_value<T: SharedBetweenBitIteratorAndSliceIter>(mut iter: T) -> Self::Output {
846                if BACK { iter.nth_back(1) } else { iter.nth(1) }
847            }
848        }
849
850        assert_bit_iterator_cases::<NthOp<false>>();
851        assert_bit_iterator_cases::<NthOp<true>>();
852    }
853
854    #[test]
855    fn assert_bit_iterator_nth_after_end() {
856        struct NthOp<const BACK: bool>;
857
858        impl<const BACK: bool> BitIteratorOp for NthOp<BACK> {
859            type Output = Option<bool>;
860            const NAME: &'static str = if BACK {
861                "nth_back(iter.len() + 1)"
862            } else {
863                "nth(iter.len() + 1)"
864            };
865
866            fn get_value<T: SharedBetweenBitIteratorAndSliceIter>(mut iter: T) -> Self::Output {
867                if BACK {
868                    iter.nth_back(iter.len() + 1)
869                } else {
870                    iter.nth(iter.len() + 1)
871                }
872            }
873        }
874
875        assert_bit_iterator_cases::<NthOp<false>>();
876        assert_bit_iterator_cases::<NthOp<true>>();
877    }
878
879    #[test]
880    fn assert_bit_iterator_nth_len() {
881        struct NthOp<const BACK: bool>;
882
883        impl<const BACK: bool> BitIteratorOp for NthOp<BACK> {
884            type Output = Option<bool>;
885            const NAME: &'static str = if BACK {
886                "nth_back(iter.len())"
887            } else {
888                "nth(iter.len())"
889            };
890
891            fn get_value<T: SharedBetweenBitIteratorAndSliceIter>(mut iter: T) -> Self::Output {
892                if BACK {
893                    iter.nth_back(iter.len())
894                } else {
895                    iter.nth(iter.len())
896                }
897            }
898        }
899
900        assert_bit_iterator_cases::<NthOp<false>>();
901        assert_bit_iterator_cases::<NthOp<true>>();
902    }
903
904    #[test]
905    fn assert_bit_iterator_nth_last() {
906        struct NthOp<const BACK: bool>;
907
908        impl<const BACK: bool> BitIteratorOp for NthOp<BACK> {
909            type Output = Option<bool>;
910            const NAME: &'static str = if BACK {
911                "nth_back(iter.len().saturating_sub(1))"
912            } else {
913                "nth(iter.len().saturating_sub(1))"
914            };
915
916            fn get_value<T: SharedBetweenBitIteratorAndSliceIter>(mut iter: T) -> Self::Output {
917                if BACK {
918                    iter.nth_back(iter.len().saturating_sub(1))
919                } else {
920                    iter.nth(iter.len().saturating_sub(1))
921                }
922            }
923        }
924
925        assert_bit_iterator_cases::<NthOp<false>>();
926        assert_bit_iterator_cases::<NthOp<true>>();
927    }
928
929    #[test]
930    fn assert_bit_iterator_nth_and_reuse() {
931        setup_and_assert(
932            |_| {},
933            |actual, expected| {
934                {
935                    let mut actual = actual.clone();
936                    let mut expected = expected.clone();
937                    for _ in 0..expected.len() {
938                        #[allow(clippy::iter_nth_zero)]
939                        let actual_val = actual.nth(0);
940                        #[allow(clippy::iter_nth_zero)]
941                        let expected_val = expected.nth(0);
942                        assert_eq!(actual_val, expected_val, "Failed on nth(0)");
943                    }
944                }
945
946                {
947                    let mut actual = actual.clone();
948                    let mut expected = expected.clone();
949                    for _ in 0..expected.len() {
950                        let actual_val = actual.nth(1);
951                        let expected_val = expected.nth(1);
952                        assert_eq!(actual_val, expected_val, "Failed on nth(1)");
953                    }
954                }
955
956                {
957                    let mut actual = actual.clone();
958                    let mut expected = expected.clone();
959                    for _ in 0..expected.len() {
960                        let actual_val = actual.nth(2);
961                        let expected_val = expected.nth(2);
962                        assert_eq!(actual_val, expected_val, "Failed on nth(2)");
963                    }
964                }
965            },
966        );
967    }
968
969    #[test]
970    fn assert_bit_iterator_nth_back_and_reuse() {
971        setup_and_assert(
972            |_| {},
973            |actual, expected| {
974                {
975                    let mut actual = actual.clone();
976                    let mut expected = expected.clone();
977                    for _ in 0..expected.len() {
978                        #[allow(clippy::iter_nth_zero)]
979                        let actual_val = actual.nth_back(0);
980                        let expected_val = expected.nth_back(0);
981                        assert_eq!(actual_val, expected_val, "Failed on nth_back(0)");
982                    }
983                }
984
985                {
986                    let mut actual = actual.clone();
987                    let mut expected = expected.clone();
988                    for _ in 0..expected.len() {
989                        let actual_val = actual.nth_back(1);
990                        let expected_val = expected.nth_back(1);
991                        assert_eq!(actual_val, expected_val, "Failed on nth_back(1)");
992                    }
993                }
994
995                {
996                    let mut actual = actual.clone();
997                    let mut expected = expected.clone();
998                    for _ in 0..expected.len() {
999                        let actual_val = actual.nth_back(2);
1000                        let expected_val = expected.nth_back(2);
1001                        assert_eq!(actual_val, expected_val, "Failed on nth_back(2)");
1002                    }
1003                }
1004            },
1005        );
1006    }
1007}