1#![no_std]
17#![deny(clippy::undocumented_unsafe_blocks)]
18
19extern crate alloc;
20use alloc::{vec, vec::Vec};
21
22mod block;
23mod range;
24
25#[cfg(feature = "serde")]
26extern crate serde;
27#[cfg(feature = "serde")]
28mod serde_impl;
29
30use core::fmt::Write;
31use core::fmt::{Binary, Display, Error, Formatter};
32
33use core::cmp::Ordering;
34use core::hash::Hash;
35use core::iter::{Chain, FusedIterator};
36use core::mem::ManuallyDrop;
37use core::mem::MaybeUninit;
38use core::ops::{BitAnd, BitAndAssign, BitOr, BitOrAssign, BitXor, BitXorAssign, Index};
39use core::ptr::NonNull;
40pub use range::IndexRange;
41
42pub(crate) const BITS: usize = core::mem::size_of::<Block>() * 8;
43#[cfg(feature = "serde")]
44pub(crate) const BYTES: usize = core::mem::size_of::<Block>();
45
46use block::Block as SimdBlock;
47pub type Block = usize;
48
49#[inline]
50fn div_rem(x: usize, denominator: usize) -> (usize, usize) {
51 (x / denominator, x % denominator)
52}
53
54fn vec_into_parts<T>(vec: Vec<T>) -> (NonNull<T>, usize, usize) {
55 let mut vec = ManuallyDrop::new(vec);
56 (
57 unsafe { NonNull::new_unchecked(vec.as_mut_ptr()) },
59 vec.capacity(),
60 vec.len(),
61 )
62}
63
64#[derive(Debug, Eq)]
73pub struct FixedBitSet {
74 pub(crate) data: NonNull<MaybeUninit<SimdBlock>>,
75 capacity: usize,
76 pub(crate) length: usize,
78}
79
80unsafe impl Send for FixedBitSet {}
82unsafe impl Sync for FixedBitSet {}
85
86impl FixedBitSet {
87 pub const fn new() -> Self {
89 FixedBitSet {
90 data: NonNull::dangling(),
91 capacity: 0,
92 length: 0,
93 }
94 }
95
96 pub fn with_capacity(bits: usize) -> Self {
99 let (mut blocks, rem) = div_rem(bits, SimdBlock::BITS);
100 blocks += (rem > 0) as usize;
101 Self::from_blocks_and_len(vec![SimdBlock::NONE; blocks], bits)
102 }
103
104 #[inline]
105 fn from_blocks_and_len(data: Vec<SimdBlock>, length: usize) -> Self {
106 let (data, capacity, _) = vec_into_parts(data);
107 FixedBitSet {
108 data: data.cast(),
109 capacity,
110 length,
111 }
112 }
113
114 pub fn with_capacity_and_blocks<I: IntoIterator<Item = Block>>(bits: usize, blocks: I) -> Self {
128 let mut bitset = Self::with_capacity(bits);
129 for (subblock, value) in bitset.as_mut_slice().iter_mut().zip(blocks.into_iter()) {
130 *subblock = value;
131 }
132 bitset
133 }
134
135 #[inline]
137 pub fn grow(&mut self, bits: usize) {
138 #[cold]
139 #[track_caller]
140 #[inline(never)]
141 fn do_grow(slf: &mut FixedBitSet, bits: usize) {
142 unsafe { slf.grow_inner(bits, MaybeUninit::new(SimdBlock::NONE)) };
144 }
145
146 if bits > self.length {
147 do_grow(self, bits);
148 }
149 }
150
151 #[inline(always)]
155 unsafe fn grow_inner(&mut self, bits: usize, fill: MaybeUninit<SimdBlock>) {
156 let mut data = unsafe {
159 Vec::from_raw_parts(self.data.as_ptr(), self.simd_block_len(), self.capacity)
160 };
161 let (mut blocks, rem) = div_rem(bits, SimdBlock::BITS);
162 blocks += (rem > 0) as usize;
163 data.resize(blocks, fill);
164 let (data, capacity, _) = vec_into_parts(data);
165 self.data = data;
166 self.capacity = capacity;
167 self.length = bits;
168 }
169
170 #[inline]
171 unsafe fn get_unchecked(&self, subblock: usize) -> &Block {
172 &*self.data.as_ptr().cast::<Block>().add(subblock)
173 }
174
175 #[inline]
176 unsafe fn get_unchecked_mut(&mut self, subblock: usize) -> &mut Block {
177 &mut *self.data.as_ptr().cast::<Block>().add(subblock)
178 }
179
180 #[inline]
181 fn usize_len(&self) -> usize {
182 let (mut blocks, rem) = div_rem(self.length, BITS);
183 blocks += (rem > 0) as usize;
184 blocks
185 }
186
187 #[inline]
188 fn simd_block_len(&self) -> usize {
189 let (mut blocks, rem) = div_rem(self.length, SimdBlock::BITS);
190 blocks += (rem > 0) as usize;
191 blocks
192 }
193
194 #[inline]
195 fn batch_count_ones(blocks: impl IntoIterator<Item = Block>) -> usize {
196 blocks.into_iter().map(|x| x.count_ones() as usize).sum()
197 }
198
199 #[inline]
200 fn as_simd_slice(&self) -> &[SimdBlock] {
201 unsafe { core::slice::from_raw_parts(self.data.as_ptr().cast(), self.simd_block_len()) }
204 }
205
206 #[inline]
207 fn as_mut_simd_slice(&mut self) -> &mut [SimdBlock] {
208 unsafe { core::slice::from_raw_parts_mut(self.data.as_ptr().cast(), self.simd_block_len()) }
211 }
212
213 #[inline]
214 fn as_simd_slice_uninit(&self) -> &[MaybeUninit<SimdBlock>] {
215 unsafe { core::slice::from_raw_parts(self.data.as_ptr(), self.simd_block_len()) }
218 }
219
220 #[inline]
221 fn as_mut_simd_slice_uninit(&mut self) -> &mut [MaybeUninit<SimdBlock>] {
222 unsafe { core::slice::from_raw_parts_mut(self.data.as_ptr(), self.simd_block_len()) }
225 }
226
227 #[inline]
233 pub fn grow_and_insert(&mut self, bits: usize) {
234 self.grow(bits + 1);
235
236 let (blocks, rem) = div_rem(bits, BITS);
237 unsafe {
239 *self.get_unchecked_mut(blocks) |= 1 << rem;
240 }
241 }
242
243 #[inline]
255 pub fn len(&self) -> usize {
256 self.length
257 }
258
259 #[inline]
274 pub fn is_empty(&self) -> bool {
275 self.len() == 0
276 }
277
278 #[inline]
294 pub fn is_clear(&self) -> bool {
295 self.as_simd_slice().iter().all(|block| block.is_empty())
296 }
297
298 #[inline]
313 pub fn minimum(&self) -> Option<usize> {
314 let (block_idx, block) = self
315 .as_simd_slice()
316 .iter()
317 .enumerate()
318 .find(|&(_, block)| !block.is_empty())?;
319 let mut inner = 0;
320 let mut trailing = 0;
321 for subblock in block.into_usize_array() {
322 if subblock != 0 {
323 trailing = subblock.trailing_zeros() as usize;
324 break;
325 } else {
326 inner += BITS;
327 }
328 }
329 Some(block_idx * SimdBlock::BITS + inner + trailing)
330 }
331
332 #[inline]
347 pub fn maximum(&self) -> Option<usize> {
348 let (block_idx, block) = self
349 .as_simd_slice()
350 .iter()
351 .rev()
352 .enumerate()
353 .find(|&(_, block)| !block.is_empty())?;
354 let mut inner = 0;
355 let mut leading = 0;
356 for subblock in block.into_usize_array().iter().rev() {
357 if *subblock != 0 {
358 leading = subblock.leading_zeros() as usize;
359 break;
360 } else {
361 inner += BITS;
362 }
363 }
364 let max = self.simd_block_len() * SimdBlock::BITS;
365 Some(max - block_idx * SimdBlock::BITS - inner - leading - 1)
366 }
367
368 #[inline]
381 pub fn is_full(&self) -> bool {
382 self.contains_all_in_range(..)
383 }
384
385 #[inline]
392 pub fn contains(&self, bit: usize) -> bool {
393 (bit < self.length)
394 .then(|| unsafe { self.contains_unchecked(bit) })
396 .unwrap_or(false)
397 }
398
399 #[inline]
408 pub unsafe fn contains_unchecked(&self, bit: usize) -> bool {
409 let (block, i) = div_rem(bit, BITS);
410 (self.get_unchecked(block) & (1 << i)) != 0
411 }
412
413 #[inline]
415 pub fn clear(&mut self) {
416 for elt in self.as_mut_simd_slice().iter_mut() {
417 *elt = SimdBlock::NONE
418 }
419 }
420
421 #[inline]
425 pub fn insert(&mut self, bit: usize) {
426 assert!(
427 bit < self.length,
428 "insert at index {} exceeds fixedbitset size {}",
429 bit,
430 self.length
431 );
432 unsafe {
434 self.insert_unchecked(bit);
435 }
436 }
437
438 #[inline]
443 pub unsafe fn insert_unchecked(&mut self, bit: usize) {
444 let (block, i) = div_rem(bit, BITS);
445 unsafe {
447 *self.get_unchecked_mut(block) |= 1 << i;
448 }
449 }
450
451 #[inline]
455 pub fn remove(&mut self, bit: usize) {
456 assert!(
457 bit < self.length,
458 "remove at index {} exceeds fixedbitset size {}",
459 bit,
460 self.length
461 );
462 unsafe {
464 self.remove_unchecked(bit);
465 }
466 }
467
468 #[inline]
473 pub unsafe fn remove_unchecked(&mut self, bit: usize) {
474 let (block, i) = div_rem(bit, BITS);
475 unsafe {
477 *self.get_unchecked_mut(block) &= !(1 << i);
478 }
479 }
480
481 #[inline]
485 pub fn put(&mut self, bit: usize) -> bool {
486 assert!(
487 bit < self.length,
488 "put at index {} exceeds fixedbitset size {}",
489 bit,
490 self.length
491 );
492 unsafe { self.put_unchecked(bit) }
494 }
495
496 #[inline]
501 pub unsafe fn put_unchecked(&mut self, bit: usize) -> bool {
502 let (block, i) = div_rem(bit, BITS);
503 unsafe {
505 let word = self.get_unchecked_mut(block);
506 let prev = *word & (1 << i) != 0;
507 *word |= 1 << i;
508 prev
509 }
510 }
511
512 #[inline]
516 pub fn toggle(&mut self, bit: usize) {
517 assert!(
518 bit < self.length,
519 "toggle at index {} exceeds fixedbitset size {}",
520 bit,
521 self.length
522 );
523 unsafe {
525 self.toggle_unchecked(bit);
526 }
527 }
528
529 #[inline]
534 pub unsafe fn toggle_unchecked(&mut self, bit: usize) {
535 let (block, i) = div_rem(bit, BITS);
536 unsafe {
538 *self.get_unchecked_mut(block) ^= 1 << i;
539 }
540 }
541
542 #[inline]
546 pub fn set(&mut self, bit: usize, enabled: bool) {
547 assert!(
548 bit < self.length,
549 "set at index {} exceeds fixedbitset size {}",
550 bit,
551 self.length
552 );
553 unsafe {
555 self.set_unchecked(bit, enabled);
556 }
557 }
558
559 #[inline]
564 pub unsafe fn set_unchecked(&mut self, bit: usize, enabled: bool) {
565 let (block, i) = div_rem(bit, BITS);
566 let elt = unsafe { self.get_unchecked_mut(block) };
568 if enabled {
569 *elt |= 1 << i;
570 } else {
571 *elt &= !(1 << i);
572 }
573 }
574
575 #[inline]
581 pub fn copy_bit(&mut self, from: usize, to: usize) {
582 assert!(
583 to < self.length,
584 "copy to index {} exceeds fixedbitset size {}",
585 to,
586 self.length
587 );
588 let enabled = self.contains(from);
589 unsafe { self.set_unchecked(to, enabled) };
591 }
592
593 #[inline]
601 pub unsafe fn copy_bit_unchecked(&mut self, from: usize, to: usize) {
602 let enabled = self.contains_unchecked(from);
604 self.set_unchecked(to, enabled);
606 }
607
608 #[inline]
615 pub fn count_ones<T: IndexRange>(&self, range: T) -> usize {
616 Self::batch_count_ones(Masks::new(range, self.length).map(|(block, mask)| {
617 unsafe { *self.get_unchecked(block) & mask }
619 }))
620 }
621
622 #[inline]
629 pub fn count_zeroes<T: IndexRange>(&self, range: T) -> usize {
630 Self::batch_count_ones(Masks::new(range, self.length).map(|(block, mask)| {
631 unsafe { !*self.get_unchecked(block) & mask }
633 }))
634 }
635
636 #[inline]
642 pub fn set_range<T: IndexRange>(&mut self, range: T, enabled: bool) {
643 if enabled {
644 self.insert_range(range);
645 } else {
646 self.remove_range(range);
647 }
648 }
649
650 #[inline]
656 pub fn insert_range<T: IndexRange>(&mut self, range: T) {
657 for (block, mask) in Masks::new(range, self.length) {
658 let block = unsafe { self.get_unchecked_mut(block) };
660 *block |= mask;
661 }
662 }
663
664 #[inline]
670 pub fn remove_range<T: IndexRange>(&mut self, range: T) {
671 for (block, mask) in Masks::new(range, self.length) {
672 let block = unsafe { self.get_unchecked_mut(block) };
674 *block &= !mask;
675 }
676 }
677
678 #[inline]
684 pub fn toggle_range<T: IndexRange>(&mut self, range: T) {
685 for (block, mask) in Masks::new(range, self.length) {
686 let block = unsafe { self.get_unchecked_mut(block) };
688 *block ^= mask;
689 }
690 }
691
692 #[inline]
696 pub fn contains_all_in_range<T: IndexRange>(&self, range: T) -> bool {
697 for (block, mask) in Masks::new(range, self.length) {
698 let block = unsafe { self.get_unchecked(block) };
700 if block & mask != mask {
701 return false;
702 }
703 }
704 true
705 }
706
707 #[inline]
711 pub fn contains_any_in_range<T: IndexRange>(&self, range: T) -> bool {
712 for (block, mask) in Masks::new(range, self.length) {
713 let block = unsafe { self.get_unchecked(block) };
715 if block & mask != 0 {
716 return true;
717 }
718 }
719 false
720 }
721
722 #[inline]
724 pub fn as_slice(&self) -> &[Block] {
725 unsafe {
730 let ptr = self.data.as_ptr().cast::<Block>();
731 core::slice::from_raw_parts(ptr, self.usize_len())
732 }
733 }
734
735 #[inline]
738 pub fn as_mut_slice(&mut self) -> &mut [Block] {
739 unsafe {
744 let ptr = self.data.as_ptr().cast::<Block>();
745 core::slice::from_raw_parts_mut(ptr, self.usize_len())
746 }
747 }
748
749 #[inline]
753 pub fn ones(&self) -> Ones {
754 match self.as_slice().split_first() {
755 Some((&first_block, rem)) => {
756 let (&last_block, rem) = rem.split_last().unwrap_or((&0, rem));
757 Ones {
758 bitset_front: first_block,
759 bitset_back: last_block,
760 block_idx_front: 0,
761 block_idx_back: (1 + rem.len()) * BITS,
762 remaining_blocks: rem.iter(),
763 }
764 }
765 None => Ones {
766 bitset_front: 0,
767 bitset_back: 0,
768 block_idx_front: 0,
769 block_idx_back: 0,
770 remaining_blocks: [].iter(),
771 },
772 }
773 }
774
775 pub fn into_ones(self) -> IntoOnes {
780 let ptr = self.data.as_ptr().cast();
781 let len = self.simd_block_len() * SimdBlock::USIZE_COUNT;
782 let slice = unsafe { core::slice::from_raw_parts(ptr, len) };
789 let data: Vec<SimdBlock> = unsafe {
792 Vec::from_raw_parts(
793 self.data.as_ptr().cast(),
794 self.simd_block_len(),
795 self.capacity,
796 )
797 };
798 let mut iter = slice.iter().copied();
799
800 core::mem::forget(self);
801
802 IntoOnes {
803 bitset_front: iter.next().unwrap_or(0),
804 bitset_back: iter.next_back().unwrap_or(0),
805 block_idx_front: 0,
806 block_idx_back: len.saturating_sub(1) * BITS,
807 remaining_blocks: iter,
808 _buf: data,
809 }
810 }
811
812 #[inline]
816 pub fn zeroes(&self) -> Zeroes {
817 match self.as_slice().split_first() {
818 Some((&block, rem)) => Zeroes {
819 bitset: !block,
820 block_idx: 0,
821 len: self.len(),
822 remaining_blocks: rem.iter(),
823 },
824 None => Zeroes {
825 bitset: !0,
826 block_idx: 0,
827 len: self.len(),
828 remaining_blocks: [].iter(),
829 },
830 }
831 }
832
833 pub fn intersection<'a>(&'a self, other: &'a FixedBitSet) -> Intersection<'a> {
835 Intersection {
836 iter: self.ones(),
837 other,
838 }
839 }
840
841 pub fn union<'a>(&'a self, other: &'a FixedBitSet) -> Union<'a> {
843 Union {
844 iter: self.ones().chain(other.difference(self)),
845 }
846 }
847
848 pub fn difference<'a>(&'a self, other: &'a FixedBitSet) -> Difference<'a> {
851 Difference {
852 iter: self.ones(),
853 other,
854 }
855 }
856
857 pub fn symmetric_difference<'a>(&'a self, other: &'a FixedBitSet) -> SymmetricDifference<'a> {
860 SymmetricDifference {
861 iter: self.difference(other).chain(other.difference(self)),
862 }
863 }
864
865 pub fn union_with(&mut self, other: &FixedBitSet) {
869 if other.len() >= self.len() {
870 self.grow(other.len());
871 }
872 self.as_mut_simd_slice()
873 .iter_mut()
874 .zip(other.as_simd_slice().iter())
875 .for_each(|(x, y)| *x |= *y);
876 }
877
878 pub fn intersect_with(&mut self, other: &FixedBitSet) {
882 let me = self.as_mut_simd_slice();
883 let other = other.as_simd_slice();
884 me.iter_mut().zip(other.iter()).for_each(|(x, y)| {
885 *x &= *y;
886 });
887 let mn = core::cmp::min(me.len(), other.len());
888 for wd in &mut me[mn..] {
889 *wd = SimdBlock::NONE;
890 }
891 }
892
893 pub fn difference_with(&mut self, other: &FixedBitSet) {
897 self.as_mut_simd_slice()
898 .iter_mut()
899 .zip(other.as_simd_slice().iter())
900 .for_each(|(x, y)| {
901 *x &= !*y;
902 });
903
904 }
911
912 pub fn symmetric_difference_with(&mut self, other: &FixedBitSet) {
916 if other.len() >= self.len() {
917 self.grow(other.len());
918 }
919 self.as_mut_simd_slice()
920 .iter_mut()
921 .zip(other.as_simd_slice().iter())
922 .for_each(|(x, y)| {
923 *x ^= *y;
924 });
925 }
926
927 #[inline]
933 pub fn union_count(&self, other: &FixedBitSet) -> usize {
934 let me = self.as_slice();
935 let other = other.as_slice();
936 let count = Self::batch_count_ones(me.iter().zip(other.iter()).map(|(x, y)| (*x | *y)));
937 match other.len().cmp(&me.len()) {
938 Ordering::Greater => count + Self::batch_count_ones(other[me.len()..].iter().copied()),
939 Ordering::Less => count + Self::batch_count_ones(me[other.len()..].iter().copied()),
940 Ordering::Equal => count,
941 }
942 }
943
944 #[inline]
950 pub fn intersection_count(&self, other: &FixedBitSet) -> usize {
951 Self::batch_count_ones(
952 self.as_slice()
953 .iter()
954 .zip(other.as_slice())
955 .map(|(x, y)| (*x & *y)),
956 )
957 }
958
959 #[inline]
965 pub fn difference_count(&self, other: &FixedBitSet) -> usize {
966 Self::batch_count_ones(
967 self.as_slice()
968 .iter()
969 .zip(other.as_slice().iter())
970 .map(|(x, y)| (*x & !*y)),
971 )
972 }
973
974 #[inline]
980 pub fn symmetric_difference_count(&self, other: &FixedBitSet) -> usize {
981 let me = self.as_slice();
982 let other = other.as_slice();
983 let count = Self::batch_count_ones(me.iter().zip(other.iter()).map(|(x, y)| (*x ^ *y)));
984 match other.len().cmp(&me.len()) {
985 Ordering::Greater => count + Self::batch_count_ones(other[me.len()..].iter().copied()),
986 Ordering::Less => count + Self::batch_count_ones(me[other.len()..].iter().copied()),
987 Ordering::Equal => count,
988 }
989 }
990
991 pub fn is_disjoint(&self, other: &FixedBitSet) -> bool {
994 self.as_simd_slice()
995 .iter()
996 .zip(other.as_simd_slice())
997 .all(|(x, y)| (*x & *y).is_empty())
998 }
999
1000 pub fn is_subset(&self, other: &FixedBitSet) -> bool {
1003 let me = self.as_simd_slice();
1004 let other = other.as_simd_slice();
1005 me.iter()
1006 .zip(other.iter())
1007 .all(|(x, y)| x.andnot(*y).is_empty())
1008 && me.iter().skip(other.len()).all(|x| x.is_empty())
1009 }
1010
1011 pub fn is_superset(&self, other: &FixedBitSet) -> bool {
1014 other.is_subset(self)
1015 }
1016}
1017
1018impl Hash for FixedBitSet {
1019 fn hash<H: core::hash::Hasher>(&self, state: &mut H) {
1020 self.length.hash(state);
1021 self.as_simd_slice().hash(state);
1022 }
1023}
1024
1025impl PartialEq for FixedBitSet {
1026 fn eq(&self, other: &Self) -> bool {
1027 self.length == other.length && self.as_simd_slice().eq(other.as_simd_slice())
1028 }
1029}
1030
1031impl PartialOrd for FixedBitSet {
1032 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
1033 Some(self.cmp(other))
1034 }
1035}
1036
1037impl Ord for FixedBitSet {
1038 fn cmp(&self, other: &Self) -> Ordering {
1039 self.length
1040 .cmp(&other.length)
1041 .then_with(|| self.as_simd_slice().cmp(other.as_simd_slice()))
1042 }
1043}
1044
1045impl Default for FixedBitSet {
1046 fn default() -> Self {
1047 Self::new()
1048 }
1049}
1050
1051impl Drop for FixedBitSet {
1052 fn drop(&mut self) {
1053 drop(unsafe {
1056 Vec::from_raw_parts(self.data.as_ptr(), self.simd_block_len(), self.capacity)
1057 });
1058 }
1059}
1060
1061impl Binary for FixedBitSet {
1062 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
1063 if f.alternate() {
1064 f.write_str("0b")?;
1065 }
1066
1067 for i in 0..self.length {
1068 if self[i] {
1069 f.write_char('1')?;
1070 } else {
1071 f.write_char('0')?;
1072 }
1073 }
1074
1075 Ok(())
1076 }
1077}
1078
1079impl Display for FixedBitSet {
1080 fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> {
1081 Binary::fmt(&self, f)
1082 }
1083}
1084
1085pub struct Difference<'a> {
1089 iter: Ones<'a>,
1090 other: &'a FixedBitSet,
1091}
1092
1093impl<'a> Iterator for Difference<'a> {
1094 type Item = usize;
1095
1096 #[inline]
1097 fn next(&mut self) -> Option<Self::Item> {
1098 self.iter.by_ref().find(|&nxt| !self.other.contains(nxt))
1099 }
1100
1101 #[inline]
1102 fn size_hint(&self) -> (usize, Option<usize>) {
1103 self.iter.size_hint()
1104 }
1105}
1106
1107impl<'a> DoubleEndedIterator for Difference<'a> {
1108 fn next_back(&mut self) -> Option<Self::Item> {
1109 self.iter
1110 .by_ref()
1111 .rev()
1112 .find(|&nxt| !self.other.contains(nxt))
1113 }
1114}
1115
1116impl<'a> FusedIterator for Difference<'a> {}
1118
1119pub struct SymmetricDifference<'a> {
1123 iter: Chain<Difference<'a>, Difference<'a>>,
1124}
1125
1126impl<'a> Iterator for SymmetricDifference<'a> {
1127 type Item = usize;
1128
1129 #[inline]
1130 fn next(&mut self) -> Option<Self::Item> {
1131 self.iter.next()
1132 }
1133
1134 #[inline]
1135 fn size_hint(&self) -> (usize, Option<usize>) {
1136 self.iter.size_hint()
1137 }
1138}
1139
1140impl<'a> DoubleEndedIterator for SymmetricDifference<'a> {
1141 fn next_back(&mut self) -> Option<Self::Item> {
1142 self.iter.next_back()
1143 }
1144}
1145
1146impl<'a> FusedIterator for SymmetricDifference<'a> {}
1148
1149pub struct Intersection<'a> {
1153 iter: Ones<'a>,
1154 other: &'a FixedBitSet,
1155}
1156
1157impl<'a> Iterator for Intersection<'a> {
1158 type Item = usize; #[inline]
1161 fn next(&mut self) -> Option<Self::Item> {
1162 self.iter.by_ref().find(|&nxt| self.other.contains(nxt))
1163 }
1164
1165 #[inline]
1166 fn size_hint(&self) -> (usize, Option<usize>) {
1167 self.iter.size_hint()
1168 }
1169}
1170
1171impl<'a> DoubleEndedIterator for Intersection<'a> {
1172 fn next_back(&mut self) -> Option<Self::Item> {
1173 self.iter
1174 .by_ref()
1175 .rev()
1176 .find(|&nxt| self.other.contains(nxt))
1177 }
1178}
1179
1180impl<'a> FusedIterator for Intersection<'a> {}
1182
1183pub struct Union<'a> {
1187 iter: Chain<Ones<'a>, Difference<'a>>,
1188}
1189
1190impl<'a> Iterator for Union<'a> {
1191 type Item = usize;
1192
1193 #[inline]
1194 fn next(&mut self) -> Option<Self::Item> {
1195 self.iter.next()
1196 }
1197
1198 #[inline]
1199 fn size_hint(&self) -> (usize, Option<usize>) {
1200 self.iter.size_hint()
1201 }
1202}
1203
1204impl<'a> DoubleEndedIterator for Union<'a> {
1205 fn next_back(&mut self) -> Option<Self::Item> {
1206 self.iter.next_back()
1207 }
1208}
1209
1210impl<'a> FusedIterator for Union<'a> {}
1212
1213struct Masks {
1214 first_block: usize,
1215 first_mask: usize,
1216 last_block: usize,
1217 last_mask: usize,
1218}
1219
1220impl Masks {
1221 #[inline]
1222 fn new<T: IndexRange>(range: T, length: usize) -> Masks {
1223 let start = range.start().unwrap_or(0);
1224 let end = range.end().unwrap_or(length);
1225 assert!(
1226 start <= end && end <= length,
1227 "invalid range {}..{} for a fixedbitset of size {}",
1228 start,
1229 end,
1230 length
1231 );
1232
1233 let (first_block, first_rem) = div_rem(start, BITS);
1234 let (last_block, last_rem) = div_rem(end, BITS);
1235
1236 Masks {
1237 first_block,
1238 first_mask: usize::MAX << first_rem,
1239 last_block,
1240 last_mask: (usize::MAX >> 1) >> (BITS - last_rem - 1),
1241 }
1243 }
1244}
1245
1246impl Iterator for Masks {
1247 type Item = (usize, usize);
1248
1249 #[inline]
1250 fn next(&mut self) -> Option<Self::Item> {
1251 match self.first_block.cmp(&self.last_block) {
1252 Ordering::Less => {
1253 let res = (self.first_block, self.first_mask);
1254 self.first_block += 1;
1255 self.first_mask = !0;
1256 Some(res)
1257 }
1258 Ordering::Equal => {
1259 let mask = self.first_mask & self.last_mask;
1260 let res = if mask == 0 {
1261 None
1262 } else {
1263 Some((self.first_block, mask))
1264 };
1265 self.first_block += 1;
1266 res
1267 }
1268 Ordering::Greater => None,
1269 }
1270 }
1271
1272 #[inline]
1273 fn size_hint(&self) -> (usize, Option<usize>) {
1274 (self.first_block..=self.last_block).size_hint()
1275 }
1276}
1277
1278impl FusedIterator for Masks {}
1280
1281impl ExactSizeIterator for Masks {}
1284
1285pub struct Ones<'a> {
1289 bitset_front: usize,
1290 bitset_back: usize,
1291 block_idx_front: usize,
1292 block_idx_back: usize,
1293 remaining_blocks: core::slice::Iter<'a, usize>,
1294}
1295
1296impl<'a> Ones<'a> {
1297 #[inline]
1298 pub fn last_positive_bit_and_unset(n: &mut usize) -> usize {
1299 let last_bit = *n & n.wrapping_neg();
1301
1302 let position = last_bit.trailing_zeros();
1304
1305 *n &= *n - 1;
1307
1308 position as usize
1309 }
1310
1311 #[inline]
1312 fn first_positive_bit_and_unset(n: &mut usize) -> usize {
1313 let bit_idx = n.leading_zeros();
1315
1316 let mask = !((1_usize) << (BITS as u32 - bit_idx - 1));
1318 n.bitand_assign(mask);
1319
1320 bit_idx as usize
1321 }
1322}
1323
1324impl<'a> DoubleEndedIterator for Ones<'a> {
1325 fn next_back(&mut self) -> Option<Self::Item> {
1326 while self.bitset_back == 0 {
1327 match self.remaining_blocks.next_back() {
1328 None => {
1329 if self.bitset_front != 0 {
1330 self.bitset_back = 0;
1331 self.block_idx_back = self.block_idx_front;
1332 return Some(
1333 self.block_idx_front + BITS
1334 - Self::first_positive_bit_and_unset(&mut self.bitset_front)
1335 - 1,
1336 );
1337 } else {
1338 return None;
1339 }
1340 }
1341 Some(next_block) => {
1342 self.bitset_back = *next_block;
1343 self.block_idx_back -= BITS;
1344 }
1345 };
1346 }
1347
1348 Some(
1349 self.block_idx_back - Self::first_positive_bit_and_unset(&mut self.bitset_back) + BITS
1350 - 1,
1351 )
1352 }
1353}
1354
1355impl<'a> Iterator for Ones<'a> {
1356 type Item = usize; #[inline]
1359 fn next(&mut self) -> Option<Self::Item> {
1360 while self.bitset_front == 0 {
1361 match self.remaining_blocks.next() {
1362 Some(next_block) => {
1363 self.bitset_front = *next_block;
1364 self.block_idx_front += BITS;
1365 }
1366 None => {
1367 if self.bitset_back != 0 {
1368 self.block_idx_front = self.block_idx_back;
1370 self.bitset_front = 0;
1371
1372 return Some(
1373 self.block_idx_back
1374 + Self::last_positive_bit_and_unset(&mut self.bitset_back),
1375 );
1376 } else {
1377 return None;
1378 }
1379 }
1380 };
1381 }
1382
1383 Some(self.block_idx_front + Self::last_positive_bit_and_unset(&mut self.bitset_front))
1384 }
1385
1386 #[inline]
1387 fn size_hint(&self) -> (usize, Option<usize>) {
1388 (
1389 0,
1390 (Some(self.block_idx_back - self.block_idx_front + 2 * BITS)),
1391 )
1392 }
1393}
1394
1395impl<'a> FusedIterator for Ones<'a> {}
1397
1398pub struct Zeroes<'a> {
1402 bitset: usize,
1403 block_idx: usize,
1404 len: usize,
1405 remaining_blocks: core::slice::Iter<'a, usize>,
1406}
1407
1408impl<'a> Iterator for Zeroes<'a> {
1409 type Item = usize; #[inline]
1412 fn next(&mut self) -> Option<Self::Item> {
1413 while self.bitset == 0 {
1414 self.bitset = !*self.remaining_blocks.next()?;
1415 self.block_idx += BITS;
1416 }
1417 let t = self.bitset & (0_usize).wrapping_sub(self.bitset);
1418 let r = self.bitset.trailing_zeros() as usize;
1419 self.bitset ^= t;
1420 let bit = self.block_idx + r;
1421 if bit < self.len {
1423 Some(bit)
1424 } else {
1425 None
1426 }
1427 }
1428
1429 #[inline]
1430 fn size_hint(&self) -> (usize, Option<usize>) {
1431 (0, Some(self.len))
1432 }
1433}
1434
1435impl<'a> FusedIterator for Zeroes<'a> {}
1437
1438impl Clone for FixedBitSet {
1439 #[inline]
1440 fn clone(&self) -> Self {
1441 Self::from_blocks_and_len(Vec::from(self.as_simd_slice()), self.length)
1442 }
1443
1444 #[inline]
1445 fn clone_from(&mut self, source: &Self) {
1446 if self.length < source.length {
1447 unsafe { self.grow_inner(source.length, MaybeUninit::uninit()) };
1449 }
1450 let me = self.as_mut_simd_slice_uninit();
1451 let them = source.as_simd_slice_uninit();
1452 match me.len().cmp(&them.len()) {
1453 Ordering::Greater => {
1454 let (head, tail) = me.split_at_mut(them.len());
1455 head.copy_from_slice(them);
1456 tail.fill(MaybeUninit::new(SimdBlock::NONE));
1457 }
1458 Ordering::Equal => me.copy_from_slice(them),
1459 Ordering::Less => {}
1462 }
1463 self.length = source.length;
1464 }
1465}
1466
1467impl Index<usize> for FixedBitSet {
1473 type Output = bool;
1474
1475 #[inline]
1476 fn index(&self, bit: usize) -> &bool {
1477 if self.contains(bit) {
1478 &true
1479 } else {
1480 &false
1481 }
1482 }
1483}
1484
1485impl Extend<usize> for FixedBitSet {
1487 fn extend<I: IntoIterator<Item = usize>>(&mut self, src: I) {
1488 let iter = src.into_iter();
1489 for i in iter {
1490 if i >= self.len() {
1491 self.grow(i + 1);
1492 }
1493 self.put(i);
1494 }
1495 }
1496}
1497
1498impl FromIterator<usize> for FixedBitSet {
1501 fn from_iter<I: IntoIterator<Item = usize>>(src: I) -> Self {
1502 let mut fbs = FixedBitSet::with_capacity(0);
1503 fbs.extend(src);
1504 fbs
1505 }
1506}
1507
1508pub struct IntoOnes {
1509 bitset_front: Block,
1510 bitset_back: Block,
1511 block_idx_front: usize,
1512 block_idx_back: usize,
1513 remaining_blocks: core::iter::Copied<core::slice::Iter<'static, usize>>,
1514 _buf: Vec<SimdBlock>,
1516}
1517
1518impl IntoOnes {
1519 #[inline]
1520 pub fn last_positive_bit_and_unset(n: &mut Block) -> usize {
1521 let last_bit = *n & n.wrapping_neg();
1523
1524 let position = last_bit.trailing_zeros();
1526
1527 *n &= *n - 1;
1529
1530 position as usize
1531 }
1532
1533 #[inline]
1534 fn first_positive_bit_and_unset(n: &mut Block) -> usize {
1535 let bit_idx = n.leading_zeros();
1537
1538 let mask = !((1_usize) << (BITS as u32 - bit_idx - 1));
1540 n.bitand_assign(mask);
1541
1542 bit_idx as usize
1543 }
1544}
1545
1546impl DoubleEndedIterator for IntoOnes {
1547 fn next_back(&mut self) -> Option<Self::Item> {
1548 while self.bitset_back == 0 {
1549 match self.remaining_blocks.next_back() {
1550 None => {
1551 if self.bitset_front != 0 {
1552 self.bitset_back = 0;
1553 self.block_idx_back = self.block_idx_front;
1554 return Some(
1555 self.block_idx_front + BITS
1556 - Self::first_positive_bit_and_unset(&mut self.bitset_front)
1557 - 1,
1558 );
1559 } else {
1560 return None;
1561 }
1562 }
1563 Some(next_block) => {
1564 self.bitset_back = next_block;
1565 self.block_idx_back -= BITS;
1566 }
1567 };
1568 }
1569
1570 Some(
1571 self.block_idx_back - Self::first_positive_bit_and_unset(&mut self.bitset_back) + BITS
1572 - 1,
1573 )
1574 }
1575}
1576
1577impl Iterator for IntoOnes {
1578 type Item = usize; #[inline]
1581 fn next(&mut self) -> Option<Self::Item> {
1582 while self.bitset_front == 0 {
1583 match self.remaining_blocks.next() {
1584 Some(next_block) => {
1585 self.bitset_front = next_block;
1586 self.block_idx_front += BITS;
1587 }
1588 None => {
1589 if self.bitset_back != 0 {
1590 self.block_idx_front = self.block_idx_back;
1592 self.bitset_front = 0;
1593
1594 return Some(
1595 self.block_idx_back
1596 + Self::last_positive_bit_and_unset(&mut self.bitset_back),
1597 );
1598 } else {
1599 return None;
1600 }
1601 }
1602 };
1603 }
1604
1605 Some(self.block_idx_front + Self::last_positive_bit_and_unset(&mut self.bitset_front))
1606 }
1607
1608 #[inline]
1609 fn size_hint(&self) -> (usize, Option<usize>) {
1610 (
1611 0,
1612 (Some(self.block_idx_back - self.block_idx_front + 2 * BITS)),
1613 )
1614 }
1615}
1616
1617impl FusedIterator for IntoOnes {}
1619
1620impl<'a> BitAnd for &'a FixedBitSet {
1621 type Output = FixedBitSet;
1622 fn bitand(self, other: &FixedBitSet) -> FixedBitSet {
1623 let (short, long) = {
1624 if self.len() <= other.len() {
1625 (self.as_simd_slice(), other.as_simd_slice())
1626 } else {
1627 (other.as_simd_slice(), self.as_simd_slice())
1628 }
1629 };
1630 let mut data = Vec::from(short);
1631 for (data, block) in data.iter_mut().zip(long.iter()) {
1632 *data &= *block;
1633 }
1634 let len = core::cmp::min(self.len(), other.len());
1635 FixedBitSet::from_blocks_and_len(data, len)
1636 }
1637}
1638
1639impl BitAndAssign for FixedBitSet {
1640 fn bitand_assign(&mut self, other: Self) {
1641 self.intersect_with(&other);
1642 }
1643}
1644
1645impl BitAndAssign<&Self> for FixedBitSet {
1646 fn bitand_assign(&mut self, other: &Self) {
1647 self.intersect_with(other);
1648 }
1649}
1650
1651impl<'a> BitOr for &'a FixedBitSet {
1652 type Output = FixedBitSet;
1653 fn bitor(self, other: &FixedBitSet) -> FixedBitSet {
1654 let (short, long) = {
1655 if self.len() <= other.len() {
1656 (self.as_simd_slice(), other.as_simd_slice())
1657 } else {
1658 (other.as_simd_slice(), self.as_simd_slice())
1659 }
1660 };
1661 let mut data = Vec::from(long);
1662 for (data, block) in data.iter_mut().zip(short.iter()) {
1663 *data |= *block;
1664 }
1665 let len = core::cmp::max(self.len(), other.len());
1666 FixedBitSet::from_blocks_and_len(data, len)
1667 }
1668}
1669
1670impl BitOrAssign for FixedBitSet {
1671 fn bitor_assign(&mut self, other: Self) {
1672 self.union_with(&other);
1673 }
1674}
1675
1676impl BitOrAssign<&Self> for FixedBitSet {
1677 fn bitor_assign(&mut self, other: &Self) {
1678 self.union_with(other);
1679 }
1680}
1681
1682impl<'a> BitXor for &'a FixedBitSet {
1683 type Output = FixedBitSet;
1684 fn bitxor(self, other: &FixedBitSet) -> FixedBitSet {
1685 let (short, long) = {
1686 if self.len() <= other.len() {
1687 (self.as_simd_slice(), other.as_simd_slice())
1688 } else {
1689 (other.as_simd_slice(), self.as_simd_slice())
1690 }
1691 };
1692 let mut data = Vec::from(long);
1693 for (data, block) in data.iter_mut().zip(short.iter()) {
1694 *data ^= *block;
1695 }
1696 let len = core::cmp::max(self.len(), other.len());
1697 FixedBitSet::from_blocks_and_len(data, len)
1698 }
1699}
1700
1701impl BitXorAssign for FixedBitSet {
1702 fn bitxor_assign(&mut self, other: Self) {
1703 self.symmetric_difference_with(&other);
1704 }
1705}
1706
1707impl BitXorAssign<&Self> for FixedBitSet {
1708 fn bitxor_assign(&mut self, other: &Self) {
1709 self.symmetric_difference_with(other);
1710 }
1711}