1#![no_std]
93#![cfg_attr(docsrs, feature(doc_cfg))]
94#![cfg_attr(feature = "specialization", allow(incomplete_features))]
95#![cfg_attr(feature = "specialization", feature(specialization))]
96#![cfg_attr(feature = "may_dangle", feature(dropck_eyepatch))]
97#![cfg_attr(
98 feature = "debugger_visualizer",
99 feature(debugger_visualizer),
100 debugger_visualizer(natvis_file = "../debug_metadata/smallvec.natvis")
101)]
102#![deny(missing_docs)]
103
104#[doc(hidden)]
105pub extern crate alloc;
106
107#[cfg(any(test, feature = "write"))]
108extern crate std;
109
110#[cfg(test)]
111mod tests;
112
113#[allow(deprecated)]
114use alloc::alloc::{Layout, LayoutErr};
115use alloc::boxed::Box;
116use alloc::{vec, vec::Vec};
117use core::borrow::{Borrow, BorrowMut};
118use core::cmp;
119use core::fmt;
120use core::hash::{Hash, Hasher};
121use core::hint::unreachable_unchecked;
122use core::iter::{repeat, FromIterator, FusedIterator, IntoIterator};
123use core::mem;
124use core::mem::MaybeUninit;
125use core::ops::{self, Range, RangeBounds};
126use core::ptr::{self, NonNull};
127use core::slice::{self, SliceIndex};
128
129#[cfg(feature = "malloc_size_of")]
130use malloc_size_of::{MallocShallowSizeOf, MallocSizeOf, MallocSizeOfOps};
131
132#[cfg(feature = "serde")]
133use serde::{
134 de::{Deserialize, Deserializer, SeqAccess, Visitor},
135 ser::{Serialize, SerializeSeq, Serializer},
136};
137
138#[cfg(feature = "serde")]
139use core::marker::PhantomData;
140
141#[cfg(feature = "write")]
142use std::io;
143
144#[cfg(feature = "drain_keep_rest")]
145use core::mem::ManuallyDrop;
146
147#[macro_export]
185macro_rules! smallvec {
186 (@one $x:expr) => (1usize);
188 ($elem:expr; $n:expr) => ({
189 $crate::SmallVec::from_elem($elem, $n)
190 });
191 ($($x:expr),*$(,)*) => ({
192 let count = 0usize $(+ $crate::smallvec!(@one $x))*;
193 #[allow(unused_mut)]
194 let mut vec = $crate::SmallVec::new();
195 if count <= vec.inline_size() {
196 $(vec.push($x);)*
197 vec
198 } else {
199 $crate::SmallVec::from_vec($crate::alloc::vec![$($x,)*])
200 }
201 });
202}
203
204#[cfg(feature = "const_new")]
234#[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
235#[macro_export]
236macro_rules! smallvec_inline {
237 (@one $x:expr) => (1usize);
239 ($elem:expr; $n:expr) => ({
240 $crate::SmallVec::<[_; $n]>::from_const([$elem; $n])
241 });
242 ($($x:expr),+ $(,)?) => ({
243 const N: usize = 0usize $(+ $crate::smallvec_inline!(@one $x))*;
244 $crate::SmallVec::<[_; N]>::from_const([$($x,)*])
245 });
246}
247
248#[cfg(not(feature = "union"))]
250macro_rules! debug_unreachable {
251 () => {
252 debug_unreachable!("entered unreachable code")
253 };
254 ($e:expr) => {
255 if cfg!(debug_assertions) {
256 panic!($e);
257 } else {
258 unreachable_unchecked();
259 }
260 };
261}
262
263#[doc(hidden)]
283#[deprecated]
284pub trait ExtendFromSlice<T> {
285 fn extend_from_slice(&mut self, other: &[T]);
287}
288
289#[allow(deprecated)]
290impl<T: Clone> ExtendFromSlice<T> for Vec<T> {
291 fn extend_from_slice(&mut self, other: &[T]) {
292 Vec::extend_from_slice(self, other)
293 }
294}
295
296#[derive(Debug)]
298pub enum CollectionAllocErr {
299 CapacityOverflow,
301 AllocErr {
303 layout: Layout,
305 },
306}
307
308impl fmt::Display for CollectionAllocErr {
309 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
310 write!(f, "Allocation error: {:?}", self)
311 }
312}
313
314#[allow(deprecated)]
315impl From<LayoutErr> for CollectionAllocErr {
316 fn from(_: LayoutErr) -> Self {
317 CollectionAllocErr::CapacityOverflow
318 }
319}
320
321fn infallible<T>(result: Result<T, CollectionAllocErr>) -> T {
322 match result {
323 Ok(x) => x,
324 Err(CollectionAllocErr::CapacityOverflow) => panic!("capacity overflow"),
325 Err(CollectionAllocErr::AllocErr { layout }) => alloc::alloc::handle_alloc_error(layout),
326 }
327}
328
329fn layout_array<T>(n: usize) -> Result<Layout, CollectionAllocErr> {
332 let size = mem::size_of::<T>()
333 .checked_mul(n)
334 .ok_or(CollectionAllocErr::CapacityOverflow)?;
335 let align = mem::align_of::<T>();
336 Layout::from_size_align(size, align).map_err(|_| CollectionAllocErr::CapacityOverflow)
337}
338
339unsafe fn deallocate<T>(ptr: NonNull<T>, capacity: usize) {
340 let layout = layout_array::<T>(capacity).unwrap();
342 alloc::alloc::dealloc(ptr.as_ptr() as *mut u8, layout)
343}
344
345pub struct Drain<'a, T: 'a + Array> {
351 tail_start: usize,
352 tail_len: usize,
353 iter: slice::Iter<'a, T::Item>,
354 vec: NonNull<SmallVec<T>>,
355}
356
357impl<'a, T: 'a + Array> fmt::Debug for Drain<'a, T>
358where
359 T::Item: fmt::Debug,
360{
361 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
362 f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
363 }
364}
365
366unsafe impl<'a, T: Sync + Array> Sync for Drain<'a, T> {}
367unsafe impl<'a, T: Send + Array> Send for Drain<'a, T> {}
368
369impl<'a, T: 'a + Array> Iterator for Drain<'a, T> {
370 type Item = T::Item;
371
372 #[inline]
373 fn next(&mut self) -> Option<T::Item> {
374 self.iter
375 .next()
376 .map(|reference| unsafe { ptr::read(reference) })
377 }
378
379 #[inline]
380 fn size_hint(&self) -> (usize, Option<usize>) {
381 self.iter.size_hint()
382 }
383}
384
385impl<'a, T: 'a + Array> DoubleEndedIterator for Drain<'a, T> {
386 #[inline]
387 fn next_back(&mut self) -> Option<T::Item> {
388 self.iter
389 .next_back()
390 .map(|reference| unsafe { ptr::read(reference) })
391 }
392}
393
394impl<'a, T: Array> ExactSizeIterator for Drain<'a, T> {
395 #[inline]
396 fn len(&self) -> usize {
397 self.iter.len()
398 }
399}
400
401impl<'a, T: Array> FusedIterator for Drain<'a, T> {}
402
403impl<'a, T: 'a + Array> Drop for Drain<'a, T> {
404 fn drop(&mut self) {
405 self.for_each(drop);
406
407 if self.tail_len > 0 {
408 unsafe {
409 let source_vec = self.vec.as_mut();
410
411 let start = source_vec.len();
413 let tail = self.tail_start;
414 if tail != start {
415 let ptr = source_vec.as_mut_ptr();
418 let src = ptr.add(tail);
419 let dst = ptr.add(start);
420 ptr::copy(src, dst, self.tail_len);
421 }
422 source_vec.set_len(start + self.tail_len);
423 }
424 }
425 }
426}
427
428#[cfg(feature = "drain_filter")]
429pub struct DrainFilter<'a, T, F>
435where
436 F: FnMut(&mut T::Item) -> bool,
437 T: Array,
438{
439 vec: &'a mut SmallVec<T>,
440 idx: usize,
442 del: usize,
444 old_len: usize,
446 pred: F,
448 panic_flag: bool,
454}
455
456#[cfg(feature = "drain_filter")]
457impl <T, F> fmt::Debug for DrainFilter<'_, T, F>
458where
459 F: FnMut(&mut T::Item) -> bool,
460 T: Array,
461 T::Item: fmt::Debug,
462{
463 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
464 f.debug_tuple("DrainFilter").field(&self.vec.as_slice()).finish()
465 }
466}
467
468#[cfg(feature = "drain_filter")]
469impl <T, F> Iterator for DrainFilter<'_, T, F>
470where
471 F: FnMut(&mut T::Item) -> bool,
472 T: Array,
473{
474 type Item = T::Item;
475
476 fn next(&mut self) -> Option<T::Item>
477 {
478 unsafe {
479 while self.idx < self.old_len {
480 let i = self.idx;
481 let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
482 self.panic_flag = true;
483 let drained = (self.pred)(&mut v[i]);
484 self.panic_flag = false;
485 self.idx += 1;
489 if drained {
490 self.del += 1;
491 return Some(ptr::read(&v[i]));
492 } else if self.del > 0 {
493 let del = self.del;
494 let src: *const Self::Item = &v[i];
495 let dst: *mut Self::Item = &mut v[i - del];
496 ptr::copy_nonoverlapping(src, dst, 1);
497 }
498 }
499 None
500 }
501 }
502
503 fn size_hint(&self) -> (usize, Option<usize>) {
504 (0, Some(self.old_len - self.idx))
505 }
506}
507
508#[cfg(feature = "drain_filter")]
509impl <T, F> Drop for DrainFilter<'_, T, F>
510where
511 F: FnMut(&mut T::Item) -> bool,
512 T: Array,
513{
514 fn drop(&mut self) {
515 struct BackshiftOnDrop<'a, 'b, T, F>
516 where
517 F: FnMut(&mut T::Item) -> bool,
518 T: Array
519 {
520 drain: &'b mut DrainFilter<'a, T, F>,
521 }
522
523 impl<'a, 'b, T, F> Drop for BackshiftOnDrop<'a, 'b, T, F>
524 where
525 F: FnMut(&mut T::Item) -> bool,
526 T: Array
527 {
528 fn drop(&mut self) {
529 unsafe {
530 if self.drain.idx < self.drain.old_len && self.drain.del > 0 {
531 let ptr = self.drain.vec.as_mut_ptr();
538 let src = ptr.add(self.drain.idx);
539 let dst = src.sub(self.drain.del);
540 let tail_len = self.drain.old_len - self.drain.idx;
541 src.copy_to(dst, tail_len);
542 }
543 self.drain.vec.set_len(self.drain.old_len - self.drain.del);
544 }
545 }
546 }
547
548 let backshift = BackshiftOnDrop { drain: self };
549
550 if !backshift.drain.panic_flag {
554 backshift.drain.for_each(drop);
555 }
556 }
557}
558
559#[cfg(feature = "drain_keep_rest")]
560impl <T, F> DrainFilter<'_, T, F>
561where
562 F: FnMut(&mut T::Item) -> bool,
563 T: Array
564{
565 pub fn keep_rest(self)
585 {
586 let mut this = ManuallyDrop::new(self);
601
602 unsafe {
603 let needs_move = mem::size_of::<T>() != 0;
605
606 if needs_move && this.idx < this.old_len && this.del > 0 {
607 let ptr = this.vec.as_mut_ptr();
608 let src = ptr.add(this.idx);
609 let dst = src.sub(this.del);
610 let tail_len = this.old_len - this.idx;
611 src.copy_to(dst, tail_len);
612 }
613
614 let new_len = this.old_len - this.del;
615 this.vec.set_len(new_len);
616 }
617 }
618}
619
620#[cfg(feature = "union")]
621union SmallVecData<A: Array> {
622 inline: core::mem::ManuallyDrop<MaybeUninit<A>>,
623 heap: (NonNull<A::Item>, usize),
624}
625
626#[cfg(all(feature = "union", feature = "const_new"))]
627impl<T, const N: usize> SmallVecData<[T; N]> {
628 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
629 #[inline]
630 const fn from_const(inline: MaybeUninit<[T; N]>) -> Self {
631 SmallVecData {
632 inline: core::mem::ManuallyDrop::new(inline),
633 }
634 }
635}
636
637#[cfg(feature = "union")]
638impl<A: Array> SmallVecData<A> {
639 #[inline]
640 unsafe fn inline(&self) -> ConstNonNull<A::Item> {
641 ConstNonNull::new(self.inline.as_ptr() as *const A::Item).unwrap()
642 }
643 #[inline]
644 unsafe fn inline_mut(&mut self) -> NonNull<A::Item> {
645 NonNull::new(self.inline.as_mut_ptr() as *mut A::Item).unwrap()
646 }
647 #[inline]
648 fn from_inline(inline: MaybeUninit<A>) -> SmallVecData<A> {
649 SmallVecData {
650 inline: core::mem::ManuallyDrop::new(inline),
651 }
652 }
653 #[inline]
654 unsafe fn into_inline(self) -> MaybeUninit<A> {
655 core::mem::ManuallyDrop::into_inner(self.inline)
656 }
657 #[inline]
658 unsafe fn heap(&self) -> (ConstNonNull<A::Item>, usize) {
659 (ConstNonNull(self.heap.0), self.heap.1)
660 }
661 #[inline]
662 unsafe fn heap_mut(&mut self) -> (NonNull<A::Item>, &mut usize) {
663 let h = &mut self.heap;
664 (h.0, &mut h.1)
665 }
666 #[inline]
667 fn from_heap(ptr: NonNull<A::Item>, len: usize) -> SmallVecData<A> {
668 SmallVecData { heap: (ptr, len) }
669 }
670}
671
672#[cfg(not(feature = "union"))]
673enum SmallVecData<A: Array> {
674 Inline(MaybeUninit<A>),
675 Heap {
677 ptr: NonNull<A::Item>,
682 len: usize,
683 },
684}
685
686#[cfg(all(not(feature = "union"), feature = "const_new"))]
687impl<T, const N: usize> SmallVecData<[T; N]> {
688 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
689 #[inline]
690 const fn from_const(inline: MaybeUninit<[T; N]>) -> Self {
691 SmallVecData::Inline(inline)
692 }
693}
694
695#[cfg(not(feature = "union"))]
696impl<A: Array> SmallVecData<A> {
697 #[inline]
698 unsafe fn inline(&self) -> ConstNonNull<A::Item> {
699 match self {
700 SmallVecData::Inline(a) => ConstNonNull::new(a.as_ptr() as *const A::Item).unwrap(),
701 _ => debug_unreachable!(),
702 }
703 }
704 #[inline]
705 unsafe fn inline_mut(&mut self) -> NonNull<A::Item> {
706 match self {
707 SmallVecData::Inline(a) => NonNull::new(a.as_mut_ptr() as *mut A::Item).unwrap(),
708 _ => debug_unreachable!(),
709 }
710 }
711 #[inline]
712 fn from_inline(inline: MaybeUninit<A>) -> SmallVecData<A> {
713 SmallVecData::Inline(inline)
714 }
715 #[inline]
716 unsafe fn into_inline(self) -> MaybeUninit<A> {
717 match self {
718 SmallVecData::Inline(a) => a,
719 _ => debug_unreachable!(),
720 }
721 }
722 #[inline]
723 unsafe fn heap(&self) -> (ConstNonNull<A::Item>, usize) {
724 match self {
725 SmallVecData::Heap { ptr, len } => (ConstNonNull(*ptr), *len),
726 _ => debug_unreachable!(),
727 }
728 }
729 #[inline]
730 unsafe fn heap_mut(&mut self) -> (NonNull<A::Item>, &mut usize) {
731 match self {
732 SmallVecData::Heap { ptr, len } => (*ptr, len),
733 _ => debug_unreachable!(),
734 }
735 }
736 #[inline]
737 fn from_heap(ptr: NonNull<A::Item>, len: usize) -> SmallVecData<A> {
738 SmallVecData::Heap { ptr, len }
739 }
740}
741
742unsafe impl<A: Array + Send> Send for SmallVecData<A> {}
743unsafe impl<A: Array + Sync> Sync for SmallVecData<A> {}
744
745pub struct SmallVec<A: Array> {
772 capacity: usize,
776 data: SmallVecData<A>,
777}
778
779impl<A: Array> SmallVec<A> {
780 #[inline]
782 pub fn new() -> SmallVec<A> {
783 assert!(
786 mem::size_of::<A>() == A::size() * mem::size_of::<A::Item>()
787 && mem::align_of::<A>() >= mem::align_of::<A::Item>()
788 );
789 SmallVec {
790 capacity: 0,
791 data: SmallVecData::from_inline(MaybeUninit::uninit()),
792 }
793 }
794
795 #[inline]
809 pub fn with_capacity(n: usize) -> Self {
810 let mut v = SmallVec::new();
811 v.reserve_exact(n);
812 v
813 }
814
815 #[inline]
828 pub fn from_vec(mut vec: Vec<A::Item>) -> SmallVec<A> {
829 if vec.capacity() <= Self::inline_capacity() {
830 unsafe {
833 let mut data = SmallVecData::<A>::from_inline(MaybeUninit::uninit());
834 let len = vec.len();
835 vec.set_len(0);
836 ptr::copy_nonoverlapping(vec.as_ptr(), data.inline_mut().as_ptr(), len);
837
838 SmallVec {
839 capacity: len,
840 data,
841 }
842 }
843 } else {
844 let (ptr, cap, len) = (vec.as_mut_ptr(), vec.capacity(), vec.len());
845 mem::forget(vec);
846 let ptr = NonNull::new(ptr)
847 .expect("Cannot be null by `Vec` invariant");
849
850 SmallVec {
851 capacity: cap,
852 data: SmallVecData::from_heap(ptr, len),
853 }
854 }
855 }
856
857 #[inline]
869 pub fn from_buf(buf: A) -> SmallVec<A> {
870 SmallVec {
871 capacity: A::size(),
872 data: SmallVecData::from_inline(MaybeUninit::new(buf)),
873 }
874 }
875
876 #[inline]
889 pub fn from_buf_and_len(buf: A, len: usize) -> SmallVec<A> {
890 assert!(len <= A::size());
891 unsafe { SmallVec::from_buf_and_len_unchecked(MaybeUninit::new(buf), len) }
892 }
893
894 #[inline]
910 pub unsafe fn from_buf_and_len_unchecked(buf: MaybeUninit<A>, len: usize) -> SmallVec<A> {
911 SmallVec {
912 capacity: len,
913 data: SmallVecData::from_inline(buf),
914 }
915 }
916
917 pub unsafe fn set_len(&mut self, new_len: usize) {
923 let (_, len_ptr, _) = self.triple_mut();
924 *len_ptr = new_len;
925 }
926
927 #[inline]
929 fn inline_capacity() -> usize {
930 if mem::size_of::<A::Item>() > 0 {
931 A::size()
932 } else {
933 core::usize::MAX
944 }
945 }
946
947 #[inline]
949 pub fn inline_size(&self) -> usize {
950 Self::inline_capacity()
951 }
952
953 #[inline]
955 pub fn len(&self) -> usize {
956 self.triple().1
957 }
958
959 #[inline]
961 pub fn is_empty(&self) -> bool {
962 self.len() == 0
963 }
964
965 #[inline]
967 pub fn capacity(&self) -> usize {
968 self.triple().2
969 }
970
971 #[inline]
974 fn triple(&self) -> (ConstNonNull<A::Item>, usize, usize) {
975 unsafe {
976 if self.spilled() {
977 let (ptr, len) = self.data.heap();
978 (ptr, len, self.capacity)
979 } else {
980 (self.data.inline(), self.capacity, Self::inline_capacity())
981 }
982 }
983 }
984
985 #[inline]
987 fn triple_mut(&mut self) -> (NonNull<A::Item>, &mut usize, usize) {
988 unsafe {
989 if self.spilled() {
990 let (ptr, len_ptr) = self.data.heap_mut();
991 (ptr, len_ptr, self.capacity)
992 } else {
993 (
994 self.data.inline_mut(),
995 &mut self.capacity,
996 Self::inline_capacity(),
997 )
998 }
999 }
1000 }
1001
1002 #[inline]
1004 pub fn spilled(&self) -> bool {
1005 self.capacity > Self::inline_capacity()
1006 }
1007
1008 pub fn drain<R>(&mut self, range: R) -> Drain<'_, A>
1022 where
1023 R: RangeBounds<usize>,
1024 {
1025 use core::ops::Bound::*;
1026
1027 let len = self.len();
1028 let start = match range.start_bound() {
1029 Included(&n) => n,
1030 Excluded(&n) => n.checked_add(1).expect("Range start out of bounds"),
1031 Unbounded => 0,
1032 };
1033 let end = match range.end_bound() {
1034 Included(&n) => n.checked_add(1).expect("Range end out of bounds"),
1035 Excluded(&n) => n,
1036 Unbounded => len,
1037 };
1038
1039 assert!(start <= end);
1040 assert!(end <= len);
1041
1042 unsafe {
1043 self.set_len(start);
1044
1045 let range_slice = slice::from_raw_parts(self.as_ptr().add(start), end - start);
1046
1047 Drain {
1048 tail_start: end,
1049 tail_len: len - end,
1050 iter: range_slice.iter(),
1051 vec: NonNull::new_unchecked(self as *mut _),
1053 }
1054 }
1055 }
1056
1057 #[cfg(feature = "drain_filter")]
1058 pub fn drain_filter<F>(&mut self, filter: F) -> DrainFilter<'_, A, F,>
1102 where
1103 F: FnMut(&mut A::Item) -> bool,
1104 {
1105 let old_len = self.len();
1106
1107 unsafe {
1109 self.set_len(0);
1110 }
1111
1112 DrainFilter { vec: self, idx: 0, del: 0, old_len, pred: filter, panic_flag: false }
1113 }
1114
1115 #[inline]
1117 pub fn push(&mut self, value: A::Item) {
1118 unsafe {
1119 let (mut ptr, mut len, cap) = self.triple_mut();
1120 if *len == cap {
1121 self.reserve_one_unchecked();
1122 let (heap_ptr, heap_len) = self.data.heap_mut();
1123 ptr = heap_ptr;
1124 len = heap_len;
1125 }
1126 ptr::write(ptr.as_ptr().add(*len), value);
1127 *len += 1;
1128 }
1129 }
1130
1131 #[inline]
1133 pub fn pop(&mut self) -> Option<A::Item> {
1134 unsafe {
1135 let (ptr, len_ptr, _) = self.triple_mut();
1136 let ptr: *const _ = ptr.as_ptr();
1137 if *len_ptr == 0 {
1138 return None;
1139 }
1140 let last_index = *len_ptr - 1;
1141 *len_ptr = last_index;
1142 Some(ptr::read(ptr.add(last_index)))
1143 }
1144 }
1145
1146 pub fn append<B>(&mut self, other: &mut SmallVec<B>)
1159 where
1160 B: Array<Item = A::Item>,
1161 {
1162 self.extend(other.drain(..))
1163 }
1164
1165 pub fn grow(&mut self, new_cap: usize) {
1170 infallible(self.try_grow(new_cap))
1171 }
1172
1173 pub fn try_grow(&mut self, new_cap: usize) -> Result<(), CollectionAllocErr> {
1177 unsafe {
1178 let unspilled = !self.spilled();
1179 let (ptr, &mut len, cap) = self.triple_mut();
1180 assert!(new_cap >= len);
1181 if new_cap <= Self::inline_capacity() {
1182 if unspilled {
1183 return Ok(());
1184 }
1185 self.data = SmallVecData::from_inline(MaybeUninit::uninit());
1186 ptr::copy_nonoverlapping(ptr.as_ptr(), self.data.inline_mut().as_ptr(), len);
1187 self.capacity = len;
1188 deallocate(ptr, cap);
1189 } else if new_cap != cap {
1190 let layout = layout_array::<A::Item>(new_cap)?;
1191 debug_assert!(layout.size() > 0);
1192 let new_alloc;
1193 if unspilled {
1194 new_alloc = NonNull::new(alloc::alloc::alloc(layout))
1195 .ok_or(CollectionAllocErr::AllocErr { layout })?
1196 .cast();
1197 ptr::copy_nonoverlapping(ptr.as_ptr(), new_alloc.as_ptr(), len);
1198 } else {
1199 let old_layout = layout_array::<A::Item>(cap)?;
1202
1203 let new_ptr =
1204 alloc::alloc::realloc(ptr.as_ptr() as *mut u8, old_layout, layout.size());
1205 new_alloc = NonNull::new(new_ptr)
1206 .ok_or(CollectionAllocErr::AllocErr { layout })?
1207 .cast();
1208 }
1209 self.data = SmallVecData::from_heap(new_alloc, len);
1210 self.capacity = new_cap;
1211 }
1212 Ok(())
1213 }
1214 }
1215
1216 #[inline]
1222 pub fn reserve(&mut self, additional: usize) {
1223 infallible(self.try_reserve(additional))
1224 }
1225
1226 #[cold]
1228 fn reserve_one_unchecked(&mut self) {
1229 debug_assert_eq!(self.len(), self.capacity());
1230 let new_cap = self.len()
1231 .checked_add(1)
1232 .and_then(usize::checked_next_power_of_two)
1233 .expect("capacity overflow");
1234 infallible(self.try_grow(new_cap))
1235 }
1236
1237 pub fn try_reserve(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
1241 let (_, &mut len, cap) = self.triple_mut();
1244 if cap - len >= additional {
1245 return Ok(());
1246 }
1247 let new_cap = len
1248 .checked_add(additional)
1249 .and_then(usize::checked_next_power_of_two)
1250 .ok_or(CollectionAllocErr::CapacityOverflow)?;
1251 self.try_grow(new_cap)
1252 }
1253
1254 pub fn reserve_exact(&mut self, additional: usize) {
1258 infallible(self.try_reserve_exact(additional))
1259 }
1260
1261 pub fn try_reserve_exact(&mut self, additional: usize) -> Result<(), CollectionAllocErr> {
1263 let (_, &mut len, cap) = self.triple_mut();
1264 if cap - len >= additional {
1265 return Ok(());
1266 }
1267 let new_cap = len
1268 .checked_add(additional)
1269 .ok_or(CollectionAllocErr::CapacityOverflow)?;
1270 self.try_grow(new_cap)
1271 }
1272
1273 pub fn shrink_to_fit(&mut self) {
1278 if !self.spilled() {
1279 return;
1280 }
1281 let len = self.len();
1282 if self.inline_size() >= len {
1283 unsafe {
1284 let (ptr, len) = self.data.heap();
1285 self.data = SmallVecData::from_inline(MaybeUninit::uninit());
1286 ptr::copy_nonoverlapping(ptr.as_ptr(), self.data.inline_mut().as_ptr(), len);
1287 deallocate(ptr.0, self.capacity);
1288 self.capacity = len;
1289 }
1290 } else if self.capacity() > len {
1291 self.grow(len);
1292 }
1293 }
1294
1295 pub fn truncate(&mut self, len: usize) {
1303 unsafe {
1304 let (ptr, len_ptr, _) = self.triple_mut();
1305 let ptr = ptr.as_ptr();
1306 while len < *len_ptr {
1307 let last_index = *len_ptr - 1;
1308 *len_ptr = last_index;
1309 ptr::drop_in_place(ptr.add(last_index));
1310 }
1311 }
1312 }
1313
1314 pub fn as_slice(&self) -> &[A::Item] {
1318 self
1319 }
1320
1321 pub fn as_mut_slice(&mut self) -> &mut [A::Item] {
1325 self
1326 }
1327
1328 #[inline]
1334 pub fn swap_remove(&mut self, index: usize) -> A::Item {
1335 let len = self.len();
1336 self.swap(len - 1, index);
1337 self.pop()
1338 .unwrap_or_else(|| unsafe { unreachable_unchecked() })
1339 }
1340
1341 #[inline]
1343 pub fn clear(&mut self) {
1344 self.truncate(0);
1345 }
1346
1347 pub fn remove(&mut self, index: usize) -> A::Item {
1352 unsafe {
1353 let (ptr, len_ptr, _) = self.triple_mut();
1354 let len = *len_ptr;
1355 assert!(index < len);
1356 *len_ptr = len - 1;
1357 let ptr = ptr.as_ptr().add(index);
1358 let item = ptr::read(ptr);
1359 ptr::copy(ptr.add(1), ptr, len - index - 1);
1360 item
1361 }
1362 }
1363
1364 pub fn insert(&mut self, index: usize, element: A::Item) {
1368 unsafe {
1369 let (mut ptr, mut len_ptr, cap) = self.triple_mut();
1370 if *len_ptr == cap {
1371 self.reserve_one_unchecked();
1372 let (heap_ptr, heap_len_ptr) = self.data.heap_mut();
1373 ptr = heap_ptr;
1374 len_ptr = heap_len_ptr;
1375 }
1376 let mut ptr = ptr.as_ptr();
1377 let len = *len_ptr;
1378 if index > len {
1379 panic!("index exceeds length");
1380 }
1381 ptr = ptr.add(index);
1383 if index < len {
1384 ptr::copy(ptr, ptr.add(1), len - index);
1386 }
1387 *len_ptr = len + 1;
1388 ptr::write(ptr, element);
1389 }
1390 }
1391
1392 pub fn insert_many<I: IntoIterator<Item = A::Item>>(&mut self, index: usize, iterable: I) {
1395 let mut iter = iterable.into_iter();
1396 if index == self.len() {
1397 return self.extend(iter);
1398 }
1399
1400 let (lower_size_bound, _) = iter.size_hint();
1401 assert!(lower_size_bound <= core::isize::MAX as usize); assert!(index + lower_size_bound >= index); let mut num_added = 0;
1405 let old_len = self.len();
1406 assert!(index <= old_len);
1407
1408 unsafe {
1409 self.reserve(lower_size_bound);
1411 let start = self.as_mut_ptr();
1412 let ptr = start.add(index);
1413
1414 ptr::copy(ptr, ptr.add(lower_size_bound), old_len - index);
1416
1417 self.set_len(0);
1419 let mut guard = DropOnPanic {
1420 start,
1421 skip: index..(index + lower_size_bound),
1422 len: old_len + lower_size_bound,
1423 };
1424
1425 let start = self.as_mut_ptr();
1427 let ptr = start.add(index);
1428
1429 while num_added < lower_size_bound {
1430 let element = match iter.next() {
1431 Some(x) => x,
1432 None => break,
1433 };
1434 let cur = ptr.add(num_added);
1435 ptr::write(cur, element);
1436 guard.skip.start += 1;
1437 num_added += 1;
1438 }
1439
1440 if num_added < lower_size_bound {
1441 ptr::copy(
1443 ptr.add(lower_size_bound),
1444 ptr.add(num_added),
1445 old_len - index,
1446 );
1447 }
1448 self.set_len(old_len + num_added);
1450 mem::forget(guard);
1451 }
1452
1453 for element in iter {
1455 self.insert(index + num_added, element);
1456 num_added += 1;
1457 }
1458
1459 struct DropOnPanic<T> {
1460 start: *mut T,
1461 skip: Range<usize>, len: usize,
1463 }
1464
1465 impl<T> Drop for DropOnPanic<T> {
1466 fn drop(&mut self) {
1467 for i in 0..self.len {
1468 if !self.skip.contains(&i) {
1469 unsafe {
1470 ptr::drop_in_place(self.start.add(i));
1471 }
1472 }
1473 }
1474 }
1475 }
1476 }
1477
1478 pub fn into_vec(mut self) -> Vec<A::Item> {
1481 if self.spilled() {
1482 unsafe {
1483 let (ptr, &mut len) = self.data.heap_mut();
1484 let v = Vec::from_raw_parts(ptr.as_ptr(), len, self.capacity);
1485 mem::forget(self);
1486 v
1487 }
1488 } else {
1489 self.into_iter().collect()
1490 }
1491 }
1492
1493 pub fn into_boxed_slice(self) -> Box<[A::Item]> {
1498 self.into_vec().into_boxed_slice()
1499 }
1500
1501 pub fn into_inner(self) -> Result<A, Self> {
1506 if self.spilled() || self.len() != A::size() {
1507 Err(self)
1509 } else {
1510 unsafe {
1511 let data = ptr::read(&self.data);
1512 mem::forget(self);
1513 Ok(data.into_inline().assume_init())
1514 }
1515 }
1516 }
1517
1518 pub fn retain<F: FnMut(&mut A::Item) -> bool>(&mut self, mut f: F) {
1524 let mut del = 0;
1525 let len = self.len();
1526 for i in 0..len {
1527 if !f(&mut self[i]) {
1528 del += 1;
1529 } else if del > 0 {
1530 self.swap(i - del, i);
1531 }
1532 }
1533 self.truncate(len - del);
1534 }
1535
1536 pub fn retain_mut<F: FnMut(&mut A::Item) -> bool>(&mut self, f: F) {
1542 self.retain(f)
1543 }
1544
1545 pub fn dedup(&mut self)
1547 where
1548 A::Item: PartialEq<A::Item>,
1549 {
1550 self.dedup_by(|a, b| a == b);
1551 }
1552
1553 pub fn dedup_by<F>(&mut self, mut same_bucket: F)
1555 where
1556 F: FnMut(&mut A::Item, &mut A::Item) -> bool,
1557 {
1558 let len = self.len();
1561 if len <= 1 {
1562 return;
1563 }
1564
1565 let ptr = self.as_mut_ptr();
1566 let mut w: usize = 1;
1567
1568 unsafe {
1569 for r in 1..len {
1570 let p_r = ptr.add(r);
1571 let p_wm1 = ptr.add(w - 1);
1572 if !same_bucket(&mut *p_r, &mut *p_wm1) {
1573 if r != w {
1574 let p_w = p_wm1.add(1);
1575 mem::swap(&mut *p_r, &mut *p_w);
1576 }
1577 w += 1;
1578 }
1579 }
1580 }
1581
1582 self.truncate(w);
1583 }
1584
1585 pub fn dedup_by_key<F, K>(&mut self, mut key: F)
1587 where
1588 F: FnMut(&mut A::Item) -> K,
1589 K: PartialEq<K>,
1590 {
1591 self.dedup_by(|a, b| key(a) == key(b));
1592 }
1593
1594 pub fn resize_with<F>(&mut self, new_len: usize, f: F)
1620 where
1621 F: FnMut() -> A::Item,
1622 {
1623 let old_len = self.len();
1624 if old_len < new_len {
1625 let mut f = f;
1626 let additional = new_len - old_len;
1627 self.reserve(additional);
1628 for _ in 0..additional {
1629 self.push(f());
1630 }
1631 } else if old_len > new_len {
1632 self.truncate(new_len);
1633 }
1634 }
1635
1636 #[inline]
1704 pub unsafe fn from_raw_parts(ptr: *mut A::Item, length: usize, capacity: usize) -> SmallVec<A> {
1705 let ptr = unsafe {
1708 debug_assert!(!ptr.is_null(), "Called `from_raw_parts` with null pointer.");
1709 NonNull::new_unchecked(ptr)
1710 };
1711 assert!(capacity > Self::inline_capacity());
1712 SmallVec {
1713 capacity,
1714 data: SmallVecData::from_heap(ptr, length),
1715 }
1716 }
1717
1718 pub fn as_ptr(&self) -> *const A::Item {
1720 self.triple().0.as_ptr()
1724 }
1725
1726 pub fn as_mut_ptr(&mut self) -> *mut A::Item {
1728 self.triple_mut().0.as_ptr()
1732 }
1733}
1734
1735impl<A: Array> SmallVec<A>
1736where
1737 A::Item: Copy,
1738{
1739 pub fn from_slice(slice: &[A::Item]) -> Self {
1743 let len = slice.len();
1744 if len <= Self::inline_capacity() {
1745 SmallVec {
1746 capacity: len,
1747 data: SmallVecData::from_inline(unsafe {
1748 let mut data: MaybeUninit<A> = MaybeUninit::uninit();
1749 ptr::copy_nonoverlapping(
1750 slice.as_ptr(),
1751 data.as_mut_ptr() as *mut A::Item,
1752 len,
1753 );
1754 data
1755 }),
1756 }
1757 } else {
1758 let mut b = slice.to_vec();
1759 let cap = b.capacity();
1760 let ptr = NonNull::new(b.as_mut_ptr()).expect("Vec always contain non null pointers.");
1761 mem::forget(b);
1762 SmallVec {
1763 capacity: cap,
1764 data: SmallVecData::from_heap(ptr, len),
1765 }
1766 }
1767 }
1768
1769 #[inline]
1774 pub fn insert_from_slice(&mut self, index: usize, slice: &[A::Item]) {
1775 self.reserve(slice.len());
1776
1777 let len = self.len();
1778 assert!(index <= len);
1779
1780 unsafe {
1781 let slice_ptr = slice.as_ptr();
1782 let ptr = self.as_mut_ptr().add(index);
1783 ptr::copy(ptr, ptr.add(slice.len()), len - index);
1784 ptr::copy_nonoverlapping(slice_ptr, ptr, slice.len());
1785 self.set_len(len + slice.len());
1786 }
1787 }
1788
1789 #[inline]
1793 pub fn extend_from_slice(&mut self, slice: &[A::Item]) {
1794 let len = self.len();
1795 self.insert_from_slice(len, slice);
1796 }
1797}
1798
1799impl<A: Array> SmallVec<A>
1800where
1801 A::Item: Clone,
1802{
1803 pub fn resize(&mut self, len: usize, value: A::Item) {
1810 let old_len = self.len();
1811
1812 if len > old_len {
1813 self.extend(repeat(value).take(len - old_len));
1814 } else {
1815 self.truncate(len);
1816 }
1817 }
1818
1819 pub fn from_elem(elem: A::Item, n: usize) -> Self {
1827 if n > Self::inline_capacity() {
1828 vec![elem; n].into()
1829 } else {
1830 let mut v = SmallVec::<A>::new();
1831 unsafe {
1832 let (ptr, len_ptr, _) = v.triple_mut();
1833 let ptr = ptr.as_ptr();
1834 let mut local_len = SetLenOnDrop::new(len_ptr);
1835
1836 for i in 0..n {
1837 ::core::ptr::write(ptr.add(i), elem.clone());
1838 local_len.increment_len(1);
1839 }
1840 }
1841 v
1842 }
1843 }
1844}
1845
1846impl<A: Array> ops::Deref for SmallVec<A> {
1847 type Target = [A::Item];
1848 #[inline]
1849 fn deref(&self) -> &[A::Item] {
1850 unsafe {
1851 let (ptr, len, _) = self.triple();
1852 slice::from_raw_parts(ptr.as_ptr(), len)
1853 }
1854 }
1855}
1856
1857impl<A: Array> ops::DerefMut for SmallVec<A> {
1858 #[inline]
1859 fn deref_mut(&mut self) -> &mut [A::Item] {
1860 unsafe {
1861 let (ptr, &mut len, _) = self.triple_mut();
1862 slice::from_raw_parts_mut(ptr.as_ptr(), len)
1863 }
1864 }
1865}
1866
1867impl<A: Array> AsRef<[A::Item]> for SmallVec<A> {
1868 #[inline]
1869 fn as_ref(&self) -> &[A::Item] {
1870 self
1871 }
1872}
1873
1874impl<A: Array> AsMut<[A::Item]> for SmallVec<A> {
1875 #[inline]
1876 fn as_mut(&mut self) -> &mut [A::Item] {
1877 self
1878 }
1879}
1880
1881impl<A: Array> Borrow<[A::Item]> for SmallVec<A> {
1882 #[inline]
1883 fn borrow(&self) -> &[A::Item] {
1884 self
1885 }
1886}
1887
1888impl<A: Array> BorrowMut<[A::Item]> for SmallVec<A> {
1889 #[inline]
1890 fn borrow_mut(&mut self) -> &mut [A::Item] {
1891 self
1892 }
1893}
1894
1895#[cfg(feature = "write")]
1896#[cfg_attr(docsrs, doc(cfg(feature = "write")))]
1897impl<A: Array<Item = u8>> io::Write for SmallVec<A> {
1898 #[inline]
1899 fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
1900 self.extend_from_slice(buf);
1901 Ok(buf.len())
1902 }
1903
1904 #[inline]
1905 fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
1906 self.extend_from_slice(buf);
1907 Ok(())
1908 }
1909
1910 #[inline]
1911 fn flush(&mut self) -> io::Result<()> {
1912 Ok(())
1913 }
1914}
1915
1916#[cfg(feature = "serde")]
1917#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
1918impl<A: Array> Serialize for SmallVec<A>
1919where
1920 A::Item: Serialize,
1921{
1922 fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
1923 let mut state = serializer.serialize_seq(Some(self.len()))?;
1924 for item in self {
1925 state.serialize_element(&item)?;
1926 }
1927 state.end()
1928 }
1929}
1930
1931#[cfg(feature = "serde")]
1932#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
1933impl<'de, A: Array> Deserialize<'de> for SmallVec<A>
1934where
1935 A::Item: Deserialize<'de>,
1936{
1937 fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
1938 deserializer.deserialize_seq(SmallVecVisitor {
1939 phantom: PhantomData,
1940 })
1941 }
1942}
1943
1944#[cfg(feature = "serde")]
1945struct SmallVecVisitor<A> {
1946 phantom: PhantomData<A>,
1947}
1948
1949#[cfg(feature = "serde")]
1950impl<'de, A: Array> Visitor<'de> for SmallVecVisitor<A>
1951where
1952 A::Item: Deserialize<'de>,
1953{
1954 type Value = SmallVec<A>;
1955
1956 fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
1957 formatter.write_str("a sequence")
1958 }
1959
1960 fn visit_seq<B>(self, mut seq: B) -> Result<Self::Value, B::Error>
1961 where
1962 B: SeqAccess<'de>,
1963 {
1964 use serde::de::Error;
1965 let len = seq.size_hint().unwrap_or(0);
1966 let mut values = SmallVec::new();
1967 values.try_reserve(len).map_err(B::Error::custom)?;
1968
1969 while let Some(value) = seq.next_element()? {
1970 values.push(value);
1971 }
1972
1973 Ok(values)
1974 }
1975}
1976
1977#[cfg(feature = "malloc_size_of")]
1978impl<A: Array> MallocShallowSizeOf for SmallVec<A> {
1979 fn shallow_size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
1980 if self.spilled() {
1981 unsafe { ops.malloc_size_of(self.as_ptr()) }
1982 } else {
1983 0
1984 }
1985 }
1986}
1987
1988#[cfg(feature = "malloc_size_of")]
1989impl<A> MallocSizeOf for SmallVec<A>
1990where
1991 A: Array,
1992 A::Item: MallocSizeOf,
1993{
1994 fn size_of(&self, ops: &mut MallocSizeOfOps) -> usize {
1995 let mut n = self.shallow_size_of(ops);
1996 for elem in self.iter() {
1997 n += elem.size_of(ops);
1998 }
1999 n
2000 }
2001}
2002
2003#[cfg(feature = "specialization")]
2004trait SpecFrom<A: Array, S> {
2005 fn spec_from(slice: S) -> SmallVec<A>;
2006}
2007
2008#[cfg(feature = "specialization")]
2009mod specialization;
2010
2011#[cfg(feature = "arbitrary")]
2012mod arbitrary;
2013
2014#[cfg(feature = "specialization")]
2015impl<'a, A: Array> SpecFrom<A, &'a [A::Item]> for SmallVec<A>
2016where
2017 A::Item: Copy,
2018{
2019 #[inline]
2020 fn spec_from(slice: &'a [A::Item]) -> SmallVec<A> {
2021 SmallVec::from_slice(slice)
2022 }
2023}
2024
2025impl<'a, A: Array> From<&'a [A::Item]> for SmallVec<A>
2026where
2027 A::Item: Clone,
2028{
2029 #[cfg(not(feature = "specialization"))]
2030 #[inline]
2031 fn from(slice: &'a [A::Item]) -> SmallVec<A> {
2032 slice.iter().cloned().collect()
2033 }
2034
2035 #[cfg(feature = "specialization")]
2036 #[inline]
2037 fn from(slice: &'a [A::Item]) -> SmallVec<A> {
2038 SmallVec::spec_from(slice)
2039 }
2040}
2041
2042impl<A: Array> From<Vec<A::Item>> for SmallVec<A> {
2043 #[inline]
2044 fn from(vec: Vec<A::Item>) -> SmallVec<A> {
2045 SmallVec::from_vec(vec)
2046 }
2047}
2048
2049impl<A: Array> From<A> for SmallVec<A> {
2050 #[inline]
2051 fn from(array: A) -> SmallVec<A> {
2052 SmallVec::from_buf(array)
2053 }
2054}
2055
2056impl<A: Array, I: SliceIndex<[A::Item]>> ops::Index<I> for SmallVec<A> {
2057 type Output = I::Output;
2058
2059 fn index(&self, index: I) -> &I::Output {
2060 &(**self)[index]
2061 }
2062}
2063
2064impl<A: Array, I: SliceIndex<[A::Item]>> ops::IndexMut<I> for SmallVec<A> {
2065 fn index_mut(&mut self, index: I) -> &mut I::Output {
2066 &mut (&mut **self)[index]
2067 }
2068}
2069
2070#[allow(deprecated)]
2071impl<A: Array> ExtendFromSlice<A::Item> for SmallVec<A>
2072where
2073 A::Item: Copy,
2074{
2075 fn extend_from_slice(&mut self, other: &[A::Item]) {
2076 SmallVec::extend_from_slice(self, other)
2077 }
2078}
2079
2080impl<A: Array> FromIterator<A::Item> for SmallVec<A> {
2081 #[inline]
2082 fn from_iter<I: IntoIterator<Item = A::Item>>(iterable: I) -> SmallVec<A> {
2083 let mut v = SmallVec::new();
2084 v.extend(iterable);
2085 v
2086 }
2087}
2088
2089impl<A: Array> Extend<A::Item> for SmallVec<A> {
2090 fn extend<I: IntoIterator<Item = A::Item>>(&mut self, iterable: I) {
2091 let mut iter = iterable.into_iter();
2092 let (lower_size_bound, _) = iter.size_hint();
2093 self.reserve(lower_size_bound);
2094
2095 unsafe {
2096 let (ptr, len_ptr, cap) = self.triple_mut();
2097 let ptr = ptr.as_ptr();
2098 let mut len = SetLenOnDrop::new(len_ptr);
2099 while len.get() < cap {
2100 if let Some(out) = iter.next() {
2101 ptr::write(ptr.add(len.get()), out);
2102 len.increment_len(1);
2103 } else {
2104 return;
2105 }
2106 }
2107 }
2108
2109 for elem in iter {
2110 self.push(elem);
2111 }
2112 }
2113}
2114
2115impl<A: Array> fmt::Debug for SmallVec<A>
2116where
2117 A::Item: fmt::Debug,
2118{
2119 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2120 f.debug_list().entries(self.iter()).finish()
2121 }
2122}
2123
2124impl<A: Array> Default for SmallVec<A> {
2125 #[inline]
2126 fn default() -> SmallVec<A> {
2127 SmallVec::new()
2128 }
2129}
2130
2131#[cfg(feature = "may_dangle")]
2132unsafe impl<#[may_dangle] A: Array> Drop for SmallVec<A> {
2133 fn drop(&mut self) {
2134 unsafe {
2135 if self.spilled() {
2136 let (ptr, &mut len) = self.data.heap_mut();
2137 Vec::from_raw_parts(ptr.as_ptr(), len, self.capacity);
2138 } else {
2139 ptr::drop_in_place(&mut self[..]);
2140 }
2141 }
2142 }
2143}
2144
2145#[cfg(not(feature = "may_dangle"))]
2146impl<A: Array> Drop for SmallVec<A> {
2147 fn drop(&mut self) {
2148 unsafe {
2149 if self.spilled() {
2150 let (ptr, &mut len) = self.data.heap_mut();
2151 drop(Vec::from_raw_parts(ptr.as_ptr(), len, self.capacity));
2152 } else {
2153 ptr::drop_in_place(&mut self[..]);
2154 }
2155 }
2156 }
2157}
2158
2159impl<A: Array> Clone for SmallVec<A>
2160where
2161 A::Item: Clone,
2162{
2163 #[inline]
2164 fn clone(&self) -> SmallVec<A> {
2165 SmallVec::from(self.as_slice())
2166 }
2167
2168 fn clone_from(&mut self, source: &Self) {
2169 self.truncate(source.len());
2173
2174 let (init, tail) = source.split_at(self.len());
2177
2178 self.clone_from_slice(init);
2180 self.extend(tail.iter().cloned());
2181 }
2182}
2183
2184impl<A: Array, B: Array> PartialEq<SmallVec<B>> for SmallVec<A>
2185where
2186 A::Item: PartialEq<B::Item>,
2187{
2188 #[inline]
2189 fn eq(&self, other: &SmallVec<B>) -> bool {
2190 self[..] == other[..]
2191 }
2192}
2193
2194impl<A: Array> Eq for SmallVec<A> where A::Item: Eq {}
2195
2196impl<A: Array> PartialOrd for SmallVec<A>
2197where
2198 A::Item: PartialOrd,
2199{
2200 #[inline]
2201 fn partial_cmp(&self, other: &SmallVec<A>) -> Option<cmp::Ordering> {
2202 PartialOrd::partial_cmp(&**self, &**other)
2203 }
2204}
2205
2206impl<A: Array> Ord for SmallVec<A>
2207where
2208 A::Item: Ord,
2209{
2210 #[inline]
2211 fn cmp(&self, other: &SmallVec<A>) -> cmp::Ordering {
2212 Ord::cmp(&**self, &**other)
2213 }
2214}
2215
2216impl<A: Array> Hash for SmallVec<A>
2217where
2218 A::Item: Hash,
2219{
2220 fn hash<H: Hasher>(&self, state: &mut H) {
2221 (**self).hash(state)
2222 }
2223}
2224
2225unsafe impl<A: Array> Send for SmallVec<A> where A::Item: Send {}
2226
2227pub struct IntoIter<A: Array> {
2233 data: SmallVec<A>,
2234 current: usize,
2235 end: usize,
2236}
2237
2238impl<A: Array> fmt::Debug for IntoIter<A>
2239where
2240 A::Item: fmt::Debug,
2241{
2242 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
2243 f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
2244 }
2245}
2246
2247impl<A: Array + Clone> Clone for IntoIter<A>
2248where
2249 A::Item: Clone,
2250{
2251 fn clone(&self) -> IntoIter<A> {
2252 SmallVec::from(self.as_slice()).into_iter()
2253 }
2254}
2255
2256impl<A: Array> Drop for IntoIter<A> {
2257 fn drop(&mut self) {
2258 for _ in self {}
2259 }
2260}
2261
2262impl<A: Array> Iterator for IntoIter<A> {
2263 type Item = A::Item;
2264
2265 #[inline]
2266 fn next(&mut self) -> Option<A::Item> {
2267 if self.current == self.end {
2268 None
2269 } else {
2270 unsafe {
2271 let current = self.current;
2272 self.current += 1;
2273 Some(ptr::read(self.data.as_ptr().add(current)))
2274 }
2275 }
2276 }
2277
2278 #[inline]
2279 fn size_hint(&self) -> (usize, Option<usize>) {
2280 let size = self.end - self.current;
2281 (size, Some(size))
2282 }
2283}
2284
2285impl<A: Array> DoubleEndedIterator for IntoIter<A> {
2286 #[inline]
2287 fn next_back(&mut self) -> Option<A::Item> {
2288 if self.current == self.end {
2289 None
2290 } else {
2291 unsafe {
2292 self.end -= 1;
2293 Some(ptr::read(self.data.as_ptr().add(self.end)))
2294 }
2295 }
2296 }
2297}
2298
2299impl<A: Array> ExactSizeIterator for IntoIter<A> {}
2300impl<A: Array> FusedIterator for IntoIter<A> {}
2301
2302impl<A: Array> IntoIter<A> {
2303 pub fn as_slice(&self) -> &[A::Item] {
2305 let len = self.end - self.current;
2306 unsafe { core::slice::from_raw_parts(self.data.as_ptr().add(self.current), len) }
2307 }
2308
2309 pub fn as_mut_slice(&mut self) -> &mut [A::Item] {
2311 let len = self.end - self.current;
2312 unsafe { core::slice::from_raw_parts_mut(self.data.as_mut_ptr().add(self.current), len) }
2313 }
2314}
2315
2316impl<A: Array> IntoIterator for SmallVec<A> {
2317 type IntoIter = IntoIter<A>;
2318 type Item = A::Item;
2319 fn into_iter(mut self) -> Self::IntoIter {
2320 unsafe {
2321 let len = self.len();
2323 self.set_len(0);
2324 IntoIter {
2325 data: self,
2326 current: 0,
2327 end: len,
2328 }
2329 }
2330 }
2331}
2332
2333impl<'a, A: Array> IntoIterator for &'a SmallVec<A> {
2334 type IntoIter = slice::Iter<'a, A::Item>;
2335 type Item = &'a A::Item;
2336 fn into_iter(self) -> Self::IntoIter {
2337 self.iter()
2338 }
2339}
2340
2341impl<'a, A: Array> IntoIterator for &'a mut SmallVec<A> {
2342 type IntoIter = slice::IterMut<'a, A::Item>;
2343 type Item = &'a mut A::Item;
2344 fn into_iter(self) -> Self::IntoIter {
2345 self.iter_mut()
2346 }
2347}
2348
2349pub unsafe trait Array {
2351 type Item;
2353 fn size() -> usize;
2355}
2356
2357struct SetLenOnDrop<'a> {
2361 len: &'a mut usize,
2362 local_len: usize,
2363}
2364
2365impl<'a> SetLenOnDrop<'a> {
2366 #[inline]
2367 fn new(len: &'a mut usize) -> Self {
2368 SetLenOnDrop {
2369 local_len: *len,
2370 len,
2371 }
2372 }
2373
2374 #[inline]
2375 fn get(&self) -> usize {
2376 self.local_len
2377 }
2378
2379 #[inline]
2380 fn increment_len(&mut self, increment: usize) {
2381 self.local_len += increment;
2382 }
2383}
2384
2385impl<'a> Drop for SetLenOnDrop<'a> {
2386 #[inline]
2387 fn drop(&mut self) {
2388 *self.len = self.local_len;
2389 }
2390}
2391
2392#[cfg(feature = "const_new")]
2393impl<T, const N: usize> SmallVec<[T; N]> {
2394 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
2398 #[inline]
2399 pub const fn new_const() -> Self {
2400 SmallVec {
2401 capacity: 0,
2402 data: SmallVecData::from_const(MaybeUninit::uninit()),
2403 }
2404 }
2405
2406 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
2410 #[inline]
2411 pub const fn from_const(items: [T; N]) -> Self {
2412 SmallVec {
2413 capacity: N,
2414 data: SmallVecData::from_const(MaybeUninit::new(items)),
2415 }
2416 }
2417
2418 #[cfg_attr(docsrs, doc(cfg(feature = "const_new")))]
2424 #[inline]
2425 pub const unsafe fn from_const_with_len_unchecked(items: [T; N], len: usize) -> Self {
2426 SmallVec {
2427 capacity: len,
2428 data: SmallVecData::from_const(MaybeUninit::new(items)),
2429 }
2430 }
2431}
2432
2433#[cfg(feature = "const_generics")]
2434#[cfg_attr(docsrs, doc(cfg(feature = "const_generics")))]
2435unsafe impl<T, const N: usize> Array for [T; N] {
2436 type Item = T;
2437 #[inline]
2438 fn size() -> usize {
2439 N
2440 }
2441}
2442
2443#[cfg(not(feature = "const_generics"))]
2444macro_rules! impl_array(
2445 ($($size:expr),+) => {
2446 $(
2447 unsafe impl<T> Array for [T; $size] {
2448 type Item = T;
2449 #[inline]
2450 fn size() -> usize { $size }
2451 }
2452 )+
2453 }
2454);
2455
2456#[cfg(not(feature = "const_generics"))]
2457impl_array!(
2458 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,
2459 26, 27, 28, 29, 30, 31, 32, 36, 0x40, 0x60, 0x80, 0x100, 0x200, 0x400, 0x600, 0x800, 0x1000,
2460 0x2000, 0x4000, 0x6000, 0x8000, 0x10000, 0x20000, 0x40000, 0x60000, 0x80000, 0x10_0000
2461);
2462
2463pub trait ToSmallVec<A: Array> {
2465 fn to_smallvec(&self) -> SmallVec<A>;
2467}
2468
2469impl<A: Array> ToSmallVec<A> for [A::Item]
2470where
2471 A::Item: Copy,
2472{
2473 #[inline]
2474 fn to_smallvec(&self) -> SmallVec<A> {
2475 SmallVec::from_slice(self)
2476 }
2477}
2478
2479#[repr(transparent)]
2481struct ConstNonNull<T>(NonNull<T>);
2482
2483impl<T> ConstNonNull<T> {
2484 #[inline]
2485 fn new(ptr: *const T) -> Option<Self> {
2486 NonNull::new(ptr as *mut T).map(Self)
2487 }
2488 #[inline]
2489 fn as_ptr(self) -> *const T {
2490 self.0.as_ptr()
2491 }
2492}
2493
2494impl<T> Clone for ConstNonNull<T> {
2495 #[inline]
2496 fn clone(&self) -> Self {
2497 *self
2498 }
2499}
2500
2501impl<T> Copy for ConstNonNull<T> {}