1#[cfg(feature = "validation")]
4pub mod validation;
5
6use crate::{Archive, ArchivePointee, Archived, RelPtr};
7use core::{
8 borrow::Borrow,
9 cmp::Ordering,
10 fmt,
11 hash::{Hash, Hasher},
12 iter::FusedIterator,
13 marker::PhantomData,
14 ops::Index,
15 ptr::NonNull,
16};
17use ptr_meta::Pointee;
18
19#[cfg_attr(feature = "strict", repr(C))]
20struct InnerNodeEntry<K> {
21 ptr: RelPtr<NodeHeader>,
22 key: K,
23}
24
25#[cfg_attr(feature = "strict", repr(C))]
26struct LeafNodeEntry<K, V> {
27 key: K,
28 value: V,
29}
30
31impl<'a, UK: Archive, UV: Archive> Archive for LeafNodeEntry<&'a UK, &'a UV> {
32 type Archived = LeafNodeEntry<UK::Archived, UV::Archived>;
33 type Resolver = (UK::Resolver, UV::Resolver);
34
35 #[inline]
36 unsafe fn resolve(&self, pos: usize, resolver: Self::Resolver, out: *mut Self::Archived) {
37 let (fp, fo) = out_field!(out.key);
38 self.key.resolve(pos + fp, resolver.0, fo);
39 let (fp, fo) = out_field!(out.value);
40 self.value.resolve(pos + fp, resolver.1, fo);
41 }
42}
43
44#[cfg_attr(feature = "strict", repr(C))]
45struct NodeHeader {
46 meta: Archived<u16>,
47 size: Archived<usize>,
48 ptr: RelPtr<NodeHeader>,
52}
53
54impl NodeHeader {
55 #[inline]
56 fn is_inner(&self) -> bool {
57 split_meta(from_archived!(self.meta)).0
58 }
59
60 #[inline]
61 fn is_leaf(&self) -> bool {
62 !split_meta(from_archived!(self.meta)).0
63 }
64
65 #[inline]
66 fn len(&self) -> usize {
67 split_meta(from_archived!(self.meta)).1
68 }
69}
70
71#[inline]
72#[cfg(feature = "alloc")]
73fn combine_meta(is_inner: bool, len: usize) -> u16 {
74 if is_inner {
75 0x80_00 | len as u16
76 } else {
77 len as u16
78 }
79}
80
81#[inline]
82fn split_meta(meta: u16) -> (bool, usize) {
83 (meta & 0x80_00 == 0x80_00, (meta & 0x7F_FF) as usize)
84}
85
86#[cfg_attr(feature = "strict", repr(C))]
87struct Node<T: ?Sized> {
88 header: NodeHeader,
89 tail: T,
90}
91
92impl<T> Pointee for Node<[T]> {
93 type Metadata = usize;
94}
95
96impl<T> ArchivePointee for Node<[T]> {
97 type ArchivedMetadata = Archived<usize>;
98
99 #[inline]
100 fn pointer_metadata(archived: &Self::ArchivedMetadata) -> <Self as Pointee>::Metadata {
101 from_archived!(*archived) as usize
102 }
103}
104
105type InnerNode<K> = Node<[InnerNodeEntry<K>]>;
106type LeafNode<K, V> = Node<[LeafNodeEntry<K, V>]>;
107
108struct NodeHeaderData {
109 meta: u16,
110 size: usize,
111 pos: Option<usize>,
112}
113
114impl Archive for NodeHeaderData {
115 type Archived = NodeHeader;
116 type Resolver = ();
117
118 #[inline]
119 unsafe fn resolve(&self, pos: usize, _: Self::Resolver, out: *mut Self::Archived) {
120 let (fp, fo) = out_field!(out.meta);
121 self.meta.resolve(pos + fp, (), fo);
122
123 let (fp, fo) = out_field!(out.size);
124 self.size.resolve(pos + fp, (), fo);
125
126 let (fp, fo) = out_field!(out.ptr);
127 RelPtr::emplace(pos + fp, self.pos.unwrap_or(pos + fp), fo);
128 }
129}
130
131struct InnerNodeEntryData<'a, UK> {
132 key: &'a UK,
133}
134
135impl<'a, UK: Archive> Archive for InnerNodeEntryData<'a, UK> {
136 type Archived = InnerNodeEntry<UK::Archived>;
137 type Resolver = (usize, UK::Resolver);
138
139 #[inline]
140 unsafe fn resolve(&self, pos: usize, resolver: Self::Resolver, out: *mut Self::Archived) {
141 let (fp, fo) = out_field!(out.ptr);
142 RelPtr::emplace(pos + fp, resolver.0, fo);
143 let (fp, fo) = out_field!(out.key);
144 self.key.resolve(pos + fp, resolver.1, fo);
145 }
146}
147
148enum ClassifiedNode<'a, K, V> {
149 Inner(&'a InnerNode<K>),
150 Leaf(&'a LeafNode<K, V>),
151}
152
153impl NodeHeader {
154 #[inline]
155 fn classify<K, V>(&self) -> ClassifiedNode<'_, K, V> {
156 if self.is_inner() {
157 ClassifiedNode::Inner(self.classify_inner())
158 } else {
159 ClassifiedNode::Leaf(self.classify_leaf())
160 }
161 }
162
163 #[inline]
164 fn classify_inner_ptr<K>(&self) -> *const InnerNode<K> {
165 ptr_meta::from_raw_parts(self as *const Self as *const (), self.len())
166 }
167
168 #[inline]
169 fn classify_inner<K>(&self) -> &'_ InnerNode<K> {
170 debug_assert!(self.is_inner());
171 unsafe { &*self.classify_inner_ptr() }
172 }
173
174 #[inline]
175 fn classify_leaf_ptr<K, V>(&self) -> *const LeafNode<K, V> {
176 ptr_meta::from_raw_parts(self as *const Self as *const (), self.len())
177 }
178
179 #[inline]
180 fn classify_leaf<K, V>(&self) -> &'_ LeafNode<K, V> {
181 debug_assert!(self.is_leaf());
182 unsafe { &*self.classify_leaf_ptr() }
183 }
184}
185
186#[cfg_attr(feature = "strict", repr(C))]
188pub struct ArchivedBTreeMap<K, V> {
189 len: Archived<usize>,
190 root: RelPtr<NodeHeader>,
191 _phantom: PhantomData<(K, V)>,
192}
193
194pub struct BTreeMapResolver {
196 root_pos: usize,
197}
198
199pub const MIN_ENTRIES_PER_LEAF_NODE: usize = 1;
203
204pub const MIN_ENTRIES_PER_INNER_NODE: usize = 2;
208
209impl<K, V> ArchivedBTreeMap<K, V> {
210 #[inline]
211 fn root(&self) -> Option<ClassifiedNode<K, V>> {
212 if self.is_empty() {
213 None
214 } else {
215 let root = unsafe { &*self.root.as_ptr() };
216 Some(root.classify())
217 }
218 }
219
220 #[inline]
221 fn first(&self) -> NonNull<NodeHeader> {
222 if let Some(mut node) = self.root() {
223 while let ClassifiedNode::Inner(inner) = node {
224 let next = unsafe { &*inner.header.ptr.as_ptr() };
225 node = next.classify();
226 }
227 match node {
228 ClassifiedNode::Leaf(leaf) => unsafe {
229 let node = (leaf as *const LeafNode<K, V> as *mut LeafNode<K, V>).cast();
230 NonNull::new_unchecked(node)
231 },
232 ClassifiedNode::Inner(_) => unsafe { core::hint::unreachable_unchecked() },
233 }
234 } else {
235 NonNull::dangling()
236 }
237 }
238
239 #[inline]
244 pub fn contains_key<Q: Ord + ?Sized>(&self, key: &Q) -> bool
245 where
246 K: Borrow<Q> + Ord,
247 {
248 self.get_key_value(key).is_some()
249 }
250
251 #[inline]
256 pub fn get<Q: Ord + ?Sized>(&self, key: &Q) -> Option<&V>
257 where
258 K: Borrow<Q> + Ord,
259 {
260 self.get_key_value(key).map(|(_, v)| v)
261 }
262
263 pub fn get_key_value<Q: Ord + ?Sized>(&self, k: &Q) -> Option<(&K, &V)>
268 where
269 K: Borrow<Q> + Ord,
270 {
271 if let Some(mut current) = self.root() {
272 loop {
273 match current {
274 ClassifiedNode::Inner(node) => {
275 let next = match node
277 .tail
278 .binary_search_by(|probe| probe.key.borrow().cmp(k))
279 {
280 Ok(i) => unsafe { &*node.tail[i].ptr.as_ptr() },
281 Err(i) => {
282 if i == 0 {
283 unsafe { &*node.header.ptr.as_ptr() }
284 } else {
285 unsafe { &*node.tail[i - 1].ptr.as_ptr() }
286 }
287 }
288 };
289 current = next.classify();
290 }
291 ClassifiedNode::Leaf(node) => {
292 if let Ok(i) = node
294 .tail
295 .binary_search_by(|probe| probe.key.borrow().cmp(k))
296 {
297 let entry = &node.tail[i];
298 break Some((&entry.key, &entry.value));
299 } else {
300 break None;
301 }
302 }
303 }
304 }
305 } else {
306 None
307 }
308 }
309
310 #[inline]
312 pub fn is_empty(&self) -> bool {
313 self.len() == 0
314 }
315
316 #[inline]
318 pub fn iter(&self) -> Iter<'_, K, V> {
319 Iter {
320 inner: RawIter::new(self.first(), 0, self.len()),
321 }
322 }
323
324 #[inline]
326 pub fn keys(&self) -> Keys<'_, K, V> {
327 Keys {
328 inner: RawIter::new(self.first(), 0, self.len()),
329 }
330 }
331
332 #[inline]
334 pub fn len(&self) -> usize {
335 from_archived!(self.len) as usize
336 }
337
338 #[inline]
340 pub fn values(&self) -> Values<'_, K, V> {
341 Values {
342 inner: RawIter::new(self.first(), 0, self.len()),
343 }
344 }
345
346 #[inline]
354 pub unsafe fn resolve_from_len(
355 len: usize,
356 pos: usize,
357 resolver: BTreeMapResolver,
358 out: *mut Self,
359 ) {
360 let (fp, fo) = out_field!(out.len);
361 len.resolve(pos + fp, (), fo);
362
363 let (fp, fo) = out_field!(out.root);
364 RelPtr::emplace(pos + fp, resolver.root_pos, fo);
365 }
366}
367
368#[cfg(feature = "alloc")]
369const _: () = {
370 use crate::{ser::Serializer, Serialize};
371 #[cfg(not(feature = "std"))]
372 use alloc::vec::Vec;
373 use core::mem;
374
375 impl<K, V> ArchivedBTreeMap<K, V> {
376 pub unsafe fn serialize_from_reverse_iter<'a, UK, UV, S, I>(
383 mut iter: I,
384 serializer: &mut S,
385 ) -> Result<BTreeMapResolver, S::Error>
386 where
387 UK: 'a + Serialize<S, Archived = K>,
388 UV: 'a + Serialize<S, Archived = V>,
389 S: Serializer + ?Sized,
390 I: ExactSizeIterator<Item = (&'a UK, &'a UV)>,
391 {
392 if iter.len() == 0 {
393 Ok(BTreeMapResolver { root_pos: 0 })
394 } else {
395 const MAX_NODE_SIZE: usize = 4096;
398
399 let mut next_level = Vec::new();
401 let mut resolvers = Vec::new();
402
403 while let Some((key, value)) = iter.next() {
404 let block_start_pos = serializer.pos();
406
407 resolvers.push((
409 key,
410 value,
411 key.serialize(serializer)?,
412 value.serialize(serializer)?,
413 ));
414
415 loop {
416 let estimated_block_size = serializer.pos() - block_start_pos
420 + mem::size_of::<NodeHeader>()
421 + resolvers.len() * mem::size_of::<LeafNodeEntry<K, V>>();
422
423 if estimated_block_size >= MAX_NODE_SIZE
426 && resolvers.len() >= MIN_ENTRIES_PER_LEAF_NODE
427 {
428 break;
429 }
430
431 if let Some((key, value)) = iter.next() {
432 resolvers.push((
434 key,
435 value,
436 key.serialize(serializer)?,
437 value.serialize(serializer)?,
438 ));
439 } else {
440 break;
441 }
442 }
443
444 serializer.align(usize::max(
446 mem::align_of::<NodeHeader>(),
447 mem::align_of::<LeafNodeEntry<K, V>>(),
448 ))?;
449 let raw_node = NodeHeaderData {
450 meta: combine_meta(false, resolvers.len()),
451 size: serializer.pos() - block_start_pos,
452 pos: next_level.last().map(|&(_, pos)| pos),
454 };
455
456 next_level.push((
458 resolvers.last().unwrap().0,
459 serializer.resolve_aligned(&raw_node, ())?,
460 ));
461
462 serializer.align_for::<LeafNodeEntry<K, V>>()?;
463 for (key, value, key_resolver, value_resolver) in resolvers.drain(..).rev() {
464 serializer.resolve_aligned(
465 &LeafNodeEntry { key, value },
466 (key_resolver, value_resolver),
467 )?;
468 }
469 }
470
471 let mut current_level = Vec::new();
474 let mut resolvers = Vec::new();
475 while next_level.len() > 1 {
476 mem::swap(&mut current_level, &mut next_level);
479
480 let mut iter = current_level.drain(..);
481 while iter.len() > 1 {
482 let block_start_pos = serializer.pos();
484
485 while iter.len() > 1 {
487 let (key, pos) = iter.next().unwrap();
488
489 resolvers.push((key, pos, key.serialize(serializer)?));
491
492 let estimated_block_size = serializer.pos() - block_start_pos
494 + mem::size_of::<NodeHeader>()
495 + resolvers.len() * mem::size_of::<InnerNodeEntry<K>>();
496
497 if estimated_block_size >= MAX_NODE_SIZE
500 && resolvers.len() >= MIN_ENTRIES_PER_INNER_NODE
501 {
502 break;
503 }
504 }
505
506 if iter.len() == 2 {
514 let (key, pos) = iter.next().unwrap();
515
516 resolvers.push((key, pos, key.serialize(serializer)?));
518 }
519
520 let (first_key, first_pos) = iter.next().unwrap();
522
523 serializer.align(usize::max(
525 mem::align_of::<NodeHeaderData>(),
526 mem::align_of::<InnerNodeEntry<K>>(),
527 ))?;
528 let node_header = NodeHeaderData {
529 meta: combine_meta(true, resolvers.len()),
530 size: serializer.pos() - block_start_pos,
531 pos: Some(first_pos),
533 };
534
535 next_level.push((first_key, serializer.resolve_aligned(&node_header, ())?));
537
538 serializer.align_for::<InnerNodeEntry<K>>()?;
539 for (key, pos, resolver) in resolvers.drain(..).rev() {
540 let inner_node_data = InnerNodeEntryData::<UK> { key };
541 serializer.resolve_aligned(&inner_node_data, (pos, resolver))?;
542 }
543 }
544
545 debug_assert!(iter.len() == 0);
546 }
547
548 Ok(BTreeMapResolver {
550 root_pos: next_level[0].1,
551 })
552 }
553 }
554 }
555};
556
557impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for ArchivedBTreeMap<K, V> {
558 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
559 f.debug_map().entries(self.iter()).finish()
560 }
561}
562
563impl<K, Q, V> Index<&'_ Q> for ArchivedBTreeMap<K, V>
564where
565 K: Borrow<Q> + Ord,
566 Q: Ord + ?Sized,
567{
568 type Output = V;
569
570 fn index(&self, key: &Q) -> &V {
571 self.get(key).unwrap()
572 }
573}
574
575impl<'a, K, V> IntoIterator for &'a ArchivedBTreeMap<K, V> {
576 type Item = (&'a K, &'a V);
577 type IntoIter = Iter<'a, K, V>;
578
579 fn into_iter(self) -> Self::IntoIter {
580 self.iter()
581 }
582}
583
584impl<K: Eq, V: Eq> Eq for ArchivedBTreeMap<K, V> {}
585
586impl<K: Hash, V: Hash> Hash for ArchivedBTreeMap<K, V> {
587 #[inline]
588 fn hash<H: Hasher>(&self, state: &mut H) {
589 for pair in self.iter() {
590 pair.hash(state);
591 }
592 }
593}
594
595impl<K: Ord, V: Ord> Ord for ArchivedBTreeMap<K, V> {
596 #[inline]
597 fn cmp(&self, other: &Self) -> Ordering {
598 self.iter().cmp(other.iter())
599 }
600}
601
602impl<K: PartialEq, V: PartialEq> PartialEq for ArchivedBTreeMap<K, V> {
603 #[inline]
604 fn eq(&self, other: &Self) -> bool {
605 if self.len() != other.len() {
606 false
607 } else {
608 self.iter().zip(other.iter()).all(|(a, b)| a.eq(&b))
609 }
610 }
611}
612
613impl<K: PartialOrd, V: PartialOrd> PartialOrd for ArchivedBTreeMap<K, V> {
614 #[inline]
615 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
616 self.iter().partial_cmp(other.iter())
617 }
618}
619
620struct RawIter<'a, K, V> {
623 leaf: NonNull<NodeHeader>,
624 index: usize,
625 remaining: usize,
626 _phantom: PhantomData<(&'a K, &'a V)>,
627}
628
629impl<'a, K, V> RawIter<'a, K, V> {
630 fn new(leaf: NonNull<NodeHeader>, index: usize, remaining: usize) -> Self {
631 Self {
632 leaf,
633 index,
634 remaining,
635 _phantom: PhantomData,
636 }
637 }
638}
639
640impl<'a, K, V> Iterator for RawIter<'a, K, V> {
641 type Item = (&'a K, &'a V);
642
643 #[inline]
644 fn next(&mut self) -> Option<Self::Item> {
645 if self.remaining == 0 {
646 None
647 } else {
648 unsafe {
649 let leaf = self.leaf.as_ref().classify_leaf::<K, V>();
652 if self.index == leaf.tail.len() {
653 self.index = 0;
654 self.leaf = NonNull::new_unchecked(leaf.header.ptr.as_ptr() as *mut _);
656 }
657 let result = &self.leaf.as_ref().classify_leaf().tail[self.index];
658 self.index += 1;
659 self.remaining -= 1;
660 Some((&result.key, &result.value))
661 }
662 }
663 }
664
665 #[inline]
666 fn size_hint(&self) -> (usize, Option<usize>) {
667 (self.remaining, Some(self.remaining))
668 }
669}
670
671impl<'a, K, V> ExactSizeIterator for RawIter<'a, K, V> {}
672impl<'a, K, V> FusedIterator for RawIter<'a, K, V> {}
673
674pub struct Iter<'a, K, V> {
676 inner: RawIter<'a, K, V>,
677}
678
679impl<'a, K, V> Iterator for Iter<'a, K, V> {
680 type Item = (&'a K, &'a V);
681
682 #[inline]
683 fn next(&mut self) -> Option<Self::Item> {
684 self.inner.next()
685 }
686
687 #[inline]
688 fn size_hint(&self) -> (usize, Option<usize>) {
689 self.inner.size_hint()
690 }
691}
692
693impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> {}
694impl<'a, K, V> FusedIterator for Iter<'a, K, V> {}
695
696pub struct Keys<'a, K, V> {
698 inner: RawIter<'a, K, V>,
699}
700
701impl<'a, K, V> Iterator for Keys<'a, K, V> {
702 type Item = &'a K;
703
704 #[inline]
705 fn next(&mut self) -> Option<Self::Item> {
706 self.inner.next().map(|(k, _)| k)
707 }
708
709 #[inline]
710 fn size_hint(&self) -> (usize, Option<usize>) {
711 self.inner.size_hint()
712 }
713}
714
715impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> {}
716impl<'a, K, V> FusedIterator for Keys<'a, K, V> {}
717
718pub struct Values<'a, K, V> {
720 inner: RawIter<'a, K, V>,
721}
722
723impl<'a, K, V> Iterator for Values<'a, K, V> {
724 type Item = &'a V;
725
726 #[inline]
727 fn next(&mut self) -> Option<Self::Item> {
728 self.inner.next().map(|(_, v)| v)
729 }
730
731 #[inline]
732 fn size_hint(&self) -> (usize, Option<usize>) {
733 self.inner.size_hint()
734 }
735}
736
737impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> {}
738impl<'a, K, V> FusedIterator for Values<'a, K, V> {}