1mod raw;
4
5use crate::{
6 ser::{ScratchSpace, Serializer},
7 Archive, Archived, RelPtr, Serialize, SerializeUnsized,
8};
9use core::{
10 borrow::Borrow,
11 cmp, fmt, hash,
12 ops::{Deref, Index, IndexMut},
13 pin::Pin,
14 slice::SliceIndex,
15};
16
17pub use self::raw::*;
18
19#[cfg_attr(feature = "strict", repr(C))]
24pub struct ArchivedVec<T> {
25 ptr: RelPtr<T>,
26 len: Archived<usize>,
27}
28
29impl<T> ArchivedVec<T> {
30 #[inline]
32 pub fn as_ptr(&self) -> *const T {
33 self.ptr.as_ptr()
34 }
35
36 #[inline]
38 pub fn len(&self) -> usize {
39 from_archived!(self.len) as usize
40 }
41
42 #[inline]
44 pub fn is_empty(&self) -> bool {
45 self.len() == 0
46 }
47
48 #[inline]
50 pub fn as_slice(&self) -> &[T] {
51 unsafe { core::slice::from_raw_parts(self.as_ptr(), self.len()) }
52 }
53
54 #[inline]
56 pub fn pin_mut_slice(self: Pin<&mut Self>) -> Pin<&mut [T]> {
57 unsafe {
58 self.map_unchecked_mut(|s| core::slice::from_raw_parts_mut(s.ptr.as_mut_ptr(), s.len()))
59 }
60 }
61
62 #[inline]
67 pub fn index_pin<I>(self: Pin<&mut Self>, index: I) -> Pin<&mut <[T] as Index<I>>::Output>
68 where
69 [T]: IndexMut<I>,
70 {
71 unsafe { self.pin_mut_slice().map_unchecked_mut(|s| &mut s[index]) }
72 }
73
74 #[inline]
81 pub unsafe fn resolve_from_slice<U: Archive<Archived = T>>(
82 slice: &[U],
83 pos: usize,
84 resolver: VecResolver,
85 out: *mut Self,
86 ) {
87 Self::resolve_from_len(slice.len(), pos, resolver, out);
88 }
89
90 #[inline]
97 pub unsafe fn resolve_from_len(len: usize, pos: usize, resolver: VecResolver, out: *mut Self) {
98 let (fp, fo) = out_field!(out.ptr);
99 RelPtr::emplace(pos + fp, resolver.pos, fo);
100 let (fp, fo) = out_field!(out.len);
101 usize::resolve(&len, pos + fp, (), fo);
102 }
103
104 #[inline]
106 pub fn serialize_from_slice<U: Serialize<S, Archived = T>, S: Serializer + ?Sized>(
107 slice: &[U],
108 serializer: &mut S,
109 ) -> Result<VecResolver, S::Error>
110 where
111 [U]: SerializeUnsized<S>,
114 {
115 Ok(VecResolver {
116 pos: slice.serialize_unsized(serializer)?,
117 })
118 }
119
120 #[inline]
129 pub unsafe fn serialize_copy_from_slice<U, S>(
130 slice: &[U],
131 serializer: &mut S,
132 ) -> Result<VecResolver, S::Error>
133 where
134 U: Serialize<S, Archived = T>,
135 S: Serializer + ?Sized,
136 {
137 use ::core::{mem::size_of, slice::from_raw_parts};
138
139 let pos = serializer.align_for::<T>()?;
140
141 let bytes = from_raw_parts(slice.as_ptr().cast::<u8>(), size_of::<T>() * slice.len());
142 serializer.write(bytes)?;
143
144 Ok(VecResolver { pos })
145 }
146
147 #[inline]
152 pub fn serialize_from_iter<U, B, I, S>(
153 iter: I,
154 serializer: &mut S,
155 ) -> Result<VecResolver, S::Error>
156 where
157 U: Serialize<S, Archived = T>,
158 B: Borrow<U>,
159 I: ExactSizeIterator<Item = B>,
160 S: ScratchSpace + Serializer + ?Sized,
161 {
162 use crate::ScratchVec;
163
164 unsafe {
165 let mut resolvers = ScratchVec::new(serializer, iter.len())?;
166
167 for value in iter {
168 let resolver = value.borrow().serialize(serializer)?;
169 resolvers.push((value, resolver));
170 }
171 let pos = serializer.align_for::<T>()?;
172 for (value, resolver) in resolvers.drain(..) {
173 serializer.resolve_aligned(value.borrow(), resolver)?;
174 }
175
176 resolvers.free(serializer)?;
177
178 Ok(VecResolver { pos })
179 }
180 }
181}
182
183impl<T> AsRef<[T]> for ArchivedVec<T> {
184 #[inline]
185 fn as_ref(&self) -> &[T] {
186 self.as_slice()
187 }
188}
189
190impl<T> Borrow<[T]> for ArchivedVec<T> {
191 #[inline]
192 fn borrow(&self) -> &[T] {
193 self.as_slice()
194 }
195}
196
197impl<T: fmt::Debug> fmt::Debug for ArchivedVec<T> {
198 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
199 f.debug_list().entries(self.as_slice()).finish()
200 }
201}
202
203impl<T> Deref for ArchivedVec<T> {
204 type Target = [T];
205
206 #[inline]
207 fn deref(&self) -> &Self::Target {
208 self.as_slice()
209 }
210}
211
212impl<T: Eq> Eq for ArchivedVec<T> {}
213
214impl<T: hash::Hash> hash::Hash for ArchivedVec<T> {
215 #[inline]
216 fn hash<H: hash::Hasher>(&self, state: &mut H) {
217 self.as_slice().hash(state)
218 }
219}
220
221impl<T, I: SliceIndex<[T]>> Index<I> for ArchivedVec<T> {
222 type Output = <[T] as Index<I>>::Output;
223
224 #[inline]
225 fn index(&self, index: I) -> &Self::Output {
226 self.as_slice().index(index)
227 }
228}
229
230impl<T: Ord> Ord for ArchivedVec<T> {
231 #[inline]
232 fn cmp(&self, other: &Self) -> cmp::Ordering {
233 self.as_slice().cmp(other.as_slice())
234 }
235}
236
237impl<T: PartialEq<U>, U> PartialEq<ArchivedVec<U>> for ArchivedVec<T> {
238 #[inline]
239 fn eq(&self, other: &ArchivedVec<U>) -> bool {
240 self.as_slice().eq(other.as_slice())
241 }
242}
243
244impl<T: PartialEq<U>, U, const N: usize> PartialEq<[U; N]> for ArchivedVec<T> {
245 #[inline]
246 fn eq(&self, other: &[U; N]) -> bool {
247 self.as_slice().eq(&other[..])
248 }
249}
250
251impl<T: PartialEq<U>, U, const N: usize> PartialEq<ArchivedVec<T>> for [U; N] {
252 #[inline]
253 fn eq(&self, other: &ArchivedVec<T>) -> bool {
254 other.eq(self)
255 }
256}
257
258impl<T: PartialEq<U>, U> PartialEq<[U]> for ArchivedVec<T> {
259 #[inline]
260 fn eq(&self, other: &[U]) -> bool {
261 self.as_slice().eq(other)
262 }
263}
264
265impl<T: PartialEq<U>, U> PartialEq<ArchivedVec<U>> for [T] {
266 #[inline]
267 fn eq(&self, other: &ArchivedVec<U>) -> bool {
268 self.eq(other.as_slice())
269 }
270}
271
272impl<T: PartialOrd> PartialOrd<ArchivedVec<T>> for ArchivedVec<T> {
273 #[inline]
274 fn partial_cmp(&self, other: &ArchivedVec<T>) -> Option<cmp::Ordering> {
275 self.as_slice().partial_cmp(other.as_slice())
276 }
277}
278
279impl<T: PartialOrd> PartialOrd<[T]> for ArchivedVec<T> {
280 #[inline]
281 fn partial_cmp(&self, other: &[T]) -> Option<cmp::Ordering> {
282 self.as_slice().partial_cmp(other)
283 }
284}
285
286impl<T: PartialOrd> PartialOrd<ArchivedVec<T>> for [T] {
287 #[inline]
288 fn partial_cmp(&self, other: &ArchivedVec<T>) -> Option<cmp::Ordering> {
289 self.partial_cmp(other.as_slice())
290 }
291}
292
293pub struct VecResolver {
295 pos: usize,
296}
297
298#[cfg(feature = "validation")]
299const _: () = {
300 use crate::validation::{
301 owned::{CheckOwnedPointerError, OwnedPointerError},
302 ArchiveContext,
303 };
304 use bytecheck::{CheckBytes, Error};
305
306 impl<T> ArchivedVec<T> {
307 pub unsafe fn check_bytes_with<'a, C, F>(
313 value: *const Self,
314 context: &mut C,
315 check_elements: F,
316 ) -> Result<&'a Self, CheckOwnedPointerError<[T], C>>
317 where
318 T: CheckBytes<C>,
319 C: ArchiveContext + ?Sized,
320 F: FnOnce(*const [T], &mut C) -> Result<(), <[T] as CheckBytes<C>>::Error>,
321 {
322 let rel_ptr = RelPtr::<[T]>::manual_check_bytes(value.cast(), context)
323 .map_err(OwnedPointerError::PointerCheckBytesError)?;
324 let ptr = context
325 .check_subtree_rel_ptr(rel_ptr)
326 .map_err(OwnedPointerError::ContextError)?;
327
328 let range = context
329 .push_prefix_subtree(ptr)
330 .map_err(OwnedPointerError::ContextError)?;
331 check_elements(ptr, context).map_err(OwnedPointerError::ValueCheckBytesError)?;
332 context
333 .pop_prefix_range(range)
334 .map_err(OwnedPointerError::ContextError)?;
335
336 Ok(&*value)
337 }
338 }
339
340 impl<T, C> CheckBytes<C> for ArchivedVec<T>
341 where
342 T: CheckBytes<C>,
343 C: ArchiveContext + ?Sized,
344 C::Error: Error,
345 {
346 type Error = CheckOwnedPointerError<[T], C>;
347
348 #[inline]
349 unsafe fn check_bytes<'a>(
350 value: *const Self,
351 context: &mut C,
352 ) -> Result<&'a Self, Self::Error> {
353 Self::check_bytes_with::<C, _>(value, context, |v, c| {
354 <[T]>::check_bytes(v, c).map(|_| ())
355 })
356 }
357 }
358};