1#![cfg_attr(feature = "rkyv-serialize", allow(unsafe_op_in_unsafe_fn))]
3
4use approx::{AbsDiffEq, RelativeEq, UlpsEq};
5use num::{One, Zero};
6use std::cmp::Ordering;
7use std::fmt;
8use std::hash;
9
10#[cfg(feature = "rkyv-serialize")]
11use rkyv::bytecheck;
12#[cfg(feature = "serde-serialize-no-std")]
13use serde::{Deserialize, Deserializer, Serialize, Serializer};
14
15use simba::simd::SimdPartialOrd;
16
17use crate::base::allocator::Allocator;
18use crate::base::dimension::{DimName, DimNameAdd, DimNameSum, U1};
19use crate::base::iter::{MatrixIter, MatrixIterMut};
20use crate::base::{Const, DefaultAllocator, OVector, Scalar};
21use simba::scalar::{ClosedAddAssign, ClosedMulAssign, ClosedSubAssign};
22use std::mem::MaybeUninit;
23
24#[repr(C)]
44#[derive(Clone)]
45#[cfg_attr(feature = "rkyv-serialize", derive(bytecheck::CheckBytes))]
46#[cfg_attr(
47 feature = "rkyv-serialize-no-std",
48 derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize),
49 archive(
50 as = "OPoint<T::Archived, D>",
51 bound(archive = "
52 T: rkyv::Archive,
53 T::Archived: Scalar,
54 OVector<T, D>: rkyv::Archive<Archived = OVector<T::Archived, D>>,
55 DefaultAllocator: Allocator<D>,
56 ")
57 )
58)]
59#[cfg_attr(feature = "defmt", derive(defmt::Format))]
60pub struct OPoint<T: Scalar, D: DimName>
61where
62 DefaultAllocator: Allocator<D>,
63{
64 pub coords: OVector<T, D>,
66}
67
68impl<T: Scalar + fmt::Debug, D: DimName> fmt::Debug for OPoint<T, D>
69where
70 DefaultAllocator: Allocator<D>,
71{
72 fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
73 self.coords.as_slice().fmt(formatter)
74 }
75}
76
77impl<T: Scalar + hash::Hash, D: DimName> hash::Hash for OPoint<T, D>
78where
79 DefaultAllocator: Allocator<D>,
80{
81 fn hash<H: hash::Hasher>(&self, state: &mut H) {
82 self.coords.hash(state)
83 }
84}
85
86impl<T: Scalar + Copy, D: DimName> Copy for OPoint<T, D>
87where
88 DefaultAllocator: Allocator<D>,
89 OVector<T, D>: Copy,
90{
91}
92
93#[cfg(feature = "bytemuck")]
94unsafe impl<T: Scalar, D: DimName> bytemuck::Zeroable for OPoint<T, D>
95where
96 OVector<T, D>: bytemuck::Zeroable,
97 DefaultAllocator: Allocator<D>,
98{
99}
100
101#[cfg(feature = "bytemuck")]
102unsafe impl<T: Scalar, D: DimName> bytemuck::Pod for OPoint<T, D>
103where
104 T: Copy,
105 OVector<T, D>: bytemuck::Pod,
106 DefaultAllocator: Allocator<D>,
107{
108}
109
110#[cfg(feature = "serde-serialize-no-std")]
111impl<T: Scalar, D: DimName> Serialize for OPoint<T, D>
112where
113 DefaultAllocator: Allocator<D>,
114 <DefaultAllocator as Allocator<D>>::Buffer<T>: Serialize,
115{
116 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
117 where
118 S: Serializer,
119 {
120 self.coords.serialize(serializer)
121 }
122}
123
124#[cfg(feature = "serde-serialize-no-std")]
125impl<'a, T: Scalar, D: DimName> Deserialize<'a> for OPoint<T, D>
126where
127 DefaultAllocator: Allocator<D>,
128 <DefaultAllocator as Allocator<D>>::Buffer<T>: Deserialize<'a>,
129{
130 fn deserialize<Des>(deserializer: Des) -> Result<Self, Des::Error>
131 where
132 Des: Deserializer<'a>,
133 {
134 let coords = OVector::<T, D>::deserialize(deserializer)?;
135
136 Ok(Self::from(coords))
137 }
138}
139
140impl<T: Scalar, D: DimName> OPoint<T, D>
141where
142 DefaultAllocator: Allocator<D>,
143{
144 #[inline]
157 #[must_use]
158 pub fn map<T2: Scalar, F: FnMut(T) -> T2>(&self, f: F) -> OPoint<T2, D>
159 where
160 DefaultAllocator: Allocator<D>,
161 {
162 self.coords.map(f).into()
163 }
164
165 #[inline]
180 pub fn apply<F: FnMut(&mut T)>(&mut self, f: F) {
181 self.coords.apply(f)
182 }
183
184 #[inline]
200 #[must_use]
201 pub fn to_homogeneous(&self) -> OVector<T, DimNameSum<D, U1>>
202 where
203 T: One,
204 D: DimNameAdd<U1>,
205 DefaultAllocator: Allocator<DimNameSum<D, U1>>,
206 {
207 let len = self.len();
212 let mut res = crate::Matrix::uninit(DimNameSum::<D, U1>::name(), Const::<1>);
213 res.generic_view_mut((0, 0), self.coords.shape_generic())
216 .zip_apply(&self.coords, |out, e| *out = MaybeUninit::new(e));
217 res[(len, 0)] = MaybeUninit::new(T::one());
218
219 unsafe { res.assume_init() }
221 }
222
223 #[must_use]
239 pub fn lerp(&self, rhs: &OPoint<T, D>, t: T) -> OPoint<T, D>
240 where
241 T: Scalar + Zero + One + ClosedAddAssign + ClosedSubAssign + ClosedMulAssign,
242 {
243 OPoint {
244 coords: self.coords.lerp(&rhs.coords, t),
245 }
246 }
247
248 #[deprecated(note = "Use Point::from(vector) instead.")]
250 #[inline]
251 pub const fn from_coordinates(coords: OVector<T, D>) -> Self {
252 Self { coords }
253 }
254
255 #[inline]
268 #[must_use]
269 pub fn len(&self) -> usize {
270 self.coords.len()
271 }
272
273 #[inline]
282 #[must_use]
283 pub fn is_empty(&self) -> bool {
284 self.len() == 0
285 }
286
287 #[inline]
290 #[deprecated(note = "This methods is no longer significant and will always return 1.")]
291 pub fn stride(&self) -> usize {
292 self.coords.strides().0
293 }
294
295 #[inline]
309 pub fn iter(
310 &self,
311 ) -> MatrixIter<'_, T, D, Const<1>, <DefaultAllocator as Allocator<D>>::Buffer<T>> {
312 self.coords.iter()
313 }
314
315 #[inline]
321 #[must_use]
322 pub unsafe fn get_unchecked(&self, i: usize) -> &T {
323 unsafe { self.coords.vget_unchecked(i) }
324 }
325
326 #[inline]
340 pub fn iter_mut(
341 &mut self,
342 ) -> MatrixIterMut<'_, T, D, Const<1>, <DefaultAllocator as Allocator<D>>::Buffer<T>> {
343 self.coords.iter_mut()
344 }
345
346 #[inline]
352 #[must_use]
353 pub unsafe fn get_unchecked_mut(&mut self, i: usize) -> &mut T {
354 unsafe { self.coords.vget_unchecked_mut(i) }
355 }
356
357 #[inline]
363 pub unsafe fn swap_unchecked(&mut self, i1: usize, i2: usize) {
364 unsafe { self.coords.swap_unchecked((i1, 0), (i2, 0)) }
365 }
366}
367
368impl<T: Scalar + AbsDiffEq, D: DimName> AbsDiffEq for OPoint<T, D>
369where
370 T::Epsilon: Clone,
371 DefaultAllocator: Allocator<D>,
372{
373 type Epsilon = T::Epsilon;
374
375 #[inline]
376 fn default_epsilon() -> Self::Epsilon {
377 T::default_epsilon()
378 }
379
380 #[inline]
381 fn abs_diff_eq(&self, other: &Self, epsilon: Self::Epsilon) -> bool {
382 self.coords.abs_diff_eq(&other.coords, epsilon)
383 }
384}
385
386impl<T: Scalar + RelativeEq, D: DimName> RelativeEq for OPoint<T, D>
387where
388 T::Epsilon: Clone,
389 DefaultAllocator: Allocator<D>,
390{
391 #[inline]
392 fn default_max_relative() -> Self::Epsilon {
393 T::default_max_relative()
394 }
395
396 #[inline]
397 fn relative_eq(
398 &self,
399 other: &Self,
400 epsilon: Self::Epsilon,
401 max_relative: Self::Epsilon,
402 ) -> bool {
403 self.coords
404 .relative_eq(&other.coords, epsilon, max_relative)
405 }
406}
407
408impl<T: Scalar + UlpsEq, D: DimName> UlpsEq for OPoint<T, D>
409where
410 T::Epsilon: Clone,
411 DefaultAllocator: Allocator<D>,
412{
413 #[inline]
414 fn default_max_ulps() -> u32 {
415 T::default_max_ulps()
416 }
417
418 #[inline]
419 fn ulps_eq(&self, other: &Self, epsilon: Self::Epsilon, max_ulps: u32) -> bool {
420 self.coords.ulps_eq(&other.coords, epsilon, max_ulps)
421 }
422}
423
424impl<T: Scalar + Eq, D: DimName> Eq for OPoint<T, D> where DefaultAllocator: Allocator<D> {}
425
426impl<T: Scalar, D: DimName> PartialEq for OPoint<T, D>
427where
428 DefaultAllocator: Allocator<D>,
429{
430 #[inline]
431 fn eq(&self, right: &Self) -> bool {
432 self.coords == right.coords
433 }
434}
435
436impl<T: Scalar + PartialOrd, D: DimName> PartialOrd for OPoint<T, D>
437where
438 DefaultAllocator: Allocator<D>,
439{
440 #[inline]
441 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
442 self.coords.partial_cmp(&other.coords)
443 }
444
445 #[inline]
446 fn lt(&self, right: &Self) -> bool {
447 self.coords.lt(&right.coords)
448 }
449
450 #[inline]
451 fn le(&self, right: &Self) -> bool {
452 self.coords.le(&right.coords)
453 }
454
455 #[inline]
456 fn gt(&self, right: &Self) -> bool {
457 self.coords.gt(&right.coords)
458 }
459
460 #[inline]
461 fn ge(&self, right: &Self) -> bool {
462 self.coords.ge(&right.coords)
463 }
464}
465
466impl<T: Scalar + SimdPartialOrd, D: DimName> OPoint<T, D>
470where
471 DefaultAllocator: Allocator<D>,
472{
473 #[inline]
475 #[must_use]
476 pub fn inf(&self, other: &Self) -> OPoint<T, D> {
477 self.coords.inf(&other.coords).into()
478 }
479
480 #[inline]
482 #[must_use]
483 pub fn sup(&self, other: &Self) -> OPoint<T, D> {
484 self.coords.sup(&other.coords).into()
485 }
486
487 #[inline]
489 #[must_use]
490 pub fn inf_sup(&self, other: &Self) -> (OPoint<T, D>, OPoint<T, D>) {
491 let (inf, sup) = self.coords.inf_sup(&other.coords);
492 (inf.into(), sup.into())
493 }
494}
495
496impl<T: Scalar + fmt::Display, D: DimName> fmt::Display for OPoint<T, D>
502where
503 DefaultAllocator: Allocator<D>,
504{
505 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
506 write!(f, "{{")?;
507
508 let mut it = self.coords.iter();
509
510 <T as fmt::Display>::fmt(it.next().unwrap(), f)?;
511
512 for comp in it {
513 write!(f, ", ")?;
514 <T as fmt::Display>::fmt(comp, f)?;
515 }
516
517 write!(f, "}}")
518 }
519}