1use approx::{AbsDiffEq, RelativeEq, UlpsEq};
2use num::{One, Zero};
3use std::cmp::Ordering;
4use std::fmt;
5use std::hash;
6
7#[cfg(feature = "rkyv-serialize")]
8use rkyv::bytecheck;
9#[cfg(feature = "serde-serialize-no-std")]
10use serde::{Deserialize, Deserializer, Serialize, Serializer};
11
12use simba::simd::SimdPartialOrd;
13
14use crate::base::allocator::Allocator;
15use crate::base::dimension::{DimName, DimNameAdd, DimNameSum, U1};
16use crate::base::iter::{MatrixIter, MatrixIterMut};
17use crate::base::{Const, DefaultAllocator, OVector, Scalar};
18use simba::scalar::{ClosedAddAssign, ClosedMulAssign, ClosedSubAssign};
19use std::mem::MaybeUninit;
20
21#[repr(C)]
41#[derive(Clone)]
42#[cfg_attr(feature = "rkyv-serialize", derive(bytecheck::CheckBytes))]
43#[cfg_attr(
44 feature = "rkyv-serialize-no-std",
45 derive(rkyv::Archive, rkyv::Serialize, rkyv::Deserialize),
46 archive(
47 as = "OPoint<T::Archived, D>",
48 bound(archive = "
49 T: rkyv::Archive,
50 T::Archived: Scalar,
51 OVector<T, D>: rkyv::Archive<Archived = OVector<T::Archived, D>>,
52 DefaultAllocator: Allocator<D>,
53 ")
54 )
55)]
56pub struct OPoint<T: Scalar, D: DimName>
57where
58 DefaultAllocator: Allocator<D>,
59{
60 pub coords: OVector<T, D>,
62}
63
64impl<T: Scalar + fmt::Debug, D: DimName> fmt::Debug for OPoint<T, D>
65where
66 DefaultAllocator: Allocator<D>,
67{
68 fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
69 self.coords.as_slice().fmt(formatter)
70 }
71}
72
73impl<T: Scalar + hash::Hash, D: DimName> hash::Hash for OPoint<T, D>
74where
75 DefaultAllocator: Allocator<D>,
76{
77 fn hash<H: hash::Hasher>(&self, state: &mut H) {
78 self.coords.hash(state)
79 }
80}
81
82impl<T: Scalar + Copy, D: DimName> Copy for OPoint<T, D>
83where
84 DefaultAllocator: Allocator<D>,
85 OVector<T, D>: Copy,
86{
87}
88
89#[cfg(feature = "bytemuck")]
90unsafe impl<T: Scalar, D: DimName> bytemuck::Zeroable for OPoint<T, D>
91where
92 OVector<T, D>: bytemuck::Zeroable,
93 DefaultAllocator: Allocator<D>,
94{
95}
96
97#[cfg(feature = "bytemuck")]
98unsafe impl<T: Scalar, D: DimName> bytemuck::Pod for OPoint<T, D>
99where
100 T: Copy,
101 OVector<T, D>: bytemuck::Pod,
102 DefaultAllocator: Allocator<D>,
103{
104}
105
106#[cfg(feature = "serde-serialize-no-std")]
107impl<T: Scalar, D: DimName> Serialize for OPoint<T, D>
108where
109 DefaultAllocator: Allocator<D>,
110 <DefaultAllocator as Allocator<D>>::Buffer<T>: Serialize,
111{
112 fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
113 where
114 S: Serializer,
115 {
116 self.coords.serialize(serializer)
117 }
118}
119
120#[cfg(feature = "serde-serialize-no-std")]
121impl<'a, T: Scalar, D: DimName> Deserialize<'a> for OPoint<T, D>
122where
123 DefaultAllocator: Allocator<D>,
124 <DefaultAllocator as Allocator<D>>::Buffer<T>: Deserialize<'a>,
125{
126 fn deserialize<Des>(deserializer: Des) -> Result<Self, Des::Error>
127 where
128 Des: Deserializer<'a>,
129 {
130 let coords = OVector::<T, D>::deserialize(deserializer)?;
131
132 Ok(Self::from(coords))
133 }
134}
135
136impl<T: Scalar, D: DimName> OPoint<T, D>
137where
138 DefaultAllocator: Allocator<D>,
139{
140 #[inline]
153 #[must_use]
154 pub fn map<T2: Scalar, F: FnMut(T) -> T2>(&self, f: F) -> OPoint<T2, D>
155 where
156 DefaultAllocator: Allocator<D>,
157 {
158 self.coords.map(f).into()
159 }
160
161 #[inline]
176 pub fn apply<F: FnMut(&mut T)>(&mut self, f: F) {
177 self.coords.apply(f)
178 }
179
180 #[inline]
196 #[must_use]
197 pub fn to_homogeneous(&self) -> OVector<T, DimNameSum<D, U1>>
198 where
199 T: One,
200 D: DimNameAdd<U1>,
201 DefaultAllocator: Allocator<DimNameSum<D, U1>>,
202 {
203 let len = self.len();
208 let mut res = crate::Matrix::uninit(DimNameSum::<D, U1>::name(), Const::<1>);
209 res.generic_view_mut((0, 0), self.coords.shape_generic())
212 .zip_apply(&self.coords, |out, e| *out = MaybeUninit::new(e));
213 res[(len, 0)] = MaybeUninit::new(T::one());
214
215 unsafe { res.assume_init() }
217 }
218
219 #[must_use]
235 pub fn lerp(&self, rhs: &OPoint<T, D>, t: T) -> OPoint<T, D>
236 where
237 T: Scalar + Zero + One + ClosedAddAssign + ClosedSubAssign + ClosedMulAssign,
238 {
239 OPoint {
240 coords: self.coords.lerp(&rhs.coords, t),
241 }
242 }
243
244 #[deprecated(note = "Use Point::from(vector) instead.")]
246 #[inline]
247 pub fn from_coordinates(coords: OVector<T, D>) -> Self {
248 Self { coords }
249 }
250
251 #[inline]
264 #[must_use]
265 pub fn len(&self) -> usize {
266 self.coords.len()
267 }
268
269 #[inline]
278 #[must_use]
279 pub fn is_empty(&self) -> bool {
280 self.len() == 0
281 }
282
283 #[inline]
286 #[deprecated(note = "This methods is no longer significant and will always return 1.")]
287 pub fn stride(&self) -> usize {
288 self.coords.strides().0
289 }
290
291 #[inline]
305 pub fn iter(
306 &self,
307 ) -> MatrixIter<'_, T, D, Const<1>, <DefaultAllocator as Allocator<D>>::Buffer<T>> {
308 self.coords.iter()
309 }
310
311 #[inline]
317 #[must_use]
318 pub unsafe fn get_unchecked(&self, i: usize) -> &T {
319 self.coords.vget_unchecked(i)
320 }
321
322 #[inline]
336 pub fn iter_mut(
337 &mut self,
338 ) -> MatrixIterMut<'_, T, D, Const<1>, <DefaultAllocator as Allocator<D>>::Buffer<T>> {
339 self.coords.iter_mut()
340 }
341
342 #[inline]
348 #[must_use]
349 pub unsafe fn get_unchecked_mut(&mut self, i: usize) -> &mut T {
350 self.coords.vget_unchecked_mut(i)
351 }
352
353 #[inline]
359 pub unsafe fn swap_unchecked(&mut self, i1: usize, i2: usize) {
360 self.coords.swap_unchecked((i1, 0), (i2, 0))
361 }
362}
363
364impl<T: Scalar + AbsDiffEq, D: DimName> AbsDiffEq for OPoint<T, D>
365where
366 T::Epsilon: Clone,
367 DefaultAllocator: Allocator<D>,
368{
369 type Epsilon = T::Epsilon;
370
371 #[inline]
372 fn default_epsilon() -> Self::Epsilon {
373 T::default_epsilon()
374 }
375
376 #[inline]
377 fn abs_diff_eq(&self, other: &Self, epsilon: Self::Epsilon) -> bool {
378 self.coords.abs_diff_eq(&other.coords, epsilon)
379 }
380}
381
382impl<T: Scalar + RelativeEq, D: DimName> RelativeEq for OPoint<T, D>
383where
384 T::Epsilon: Clone,
385 DefaultAllocator: Allocator<D>,
386{
387 #[inline]
388 fn default_max_relative() -> Self::Epsilon {
389 T::default_max_relative()
390 }
391
392 #[inline]
393 fn relative_eq(
394 &self,
395 other: &Self,
396 epsilon: Self::Epsilon,
397 max_relative: Self::Epsilon,
398 ) -> bool {
399 self.coords
400 .relative_eq(&other.coords, epsilon, max_relative)
401 }
402}
403
404impl<T: Scalar + UlpsEq, D: DimName> UlpsEq for OPoint<T, D>
405where
406 T::Epsilon: Clone,
407 DefaultAllocator: Allocator<D>,
408{
409 #[inline]
410 fn default_max_ulps() -> u32 {
411 T::default_max_ulps()
412 }
413
414 #[inline]
415 fn ulps_eq(&self, other: &Self, epsilon: Self::Epsilon, max_ulps: u32) -> bool {
416 self.coords.ulps_eq(&other.coords, epsilon, max_ulps)
417 }
418}
419
420impl<T: Scalar + Eq, D: DimName> Eq for OPoint<T, D> where DefaultAllocator: Allocator<D> {}
421
422impl<T: Scalar, D: DimName> PartialEq for OPoint<T, D>
423where
424 DefaultAllocator: Allocator<D>,
425{
426 #[inline]
427 fn eq(&self, right: &Self) -> bool {
428 self.coords == right.coords
429 }
430}
431
432impl<T: Scalar + PartialOrd, D: DimName> PartialOrd for OPoint<T, D>
433where
434 DefaultAllocator: Allocator<D>,
435{
436 #[inline]
437 fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
438 self.coords.partial_cmp(&other.coords)
439 }
440
441 #[inline]
442 fn lt(&self, right: &Self) -> bool {
443 self.coords.lt(&right.coords)
444 }
445
446 #[inline]
447 fn le(&self, right: &Self) -> bool {
448 self.coords.le(&right.coords)
449 }
450
451 #[inline]
452 fn gt(&self, right: &Self) -> bool {
453 self.coords.gt(&right.coords)
454 }
455
456 #[inline]
457 fn ge(&self, right: &Self) -> bool {
458 self.coords.ge(&right.coords)
459 }
460}
461
462impl<T: Scalar + SimdPartialOrd, D: DimName> OPoint<T, D>
466where
467 DefaultAllocator: Allocator<D>,
468{
469 #[inline]
471 #[must_use]
472 pub fn inf(&self, other: &Self) -> OPoint<T, D> {
473 self.coords.inf(&other.coords).into()
474 }
475
476 #[inline]
478 #[must_use]
479 pub fn sup(&self, other: &Self) -> OPoint<T, D> {
480 self.coords.sup(&other.coords).into()
481 }
482
483 #[inline]
485 #[must_use]
486 pub fn inf_sup(&self, other: &Self) -> (OPoint<T, D>, OPoint<T, D>) {
487 let (inf, sup) = self.coords.inf_sup(&other.coords);
488 (inf.into(), sup.into())
489 }
490}
491
492impl<T: Scalar + fmt::Display, D: DimName> fmt::Display for OPoint<T, D>
498where
499 DefaultAllocator: Allocator<D>,
500{
501 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
502 write!(f, "{{")?;
503
504 let mut it = self.coords.iter();
505
506 <T as fmt::Display>::fmt(it.next().unwrap(), f)?;
507
508 for comp in it {
509 write!(f, ", ")?;
510 <T as fmt::Display>::fmt(comp, f)?;
511 }
512
513 write!(f, "}}")
514 }
515}