trackball/
image.rs

1use crate::{Frame, Scope};
2use nalgebra::{Isometry3, Matrix4, Point2, Point3, RealField, Vector2, Vector3, convert, zero};
3use simba::scalar::SubsetOf;
4
5/// Image as projection of [`Scope`] wrt [`Frame`].
6#[derive(Debug, Clone, Copy, PartialEq, Eq)]
7#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
8pub struct Image<N: Copy + RealField> {
9	/// Current position in screen space of hovering input or pointing device.
10	pos: Point2<N>,
11	/// Maximum position in screen space as screen's width and height.
12	max: Point2<N>,
13	/// Cached unit per pixel on focus plane to scale/project positions/vectors onto focus plane.
14	upp: N,
15	/// Cached previous frame.
16	frame: Frame<N>,
17	/// Cached previous scope.
18	scope: Scope<N>,
19	/// Cached view isometry from world to camera space coinciding with right-handed look-at space.
20	view_iso: Isometry3<N>,
21	/// Cached homogeneous view matrix computed from view isometry.
22	view_mat: Matrix4<N>,
23	/// Cached scale-identical orthographic or perspective projection matrix.
24	proj_mat: Matrix4<N>,
25	/// Cached transformation.
26	proj_view_mat: Matrix4<N>,
27	/// Cached inverse of transformation.
28	proj_view_inv: Matrix4<N>,
29	/// Whether to compute transformation. Default is `true`.
30	compute_mat: bool,
31	/// Whether to compute inverse transformation. Default is `true`.
32	compute_inv: bool,
33}
34
35impl<N: Copy + RealField> Image<N> {
36	/// Computes initial transformations from frame, scope, and screen's width and height.
37	#[must_use]
38	pub fn new(frame: &Frame<N>, scope: &Scope<N>, max: Point2<N>) -> Self {
39		let mut image = Self {
40			pos: Point2::origin(),
41			max,
42			upp: zero(),
43			frame: *frame,
44			scope: *scope,
45			view_iso: Isometry3::identity(),
46			view_mat: zero(),
47			proj_mat: zero(),
48			proj_view_mat: zero(),
49			proj_view_inv: zero(),
50			compute_mat: true,
51			compute_inv: true,
52		};
53		image.compute_view(frame);
54		image.compute_projection_and_upp(frame.distance(), scope);
55		image.compute_transformation();
56		image.compute_inverse_transformation();
57		image
58	}
59	/// Recomputes only cached matrices whose parameters have changed, see [`Self::set_compute()`].
60	///
61	/// Returns `Some(true)` on success, `Some(false)` on failure, and `None` with no changes.
62	#[allow(clippy::useless_let_if_seq)]
63	pub fn compute(&mut self, frame: Frame<N>, scope: Scope<N>) -> Option<bool> {
64		let mut compute = false;
65		if self.frame != frame {
66			self.compute_view(&frame);
67			compute = true;
68		}
69		if self.frame.distance() != frame.distance() || self.scope != scope {
70			self.compute_projection_and_upp(frame.distance(), &scope);
71			compute = true;
72		}
73		self.frame = frame;
74		self.scope = scope;
75		compute.then(|| {
76			if self.compute_mat || self.compute_inv {
77				self.compute_transformation();
78			}
79			if self.compute_inv {
80				self.compute_inverse_transformation()
81			} else {
82				true
83			}
84		})
85	}
86	/// Sets whether to compute transformation and inverse transformation with [`Self::compute()`].
87	///
88	/// Default is `(true, true)`.
89	pub const fn set_compute(&mut self, compute_mat: bool, compute_inv: bool) {
90		self.compute_mat = compute_mat;
91		self.compute_inv = compute_inv;
92	}
93	/// Current position in screen space of hovering input or pointing device.
94	#[must_use]
95	pub const fn pos(&self) -> &Point2<N> {
96		&self.pos
97	}
98	/// Sets current position in screen space of hovering input or pointing device.
99	pub const fn set_pos(&mut self, pos: Point2<N>) {
100		self.pos = pos;
101	}
102	/// Maximum position in screen space as screen's width and height.
103	#[must_use]
104	pub const fn max(&self) -> &Point2<N> {
105		&self.max
106	}
107	/// Sets maximum position in screen space as screen's width and height.
108	pub fn set_max(&mut self, max: Point2<N>) {
109		// Let `Self::compute()` recompute projection matrix by invalidating cached previous scope.
110		if self.max != max {
111			self.scope.set_fov(N::zero());
112		}
113		self.max = max;
114	}
115	/// Cached unit per pixel on focus plane to scale/project positions/vectors onto focus plane.
116	#[must_use]
117	pub const fn upp(&self) -> N {
118		self.upp
119	}
120	/// Cached view isometry.
121	#[must_use]
122	pub const fn view_isometry(&self) -> &Isometry3<N> {
123		&self.view_iso
124	}
125	/// Cached view matrix.
126	#[must_use]
127	pub const fn view(&self) -> &Matrix4<N> {
128		&self.view_mat
129	}
130	/// Computes view isometry and matrix from frame wrt camera eye and target.
131	pub fn compute_view(&mut self, frame: &Frame<N>) {
132		self.view_iso = frame.view();
133		self.view_mat = self.view_iso.to_homogeneous();
134	}
135	/// Cached projection matrix.
136	#[must_use]
137	pub const fn projection(&self) -> &Matrix4<N> {
138		&self.proj_mat
139	}
140	/// Computes projection matrix and unit per pixel on focus plane.
141	pub fn compute_projection_and_upp(&mut self, zat: N, scope: &Scope<N>) {
142		let (mat, upp) = scope.projection_and_upp(zat, &self.max);
143		self.upp = upp;
144		self.proj_mat = mat;
145	}
146	/// Cached projection view matrix.
147	#[must_use]
148	pub const fn transformation(&self) -> &Matrix4<N> {
149		&self.proj_view_mat
150	}
151	/// Computes projection view matrix.
152	pub fn compute_transformation(&mut self) {
153		self.proj_view_mat = self.proj_mat * self.view_mat;
154	}
155	/// Cached inverse projection view matrix.
156	#[must_use]
157	pub const fn inverse_transformation(&self) -> &Matrix4<N> {
158		&self.proj_view_inv
159	}
160	/// Computes inverse of projection view matrix.
161	///
162	/// Returns `true` on success.
163	pub fn compute_inverse_transformation(&mut self) -> bool {
164		let inv = self.proj_view_mat.try_inverse();
165		if let Some(mat) = inv {
166			self.proj_view_inv = mat;
167		}
168		inv.is_some()
169	}
170	/// Clamps position in screen space wrt its maximum in screen space.
171	#[must_use]
172	pub fn clamp_pos_wrt_max(pos: &Point2<N>, max: &Point2<N>) -> Point2<N> {
173		Point2::new(pos.x.clamp(N::zero(), max.x), pos.y.clamp(N::zero(), max.y))
174	}
175	/// Clamps position in screen space.
176	#[must_use]
177	pub fn clamp_pos(&self, pos: &Point2<N>) -> Point2<N> {
178		Self::clamp_pos_wrt_max(pos, &self.max)
179	}
180	/// Transforms position and its maximum from screen to camera space wrt its maximum.
181	#[must_use]
182	pub fn transform_pos_and_max_wrt_max(
183		pos: &Point2<N>,
184		max: &Point2<N>,
185	) -> (Point2<N>, Point2<N>) {
186		let max = max * convert(0.5);
187		(Point2::new(pos.x - max.x, max.y - pos.y), max)
188	}
189	/// Transforms position from screen to camera space.
190	#[must_use]
191	pub fn transform_pos(&self, pos: &Point2<N>) -> Point2<N> {
192		Self::transform_pos_and_max_wrt_max(pos, &self.max).0
193	}
194	/// Transforms vector from screen to camera space.
195	#[must_use]
196	pub fn transform_vec(pos: &Vector2<N>) -> Vector2<N> {
197		Vector2::new(pos.x, -pos.y)
198	}
199	/// Transforms position from screen to camera space and projects it onto focus plane.
200	#[must_use]
201	pub fn project_pos(&self, pos: &Point2<N>) -> Point3<N> {
202		self.transform_pos(pos)
203			.coords
204			.scale(self.upp)
205			.push(N::zero())
206			.into()
207	}
208	/// Transforms vector from screen to camera space and projects it onto focus plane.
209	#[must_use]
210	pub fn project_vec(&self, vec: &Vector2<N>) -> Vector3<N> {
211		Self::transform_vec(vec).scale(self.upp).push(N::zero())
212	}
213	/// Casts components to another type, e.g., between [`f32`] and [`f64`].
214	#[must_use]
215	pub fn cast<M: Copy + RealField>(self) -> Image<M>
216	where
217		N: SubsetOf<M>,
218	{
219		Image {
220			pos: self.pos.cast(),
221			max: self.max.cast(),
222			upp: self.upp.to_superset(),
223			frame: self.frame.cast(),
224			scope: self.scope.cast(),
225			view_iso: self.view_iso.cast(),
226			view_mat: self.view_mat.cast(),
227			proj_mat: self.proj_mat.cast(),
228			proj_view_mat: self.proj_view_mat.cast(),
229			proj_view_inv: self.proj_view_inv.cast(),
230			compute_mat: self.compute_mat,
231			compute_inv: self.compute_inv,
232		}
233	}
234}
235
236#[cfg(feature = "rkyv")]
237impl<N: Copy + RealField> rkyv::Archive for Image<N> {
238	type Archived = Self;
239	type Resolver = ();
240
241	#[inline]
242	#[allow(unsafe_code)]
243	unsafe fn resolve(&self, _: usize, (): Self::Resolver, out: *mut Self::Archived) {
244		unsafe {
245			out.write(rkyv::to_archived!(*self as Self));
246		}
247	}
248}
249
250#[cfg(feature = "rkyv")]
251impl<Ser: rkyv::Fallible + ?Sized, N: Copy + RealField> rkyv::Serialize<Ser> for Image<N> {
252	#[inline]
253	fn serialize(&self, _: &mut Ser) -> Result<Self::Resolver, Ser::Error> {
254		Ok(())
255	}
256}
257
258#[cfg(feature = "rkyv")]
259impl<De: rkyv::Fallible + ?Sized, N: Copy + RealField> rkyv::Deserialize<Self, De> for Image<N> {
260	#[inline]
261	fn deserialize(&self, _: &mut De) -> Result<Self, De::Error> {
262		Ok(rkyv::from_archived!(*self))
263	}
264}