bevy_render/view/mod.rs
1pub mod visibility;
2pub mod window;
3
4use bevy_asset::{load_internal_asset, weak_handle, Handle};
5use bevy_diagnostic::FrameCount;
6pub use visibility::*;
7pub use window::*;
8
9use crate::{
10 camera::{
11 CameraMainTextureUsages, ClearColor, ClearColorConfig, Exposure, ExtractedCamera,
12 ManualTextureViews, MipBias, NormalizedRenderTarget, TemporalJitter,
13 },
14 experimental::occlusion_culling::OcclusionCulling,
15 extract_component::ExtractComponentPlugin,
16 prelude::Shader,
17 primitives::Frustum,
18 render_asset::RenderAssets,
19 render_phase::ViewRangefinder3d,
20 render_resource::{DynamicUniformBuffer, ShaderType, Texture, TextureView},
21 renderer::{RenderDevice, RenderQueue},
22 sync_world::MainEntity,
23 texture::{
24 CachedTexture, ColorAttachment, DepthAttachment, GpuImage, OutputColorAttachment,
25 TextureCache,
26 },
27 Render, RenderApp, RenderSet,
28};
29use alloc::sync::Arc;
30use bevy_app::{App, Plugin};
31use bevy_color::LinearRgba;
32use bevy_derive::{Deref, DerefMut};
33use bevy_ecs::prelude::*;
34use bevy_image::BevyDefault as _;
35use bevy_math::{mat3, vec2, vec3, Mat3, Mat4, UVec4, Vec2, Vec3, Vec4, Vec4Swizzles};
36use bevy_platform::collections::{hash_map::Entry, HashMap};
37use bevy_reflect::{std_traits::ReflectDefault, Reflect};
38use bevy_render_macros::ExtractComponent;
39use bevy_transform::components::GlobalTransform;
40use core::{
41 ops::Range,
42 sync::atomic::{AtomicUsize, Ordering},
43};
44use wgpu::{
45 BufferUsages, Extent3d, RenderPassColorAttachment, RenderPassDepthStencilAttachment, StoreOp,
46 TextureDescriptor, TextureDimension, TextureFormat, TextureUsages,
47};
48
49pub const VIEW_TYPE_HANDLE: Handle<Shader> = weak_handle!("7234423c-38bb-411c-acec-f67730f6db5b");
50
51/// The matrix that converts from the RGB to the LMS color space.
52///
53/// To derive this, first we convert from RGB to [CIE 1931 XYZ]:
54///
55/// ```text
56/// ⎡ X ⎤ ⎡ 0.490 0.310 0.200 ⎤ ⎡ R ⎤
57/// ⎢ Y ⎥ = ⎢ 0.177 0.812 0.011 ⎥ ⎢ G ⎥
58/// ⎣ Z ⎦ ⎣ 0.000 0.010 0.990 ⎦ ⎣ B ⎦
59/// ```
60///
61/// Then we convert to LMS according to the [CAM16 standard matrix]:
62///
63/// ```text
64/// ⎡ L ⎤ ⎡ 0.401 0.650 -0.051 ⎤ ⎡ X ⎤
65/// ⎢ M ⎥ = ⎢ -0.250 1.204 0.046 ⎥ ⎢ Y ⎥
66/// ⎣ S ⎦ ⎣ -0.002 0.049 0.953 ⎦ ⎣ Z ⎦
67/// ```
68///
69/// The resulting matrix is just the concatenation of these two matrices, to do
70/// the conversion in one step.
71///
72/// [CIE 1931 XYZ]: https://en.wikipedia.org/wiki/CIE_1931_color_space
73/// [CAM16 standard matrix]: https://en.wikipedia.org/wiki/LMS_color_space
74static RGB_TO_LMS: Mat3 = mat3(
75 vec3(0.311692, 0.0905138, 0.00764433),
76 vec3(0.652085, 0.901341, 0.0486554),
77 vec3(0.0362225, 0.00814478, 0.943700),
78);
79
80/// The inverse of the [`RGB_TO_LMS`] matrix, converting from the LMS color
81/// space back to RGB.
82static LMS_TO_RGB: Mat3 = mat3(
83 vec3(4.06305, -0.40791, -0.0118812),
84 vec3(-2.93241, 1.40437, -0.0486532),
85 vec3(-0.130646, 0.00353630, 1.0605344),
86);
87
88/// The [CIE 1931] *xy* chromaticity coordinates of the [D65 white point].
89///
90/// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space
91/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values
92static D65_XY: Vec2 = vec2(0.31272, 0.32903);
93
94/// The [D65 white point] in [LMS color space].
95///
96/// [LMS color space]: https://en.wikipedia.org/wiki/LMS_color_space
97/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values
98static D65_LMS: Vec3 = vec3(0.975538, 1.01648, 1.08475);
99
100pub struct ViewPlugin;
101
102impl Plugin for ViewPlugin {
103 fn build(&self, app: &mut App) {
104 load_internal_asset!(app, VIEW_TYPE_HANDLE, "view.wgsl", Shader::from_wgsl);
105
106 app.register_type::<InheritedVisibility>()
107 .register_type::<ViewVisibility>()
108 .register_type::<Msaa>()
109 .register_type::<NoFrustumCulling>()
110 .register_type::<RenderLayers>()
111 .register_type::<Visibility>()
112 .register_type::<VisibleEntities>()
113 .register_type::<ColorGrading>()
114 .register_type::<OcclusionCulling>()
115 // NOTE: windows.is_changed() handles cases where a window was resized
116 .add_plugins((
117 ExtractComponentPlugin::<Msaa>::default(),
118 ExtractComponentPlugin::<OcclusionCulling>::default(),
119 VisibilityPlugin,
120 VisibilityRangePlugin,
121 ));
122
123 if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
124 render_app.add_systems(
125 Render,
126 (
127 // `TextureView`s need to be dropped before reconfiguring window surfaces.
128 clear_view_attachments
129 .in_set(RenderSet::ManageViews)
130 .before(create_surfaces),
131 prepare_view_attachments
132 .in_set(RenderSet::ManageViews)
133 .before(prepare_view_targets)
134 .after(prepare_windows),
135 prepare_view_targets
136 .in_set(RenderSet::ManageViews)
137 .after(prepare_windows)
138 .after(crate::render_asset::prepare_assets::<GpuImage>)
139 .ambiguous_with(crate::camera::sort_cameras), // doesn't use `sorted_camera_index_for_target`
140 prepare_view_uniforms.in_set(RenderSet::PrepareResources),
141 ),
142 );
143 }
144 }
145
146 fn finish(&self, app: &mut App) {
147 if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
148 render_app
149 .init_resource::<ViewUniforms>()
150 .init_resource::<ViewTargetAttachments>();
151 }
152 }
153}
154
155/// Component for configuring the number of samples for [Multi-Sample Anti-Aliasing](https://en.wikipedia.org/wiki/Multisample_anti-aliasing)
156/// for a [`Camera`](crate::camera::Camera).
157///
158/// Defaults to 4 samples. A higher number of samples results in smoother edges.
159///
160/// Some advanced rendering features may require that MSAA is disabled.
161///
162/// Note that the web currently only supports 1 or 4 samples.
163#[derive(
164 Component,
165 Default,
166 Clone,
167 Copy,
168 ExtractComponent,
169 Reflect,
170 PartialEq,
171 PartialOrd,
172 Eq,
173 Hash,
174 Debug,
175)]
176#[reflect(Component, Default, PartialEq, Hash, Debug)]
177pub enum Msaa {
178 Off = 1,
179 Sample2 = 2,
180 #[default]
181 Sample4 = 4,
182 Sample8 = 8,
183}
184
185impl Msaa {
186 #[inline]
187 pub fn samples(&self) -> u32 {
188 *self as u32
189 }
190
191 pub fn from_samples(samples: u32) -> Self {
192 match samples {
193 1 => Msaa::Off,
194 2 => Msaa::Sample2,
195 4 => Msaa::Sample4,
196 8 => Msaa::Sample8,
197 _ => panic!("Unsupported MSAA sample count: {}", samples),
198 }
199 }
200}
201
202/// An identifier for a view that is stable across frames.
203///
204/// We can't use [`Entity`] for this because render world entities aren't
205/// stable, and we can't use just [`MainEntity`] because some main world views
206/// extract to multiple render world views. For example, a directional light
207/// extracts to one render world view per cascade, and a point light extracts to
208/// one render world view per cubemap face. So we pair the main entity with an
209/// *auxiliary entity* and a *subview index*, which *together* uniquely identify
210/// a view in the render world in a way that's stable from frame to frame.
211#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
212pub struct RetainedViewEntity {
213 /// The main entity that this view corresponds to.
214 pub main_entity: MainEntity,
215
216 /// Another entity associated with the view entity.
217 ///
218 /// This is currently used for shadow cascades. If there are multiple
219 /// cameras, each camera needs to have its own set of shadow cascades. Thus
220 /// the light and subview index aren't themselves enough to uniquely
221 /// identify a shadow cascade: we need the camera that the cascade is
222 /// associated with as well. This entity stores that camera.
223 ///
224 /// If not present, this will be `MainEntity(Entity::PLACEHOLDER)`.
225 pub auxiliary_entity: MainEntity,
226
227 /// The index of the view corresponding to the entity.
228 ///
229 /// For example, for point lights that cast shadows, this is the index of
230 /// the cubemap face (0 through 5 inclusive). For directional lights, this
231 /// is the index of the cascade.
232 pub subview_index: u32,
233}
234
235impl RetainedViewEntity {
236 /// Creates a new [`RetainedViewEntity`] from the given main world entity,
237 /// auxiliary main world entity, and subview index.
238 ///
239 /// See [`RetainedViewEntity::subview_index`] for an explanation of what
240 /// `auxiliary_entity` and `subview_index` are.
241 pub fn new(
242 main_entity: MainEntity,
243 auxiliary_entity: Option<MainEntity>,
244 subview_index: u32,
245 ) -> Self {
246 Self {
247 main_entity,
248 auxiliary_entity: auxiliary_entity.unwrap_or(Entity::PLACEHOLDER.into()),
249 subview_index,
250 }
251 }
252}
253
254/// Describes a camera in the render world.
255///
256/// Each entity in the main world can potentially extract to multiple subviews,
257/// each of which has a [`RetainedViewEntity::subview_index`]. For instance, 3D
258/// cameras extract to both a 3D camera subview with index 0 and a special UI
259/// subview with index 1. Likewise, point lights with shadows extract to 6
260/// subviews, one for each side of the shadow cubemap.
261#[derive(Component)]
262pub struct ExtractedView {
263 /// The entity in the main world corresponding to this render world view.
264 pub retained_view_entity: RetainedViewEntity,
265 /// Typically a right-handed projection matrix, one of either:
266 ///
267 /// Perspective (infinite reverse z)
268 /// ```text
269 /// f = 1 / tan(fov_y_radians / 2)
270 ///
271 /// ⎡ f / aspect 0 0 0 ⎤
272 /// ⎢ 0 f 0 0 ⎥
273 /// ⎢ 0 0 0 -1 ⎥
274 /// ⎣ 0 0 near 0 ⎦
275 /// ```
276 ///
277 /// Orthographic
278 /// ```text
279 /// w = right - left
280 /// h = top - bottom
281 /// d = near - far
282 /// cw = -right - left
283 /// ch = -top - bottom
284 ///
285 /// ⎡ 2 / w 0 0 0 ⎤
286 /// ⎢ 0 2 / h 0 0 ⎥
287 /// ⎢ 0 0 1 / d 0 ⎥
288 /// ⎣ cw / w ch / h near / d 1 ⎦
289 /// ```
290 ///
291 /// `clip_from_view[3][3] == 1.0` is the standard way to check if a projection is orthographic
292 ///
293 /// Custom projections are also possible however.
294 pub clip_from_view: Mat4,
295 pub world_from_view: GlobalTransform,
296 // The view-projection matrix. When provided it is used instead of deriving it from
297 // `projection` and `transform` fields, which can be helpful in cases where numerical
298 // stability matters and there is a more direct way to derive the view-projection matrix.
299 pub clip_from_world: Option<Mat4>,
300 pub hdr: bool,
301 // uvec4(origin.x, origin.y, width, height)
302 pub viewport: UVec4,
303 pub color_grading: ColorGrading,
304}
305
306impl ExtractedView {
307 /// Creates a 3D rangefinder for a view
308 pub fn rangefinder3d(&self) -> ViewRangefinder3d {
309 ViewRangefinder3d::from_world_from_view(&self.world_from_view.compute_matrix())
310 }
311}
312
313/// Configures filmic color grading parameters to adjust the image appearance.
314///
315/// Color grading is applied just before tonemapping for a given
316/// [`Camera`](crate::camera::Camera) entity, with the sole exception of the
317/// `post_saturation` value in [`ColorGradingGlobal`], which is applied after
318/// tonemapping.
319#[derive(Component, Reflect, Debug, Default, Clone)]
320#[reflect(Component, Default, Debug, Clone)]
321pub struct ColorGrading {
322 /// Filmic color grading values applied to the image as a whole (as opposed
323 /// to individual sections, like shadows and highlights).
324 pub global: ColorGradingGlobal,
325
326 /// Color grading values that are applied to the darker parts of the image.
327 ///
328 /// The cutoff points can be customized with the
329 /// [`ColorGradingGlobal::midtones_range`] field.
330 pub shadows: ColorGradingSection,
331
332 /// Color grading values that are applied to the parts of the image with
333 /// intermediate brightness.
334 ///
335 /// The cutoff points can be customized with the
336 /// [`ColorGradingGlobal::midtones_range`] field.
337 pub midtones: ColorGradingSection,
338
339 /// Color grading values that are applied to the lighter parts of the image.
340 ///
341 /// The cutoff points can be customized with the
342 /// [`ColorGradingGlobal::midtones_range`] field.
343 pub highlights: ColorGradingSection,
344}
345
346/// Filmic color grading values applied to the image as a whole (as opposed to
347/// individual sections, like shadows and highlights).
348#[derive(Clone, Debug, Reflect)]
349#[reflect(Default, Clone)]
350pub struct ColorGradingGlobal {
351 /// Exposure value (EV) offset, measured in stops.
352 pub exposure: f32,
353
354 /// An adjustment made to the [CIE 1931] chromaticity *x* value.
355 ///
356 /// Positive values make the colors redder. Negative values make the colors
357 /// bluer. This has no effect on luminance (brightness).
358 ///
359 /// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
360 pub temperature: f32,
361
362 /// An adjustment made to the [CIE 1931] chromaticity *y* value.
363 ///
364 /// Positive values make the colors more magenta. Negative values make the
365 /// colors greener. This has no effect on luminance (brightness).
366 ///
367 /// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
368 pub tint: f32,
369
370 /// An adjustment to the [hue], in radians.
371 ///
372 /// Adjusting this value changes the perceived colors in the image: red to
373 /// yellow to green to blue, etc. It has no effect on the saturation or
374 /// brightness of the colors.
375 ///
376 /// [hue]: https://en.wikipedia.org/wiki/HSL_and_HSV#Formal_derivation
377 pub hue: f32,
378
379 /// Saturation adjustment applied after tonemapping.
380 /// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image
381 /// with luminance defined by ITU-R BT.709
382 /// Values above 1.0 increase saturation.
383 pub post_saturation: f32,
384
385 /// The luminance (brightness) ranges that are considered part of the
386 /// "midtones" of the image.
387 ///
388 /// This affects which [`ColorGradingSection`]s apply to which colors. Note
389 /// that the sections smoothly blend into one another, to avoid abrupt
390 /// transitions.
391 ///
392 /// The default value is 0.2 to 0.7.
393 pub midtones_range: Range<f32>,
394}
395
396/// The [`ColorGrading`] structure, packed into the most efficient form for the
397/// GPU.
398#[derive(Clone, Copy, Debug, ShaderType)]
399pub struct ColorGradingUniform {
400 pub balance: Mat3,
401 pub saturation: Vec3,
402 pub contrast: Vec3,
403 pub gamma: Vec3,
404 pub gain: Vec3,
405 pub lift: Vec3,
406 pub midtone_range: Vec2,
407 pub exposure: f32,
408 pub hue: f32,
409 pub post_saturation: f32,
410}
411
412/// A section of color grading values that can be selectively applied to
413/// shadows, midtones, and highlights.
414#[derive(Reflect, Debug, Copy, Clone, PartialEq)]
415#[reflect(Clone, PartialEq)]
416pub struct ColorGradingSection {
417 /// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image
418 /// with luminance defined by ITU-R BT.709.
419 /// Values above 1.0 increase saturation.
420 pub saturation: f32,
421
422 /// Adjusts the range of colors.
423 ///
424 /// A value of 1.0 applies no changes. Values below 1.0 move the colors more
425 /// toward a neutral gray. Values above 1.0 spread the colors out away from
426 /// the neutral gray.
427 pub contrast: f32,
428
429 /// A nonlinear luminance adjustment, mainly affecting the high end of the
430 /// range.
431 ///
432 /// This is the *n* exponent in the standard [ASC CDL] formula for color
433 /// correction:
434 ///
435 /// ```text
436 /// out = (i × s + o)ⁿ
437 /// ```
438 ///
439 /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
440 pub gamma: f32,
441
442 /// A linear luminance adjustment, mainly affecting the middle part of the
443 /// range.
444 ///
445 /// This is the *s* factor in the standard [ASC CDL] formula for color
446 /// correction:
447 ///
448 /// ```text
449 /// out = (i × s + o)ⁿ
450 /// ```
451 ///
452 /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
453 pub gain: f32,
454
455 /// A fixed luminance adjustment, mainly affecting the lower part of the
456 /// range.
457 ///
458 /// This is the *o* term in the standard [ASC CDL] formula for color
459 /// correction:
460 ///
461 /// ```text
462 /// out = (i × s + o)ⁿ
463 /// ```
464 ///
465 /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
466 pub lift: f32,
467}
468
469impl Default for ColorGradingGlobal {
470 fn default() -> Self {
471 Self {
472 exposure: 0.0,
473 temperature: 0.0,
474 tint: 0.0,
475 hue: 0.0,
476 post_saturation: 1.0,
477 midtones_range: 0.2..0.7,
478 }
479 }
480}
481
482impl Default for ColorGradingSection {
483 fn default() -> Self {
484 Self {
485 saturation: 1.0,
486 contrast: 1.0,
487 gamma: 1.0,
488 gain: 1.0,
489 lift: 0.0,
490 }
491 }
492}
493
494impl ColorGrading {
495 /// Creates a new [`ColorGrading`] instance in which shadows, midtones, and
496 /// highlights all have the same set of color grading values.
497 pub fn with_identical_sections(
498 global: ColorGradingGlobal,
499 section: ColorGradingSection,
500 ) -> ColorGrading {
501 ColorGrading {
502 global,
503 highlights: section,
504 midtones: section,
505 shadows: section,
506 }
507 }
508
509 /// Returns an iterator that visits the shadows, midtones, and highlights
510 /// sections, in that order.
511 pub fn all_sections(&self) -> impl Iterator<Item = &ColorGradingSection> {
512 [&self.shadows, &self.midtones, &self.highlights].into_iter()
513 }
514
515 /// Applies the given mutating function to the shadows, midtones, and
516 /// highlights sections, in that order.
517 ///
518 /// Returns an array composed of the results of such evaluation, in that
519 /// order.
520 pub fn all_sections_mut(&mut self) -> impl Iterator<Item = &mut ColorGradingSection> {
521 [&mut self.shadows, &mut self.midtones, &mut self.highlights].into_iter()
522 }
523}
524
525#[derive(Clone, ShaderType)]
526pub struct ViewUniform {
527 pub clip_from_world: Mat4,
528 pub unjittered_clip_from_world: Mat4,
529 pub world_from_clip: Mat4,
530 pub world_from_view: Mat4,
531 pub view_from_world: Mat4,
532 /// Typically a right-handed projection matrix, one of either:
533 ///
534 /// Perspective (infinite reverse z)
535 /// ```text
536 /// f = 1 / tan(fov_y_radians / 2)
537 ///
538 /// ⎡ f / aspect 0 0 0 ⎤
539 /// ⎢ 0 f 0 0 ⎥
540 /// ⎢ 0 0 0 -1 ⎥
541 /// ⎣ 0 0 near 0 ⎦
542 /// ```
543 ///
544 /// Orthographic
545 /// ```text
546 /// w = right - left
547 /// h = top - bottom
548 /// d = near - far
549 /// cw = -right - left
550 /// ch = -top - bottom
551 ///
552 /// ⎡ 2 / w 0 0 0 ⎤
553 /// ⎢ 0 2 / h 0 0 ⎥
554 /// ⎢ 0 0 1 / d 0 ⎥
555 /// ⎣ cw / w ch / h near / d 1 ⎦
556 /// ```
557 ///
558 /// `clip_from_view[3][3] == 1.0` is the standard way to check if a projection is orthographic
559 ///
560 /// Custom projections are also possible however.
561 pub clip_from_view: Mat4,
562 pub view_from_clip: Mat4,
563 pub world_position: Vec3,
564 pub exposure: f32,
565 // viewport(x_origin, y_origin, width, height)
566 pub viewport: Vec4,
567 /// 6 world-space half spaces (normal: vec3, distance: f32) ordered left, right, top, bottom, near, far.
568 /// The normal vectors point towards the interior of the frustum.
569 /// A half space contains `p` if `normal.dot(p) + distance > 0.`
570 pub frustum: [Vec4; 6],
571 pub color_grading: ColorGradingUniform,
572 pub mip_bias: f32,
573 pub frame_count: u32,
574}
575
576#[derive(Resource)]
577pub struct ViewUniforms {
578 pub uniforms: DynamicUniformBuffer<ViewUniform>,
579}
580
581impl FromWorld for ViewUniforms {
582 fn from_world(world: &mut World) -> Self {
583 let mut uniforms = DynamicUniformBuffer::default();
584 uniforms.set_label(Some("view_uniforms_buffer"));
585
586 let render_device = world.resource::<RenderDevice>();
587 if render_device.limits().max_storage_buffers_per_shader_stage > 0 {
588 uniforms.add_usages(BufferUsages::STORAGE);
589 }
590
591 Self { uniforms }
592 }
593}
594
595#[derive(Component)]
596pub struct ViewUniformOffset {
597 pub offset: u32,
598}
599
600#[derive(Component)]
601pub struct ViewTarget {
602 main_textures: MainTargetTextures,
603 main_texture_format: TextureFormat,
604 /// 0 represents `main_textures.a`, 1 represents `main_textures.b`
605 /// This is shared across view targets with the same render target
606 main_texture: Arc<AtomicUsize>,
607 out_texture: OutputColorAttachment,
608}
609
610/// Contains [`OutputColorAttachment`] used for each target present on any view in the current
611/// frame, after being prepared by [`prepare_view_attachments`]. Users that want to override
612/// the default output color attachment for a specific target can do so by adding a
613/// [`OutputColorAttachment`] to this resource before [`prepare_view_targets`] is called.
614#[derive(Resource, Default, Deref, DerefMut)]
615pub struct ViewTargetAttachments(HashMap<NormalizedRenderTarget, OutputColorAttachment>);
616
617pub struct PostProcessWrite<'a> {
618 pub source: &'a TextureView,
619 pub source_texture: &'a Texture,
620 pub destination: &'a TextureView,
621 pub destination_texture: &'a Texture,
622}
623
624impl From<ColorGrading> for ColorGradingUniform {
625 fn from(component: ColorGrading) -> Self {
626 // Compute the balance matrix that will be used to apply the white
627 // balance adjustment to an RGB color. Our general approach will be to
628 // convert both the color and the developer-supplied white point to the
629 // LMS color space, apply the conversion, and then convert back.
630 //
631 // First, we start with the CIE 1931 *xy* values of the standard D65
632 // illuminant:
633 // <https://en.wikipedia.org/wiki/Standard_illuminant#D65_values>
634 //
635 // We then adjust them based on the developer's requested white balance.
636 let white_point_xy = D65_XY + vec2(-component.global.temperature, component.global.tint);
637
638 // Convert the white point from CIE 1931 *xy* to LMS. First, we convert to XYZ:
639 //
640 // Y Y
641 // Y = 1 X = ─ x Z = ─ (1 - x - y)
642 // y y
643 //
644 // Then we convert from XYZ to LMS color space, using the CAM16 matrix
645 // from <https://en.wikipedia.org/wiki/LMS_color_space#Later_CIECAMs>:
646 //
647 // ⎡ L ⎤ ⎡ 0.401 0.650 -0.051 ⎤ ⎡ X ⎤
648 // ⎢ M ⎥ = ⎢ -0.250 1.204 0.046 ⎥ ⎢ Y ⎥
649 // ⎣ S ⎦ ⎣ -0.002 0.049 0.953 ⎦ ⎣ Z ⎦
650 //
651 // The following formula is just a simplification of the above.
652
653 let white_point_lms = vec3(0.701634, 1.15856, -0.904175)
654 + (vec3(-0.051461, 0.045854, 0.953127)
655 + vec3(0.452749, -0.296122, -0.955206) * white_point_xy.x)
656 / white_point_xy.y;
657
658 // Now that we're in LMS space, perform the white point scaling.
659 let white_point_adjustment = Mat3::from_diagonal(D65_LMS / white_point_lms);
660
661 // Finally, combine the RGB → LMS → corrected LMS → corrected RGB
662 // pipeline into a single 3×3 matrix.
663 let balance = LMS_TO_RGB * white_point_adjustment * RGB_TO_LMS;
664
665 Self {
666 balance,
667 saturation: vec3(
668 component.shadows.saturation,
669 component.midtones.saturation,
670 component.highlights.saturation,
671 ),
672 contrast: vec3(
673 component.shadows.contrast,
674 component.midtones.contrast,
675 component.highlights.contrast,
676 ),
677 gamma: vec3(
678 component.shadows.gamma,
679 component.midtones.gamma,
680 component.highlights.gamma,
681 ),
682 gain: vec3(
683 component.shadows.gain,
684 component.midtones.gain,
685 component.highlights.gain,
686 ),
687 lift: vec3(
688 component.shadows.lift,
689 component.midtones.lift,
690 component.highlights.lift,
691 ),
692 midtone_range: vec2(
693 component.global.midtones_range.start,
694 component.global.midtones_range.end,
695 ),
696 exposure: component.global.exposure,
697 hue: component.global.hue,
698 post_saturation: component.global.post_saturation,
699 }
700 }
701}
702
703/// Add this component to a camera to disable *indirect mode*.
704///
705/// Indirect mode, automatically enabled on supported hardware, allows Bevy to
706/// offload transform and cull operations to the GPU, reducing CPU overhead.
707/// Doing this, however, reduces the amount of control that your app has over
708/// instancing decisions. In certain circumstances, you may want to disable
709/// indirect drawing so that your app can manually instance meshes as it sees
710/// fit. See the `custom_shader_instancing` example.
711///
712/// The vast majority of applications will not need to use this component, as it
713/// generally reduces rendering performance.
714///
715/// Note: This component should only be added when initially spawning a camera. Adding
716/// or removing after spawn can result in unspecified behavior.
717#[derive(Component, Default)]
718pub struct NoIndirectDrawing;
719
720#[derive(Component, Default)]
721pub struct NoCpuCulling;
722
723impl ViewTarget {
724 pub const TEXTURE_FORMAT_HDR: TextureFormat = TextureFormat::Rgba16Float;
725
726 /// Retrieve this target's main texture's color attachment.
727 pub fn get_color_attachment(&self) -> RenderPassColorAttachment {
728 if self.main_texture.load(Ordering::SeqCst) == 0 {
729 self.main_textures.a.get_attachment()
730 } else {
731 self.main_textures.b.get_attachment()
732 }
733 }
734
735 /// Retrieve this target's "unsampled" main texture's color attachment.
736 pub fn get_unsampled_color_attachment(&self) -> RenderPassColorAttachment {
737 if self.main_texture.load(Ordering::SeqCst) == 0 {
738 self.main_textures.a.get_unsampled_attachment()
739 } else {
740 self.main_textures.b.get_unsampled_attachment()
741 }
742 }
743
744 /// The "main" unsampled texture.
745 pub fn main_texture(&self) -> &Texture {
746 if self.main_texture.load(Ordering::SeqCst) == 0 {
747 &self.main_textures.a.texture.texture
748 } else {
749 &self.main_textures.b.texture.texture
750 }
751 }
752
753 /// The _other_ "main" unsampled texture.
754 /// In most cases you should use [`Self::main_texture`] instead and never this.
755 /// The textures will naturally be swapped when [`Self::post_process_write`] is called.
756 ///
757 /// A use case for this is to be able to prepare a bind group for all main textures
758 /// ahead of time.
759 pub fn main_texture_other(&self) -> &Texture {
760 if self.main_texture.load(Ordering::SeqCst) == 0 {
761 &self.main_textures.b.texture.texture
762 } else {
763 &self.main_textures.a.texture.texture
764 }
765 }
766
767 /// The "main" unsampled texture.
768 pub fn main_texture_view(&self) -> &TextureView {
769 if self.main_texture.load(Ordering::SeqCst) == 0 {
770 &self.main_textures.a.texture.default_view
771 } else {
772 &self.main_textures.b.texture.default_view
773 }
774 }
775
776 /// The _other_ "main" unsampled texture view.
777 /// In most cases you should use [`Self::main_texture_view`] instead and never this.
778 /// The textures will naturally be swapped when [`Self::post_process_write`] is called.
779 ///
780 /// A use case for this is to be able to prepare a bind group for all main textures
781 /// ahead of time.
782 pub fn main_texture_other_view(&self) -> &TextureView {
783 if self.main_texture.load(Ordering::SeqCst) == 0 {
784 &self.main_textures.b.texture.default_view
785 } else {
786 &self.main_textures.a.texture.default_view
787 }
788 }
789
790 /// The "main" sampled texture.
791 pub fn sampled_main_texture(&self) -> Option<&Texture> {
792 self.main_textures
793 .a
794 .resolve_target
795 .as_ref()
796 .map(|sampled| &sampled.texture)
797 }
798
799 /// The "main" sampled texture view.
800 pub fn sampled_main_texture_view(&self) -> Option<&TextureView> {
801 self.main_textures
802 .a
803 .resolve_target
804 .as_ref()
805 .map(|sampled| &sampled.default_view)
806 }
807
808 #[inline]
809 pub fn main_texture_format(&self) -> TextureFormat {
810 self.main_texture_format
811 }
812
813 /// Returns `true` if and only if the main texture is [`Self::TEXTURE_FORMAT_HDR`]
814 #[inline]
815 pub fn is_hdr(&self) -> bool {
816 self.main_texture_format == ViewTarget::TEXTURE_FORMAT_HDR
817 }
818
819 /// The final texture this view will render to.
820 #[inline]
821 pub fn out_texture(&self) -> &TextureView {
822 &self.out_texture.view
823 }
824
825 pub fn out_texture_color_attachment(
826 &self,
827 clear_color: Option<LinearRgba>,
828 ) -> RenderPassColorAttachment {
829 self.out_texture.get_attachment(clear_color)
830 }
831
832 /// The format of the final texture this view will render to
833 #[inline]
834 pub fn out_texture_format(&self) -> TextureFormat {
835 self.out_texture.format
836 }
837
838 /// This will start a new "post process write", which assumes that the caller
839 /// will write the [`PostProcessWrite`]'s `source` to the `destination`.
840 ///
841 /// `source` is the "current" main texture. This will internally flip this
842 /// [`ViewTarget`]'s main texture to the `destination` texture, so the caller
843 /// _must_ ensure `source` is copied to `destination`, with or without modifications.
844 /// Failing to do so will cause the current main texture information to be lost.
845 pub fn post_process_write(&self) -> PostProcessWrite {
846 let old_is_a_main_texture = self.main_texture.fetch_xor(1, Ordering::SeqCst);
847 // if the old main texture is a, then the post processing must write from a to b
848 if old_is_a_main_texture == 0 {
849 self.main_textures.b.mark_as_cleared();
850 PostProcessWrite {
851 source: &self.main_textures.a.texture.default_view,
852 source_texture: &self.main_textures.a.texture.texture,
853 destination: &self.main_textures.b.texture.default_view,
854 destination_texture: &self.main_textures.b.texture.texture,
855 }
856 } else {
857 self.main_textures.a.mark_as_cleared();
858 PostProcessWrite {
859 source: &self.main_textures.b.texture.default_view,
860 source_texture: &self.main_textures.b.texture.texture,
861 destination: &self.main_textures.a.texture.default_view,
862 destination_texture: &self.main_textures.a.texture.texture,
863 }
864 }
865 }
866}
867
868#[derive(Component)]
869pub struct ViewDepthTexture {
870 pub texture: Texture,
871 attachment: DepthAttachment,
872}
873
874impl ViewDepthTexture {
875 pub fn new(texture: CachedTexture, clear_value: Option<f32>) -> Self {
876 Self {
877 texture: texture.texture,
878 attachment: DepthAttachment::new(texture.default_view, clear_value),
879 }
880 }
881
882 pub fn get_attachment(&self, store: StoreOp) -> RenderPassDepthStencilAttachment {
883 self.attachment.get_attachment(store)
884 }
885
886 pub fn view(&self) -> &TextureView {
887 &self.attachment.view
888 }
889}
890
891pub fn prepare_view_uniforms(
892 mut commands: Commands,
893 render_device: Res<RenderDevice>,
894 render_queue: Res<RenderQueue>,
895 mut view_uniforms: ResMut<ViewUniforms>,
896 views: Query<(
897 Entity,
898 Option<&ExtractedCamera>,
899 &ExtractedView,
900 Option<&Frustum>,
901 Option<&TemporalJitter>,
902 Option<&MipBias>,
903 )>,
904 frame_count: Res<FrameCount>,
905) {
906 let view_iter = views.iter();
907 let view_count = view_iter.len();
908 let Some(mut writer) =
909 view_uniforms
910 .uniforms
911 .get_writer(view_count, &render_device, &render_queue)
912 else {
913 return;
914 };
915 for (entity, extracted_camera, extracted_view, frustum, temporal_jitter, mip_bias) in &views {
916 let viewport = extracted_view.viewport.as_vec4();
917 let unjittered_projection = extracted_view.clip_from_view;
918 let mut clip_from_view = unjittered_projection;
919
920 if let Some(temporal_jitter) = temporal_jitter {
921 temporal_jitter.jitter_projection(&mut clip_from_view, viewport.zw());
922 }
923
924 let view_from_clip = clip_from_view.inverse();
925 let world_from_view = extracted_view.world_from_view.compute_matrix();
926 let view_from_world = world_from_view.inverse();
927
928 let clip_from_world = if temporal_jitter.is_some() {
929 clip_from_view * view_from_world
930 } else {
931 extracted_view
932 .clip_from_world
933 .unwrap_or_else(|| clip_from_view * view_from_world)
934 };
935
936 // Map Frustum type to shader array<vec4<f32>, 6>
937 let frustum = frustum
938 .map(|frustum| frustum.half_spaces.map(|h| h.normal_d()))
939 .unwrap_or([Vec4::ZERO; 6]);
940
941 let view_uniforms = ViewUniformOffset {
942 offset: writer.write(&ViewUniform {
943 clip_from_world,
944 unjittered_clip_from_world: unjittered_projection * view_from_world,
945 world_from_clip: world_from_view * view_from_clip,
946 world_from_view,
947 view_from_world,
948 clip_from_view,
949 view_from_clip,
950 world_position: extracted_view.world_from_view.translation(),
951 exposure: extracted_camera
952 .map(|c| c.exposure)
953 .unwrap_or_else(|| Exposure::default().exposure()),
954 viewport,
955 frustum,
956 color_grading: extracted_view.color_grading.clone().into(),
957 mip_bias: mip_bias.unwrap_or(&MipBias(0.0)).0,
958 frame_count: frame_count.0,
959 }),
960 };
961
962 commands.entity(entity).insert(view_uniforms);
963 }
964}
965
966#[derive(Clone)]
967struct MainTargetTextures {
968 a: ColorAttachment,
969 b: ColorAttachment,
970 /// 0 represents `main_textures.a`, 1 represents `main_textures.b`
971 /// This is shared across view targets with the same render target
972 main_texture: Arc<AtomicUsize>,
973}
974
975/// Prepares the view target [`OutputColorAttachment`] for each view in the current frame.
976pub fn prepare_view_attachments(
977 windows: Res<ExtractedWindows>,
978 images: Res<RenderAssets<GpuImage>>,
979 manual_texture_views: Res<ManualTextureViews>,
980 cameras: Query<&ExtractedCamera>,
981 mut view_target_attachments: ResMut<ViewTargetAttachments>,
982) {
983 for camera in cameras.iter() {
984 let Some(target) = &camera.target else {
985 continue;
986 };
987
988 match view_target_attachments.entry(target.clone()) {
989 Entry::Occupied(_) => {}
990 Entry::Vacant(entry) => {
991 let Some(attachment) = target
992 .get_texture_view(&windows, &images, &manual_texture_views)
993 .cloned()
994 .zip(target.get_texture_format(&windows, &images, &manual_texture_views))
995 .map(|(view, format)| {
996 OutputColorAttachment::new(view.clone(), format.add_srgb_suffix())
997 })
998 else {
999 continue;
1000 };
1001 entry.insert(attachment);
1002 }
1003 };
1004 }
1005}
1006
1007/// Clears the view target [`OutputColorAttachment`]s.
1008pub fn clear_view_attachments(mut view_target_attachments: ResMut<ViewTargetAttachments>) {
1009 view_target_attachments.clear();
1010}
1011
1012pub fn prepare_view_targets(
1013 mut commands: Commands,
1014 clear_color_global: Res<ClearColor>,
1015 render_device: Res<RenderDevice>,
1016 mut texture_cache: ResMut<TextureCache>,
1017 cameras: Query<(
1018 Entity,
1019 &ExtractedCamera,
1020 &ExtractedView,
1021 &CameraMainTextureUsages,
1022 &Msaa,
1023 )>,
1024 view_target_attachments: Res<ViewTargetAttachments>,
1025) {
1026 let mut textures = <HashMap<_, _>>::default();
1027 for (entity, camera, view, texture_usage, msaa) in cameras.iter() {
1028 let (Some(target_size), Some(target)) = (camera.physical_target_size, &camera.target)
1029 else {
1030 continue;
1031 };
1032
1033 let Some(out_attachment) = view_target_attachments.get(target) else {
1034 continue;
1035 };
1036
1037 let size = Extent3d {
1038 width: target_size.x,
1039 height: target_size.y,
1040 depth_or_array_layers: 1,
1041 };
1042
1043 let main_texture_format = if view.hdr {
1044 ViewTarget::TEXTURE_FORMAT_HDR
1045 } else {
1046 TextureFormat::bevy_default()
1047 };
1048
1049 let clear_color = match camera.clear_color {
1050 ClearColorConfig::Custom(color) => Some(color),
1051 ClearColorConfig::None => None,
1052 _ => Some(clear_color_global.0),
1053 };
1054
1055 let (a, b, sampled, main_texture) = textures
1056 .entry((camera.target.clone(), texture_usage.0, view.hdr, msaa))
1057 .or_insert_with(|| {
1058 let descriptor = TextureDescriptor {
1059 label: None,
1060 size,
1061 mip_level_count: 1,
1062 sample_count: 1,
1063 dimension: TextureDimension::D2,
1064 format: main_texture_format,
1065 usage: texture_usage.0,
1066 view_formats: match main_texture_format {
1067 TextureFormat::Bgra8Unorm => &[TextureFormat::Bgra8UnormSrgb],
1068 TextureFormat::Rgba8Unorm => &[TextureFormat::Rgba8UnormSrgb],
1069 _ => &[],
1070 },
1071 };
1072 let a = texture_cache.get(
1073 &render_device,
1074 TextureDescriptor {
1075 label: Some("main_texture_a"),
1076 ..descriptor
1077 },
1078 );
1079 let b = texture_cache.get(
1080 &render_device,
1081 TextureDescriptor {
1082 label: Some("main_texture_b"),
1083 ..descriptor
1084 },
1085 );
1086 let sampled = if msaa.samples() > 1 {
1087 let sampled = texture_cache.get(
1088 &render_device,
1089 TextureDescriptor {
1090 label: Some("main_texture_sampled"),
1091 size,
1092 mip_level_count: 1,
1093 sample_count: msaa.samples(),
1094 dimension: TextureDimension::D2,
1095 format: main_texture_format,
1096 usage: TextureUsages::RENDER_ATTACHMENT,
1097 view_formats: descriptor.view_formats,
1098 },
1099 );
1100 Some(sampled)
1101 } else {
1102 None
1103 };
1104 let main_texture = Arc::new(AtomicUsize::new(0));
1105 (a, b, sampled, main_texture)
1106 });
1107
1108 let converted_clear_color = clear_color.map(Into::into);
1109
1110 let main_textures = MainTargetTextures {
1111 a: ColorAttachment::new(a.clone(), sampled.clone(), converted_clear_color),
1112 b: ColorAttachment::new(b.clone(), sampled.clone(), converted_clear_color),
1113 main_texture: main_texture.clone(),
1114 };
1115
1116 commands.entity(entity).insert(ViewTarget {
1117 main_texture: main_textures.main_texture.clone(),
1118 main_textures,
1119 main_texture_format,
1120 out_texture: out_attachment.clone(),
1121 });
1122 }
1123}