bevy_render/view/
mod.rs

1pub mod visibility;
2pub mod window;
3
4use bevy_asset::{load_internal_asset, weak_handle, Handle};
5use bevy_diagnostic::FrameCount;
6pub use visibility::*;
7pub use window::*;
8
9use crate::{
10    camera::{
11        CameraMainTextureUsages, ClearColor, ClearColorConfig, Exposure, ExtractedCamera,
12        ManualTextureViews, MipBias, NormalizedRenderTarget, TemporalJitter,
13    },
14    experimental::occlusion_culling::OcclusionCulling,
15    extract_component::ExtractComponentPlugin,
16    prelude::Shader,
17    primitives::Frustum,
18    render_asset::RenderAssets,
19    render_phase::ViewRangefinder3d,
20    render_resource::{DynamicUniformBuffer, ShaderType, Texture, TextureView},
21    renderer::{RenderDevice, RenderQueue},
22    sync_world::MainEntity,
23    texture::{
24        CachedTexture, ColorAttachment, DepthAttachment, GpuImage, OutputColorAttachment,
25        TextureCache,
26    },
27    Render, RenderApp, RenderSet,
28};
29use alloc::sync::Arc;
30use bevy_app::{App, Plugin};
31use bevy_color::LinearRgba;
32use bevy_derive::{Deref, DerefMut};
33use bevy_ecs::prelude::*;
34use bevy_image::BevyDefault as _;
35use bevy_math::{mat3, vec2, vec3, Mat3, Mat4, UVec4, Vec2, Vec3, Vec4, Vec4Swizzles};
36use bevy_platform::collections::{hash_map::Entry, HashMap};
37use bevy_reflect::{std_traits::ReflectDefault, Reflect};
38use bevy_render_macros::ExtractComponent;
39use bevy_transform::components::GlobalTransform;
40use core::{
41    ops::Range,
42    sync::atomic::{AtomicUsize, Ordering},
43};
44use wgpu::{
45    BufferUsages, Extent3d, RenderPassColorAttachment, RenderPassDepthStencilAttachment, StoreOp,
46    TextureDescriptor, TextureDimension, TextureFormat, TextureUsages,
47};
48
49pub const VIEW_TYPE_HANDLE: Handle<Shader> = weak_handle!("7234423c-38bb-411c-acec-f67730f6db5b");
50
51/// The matrix that converts from the RGB to the LMS color space.
52///
53/// To derive this, first we convert from RGB to [CIE 1931 XYZ]:
54///
55/// ```text
56/// ⎡ X ⎤   ⎡ 0.490  0.310  0.200 ⎤ ⎡ R ⎤
57/// ⎢ Y ⎥ = ⎢ 0.177  0.812  0.011 ⎥ ⎢ G ⎥
58/// ⎣ Z ⎦   ⎣ 0.000  0.010  0.990 ⎦ ⎣ B ⎦
59/// ```
60///
61/// Then we convert to LMS according to the [CAM16 standard matrix]:
62///
63/// ```text
64/// ⎡ L ⎤   ⎡  0.401   0.650  -0.051 ⎤ ⎡ X ⎤
65/// ⎢ M ⎥ = ⎢ -0.250   1.204   0.046 ⎥ ⎢ Y ⎥
66/// ⎣ S ⎦   ⎣ -0.002   0.049   0.953 ⎦ ⎣ Z ⎦
67/// ```
68///
69/// The resulting matrix is just the concatenation of these two matrices, to do
70/// the conversion in one step.
71///
72/// [CIE 1931 XYZ]: https://en.wikipedia.org/wiki/CIE_1931_color_space
73/// [CAM16 standard matrix]: https://en.wikipedia.org/wiki/LMS_color_space
74static RGB_TO_LMS: Mat3 = mat3(
75    vec3(0.311692, 0.0905138, 0.00764433),
76    vec3(0.652085, 0.901341, 0.0486554),
77    vec3(0.0362225, 0.00814478, 0.943700),
78);
79
80/// The inverse of the [`RGB_TO_LMS`] matrix, converting from the LMS color
81/// space back to RGB.
82static LMS_TO_RGB: Mat3 = mat3(
83    vec3(4.06305, -0.40791, -0.0118812),
84    vec3(-2.93241, 1.40437, -0.0486532),
85    vec3(-0.130646, 0.00353630, 1.0605344),
86);
87
88/// The [CIE 1931] *xy* chromaticity coordinates of the [D65 white point].
89///
90/// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space
91/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values
92static D65_XY: Vec2 = vec2(0.31272, 0.32903);
93
94/// The [D65 white point] in [LMS color space].
95///
96/// [LMS color space]: https://en.wikipedia.org/wiki/LMS_color_space
97/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values
98static D65_LMS: Vec3 = vec3(0.975538, 1.01648, 1.08475);
99
100pub struct ViewPlugin;
101
102impl Plugin for ViewPlugin {
103    fn build(&self, app: &mut App) {
104        load_internal_asset!(app, VIEW_TYPE_HANDLE, "view.wgsl", Shader::from_wgsl);
105
106        app.register_type::<InheritedVisibility>()
107            .register_type::<ViewVisibility>()
108            .register_type::<Msaa>()
109            .register_type::<NoFrustumCulling>()
110            .register_type::<RenderLayers>()
111            .register_type::<Visibility>()
112            .register_type::<VisibleEntities>()
113            .register_type::<ColorGrading>()
114            .register_type::<OcclusionCulling>()
115            // NOTE: windows.is_changed() handles cases where a window was resized
116            .add_plugins((
117                ExtractComponentPlugin::<Msaa>::default(),
118                ExtractComponentPlugin::<OcclusionCulling>::default(),
119                VisibilityPlugin,
120                VisibilityRangePlugin,
121            ));
122
123        if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
124            render_app.add_systems(
125                Render,
126                (
127                    // `TextureView`s need to be dropped before reconfiguring window surfaces.
128                    clear_view_attachments
129                        .in_set(RenderSet::ManageViews)
130                        .before(create_surfaces),
131                    prepare_view_attachments
132                        .in_set(RenderSet::ManageViews)
133                        .before(prepare_view_targets)
134                        .after(prepare_windows),
135                    prepare_view_targets
136                        .in_set(RenderSet::ManageViews)
137                        .after(prepare_windows)
138                        .after(crate::render_asset::prepare_assets::<GpuImage>)
139                        .ambiguous_with(crate::camera::sort_cameras), // doesn't use `sorted_camera_index_for_target`
140                    prepare_view_uniforms.in_set(RenderSet::PrepareResources),
141                ),
142            );
143        }
144    }
145
146    fn finish(&self, app: &mut App) {
147        if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
148            render_app
149                .init_resource::<ViewUniforms>()
150                .init_resource::<ViewTargetAttachments>();
151        }
152    }
153}
154
155/// Component for configuring the number of samples for [Multi-Sample Anti-Aliasing](https://en.wikipedia.org/wiki/Multisample_anti-aliasing)
156/// for a [`Camera`](crate::camera::Camera).
157///
158/// Defaults to 4 samples. A higher number of samples results in smoother edges.
159///
160/// Some advanced rendering features may require that MSAA is disabled.
161///
162/// Note that the web currently only supports 1 or 4 samples.
163#[derive(
164    Component,
165    Default,
166    Clone,
167    Copy,
168    ExtractComponent,
169    Reflect,
170    PartialEq,
171    PartialOrd,
172    Eq,
173    Hash,
174    Debug,
175)]
176#[reflect(Component, Default, PartialEq, Hash, Debug)]
177pub enum Msaa {
178    Off = 1,
179    Sample2 = 2,
180    #[default]
181    Sample4 = 4,
182    Sample8 = 8,
183}
184
185impl Msaa {
186    #[inline]
187    pub fn samples(&self) -> u32 {
188        *self as u32
189    }
190
191    pub fn from_samples(samples: u32) -> Self {
192        match samples {
193            1 => Msaa::Off,
194            2 => Msaa::Sample2,
195            4 => Msaa::Sample4,
196            8 => Msaa::Sample8,
197            _ => panic!("Unsupported MSAA sample count: {}", samples),
198        }
199    }
200}
201
202/// An identifier for a view that is stable across frames.
203///
204/// We can't use [`Entity`] for this because render world entities aren't
205/// stable, and we can't use just [`MainEntity`] because some main world views
206/// extract to multiple render world views. For example, a directional light
207/// extracts to one render world view per cascade, and a point light extracts to
208/// one render world view per cubemap face. So we pair the main entity with an
209/// *auxiliary entity* and a *subview index*, which *together* uniquely identify
210/// a view in the render world in a way that's stable from frame to frame.
211#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
212pub struct RetainedViewEntity {
213    /// The main entity that this view corresponds to.
214    pub main_entity: MainEntity,
215
216    /// Another entity associated with the view entity.
217    ///
218    /// This is currently used for shadow cascades. If there are multiple
219    /// cameras, each camera needs to have its own set of shadow cascades. Thus
220    /// the light and subview index aren't themselves enough to uniquely
221    /// identify a shadow cascade: we need the camera that the cascade is
222    /// associated with as well. This entity stores that camera.
223    ///
224    /// If not present, this will be `MainEntity(Entity::PLACEHOLDER)`.
225    pub auxiliary_entity: MainEntity,
226
227    /// The index of the view corresponding to the entity.
228    ///
229    /// For example, for point lights that cast shadows, this is the index of
230    /// the cubemap face (0 through 5 inclusive). For directional lights, this
231    /// is the index of the cascade.
232    pub subview_index: u32,
233}
234
235impl RetainedViewEntity {
236    /// Creates a new [`RetainedViewEntity`] from the given main world entity,
237    /// auxiliary main world entity, and subview index.
238    ///
239    /// See [`RetainedViewEntity::subview_index`] for an explanation of what
240    /// `auxiliary_entity` and `subview_index` are.
241    pub fn new(
242        main_entity: MainEntity,
243        auxiliary_entity: Option<MainEntity>,
244        subview_index: u32,
245    ) -> Self {
246        Self {
247            main_entity,
248            auxiliary_entity: auxiliary_entity.unwrap_or(Entity::PLACEHOLDER.into()),
249            subview_index,
250        }
251    }
252}
253
254/// Describes a camera in the render world.
255///
256/// Each entity in the main world can potentially extract to multiple subviews,
257/// each of which has a [`RetainedViewEntity::subview_index`]. For instance, 3D
258/// cameras extract to both a 3D camera subview with index 0 and a special UI
259/// subview with index 1. Likewise, point lights with shadows extract to 6
260/// subviews, one for each side of the shadow cubemap.
261#[derive(Component)]
262pub struct ExtractedView {
263    /// The entity in the main world corresponding to this render world view.
264    pub retained_view_entity: RetainedViewEntity,
265    /// Typically a column-major right-handed projection matrix, one of either:
266    ///
267    /// Perspective (infinite reverse z)
268    /// ```text
269    /// f = 1 / tan(fov_y_radians / 2)
270    ///
271    /// ⎡ f / aspect  0   0     0 ⎤
272    /// ⎢          0  f   0     0 ⎥
273    /// ⎢          0  0   0  near ⎥
274    /// ⎣          0  0  -1     0 ⎦
275    /// ```
276    ///
277    /// Orthographic
278    /// ```text
279    /// w = right - left
280    /// h = top - bottom
281    /// d = far - near
282    /// cw = -right - left
283    /// ch = -top - bottom
284    ///
285    /// ⎡ 2 / w      0      0   cw / w ⎤
286    /// ⎢     0  2 / h      0   ch / h ⎥
287    /// ⎢     0      0  1 / d  far / d ⎥
288    /// ⎣     0      0      0        1 ⎦
289    /// ```
290    ///
291    /// `clip_from_view[3][3] == 1.0` is the standard way to check if a projection is orthographic
292    ///
293    /// Glam matrices are column major, so for example getting the near plane of a perspective projection is `clip_from_view[3][2]`
294    ///
295    /// Custom projections are also possible however.
296    pub clip_from_view: Mat4,
297    pub world_from_view: GlobalTransform,
298    // The view-projection matrix. When provided it is used instead of deriving it from
299    // `projection` and `transform` fields, which can be helpful in cases where numerical
300    // stability matters and there is a more direct way to derive the view-projection matrix.
301    pub clip_from_world: Option<Mat4>,
302    pub hdr: bool,
303    // uvec4(origin.x, origin.y, width, height)
304    pub viewport: UVec4,
305    pub color_grading: ColorGrading,
306}
307
308impl ExtractedView {
309    /// Creates a 3D rangefinder for a view
310    pub fn rangefinder3d(&self) -> ViewRangefinder3d {
311        ViewRangefinder3d::from_world_from_view(&self.world_from_view.compute_matrix())
312    }
313}
314
315/// Configures filmic color grading parameters to adjust the image appearance.
316///
317/// Color grading is applied just before tonemapping for a given
318/// [`Camera`](crate::camera::Camera) entity, with the sole exception of the
319/// `post_saturation` value in [`ColorGradingGlobal`], which is applied after
320/// tonemapping.
321#[derive(Component, Reflect, Debug, Default, Clone)]
322#[reflect(Component, Default, Debug, Clone)]
323pub struct ColorGrading {
324    /// Filmic color grading values applied to the image as a whole (as opposed
325    /// to individual sections, like shadows and highlights).
326    pub global: ColorGradingGlobal,
327
328    /// Color grading values that are applied to the darker parts of the image.
329    ///
330    /// The cutoff points can be customized with the
331    /// [`ColorGradingGlobal::midtones_range`] field.
332    pub shadows: ColorGradingSection,
333
334    /// Color grading values that are applied to the parts of the image with
335    /// intermediate brightness.
336    ///
337    /// The cutoff points can be customized with the
338    /// [`ColorGradingGlobal::midtones_range`] field.
339    pub midtones: ColorGradingSection,
340
341    /// Color grading values that are applied to the lighter parts of the image.
342    ///
343    /// The cutoff points can be customized with the
344    /// [`ColorGradingGlobal::midtones_range`] field.
345    pub highlights: ColorGradingSection,
346}
347
348/// Filmic color grading values applied to the image as a whole (as opposed to
349/// individual sections, like shadows and highlights).
350#[derive(Clone, Debug, Reflect)]
351#[reflect(Default, Clone)]
352pub struct ColorGradingGlobal {
353    /// Exposure value (EV) offset, measured in stops.
354    pub exposure: f32,
355
356    /// An adjustment made to the [CIE 1931] chromaticity *x* value.
357    ///
358    /// Positive values make the colors redder. Negative values make the colors
359    /// bluer. This has no effect on luminance (brightness).
360    ///
361    /// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
362    pub temperature: f32,
363
364    /// An adjustment made to the [CIE 1931] chromaticity *y* value.
365    ///
366    /// Positive values make the colors more magenta. Negative values make the
367    /// colors greener. This has no effect on luminance (brightness).
368    ///
369    /// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
370    pub tint: f32,
371
372    /// An adjustment to the [hue], in radians.
373    ///
374    /// Adjusting this value changes the perceived colors in the image: red to
375    /// yellow to green to blue, etc. It has no effect on the saturation or
376    /// brightness of the colors.
377    ///
378    /// [hue]: https://en.wikipedia.org/wiki/HSL_and_HSV#Formal_derivation
379    pub hue: f32,
380
381    /// Saturation adjustment applied after tonemapping.
382    /// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image
383    /// with luminance defined by ITU-R BT.709
384    /// Values above 1.0 increase saturation.
385    pub post_saturation: f32,
386
387    /// The luminance (brightness) ranges that are considered part of the
388    /// "midtones" of the image.
389    ///
390    /// This affects which [`ColorGradingSection`]s apply to which colors. Note
391    /// that the sections smoothly blend into one another, to avoid abrupt
392    /// transitions.
393    ///
394    /// The default value is 0.2 to 0.7.
395    pub midtones_range: Range<f32>,
396}
397
398/// The [`ColorGrading`] structure, packed into the most efficient form for the
399/// GPU.
400#[derive(Clone, Copy, Debug, ShaderType)]
401pub struct ColorGradingUniform {
402    pub balance: Mat3,
403    pub saturation: Vec3,
404    pub contrast: Vec3,
405    pub gamma: Vec3,
406    pub gain: Vec3,
407    pub lift: Vec3,
408    pub midtone_range: Vec2,
409    pub exposure: f32,
410    pub hue: f32,
411    pub post_saturation: f32,
412}
413
414/// A section of color grading values that can be selectively applied to
415/// shadows, midtones, and highlights.
416#[derive(Reflect, Debug, Copy, Clone, PartialEq)]
417#[reflect(Clone, PartialEq)]
418pub struct ColorGradingSection {
419    /// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image
420    /// with luminance defined by ITU-R BT.709.
421    /// Values above 1.0 increase saturation.
422    pub saturation: f32,
423
424    /// Adjusts the range of colors.
425    ///
426    /// A value of 1.0 applies no changes. Values below 1.0 move the colors more
427    /// toward a neutral gray. Values above 1.0 spread the colors out away from
428    /// the neutral gray.
429    pub contrast: f32,
430
431    /// A nonlinear luminance adjustment, mainly affecting the high end of the
432    /// range.
433    ///
434    /// This is the *n* exponent in the standard [ASC CDL] formula for color
435    /// correction:
436    ///
437    /// ```text
438    /// out = (i × s + o)ⁿ
439    /// ```
440    ///
441    /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
442    pub gamma: f32,
443
444    /// A linear luminance adjustment, mainly affecting the middle part of the
445    /// range.
446    ///
447    /// This is the *s* factor in the standard [ASC CDL] formula for color
448    /// correction:
449    ///
450    /// ```text
451    /// out = (i × s + o)ⁿ
452    /// ```
453    ///
454    /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
455    pub gain: f32,
456
457    /// A fixed luminance adjustment, mainly affecting the lower part of the
458    /// range.
459    ///
460    /// This is the *o* term in the standard [ASC CDL] formula for color
461    /// correction:
462    ///
463    /// ```text
464    /// out = (i × s + o)ⁿ
465    /// ```
466    ///
467    /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
468    pub lift: f32,
469}
470
471impl Default for ColorGradingGlobal {
472    fn default() -> Self {
473        Self {
474            exposure: 0.0,
475            temperature: 0.0,
476            tint: 0.0,
477            hue: 0.0,
478            post_saturation: 1.0,
479            midtones_range: 0.2..0.7,
480        }
481    }
482}
483
484impl Default for ColorGradingSection {
485    fn default() -> Self {
486        Self {
487            saturation: 1.0,
488            contrast: 1.0,
489            gamma: 1.0,
490            gain: 1.0,
491            lift: 0.0,
492        }
493    }
494}
495
496impl ColorGrading {
497    /// Creates a new [`ColorGrading`] instance in which shadows, midtones, and
498    /// highlights all have the same set of color grading values.
499    pub fn with_identical_sections(
500        global: ColorGradingGlobal,
501        section: ColorGradingSection,
502    ) -> ColorGrading {
503        ColorGrading {
504            global,
505            highlights: section,
506            midtones: section,
507            shadows: section,
508        }
509    }
510
511    /// Returns an iterator that visits the shadows, midtones, and highlights
512    /// sections, in that order.
513    pub fn all_sections(&self) -> impl Iterator<Item = &ColorGradingSection> {
514        [&self.shadows, &self.midtones, &self.highlights].into_iter()
515    }
516
517    /// Applies the given mutating function to the shadows, midtones, and
518    /// highlights sections, in that order.
519    ///
520    /// Returns an array composed of the results of such evaluation, in that
521    /// order.
522    pub fn all_sections_mut(&mut self) -> impl Iterator<Item = &mut ColorGradingSection> {
523        [&mut self.shadows, &mut self.midtones, &mut self.highlights].into_iter()
524    }
525}
526
527#[derive(Clone, ShaderType)]
528pub struct ViewUniform {
529    pub clip_from_world: Mat4,
530    pub unjittered_clip_from_world: Mat4,
531    pub world_from_clip: Mat4,
532    pub world_from_view: Mat4,
533    pub view_from_world: Mat4,
534    /// Typically a column-major right-handed projection matrix, one of either:
535    ///
536    /// Perspective (infinite reverse z)
537    /// ```text
538    /// f = 1 / tan(fov_y_radians / 2)
539    ///
540    /// ⎡ f / aspect  0   0     0 ⎤
541    /// ⎢          0  f   0     0 ⎥
542    /// ⎢          0  0   0  near ⎥
543    /// ⎣          0  0  -1     0 ⎦
544    /// ```
545    ///
546    /// Orthographic
547    /// ```text
548    /// w = right - left
549    /// h = top - bottom
550    /// d = far - near
551    /// cw = -right - left
552    /// ch = -top - bottom
553    ///
554    /// ⎡ 2 / w      0      0   cw / w ⎤
555    /// ⎢     0  2 / h      0   ch / h ⎥
556    /// ⎢     0      0  1 / d  far / d ⎥
557    /// ⎣     0      0      0        1 ⎦
558    /// ```
559    ///
560    /// `clip_from_view[3][3] == 1.0` is the standard way to check if a projection is orthographic
561    ///
562    /// Glam matrices are column major, so for example getting the near plane of a perspective projection is `clip_from_view[3][2]`
563    ///
564    /// Custom projections are also possible however.
565    pub clip_from_view: Mat4,
566    pub view_from_clip: Mat4,
567    pub world_position: Vec3,
568    pub exposure: f32,
569    // viewport(x_origin, y_origin, width, height)
570    pub viewport: Vec4,
571    /// 6 world-space half spaces (normal: vec3, distance: f32) ordered left, right, top, bottom, near, far.
572    /// The normal vectors point towards the interior of the frustum.
573    /// A half space contains `p` if `normal.dot(p) + distance > 0.`
574    pub frustum: [Vec4; 6],
575    pub color_grading: ColorGradingUniform,
576    pub mip_bias: f32,
577    pub frame_count: u32,
578}
579
580#[derive(Resource)]
581pub struct ViewUniforms {
582    pub uniforms: DynamicUniformBuffer<ViewUniform>,
583}
584
585impl FromWorld for ViewUniforms {
586    fn from_world(world: &mut World) -> Self {
587        let mut uniforms = DynamicUniformBuffer::default();
588        uniforms.set_label(Some("view_uniforms_buffer"));
589
590        let render_device = world.resource::<RenderDevice>();
591        if render_device.limits().max_storage_buffers_per_shader_stage > 0 {
592            uniforms.add_usages(BufferUsages::STORAGE);
593        }
594
595        Self { uniforms }
596    }
597}
598
599#[derive(Component)]
600pub struct ViewUniformOffset {
601    pub offset: u32,
602}
603
604#[derive(Component)]
605pub struct ViewTarget {
606    main_textures: MainTargetTextures,
607    main_texture_format: TextureFormat,
608    /// 0 represents `main_textures.a`, 1 represents `main_textures.b`
609    /// This is shared across view targets with the same render target
610    main_texture: Arc<AtomicUsize>,
611    out_texture: OutputColorAttachment,
612}
613
614/// Contains [`OutputColorAttachment`] used for each target present on any view in the current
615/// frame, after being prepared by [`prepare_view_attachments`]. Users that want to override
616/// the default output color attachment for a specific target can do so by adding a
617/// [`OutputColorAttachment`] to this resource before [`prepare_view_targets`] is called.
618#[derive(Resource, Default, Deref, DerefMut)]
619pub struct ViewTargetAttachments(HashMap<NormalizedRenderTarget, OutputColorAttachment>);
620
621pub struct PostProcessWrite<'a> {
622    pub source: &'a TextureView,
623    pub source_texture: &'a Texture,
624    pub destination: &'a TextureView,
625    pub destination_texture: &'a Texture,
626}
627
628impl From<ColorGrading> for ColorGradingUniform {
629    fn from(component: ColorGrading) -> Self {
630        // Compute the balance matrix that will be used to apply the white
631        // balance adjustment to an RGB color. Our general approach will be to
632        // convert both the color and the developer-supplied white point to the
633        // LMS color space, apply the conversion, and then convert back.
634        //
635        // First, we start with the CIE 1931 *xy* values of the standard D65
636        // illuminant:
637        // <https://en.wikipedia.org/wiki/Standard_illuminant#D65_values>
638        //
639        // We then adjust them based on the developer's requested white balance.
640        let white_point_xy = D65_XY + vec2(-component.global.temperature, component.global.tint);
641
642        // Convert the white point from CIE 1931 *xy* to LMS. First, we convert to XYZ:
643        //
644        //                  Y          Y
645        //     Y = 1    X = ─ x    Z = ─ (1 - x - y)
646        //                  y          y
647        //
648        // Then we convert from XYZ to LMS color space, using the CAM16 matrix
649        // from <https://en.wikipedia.org/wiki/LMS_color_space#Later_CIECAMs>:
650        //
651        //     ⎡ L ⎤   ⎡  0.401   0.650  -0.051 ⎤ ⎡ X ⎤
652        //     ⎢ M ⎥ = ⎢ -0.250   1.204   0.046 ⎥ ⎢ Y ⎥
653        //     ⎣ S ⎦   ⎣ -0.002   0.049   0.953 ⎦ ⎣ Z ⎦
654        //
655        // The following formula is just a simplification of the above.
656
657        let white_point_lms = vec3(0.701634, 1.15856, -0.904175)
658            + (vec3(-0.051461, 0.045854, 0.953127)
659                + vec3(0.452749, -0.296122, -0.955206) * white_point_xy.x)
660                / white_point_xy.y;
661
662        // Now that we're in LMS space, perform the white point scaling.
663        let white_point_adjustment = Mat3::from_diagonal(D65_LMS / white_point_lms);
664
665        // Finally, combine the RGB → LMS → corrected LMS → corrected RGB
666        // pipeline into a single 3×3 matrix.
667        let balance = LMS_TO_RGB * white_point_adjustment * RGB_TO_LMS;
668
669        Self {
670            balance,
671            saturation: vec3(
672                component.shadows.saturation,
673                component.midtones.saturation,
674                component.highlights.saturation,
675            ),
676            contrast: vec3(
677                component.shadows.contrast,
678                component.midtones.contrast,
679                component.highlights.contrast,
680            ),
681            gamma: vec3(
682                component.shadows.gamma,
683                component.midtones.gamma,
684                component.highlights.gamma,
685            ),
686            gain: vec3(
687                component.shadows.gain,
688                component.midtones.gain,
689                component.highlights.gain,
690            ),
691            lift: vec3(
692                component.shadows.lift,
693                component.midtones.lift,
694                component.highlights.lift,
695            ),
696            midtone_range: vec2(
697                component.global.midtones_range.start,
698                component.global.midtones_range.end,
699            ),
700            exposure: component.global.exposure,
701            hue: component.global.hue,
702            post_saturation: component.global.post_saturation,
703        }
704    }
705}
706
707/// Add this component to a camera to disable *indirect mode*.
708///
709/// Indirect mode, automatically enabled on supported hardware, allows Bevy to
710/// offload transform and cull operations to the GPU, reducing CPU overhead.
711/// Doing this, however, reduces the amount of control that your app has over
712/// instancing decisions. In certain circumstances, you may want to disable
713/// indirect drawing so that your app can manually instance meshes as it sees
714/// fit. See the `custom_shader_instancing` example.
715///
716/// The vast majority of applications will not need to use this component, as it
717/// generally reduces rendering performance.
718///
719/// Note: This component should only be added when initially spawning a camera. Adding
720/// or removing after spawn can result in unspecified behavior.
721#[derive(Component, Default)]
722pub struct NoIndirectDrawing;
723
724#[derive(Component, Default)]
725pub struct NoCpuCulling;
726
727impl ViewTarget {
728    pub const TEXTURE_FORMAT_HDR: TextureFormat = TextureFormat::Rgba16Float;
729
730    /// Retrieve this target's main texture's color attachment.
731    pub fn get_color_attachment(&self) -> RenderPassColorAttachment {
732        if self.main_texture.load(Ordering::SeqCst) == 0 {
733            self.main_textures.a.get_attachment()
734        } else {
735            self.main_textures.b.get_attachment()
736        }
737    }
738
739    /// Retrieve this target's "unsampled" main texture's color attachment.
740    pub fn get_unsampled_color_attachment(&self) -> RenderPassColorAttachment {
741        if self.main_texture.load(Ordering::SeqCst) == 0 {
742            self.main_textures.a.get_unsampled_attachment()
743        } else {
744            self.main_textures.b.get_unsampled_attachment()
745        }
746    }
747
748    /// The "main" unsampled texture.
749    pub fn main_texture(&self) -> &Texture {
750        if self.main_texture.load(Ordering::SeqCst) == 0 {
751            &self.main_textures.a.texture.texture
752        } else {
753            &self.main_textures.b.texture.texture
754        }
755    }
756
757    /// The _other_ "main" unsampled texture.
758    /// In most cases you should use [`Self::main_texture`] instead and never this.
759    /// The textures will naturally be swapped when [`Self::post_process_write`] is called.
760    ///
761    /// A use case for this is to be able to prepare a bind group for all main textures
762    /// ahead of time.
763    pub fn main_texture_other(&self) -> &Texture {
764        if self.main_texture.load(Ordering::SeqCst) == 0 {
765            &self.main_textures.b.texture.texture
766        } else {
767            &self.main_textures.a.texture.texture
768        }
769    }
770
771    /// The "main" unsampled texture.
772    pub fn main_texture_view(&self) -> &TextureView {
773        if self.main_texture.load(Ordering::SeqCst) == 0 {
774            &self.main_textures.a.texture.default_view
775        } else {
776            &self.main_textures.b.texture.default_view
777        }
778    }
779
780    /// The _other_ "main" unsampled texture view.
781    /// In most cases you should use [`Self::main_texture_view`] instead and never this.
782    /// The textures will naturally be swapped when [`Self::post_process_write`] is called.
783    ///
784    /// A use case for this is to be able to prepare a bind group for all main textures
785    /// ahead of time.
786    pub fn main_texture_other_view(&self) -> &TextureView {
787        if self.main_texture.load(Ordering::SeqCst) == 0 {
788            &self.main_textures.b.texture.default_view
789        } else {
790            &self.main_textures.a.texture.default_view
791        }
792    }
793
794    /// The "main" sampled texture.
795    pub fn sampled_main_texture(&self) -> Option<&Texture> {
796        self.main_textures
797            .a
798            .resolve_target
799            .as_ref()
800            .map(|sampled| &sampled.texture)
801    }
802
803    /// The "main" sampled texture view.
804    pub fn sampled_main_texture_view(&self) -> Option<&TextureView> {
805        self.main_textures
806            .a
807            .resolve_target
808            .as_ref()
809            .map(|sampled| &sampled.default_view)
810    }
811
812    #[inline]
813    pub fn main_texture_format(&self) -> TextureFormat {
814        self.main_texture_format
815    }
816
817    /// Returns `true` if and only if the main texture is [`Self::TEXTURE_FORMAT_HDR`]
818    #[inline]
819    pub fn is_hdr(&self) -> bool {
820        self.main_texture_format == ViewTarget::TEXTURE_FORMAT_HDR
821    }
822
823    /// The final texture this view will render to.
824    #[inline]
825    pub fn out_texture(&self) -> &TextureView {
826        &self.out_texture.view
827    }
828
829    pub fn out_texture_color_attachment(
830        &self,
831        clear_color: Option<LinearRgba>,
832    ) -> RenderPassColorAttachment {
833        self.out_texture.get_attachment(clear_color)
834    }
835
836    /// The format of the final texture this view will render to
837    #[inline]
838    pub fn out_texture_format(&self) -> TextureFormat {
839        self.out_texture.format
840    }
841
842    /// This will start a new "post process write", which assumes that the caller
843    /// will write the [`PostProcessWrite`]'s `source` to the `destination`.
844    ///
845    /// `source` is the "current" main texture. This will internally flip this
846    /// [`ViewTarget`]'s main texture to the `destination` texture, so the caller
847    /// _must_ ensure `source` is copied to `destination`, with or without modifications.
848    /// Failing to do so will cause the current main texture information to be lost.
849    pub fn post_process_write(&self) -> PostProcessWrite {
850        let old_is_a_main_texture = self.main_texture.fetch_xor(1, Ordering::SeqCst);
851        // if the old main texture is a, then the post processing must write from a to b
852        if old_is_a_main_texture == 0 {
853            self.main_textures.b.mark_as_cleared();
854            PostProcessWrite {
855                source: &self.main_textures.a.texture.default_view,
856                source_texture: &self.main_textures.a.texture.texture,
857                destination: &self.main_textures.b.texture.default_view,
858                destination_texture: &self.main_textures.b.texture.texture,
859            }
860        } else {
861            self.main_textures.a.mark_as_cleared();
862            PostProcessWrite {
863                source: &self.main_textures.b.texture.default_view,
864                source_texture: &self.main_textures.b.texture.texture,
865                destination: &self.main_textures.a.texture.default_view,
866                destination_texture: &self.main_textures.a.texture.texture,
867            }
868        }
869    }
870}
871
872#[derive(Component)]
873pub struct ViewDepthTexture {
874    pub texture: Texture,
875    attachment: DepthAttachment,
876}
877
878impl ViewDepthTexture {
879    pub fn new(texture: CachedTexture, clear_value: Option<f32>) -> Self {
880        Self {
881            texture: texture.texture,
882            attachment: DepthAttachment::new(texture.default_view, clear_value),
883        }
884    }
885
886    pub fn get_attachment(&self, store: StoreOp) -> RenderPassDepthStencilAttachment {
887        self.attachment.get_attachment(store)
888    }
889
890    pub fn view(&self) -> &TextureView {
891        &self.attachment.view
892    }
893}
894
895pub fn prepare_view_uniforms(
896    mut commands: Commands,
897    render_device: Res<RenderDevice>,
898    render_queue: Res<RenderQueue>,
899    mut view_uniforms: ResMut<ViewUniforms>,
900    views: Query<(
901        Entity,
902        Option<&ExtractedCamera>,
903        &ExtractedView,
904        Option<&Frustum>,
905        Option<&TemporalJitter>,
906        Option<&MipBias>,
907    )>,
908    frame_count: Res<FrameCount>,
909) {
910    let view_iter = views.iter();
911    let view_count = view_iter.len();
912    let Some(mut writer) =
913        view_uniforms
914            .uniforms
915            .get_writer(view_count, &render_device, &render_queue)
916    else {
917        return;
918    };
919    for (entity, extracted_camera, extracted_view, frustum, temporal_jitter, mip_bias) in &views {
920        let viewport = extracted_view.viewport.as_vec4();
921        let unjittered_projection = extracted_view.clip_from_view;
922        let mut clip_from_view = unjittered_projection;
923
924        if let Some(temporal_jitter) = temporal_jitter {
925            temporal_jitter.jitter_projection(&mut clip_from_view, viewport.zw());
926        }
927
928        let view_from_clip = clip_from_view.inverse();
929        let world_from_view = extracted_view.world_from_view.compute_matrix();
930        let view_from_world = world_from_view.inverse();
931
932        let clip_from_world = if temporal_jitter.is_some() {
933            clip_from_view * view_from_world
934        } else {
935            extracted_view
936                .clip_from_world
937                .unwrap_or_else(|| clip_from_view * view_from_world)
938        };
939
940        // Map Frustum type to shader array<vec4<f32>, 6>
941        let frustum = frustum
942            .map(|frustum| frustum.half_spaces.map(|h| h.normal_d()))
943            .unwrap_or([Vec4::ZERO; 6]);
944
945        let view_uniforms = ViewUniformOffset {
946            offset: writer.write(&ViewUniform {
947                clip_from_world,
948                unjittered_clip_from_world: unjittered_projection * view_from_world,
949                world_from_clip: world_from_view * view_from_clip,
950                world_from_view,
951                view_from_world,
952                clip_from_view,
953                view_from_clip,
954                world_position: extracted_view.world_from_view.translation(),
955                exposure: extracted_camera
956                    .map(|c| c.exposure)
957                    .unwrap_or_else(|| Exposure::default().exposure()),
958                viewport,
959                frustum,
960                color_grading: extracted_view.color_grading.clone().into(),
961                mip_bias: mip_bias.unwrap_or(&MipBias(0.0)).0,
962                frame_count: frame_count.0,
963            }),
964        };
965
966        commands.entity(entity).insert(view_uniforms);
967    }
968}
969
970#[derive(Clone)]
971struct MainTargetTextures {
972    a: ColorAttachment,
973    b: ColorAttachment,
974    /// 0 represents `main_textures.a`, 1 represents `main_textures.b`
975    /// This is shared across view targets with the same render target
976    main_texture: Arc<AtomicUsize>,
977}
978
979/// Prepares the view target [`OutputColorAttachment`] for each view in the current frame.
980pub fn prepare_view_attachments(
981    windows: Res<ExtractedWindows>,
982    images: Res<RenderAssets<GpuImage>>,
983    manual_texture_views: Res<ManualTextureViews>,
984    cameras: Query<&ExtractedCamera>,
985    mut view_target_attachments: ResMut<ViewTargetAttachments>,
986) {
987    for camera in cameras.iter() {
988        let Some(target) = &camera.target else {
989            continue;
990        };
991
992        match view_target_attachments.entry(target.clone()) {
993            Entry::Occupied(_) => {}
994            Entry::Vacant(entry) => {
995                let Some(attachment) = target
996                    .get_texture_view(&windows, &images, &manual_texture_views)
997                    .cloned()
998                    .zip(target.get_texture_format(&windows, &images, &manual_texture_views))
999                    .map(|(view, format)| {
1000                        OutputColorAttachment::new(view.clone(), format.add_srgb_suffix())
1001                    })
1002                else {
1003                    continue;
1004                };
1005                entry.insert(attachment);
1006            }
1007        };
1008    }
1009}
1010
1011/// Clears the view target [`OutputColorAttachment`]s.
1012pub fn clear_view_attachments(mut view_target_attachments: ResMut<ViewTargetAttachments>) {
1013    view_target_attachments.clear();
1014}
1015
1016pub fn prepare_view_targets(
1017    mut commands: Commands,
1018    clear_color_global: Res<ClearColor>,
1019    render_device: Res<RenderDevice>,
1020    mut texture_cache: ResMut<TextureCache>,
1021    cameras: Query<(
1022        Entity,
1023        &ExtractedCamera,
1024        &ExtractedView,
1025        &CameraMainTextureUsages,
1026        &Msaa,
1027    )>,
1028    view_target_attachments: Res<ViewTargetAttachments>,
1029) {
1030    let mut textures = <HashMap<_, _>>::default();
1031    for (entity, camera, view, texture_usage, msaa) in cameras.iter() {
1032        let (Some(target_size), Some(target)) = (camera.physical_target_size, &camera.target)
1033        else {
1034            continue;
1035        };
1036
1037        let Some(out_attachment) = view_target_attachments.get(target) else {
1038            continue;
1039        };
1040
1041        let size = Extent3d {
1042            width: target_size.x,
1043            height: target_size.y,
1044            depth_or_array_layers: 1,
1045        };
1046
1047        let main_texture_format = if view.hdr {
1048            ViewTarget::TEXTURE_FORMAT_HDR
1049        } else {
1050            TextureFormat::bevy_default()
1051        };
1052
1053        let clear_color = match camera.clear_color {
1054            ClearColorConfig::Custom(color) => Some(color),
1055            ClearColorConfig::None => None,
1056            _ => Some(clear_color_global.0),
1057        };
1058
1059        let (a, b, sampled, main_texture) = textures
1060            .entry((camera.target.clone(), texture_usage.0, view.hdr, msaa))
1061            .or_insert_with(|| {
1062                let descriptor = TextureDescriptor {
1063                    label: None,
1064                    size,
1065                    mip_level_count: 1,
1066                    sample_count: 1,
1067                    dimension: TextureDimension::D2,
1068                    format: main_texture_format,
1069                    usage: texture_usage.0,
1070                    view_formats: match main_texture_format {
1071                        TextureFormat::Bgra8Unorm => &[TextureFormat::Bgra8UnormSrgb],
1072                        TextureFormat::Rgba8Unorm => &[TextureFormat::Rgba8UnormSrgb],
1073                        _ => &[],
1074                    },
1075                };
1076                let a = texture_cache.get(
1077                    &render_device,
1078                    TextureDescriptor {
1079                        label: Some("main_texture_a"),
1080                        ..descriptor
1081                    },
1082                );
1083                let b = texture_cache.get(
1084                    &render_device,
1085                    TextureDescriptor {
1086                        label: Some("main_texture_b"),
1087                        ..descriptor
1088                    },
1089                );
1090                let sampled = if msaa.samples() > 1 {
1091                    let sampled = texture_cache.get(
1092                        &render_device,
1093                        TextureDescriptor {
1094                            label: Some("main_texture_sampled"),
1095                            size,
1096                            mip_level_count: 1,
1097                            sample_count: msaa.samples(),
1098                            dimension: TextureDimension::D2,
1099                            format: main_texture_format,
1100                            usage: TextureUsages::RENDER_ATTACHMENT,
1101                            view_formats: descriptor.view_formats,
1102                        },
1103                    );
1104                    Some(sampled)
1105                } else {
1106                    None
1107                };
1108                let main_texture = Arc::new(AtomicUsize::new(0));
1109                (a, b, sampled, main_texture)
1110            });
1111
1112        let converted_clear_color = clear_color.map(Into::into);
1113
1114        let main_textures = MainTargetTextures {
1115            a: ColorAttachment::new(a.clone(), sampled.clone(), converted_clear_color),
1116            b: ColorAttachment::new(b.clone(), sampled.clone(), converted_clear_color),
1117            main_texture: main_texture.clone(),
1118        };
1119
1120        commands.entity(entity).insert(ViewTarget {
1121            main_texture: main_textures.main_texture.clone(),
1122            main_textures,
1123            main_texture_format,
1124            out_texture: out_attachment.clone(),
1125        });
1126    }
1127}