bevy_render/view/
mod.rs

1pub mod visibility;
2pub mod window;
3
4use bevy_camera::{
5    primitives::Frustum, CameraMainTextureUsages, ClearColor, ClearColorConfig, Exposure,
6    MainPassResolutionOverride, NormalizedRenderTarget,
7};
8use bevy_diagnostic::FrameCount;
9pub use visibility::*;
10pub use window::*;
11
12use crate::{
13    camera::{ExtractedCamera, MipBias, NormalizedRenderTargetExt as _, TemporalJitter},
14    experimental::occlusion_culling::OcclusionCulling,
15    extract_component::ExtractComponentPlugin,
16    render_asset::RenderAssets,
17    render_phase::ViewRangefinder3d,
18    render_resource::{DynamicUniformBuffer, ShaderType, Texture, TextureView},
19    renderer::{RenderDevice, RenderQueue},
20    sync_world::MainEntity,
21    texture::{
22        CachedTexture, ColorAttachment, DepthAttachment, GpuImage, ManualTextureViews,
23        OutputColorAttachment, TextureCache,
24    },
25    Render, RenderApp, RenderSystems,
26};
27use alloc::sync::Arc;
28use bevy_app::{App, Plugin};
29use bevy_color::LinearRgba;
30use bevy_derive::{Deref, DerefMut};
31use bevy_ecs::prelude::*;
32use bevy_image::{BevyDefault as _, ToExtents};
33use bevy_math::{mat3, vec2, vec3, Mat3, Mat4, UVec4, Vec2, Vec3, Vec4, Vec4Swizzles};
34use bevy_platform::collections::{hash_map::Entry, HashMap};
35use bevy_reflect::{std_traits::ReflectDefault, Reflect};
36use bevy_render_macros::ExtractComponent;
37use bevy_shader::load_shader_library;
38use bevy_transform::components::GlobalTransform;
39use core::{
40    ops::Range,
41    sync::atomic::{AtomicUsize, Ordering},
42};
43use wgpu::{
44    BufferUsages, RenderPassColorAttachment, RenderPassDepthStencilAttachment, StoreOp,
45    TextureDescriptor, TextureDimension, TextureFormat, TextureUsages,
46};
47
48/// The matrix that converts from the RGB to the LMS color space.
49///
50/// To derive this, first we convert from RGB to [CIE 1931 XYZ]:
51///
52/// ```text
53/// ⎡ X ⎤   ⎡ 0.490  0.310  0.200 ⎤ ⎡ R ⎤
54/// ⎢ Y ⎥ = ⎢ 0.177  0.812  0.011 ⎥ ⎢ G ⎥
55/// ⎣ Z ⎦   ⎣ 0.000  0.010  0.990 ⎦ ⎣ B ⎦
56/// ```
57///
58/// Then we convert to LMS according to the [CAM16 standard matrix]:
59///
60/// ```text
61/// ⎡ L ⎤   ⎡  0.401   0.650  -0.051 ⎤ ⎡ X ⎤
62/// ⎢ M ⎥ = ⎢ -0.250   1.204   0.046 ⎥ ⎢ Y ⎥
63/// ⎣ S ⎦   ⎣ -0.002   0.049   0.953 ⎦ ⎣ Z ⎦
64/// ```
65///
66/// The resulting matrix is just the concatenation of these two matrices, to do
67/// the conversion in one step.
68///
69/// [CIE 1931 XYZ]: https://en.wikipedia.org/wiki/CIE_1931_color_space
70/// [CAM16 standard matrix]: https://en.wikipedia.org/wiki/LMS_color_space
71static RGB_TO_LMS: Mat3 = mat3(
72    vec3(0.311692, 0.0905138, 0.00764433),
73    vec3(0.652085, 0.901341, 0.0486554),
74    vec3(0.0362225, 0.00814478, 0.943700),
75);
76
77/// The inverse of the [`RGB_TO_LMS`] matrix, converting from the LMS color
78/// space back to RGB.
79static LMS_TO_RGB: Mat3 = mat3(
80    vec3(4.06305, -0.40791, -0.0118812),
81    vec3(-2.93241, 1.40437, -0.0486532),
82    vec3(-0.130646, 0.00353630, 1.0605344),
83);
84
85/// The [CIE 1931] *xy* chromaticity coordinates of the [D65 white point].
86///
87/// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space
88/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values
89static D65_XY: Vec2 = vec2(0.31272, 0.32903);
90
91/// The [D65 white point] in [LMS color space].
92///
93/// [LMS color space]: https://en.wikipedia.org/wiki/LMS_color_space
94/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values
95static D65_LMS: Vec3 = vec3(0.975538, 1.01648, 1.08475);
96
97pub struct ViewPlugin;
98
99impl Plugin for ViewPlugin {
100    fn build(&self, app: &mut App) {
101        load_shader_library!(app, "view.wgsl");
102
103        app
104            // NOTE: windows.is_changed() handles cases where a window was resized
105            .add_plugins((
106                ExtractComponentPlugin::<Hdr>::default(),
107                ExtractComponentPlugin::<Msaa>::default(),
108                ExtractComponentPlugin::<OcclusionCulling>::default(),
109                RenderVisibilityRangePlugin,
110            ));
111
112        if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
113            render_app.add_systems(
114                Render,
115                (
116                    // `TextureView`s need to be dropped before reconfiguring window surfaces.
117                    clear_view_attachments
118                        .in_set(RenderSystems::ManageViews)
119                        .before(create_surfaces),
120                    cleanup_view_targets_for_resize
121                        .in_set(RenderSystems::ManageViews)
122                        .before(create_surfaces),
123                    prepare_view_attachments
124                        .in_set(RenderSystems::ManageViews)
125                        .before(prepare_view_targets)
126                        .after(prepare_windows),
127                    prepare_view_targets
128                        .in_set(RenderSystems::ManageViews)
129                        .after(prepare_windows)
130                        .after(crate::render_asset::prepare_assets::<GpuImage>)
131                        .ambiguous_with(crate::camera::sort_cameras), // doesn't use `sorted_camera_index_for_target`
132                    prepare_view_uniforms.in_set(RenderSystems::PrepareResources),
133                ),
134            );
135        }
136    }
137
138    fn finish(&self, app: &mut App) {
139        if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
140            render_app
141                .init_resource::<ViewUniforms>()
142                .init_resource::<ViewTargetAttachments>();
143        }
144    }
145}
146
147/// Component for configuring the number of samples for [Multi-Sample Anti-Aliasing](https://en.wikipedia.org/wiki/Multisample_anti-aliasing)
148/// for a [`Camera`](bevy_camera::Camera).
149///
150/// Defaults to 4 samples. A higher number of samples results in smoother edges.
151///
152/// Some advanced rendering features may require that MSAA is disabled.
153///
154/// Note that the web currently only supports 1 or 4 samples.
155#[derive(
156    Component,
157    Default,
158    Clone,
159    Copy,
160    ExtractComponent,
161    Reflect,
162    PartialEq,
163    PartialOrd,
164    Eq,
165    Hash,
166    Debug,
167)]
168#[reflect(Component, Default, PartialEq, Hash, Debug)]
169pub enum Msaa {
170    Off = 1,
171    Sample2 = 2,
172    #[default]
173    Sample4 = 4,
174    Sample8 = 8,
175}
176
177impl Msaa {
178    #[inline]
179    pub fn samples(&self) -> u32 {
180        *self as u32
181    }
182
183    pub fn from_samples(samples: u32) -> Self {
184        match samples {
185            1 => Msaa::Off,
186            2 => Msaa::Sample2,
187            4 => Msaa::Sample4,
188            8 => Msaa::Sample8,
189            _ => panic!("Unsupported MSAA sample count: {samples}"),
190        }
191    }
192}
193
194/// If this component is added to a camera, the camera will use an intermediate "high dynamic range" render texture.
195/// This allows rendering with a wider range of lighting values. However, this does *not* affect
196/// whether the camera will render with hdr display output (which bevy does not support currently)
197/// and only affects the intermediate render texture.
198#[derive(
199    Component, Default, Copy, Clone, ExtractComponent, Reflect, PartialEq, Eq, Hash, Debug,
200)]
201#[reflect(Component, Default, PartialEq, Hash, Debug)]
202pub struct Hdr;
203
204/// An identifier for a view that is stable across frames.
205///
206/// We can't use [`Entity`] for this because render world entities aren't
207/// stable, and we can't use just [`MainEntity`] because some main world views
208/// extract to multiple render world views. For example, a directional light
209/// extracts to one render world view per cascade, and a point light extracts to
210/// one render world view per cubemap face. So we pair the main entity with an
211/// *auxiliary entity* and a *subview index*, which *together* uniquely identify
212/// a view in the render world in a way that's stable from frame to frame.
213#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
214pub struct RetainedViewEntity {
215    /// The main entity that this view corresponds to.
216    pub main_entity: MainEntity,
217
218    /// Another entity associated with the view entity.
219    ///
220    /// This is currently used for shadow cascades. If there are multiple
221    /// cameras, each camera needs to have its own set of shadow cascades. Thus
222    /// the light and subview index aren't themselves enough to uniquely
223    /// identify a shadow cascade: we need the camera that the cascade is
224    /// associated with as well. This entity stores that camera.
225    ///
226    /// If not present, this will be `MainEntity(Entity::PLACEHOLDER)`.
227    pub auxiliary_entity: MainEntity,
228
229    /// The index of the view corresponding to the entity.
230    ///
231    /// For example, for point lights that cast shadows, this is the index of
232    /// the cubemap face (0 through 5 inclusive). For directional lights, this
233    /// is the index of the cascade.
234    pub subview_index: u32,
235}
236
237impl RetainedViewEntity {
238    /// Creates a new [`RetainedViewEntity`] from the given main world entity,
239    /// auxiliary main world entity, and subview index.
240    ///
241    /// See [`RetainedViewEntity::subview_index`] for an explanation of what
242    /// `auxiliary_entity` and `subview_index` are.
243    pub fn new(
244        main_entity: MainEntity,
245        auxiliary_entity: Option<MainEntity>,
246        subview_index: u32,
247    ) -> Self {
248        Self {
249            main_entity,
250            auxiliary_entity: auxiliary_entity.unwrap_or(Entity::PLACEHOLDER.into()),
251            subview_index,
252        }
253    }
254}
255
256/// Describes a camera in the render world.
257///
258/// Each entity in the main world can potentially extract to multiple subviews,
259/// each of which has a [`RetainedViewEntity::subview_index`]. For instance, 3D
260/// cameras extract to both a 3D camera subview with index 0 and a special UI
261/// subview with index 1. Likewise, point lights with shadows extract to 6
262/// subviews, one for each side of the shadow cubemap.
263#[derive(Component)]
264pub struct ExtractedView {
265    /// The entity in the main world corresponding to this render world view.
266    pub retained_view_entity: RetainedViewEntity,
267    /// Typically a column-major right-handed projection matrix, one of either:
268    ///
269    /// Perspective (infinite reverse z)
270    /// ```text
271    /// f = 1 / tan(fov_y_radians / 2)
272    ///
273    /// ⎡ f / aspect  0   0     0 ⎤
274    /// ⎢          0  f   0     0 ⎥
275    /// ⎢          0  0   0  near ⎥
276    /// ⎣          0  0  -1     0 ⎦
277    /// ```
278    ///
279    /// Orthographic
280    /// ```text
281    /// w = right - left
282    /// h = top - bottom
283    /// d = far - near
284    /// cw = -right - left
285    /// ch = -top - bottom
286    ///
287    /// ⎡ 2 / w      0      0   cw / w ⎤
288    /// ⎢     0  2 / h      0   ch / h ⎥
289    /// ⎢     0      0  1 / d  far / d ⎥
290    /// ⎣     0      0      0        1 ⎦
291    /// ```
292    ///
293    /// `clip_from_view[3][3] == 1.0` is the standard way to check if a projection is orthographic
294    ///
295    /// Glam matrices are column major, so for example getting the near plane of a perspective projection is `clip_from_view[3][2]`
296    ///
297    /// Custom projections are also possible however.
298    pub clip_from_view: Mat4,
299    pub world_from_view: GlobalTransform,
300    // The view-projection matrix. When provided it is used instead of deriving it from
301    // `projection` and `transform` fields, which can be helpful in cases where numerical
302    // stability matters and there is a more direct way to derive the view-projection matrix.
303    pub clip_from_world: Option<Mat4>,
304    pub hdr: bool,
305    // uvec4(origin.x, origin.y, width, height)
306    pub viewport: UVec4,
307    pub color_grading: ColorGrading,
308
309    /// Whether to switch culling mode so that materials that request backface
310    /// culling cull front faces, and vice versa.
311    ///
312    /// This is typically used for cameras that mirror the world that they
313    /// render across a plane, because doing that flips the winding of each
314    /// polygon.
315    ///
316    /// This setting doesn't affect materials that disable backface culling.
317    pub invert_culling: bool,
318}
319
320impl ExtractedView {
321    /// Creates a 3D rangefinder for a view
322    pub fn rangefinder3d(&self) -> ViewRangefinder3d {
323        ViewRangefinder3d::from_world_from_view(&self.world_from_view.affine())
324    }
325}
326
327/// Configures filmic color grading parameters to adjust the image appearance.
328///
329/// Color grading is applied just before tonemapping for a given
330/// [`Camera`](bevy_camera::Camera) entity, with the sole exception of the
331/// `post_saturation` value in [`ColorGradingGlobal`], which is applied after
332/// tonemapping.
333#[derive(Component, Reflect, Debug, Default, Clone)]
334#[reflect(Component, Default, Debug, Clone)]
335pub struct ColorGrading {
336    /// Filmic color grading values applied to the image as a whole (as opposed
337    /// to individual sections, like shadows and highlights).
338    pub global: ColorGradingGlobal,
339
340    /// Color grading values that are applied to the darker parts of the image.
341    ///
342    /// The cutoff points can be customized with the
343    /// [`ColorGradingGlobal::midtones_range`] field.
344    pub shadows: ColorGradingSection,
345
346    /// Color grading values that are applied to the parts of the image with
347    /// intermediate brightness.
348    ///
349    /// The cutoff points can be customized with the
350    /// [`ColorGradingGlobal::midtones_range`] field.
351    pub midtones: ColorGradingSection,
352
353    /// Color grading values that are applied to the lighter parts of the image.
354    ///
355    /// The cutoff points can be customized with the
356    /// [`ColorGradingGlobal::midtones_range`] field.
357    pub highlights: ColorGradingSection,
358}
359
360/// Filmic color grading values applied to the image as a whole (as opposed to
361/// individual sections, like shadows and highlights).
362#[derive(Clone, Debug, Reflect)]
363#[reflect(Default, Clone)]
364pub struct ColorGradingGlobal {
365    /// Exposure value (EV) offset, measured in stops.
366    pub exposure: f32,
367
368    /// An adjustment made to the [CIE 1931] chromaticity *x* value.
369    ///
370    /// Positive values make the colors redder. Negative values make the colors
371    /// bluer. This has no effect on luminance (brightness).
372    ///
373    /// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
374    pub temperature: f32,
375
376    /// An adjustment made to the [CIE 1931] chromaticity *y* value.
377    ///
378    /// Positive values make the colors more magenta. Negative values make the
379    /// colors greener. This has no effect on luminance (brightness).
380    ///
381    /// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
382    pub tint: f32,
383
384    /// An adjustment to the [hue], in radians.
385    ///
386    /// Adjusting this value changes the perceived colors in the image: red to
387    /// yellow to green to blue, etc. It has no effect on the saturation or
388    /// brightness of the colors.
389    ///
390    /// [hue]: https://en.wikipedia.org/wiki/HSL_and_HSV#Formal_derivation
391    pub hue: f32,
392
393    /// Saturation adjustment applied after tonemapping.
394    /// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image
395    /// with luminance defined by ITU-R BT.709
396    /// Values above 1.0 increase saturation.
397    pub post_saturation: f32,
398
399    /// The luminance (brightness) ranges that are considered part of the
400    /// "midtones" of the image.
401    ///
402    /// This affects which [`ColorGradingSection`]s apply to which colors. Note
403    /// that the sections smoothly blend into one another, to avoid abrupt
404    /// transitions.
405    ///
406    /// The default value is 0.2 to 0.7.
407    pub midtones_range: Range<f32>,
408}
409
410/// The [`ColorGrading`] structure, packed into the most efficient form for the
411/// GPU.
412#[derive(Clone, Copy, Debug, ShaderType)]
413pub struct ColorGradingUniform {
414    pub balance: Mat3,
415    pub saturation: Vec3,
416    pub contrast: Vec3,
417    pub gamma: Vec3,
418    pub gain: Vec3,
419    pub lift: Vec3,
420    pub midtone_range: Vec2,
421    pub exposure: f32,
422    pub hue: f32,
423    pub post_saturation: f32,
424}
425
426/// A section of color grading values that can be selectively applied to
427/// shadows, midtones, and highlights.
428#[derive(Reflect, Debug, Copy, Clone, PartialEq)]
429#[reflect(Clone, PartialEq)]
430pub struct ColorGradingSection {
431    /// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image
432    /// with luminance defined by ITU-R BT.709.
433    /// Values above 1.0 increase saturation.
434    pub saturation: f32,
435
436    /// Adjusts the range of colors.
437    ///
438    /// A value of 1.0 applies no changes. Values below 1.0 move the colors more
439    /// toward a neutral gray. Values above 1.0 spread the colors out away from
440    /// the neutral gray.
441    pub contrast: f32,
442
443    /// A nonlinear luminance adjustment, mainly affecting the high end of the
444    /// range.
445    ///
446    /// This is the *n* exponent in the standard [ASC CDL] formula for color
447    /// correction:
448    ///
449    /// ```text
450    /// out = (i × s + o)ⁿ
451    /// ```
452    ///
453    /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
454    pub gamma: f32,
455
456    /// A linear luminance adjustment, mainly affecting the middle part of the
457    /// range.
458    ///
459    /// This is the *s* factor in the standard [ASC CDL] formula for color
460    /// correction:
461    ///
462    /// ```text
463    /// out = (i × s + o)ⁿ
464    /// ```
465    ///
466    /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
467    pub gain: f32,
468
469    /// A fixed luminance adjustment, mainly affecting the lower part of the
470    /// range.
471    ///
472    /// This is the *o* term in the standard [ASC CDL] formula for color
473    /// correction:
474    ///
475    /// ```text
476    /// out = (i × s + o)ⁿ
477    /// ```
478    ///
479    /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
480    pub lift: f32,
481}
482
483impl Default for ColorGradingGlobal {
484    fn default() -> Self {
485        Self {
486            exposure: 0.0,
487            temperature: 0.0,
488            tint: 0.0,
489            hue: 0.0,
490            post_saturation: 1.0,
491            midtones_range: 0.2..0.7,
492        }
493    }
494}
495
496impl Default for ColorGradingSection {
497    fn default() -> Self {
498        Self {
499            saturation: 1.0,
500            contrast: 1.0,
501            gamma: 1.0,
502            gain: 1.0,
503            lift: 0.0,
504        }
505    }
506}
507
508impl ColorGrading {
509    /// Creates a new [`ColorGrading`] instance in which shadows, midtones, and
510    /// highlights all have the same set of color grading values.
511    pub fn with_identical_sections(
512        global: ColorGradingGlobal,
513        section: ColorGradingSection,
514    ) -> ColorGrading {
515        ColorGrading {
516            global,
517            highlights: section,
518            midtones: section,
519            shadows: section,
520        }
521    }
522
523    /// Returns an iterator that visits the shadows, midtones, and highlights
524    /// sections, in that order.
525    pub fn all_sections(&self) -> impl Iterator<Item = &ColorGradingSection> {
526        [&self.shadows, &self.midtones, &self.highlights].into_iter()
527    }
528
529    /// Applies the given mutating function to the shadows, midtones, and
530    /// highlights sections, in that order.
531    ///
532    /// Returns an array composed of the results of such evaluation, in that
533    /// order.
534    pub fn all_sections_mut(&mut self) -> impl Iterator<Item = &mut ColorGradingSection> {
535        [&mut self.shadows, &mut self.midtones, &mut self.highlights].into_iter()
536    }
537}
538
539#[derive(Clone, ShaderType)]
540pub struct ViewUniform {
541    pub clip_from_world: Mat4,
542    pub unjittered_clip_from_world: Mat4,
543    pub world_from_clip: Mat4,
544    pub world_from_view: Mat4,
545    pub view_from_world: Mat4,
546    /// Typically a column-major right-handed projection matrix, one of either:
547    ///
548    /// Perspective (infinite reverse z)
549    /// ```text
550    /// f = 1 / tan(fov_y_radians / 2)
551    ///
552    /// ⎡ f / aspect  0   0     0 ⎤
553    /// ⎢          0  f   0     0 ⎥
554    /// ⎢          0  0   0  near ⎥
555    /// ⎣          0  0  -1     0 ⎦
556    /// ```
557    ///
558    /// Orthographic
559    /// ```text
560    /// w = right - left
561    /// h = top - bottom
562    /// d = far - near
563    /// cw = -right - left
564    /// ch = -top - bottom
565    ///
566    /// ⎡ 2 / w      0      0   cw / w ⎤
567    /// ⎢     0  2 / h      0   ch / h ⎥
568    /// ⎢     0      0  1 / d  far / d ⎥
569    /// ⎣     0      0      0        1 ⎦
570    /// ```
571    ///
572    /// `clip_from_view[3][3] == 1.0` is the standard way to check if a projection is orthographic
573    ///
574    /// Glam matrices are column major, so for example getting the near plane of a perspective projection is `clip_from_view[3][2]`
575    ///
576    /// Custom projections are also possible however.
577    pub clip_from_view: Mat4,
578    pub view_from_clip: Mat4,
579    pub world_position: Vec3,
580    pub exposure: f32,
581    // viewport(x_origin, y_origin, width, height)
582    pub viewport: Vec4,
583    pub main_pass_viewport: Vec4,
584    /// 6 world-space half spaces (normal: vec3, distance: f32) ordered left, right, top, bottom, near, far.
585    /// The normal vectors point towards the interior of the frustum.
586    /// A half space contains `p` if `normal.dot(p) + distance > 0.`
587    pub frustum: [Vec4; 6],
588    pub color_grading: ColorGradingUniform,
589    pub mip_bias: f32,
590    pub frame_count: u32,
591}
592
593#[derive(Resource)]
594pub struct ViewUniforms {
595    pub uniforms: DynamicUniformBuffer<ViewUniform>,
596}
597
598impl FromWorld for ViewUniforms {
599    fn from_world(world: &mut World) -> Self {
600        let mut uniforms = DynamicUniformBuffer::default();
601        uniforms.set_label(Some("view_uniforms_buffer"));
602
603        let render_device = world.resource::<RenderDevice>();
604        if render_device.limits().max_storage_buffers_per_shader_stage > 0 {
605            uniforms.add_usages(BufferUsages::STORAGE);
606        }
607
608        Self { uniforms }
609    }
610}
611
612#[derive(Component)]
613pub struct ViewUniformOffset {
614    pub offset: u32,
615}
616
617#[derive(Component, Clone)]
618pub struct ViewTarget {
619    main_textures: MainTargetTextures,
620    main_texture_format: TextureFormat,
621    /// 0 represents `main_textures.a`, 1 represents `main_textures.b`
622    /// This is shared across view targets with the same render target
623    main_texture: Arc<AtomicUsize>,
624    out_texture: OutputColorAttachment,
625}
626
627/// Contains [`OutputColorAttachment`] used for each target present on any view in the current
628/// frame, after being prepared by [`prepare_view_attachments`]. Users that want to override
629/// the default output color attachment for a specific target can do so by adding a
630/// [`OutputColorAttachment`] to this resource before [`prepare_view_targets`] is called.
631#[derive(Resource, Default, Deref, DerefMut)]
632pub struct ViewTargetAttachments(HashMap<NormalizedRenderTarget, OutputColorAttachment>);
633
634pub struct PostProcessWrite<'a> {
635    pub source: &'a TextureView,
636    pub source_texture: &'a Texture,
637    pub destination: &'a TextureView,
638    pub destination_texture: &'a Texture,
639}
640
641impl From<ColorGrading> for ColorGradingUniform {
642    fn from(component: ColorGrading) -> Self {
643        // Compute the balance matrix that will be used to apply the white
644        // balance adjustment to an RGB color. Our general approach will be to
645        // convert both the color and the developer-supplied white point to the
646        // LMS color space, apply the conversion, and then convert back.
647        //
648        // First, we start with the CIE 1931 *xy* values of the standard D65
649        // illuminant:
650        // <https://en.wikipedia.org/wiki/Standard_illuminant#D65_values>
651        //
652        // We then adjust them based on the developer's requested white balance.
653        let white_point_xy = D65_XY + vec2(-component.global.temperature, component.global.tint);
654
655        // Convert the white point from CIE 1931 *xy* to LMS. First, we convert to XYZ:
656        //
657        //                  Y          Y
658        //     Y = 1    X = ─ x    Z = ─ (1 - x - y)
659        //                  y          y
660        //
661        // Then we convert from XYZ to LMS color space, using the CAM16 matrix
662        // from <https://en.wikipedia.org/wiki/LMS_color_space#Later_CIECAMs>:
663        //
664        //     ⎡ L ⎤   ⎡  0.401   0.650  -0.051 ⎤ ⎡ X ⎤
665        //     ⎢ M ⎥ = ⎢ -0.250   1.204   0.046 ⎥ ⎢ Y ⎥
666        //     ⎣ S ⎦   ⎣ -0.002   0.049   0.953 ⎦ ⎣ Z ⎦
667        //
668        // The following formula is just a simplification of the above.
669
670        let white_point_lms = vec3(0.701634, 1.15856, -0.904175)
671            + (vec3(-0.051461, 0.045854, 0.953127)
672                + vec3(0.452749, -0.296122, -0.955206) * white_point_xy.x)
673                / white_point_xy.y;
674
675        // Now that we're in LMS space, perform the white point scaling.
676        let white_point_adjustment = Mat3::from_diagonal(D65_LMS / white_point_lms);
677
678        // Finally, combine the RGB → LMS → corrected LMS → corrected RGB
679        // pipeline into a single 3×3 matrix.
680        let balance = LMS_TO_RGB * white_point_adjustment * RGB_TO_LMS;
681
682        Self {
683            balance,
684            saturation: vec3(
685                component.shadows.saturation,
686                component.midtones.saturation,
687                component.highlights.saturation,
688            ),
689            contrast: vec3(
690                component.shadows.contrast,
691                component.midtones.contrast,
692                component.highlights.contrast,
693            ),
694            gamma: vec3(
695                component.shadows.gamma,
696                component.midtones.gamma,
697                component.highlights.gamma,
698            ),
699            gain: vec3(
700                component.shadows.gain,
701                component.midtones.gain,
702                component.highlights.gain,
703            ),
704            lift: vec3(
705                component.shadows.lift,
706                component.midtones.lift,
707                component.highlights.lift,
708            ),
709            midtone_range: vec2(
710                component.global.midtones_range.start,
711                component.global.midtones_range.end,
712            ),
713            exposure: component.global.exposure,
714            hue: component.global.hue,
715            post_saturation: component.global.post_saturation,
716        }
717    }
718}
719
720/// Add this component to a camera to disable *indirect mode*.
721///
722/// Indirect mode, automatically enabled on supported hardware, allows Bevy to
723/// offload transform and cull operations to the GPU, reducing CPU overhead.
724/// Doing this, however, reduces the amount of control that your app has over
725/// instancing decisions. In certain circumstances, you may want to disable
726/// indirect drawing so that your app can manually instance meshes as it sees
727/// fit. See the `custom_shader_instancing` example.
728///
729/// The vast majority of applications will not need to use this component, as it
730/// generally reduces rendering performance.
731///
732/// Note: This component should only be added when initially spawning a camera. Adding
733/// or removing after spawn can result in unspecified behavior.
734#[derive(Component, Default)]
735pub struct NoIndirectDrawing;
736
737impl ViewTarget {
738    pub const TEXTURE_FORMAT_HDR: TextureFormat = TextureFormat::Rgba16Float;
739
740    /// Retrieve this target's main texture's color attachment.
741    pub fn get_color_attachment(&self) -> RenderPassColorAttachment<'_> {
742        if self.main_texture.load(Ordering::SeqCst) == 0 {
743            self.main_textures.a.get_attachment()
744        } else {
745            self.main_textures.b.get_attachment()
746        }
747    }
748
749    /// Retrieve this target's "unsampled" main texture's color attachment.
750    pub fn get_unsampled_color_attachment(&self) -> RenderPassColorAttachment<'_> {
751        if self.main_texture.load(Ordering::SeqCst) == 0 {
752            self.main_textures.a.get_unsampled_attachment()
753        } else {
754            self.main_textures.b.get_unsampled_attachment()
755        }
756    }
757
758    /// The "main" unsampled texture.
759    pub fn main_texture(&self) -> &Texture {
760        if self.main_texture.load(Ordering::SeqCst) == 0 {
761            &self.main_textures.a.texture.texture
762        } else {
763            &self.main_textures.b.texture.texture
764        }
765    }
766
767    /// The _other_ "main" unsampled texture.
768    /// In most cases you should use [`Self::main_texture`] instead and never this.
769    /// The textures will naturally be swapped when [`Self::post_process_write`] is called.
770    ///
771    /// A use case for this is to be able to prepare a bind group for all main textures
772    /// ahead of time.
773    pub fn main_texture_other(&self) -> &Texture {
774        if self.main_texture.load(Ordering::SeqCst) == 0 {
775            &self.main_textures.b.texture.texture
776        } else {
777            &self.main_textures.a.texture.texture
778        }
779    }
780
781    /// The "main" unsampled texture.
782    pub fn main_texture_view(&self) -> &TextureView {
783        if self.main_texture.load(Ordering::SeqCst) == 0 {
784            &self.main_textures.a.texture.default_view
785        } else {
786            &self.main_textures.b.texture.default_view
787        }
788    }
789
790    /// The _other_ "main" unsampled texture view.
791    /// In most cases you should use [`Self::main_texture_view`] instead and never this.
792    /// The textures will naturally be swapped when [`Self::post_process_write`] is called.
793    ///
794    /// A use case for this is to be able to prepare a bind group for all main textures
795    /// ahead of time.
796    pub fn main_texture_other_view(&self) -> &TextureView {
797        if self.main_texture.load(Ordering::SeqCst) == 0 {
798            &self.main_textures.b.texture.default_view
799        } else {
800            &self.main_textures.a.texture.default_view
801        }
802    }
803
804    /// The "main" sampled texture.
805    pub fn sampled_main_texture(&self) -> Option<&Texture> {
806        self.main_textures
807            .a
808            .resolve_target
809            .as_ref()
810            .map(|sampled| &sampled.texture)
811    }
812
813    /// The "main" sampled texture view.
814    pub fn sampled_main_texture_view(&self) -> Option<&TextureView> {
815        self.main_textures
816            .a
817            .resolve_target
818            .as_ref()
819            .map(|sampled| &sampled.default_view)
820    }
821
822    #[inline]
823    pub fn main_texture_format(&self) -> TextureFormat {
824        self.main_texture_format
825    }
826
827    /// Returns `true` if and only if the main texture is [`Self::TEXTURE_FORMAT_HDR`]
828    #[inline]
829    pub fn is_hdr(&self) -> bool {
830        self.main_texture_format == ViewTarget::TEXTURE_FORMAT_HDR
831    }
832
833    /// The final texture this view will render to.
834    #[inline]
835    pub fn out_texture(&self) -> &TextureView {
836        &self.out_texture.view
837    }
838
839    pub fn out_texture_color_attachment(
840        &self,
841        clear_color: Option<LinearRgba>,
842    ) -> RenderPassColorAttachment<'_> {
843        self.out_texture.get_attachment(clear_color)
844    }
845
846    /// Whether the final texture this view will render to needs to be presented.
847    pub fn needs_present(&self) -> bool {
848        self.out_texture.needs_present()
849    }
850
851    /// The format of the final texture this view will render to
852    #[inline]
853    pub fn out_texture_view_format(&self) -> TextureFormat {
854        self.out_texture.view_format
855    }
856
857    /// This will start a new "post process write", which assumes that the caller
858    /// will write the [`PostProcessWrite`]'s `source` to the `destination`.
859    ///
860    /// `source` is the "current" main texture. This will internally flip this
861    /// [`ViewTarget`]'s main texture to the `destination` texture, so the caller
862    /// _must_ ensure `source` is copied to `destination`, with or without modifications.
863    /// Failing to do so will cause the current main texture information to be lost.
864    pub fn post_process_write(&self) -> PostProcessWrite<'_> {
865        let old_is_a_main_texture = self.main_texture.fetch_xor(1, Ordering::SeqCst);
866        // if the old main texture is a, then the post processing must write from a to b
867        if old_is_a_main_texture == 0 {
868            self.main_textures.b.mark_as_cleared();
869            PostProcessWrite {
870                source: &self.main_textures.a.texture.default_view,
871                source_texture: &self.main_textures.a.texture.texture,
872                destination: &self.main_textures.b.texture.default_view,
873                destination_texture: &self.main_textures.b.texture.texture,
874            }
875        } else {
876            self.main_textures.a.mark_as_cleared();
877            PostProcessWrite {
878                source: &self.main_textures.b.texture.default_view,
879                source_texture: &self.main_textures.b.texture.texture,
880                destination: &self.main_textures.a.texture.default_view,
881                destination_texture: &self.main_textures.a.texture.texture,
882            }
883        }
884    }
885}
886
887#[derive(Component)]
888pub struct ViewDepthTexture {
889    pub texture: Texture,
890    attachment: DepthAttachment,
891}
892
893impl ViewDepthTexture {
894    pub fn new(texture: CachedTexture, clear_value: Option<f32>) -> Self {
895        Self {
896            texture: texture.texture,
897            attachment: DepthAttachment::new(texture.default_view, clear_value),
898        }
899    }
900
901    pub fn get_attachment(&self, store: StoreOp) -> RenderPassDepthStencilAttachment<'_> {
902        self.attachment.get_attachment(store)
903    }
904
905    pub fn view(&self) -> &TextureView {
906        &self.attachment.view
907    }
908}
909
910pub fn prepare_view_uniforms(
911    mut commands: Commands,
912    render_device: Res<RenderDevice>,
913    render_queue: Res<RenderQueue>,
914    mut view_uniforms: ResMut<ViewUniforms>,
915    views: Query<(
916        Entity,
917        Option<&ExtractedCamera>,
918        &ExtractedView,
919        Option<&Frustum>,
920        Option<&TemporalJitter>,
921        Option<&MipBias>,
922        Option<&MainPassResolutionOverride>,
923    )>,
924    frame_count: Res<FrameCount>,
925) {
926    let view_iter = views.iter();
927    let view_count = view_iter.len();
928    let Some(mut writer) =
929        view_uniforms
930            .uniforms
931            .get_writer(view_count, &render_device, &render_queue)
932    else {
933        return;
934    };
935    for (
936        entity,
937        extracted_camera,
938        extracted_view,
939        frustum,
940        temporal_jitter,
941        mip_bias,
942        resolution_override,
943    ) in &views
944    {
945        let viewport = extracted_view.viewport.as_vec4();
946        let mut main_pass_viewport = viewport;
947        if let Some(resolution_override) = resolution_override {
948            main_pass_viewport.z = resolution_override.0.x as f32;
949            main_pass_viewport.w = resolution_override.0.y as f32;
950        }
951
952        let unjittered_projection = extracted_view.clip_from_view;
953        let mut clip_from_view = unjittered_projection;
954
955        if let Some(temporal_jitter) = temporal_jitter {
956            temporal_jitter.jitter_projection(&mut clip_from_view, main_pass_viewport.zw());
957        }
958
959        let view_from_clip = clip_from_view.inverse();
960        let world_from_view = extracted_view.world_from_view.to_matrix();
961        let view_from_world = world_from_view.inverse();
962
963        let clip_from_world = if temporal_jitter.is_some() {
964            clip_from_view * view_from_world
965        } else {
966            extracted_view
967                .clip_from_world
968                .unwrap_or_else(|| clip_from_view * view_from_world)
969        };
970
971        // Map Frustum type to shader array<vec4<f32>, 6>
972        let frustum = frustum
973            .map(|frustum| frustum.half_spaces.map(|h| h.normal_d()))
974            .unwrap_or([Vec4::ZERO; 6]);
975
976        let view_uniforms = ViewUniformOffset {
977            offset: writer.write(&ViewUniform {
978                clip_from_world,
979                unjittered_clip_from_world: unjittered_projection * view_from_world,
980                world_from_clip: world_from_view * view_from_clip,
981                world_from_view,
982                view_from_world,
983                clip_from_view,
984                view_from_clip,
985                world_position: extracted_view.world_from_view.translation(),
986                exposure: extracted_camera
987                    .map(|c| c.exposure)
988                    .unwrap_or_else(|| Exposure::default().exposure()),
989                viewport,
990                main_pass_viewport,
991                frustum,
992                color_grading: extracted_view.color_grading.clone().into(),
993                mip_bias: mip_bias.unwrap_or(&MipBias(0.0)).0,
994                frame_count: frame_count.0,
995            }),
996        };
997
998        commands.entity(entity).insert(view_uniforms);
999    }
1000}
1001
1002#[derive(Clone)]
1003struct MainTargetTextures {
1004    a: ColorAttachment,
1005    b: ColorAttachment,
1006    /// 0 represents `main_textures.a`, 1 represents `main_textures.b`
1007    /// This is shared across view targets with the same render target
1008    main_texture: Arc<AtomicUsize>,
1009}
1010
1011/// Prepares the view target [`OutputColorAttachment`] for each view in the current frame.
1012pub fn prepare_view_attachments(
1013    windows: Res<ExtractedWindows>,
1014    images: Res<RenderAssets<GpuImage>>,
1015    manual_texture_views: Res<ManualTextureViews>,
1016    cameras: Query<&ExtractedCamera>,
1017    mut view_target_attachments: ResMut<ViewTargetAttachments>,
1018) {
1019    for camera in cameras.iter() {
1020        let Some(target) = &camera.target else {
1021            continue;
1022        };
1023
1024        match view_target_attachments.entry(target.clone()) {
1025            Entry::Occupied(_) => {}
1026            Entry::Vacant(entry) => {
1027                let Some(attachment) = target
1028                    .get_texture_view(&windows, &images, &manual_texture_views)
1029                    .cloned()
1030                    .zip(target.get_texture_view_format(&windows, &images, &manual_texture_views))
1031                    .map(|(view, format)| OutputColorAttachment::new(view.clone(), format))
1032                else {
1033                    continue;
1034                };
1035                entry.insert(attachment);
1036            }
1037        };
1038    }
1039}
1040
1041/// Clears the view target [`OutputColorAttachment`]s.
1042pub fn clear_view_attachments(mut view_target_attachments: ResMut<ViewTargetAttachments>) {
1043    view_target_attachments.clear();
1044}
1045
1046pub fn cleanup_view_targets_for_resize(
1047    mut commands: Commands,
1048    windows: Res<ExtractedWindows>,
1049    cameras: Query<(Entity, &ExtractedCamera), With<ViewTarget>>,
1050) {
1051    for (entity, camera) in &cameras {
1052        if let Some(NormalizedRenderTarget::Window(window_ref)) = &camera.target
1053            && let Some(window) = windows.get(&window_ref.entity())
1054            && (window.size_changed || window.present_mode_changed)
1055        {
1056            commands.entity(entity).remove::<ViewTarget>();
1057        }
1058    }
1059}
1060
1061pub fn prepare_view_targets(
1062    mut commands: Commands,
1063    clear_color_global: Res<ClearColor>,
1064    render_device: Res<RenderDevice>,
1065    mut texture_cache: ResMut<TextureCache>,
1066    cameras: Query<(
1067        Entity,
1068        &ExtractedCamera,
1069        &ExtractedView,
1070        &CameraMainTextureUsages,
1071        &Msaa,
1072    )>,
1073    view_target_attachments: Res<ViewTargetAttachments>,
1074) {
1075    let mut textures = <HashMap<_, _>>::default();
1076    for (entity, camera, view, texture_usage, msaa) in cameras.iter() {
1077        let (Some(target_size), Some(out_attachment)) = (
1078            camera.physical_target_size,
1079            camera
1080                .target
1081                .as_ref()
1082                .and_then(|target| view_target_attachments.get(target)),
1083        ) else {
1084            // If we can't find an output attachment we need to remove the ViewTarget
1085            // component to make sure the camera doesn't try rendering to an invalid
1086            // output attachment.
1087            commands.entity(entity).try_remove::<ViewTarget>();
1088
1089            continue;
1090        };
1091
1092        let main_texture_format = if view.hdr {
1093            ViewTarget::TEXTURE_FORMAT_HDR
1094        } else {
1095            TextureFormat::bevy_default()
1096        };
1097
1098        let clear_color = match camera.clear_color {
1099            ClearColorConfig::Custom(color) => Some(color),
1100            ClearColorConfig::None => None,
1101            _ => Some(clear_color_global.0),
1102        };
1103
1104        let (a, b, sampled, main_texture) = textures
1105            .entry((camera.target.clone(), texture_usage.0, view.hdr, msaa))
1106            .or_insert_with(|| {
1107                let descriptor = TextureDescriptor {
1108                    label: None,
1109                    size: target_size.to_extents(),
1110                    mip_level_count: 1,
1111                    sample_count: 1,
1112                    dimension: TextureDimension::D2,
1113                    format: main_texture_format,
1114                    usage: texture_usage.0,
1115                    view_formats: match main_texture_format {
1116                        TextureFormat::Bgra8Unorm => &[TextureFormat::Bgra8UnormSrgb],
1117                        TextureFormat::Rgba8Unorm => &[TextureFormat::Rgba8UnormSrgb],
1118                        _ => &[],
1119                    },
1120                };
1121                let a = texture_cache.get(
1122                    &render_device,
1123                    TextureDescriptor {
1124                        label: Some("main_texture_a"),
1125                        ..descriptor
1126                    },
1127                );
1128                let b = texture_cache.get(
1129                    &render_device,
1130                    TextureDescriptor {
1131                        label: Some("main_texture_b"),
1132                        ..descriptor
1133                    },
1134                );
1135                let sampled = if msaa.samples() > 1 {
1136                    let sampled = texture_cache.get(
1137                        &render_device,
1138                        TextureDescriptor {
1139                            label: Some("main_texture_sampled"),
1140                            size: target_size.to_extents(),
1141                            mip_level_count: 1,
1142                            sample_count: msaa.samples(),
1143                            dimension: TextureDimension::D2,
1144                            format: main_texture_format,
1145                            usage: TextureUsages::RENDER_ATTACHMENT,
1146                            view_formats: descriptor.view_formats,
1147                        },
1148                    );
1149                    Some(sampled)
1150                } else {
1151                    None
1152                };
1153                let main_texture = Arc::new(AtomicUsize::new(0));
1154                (a, b, sampled, main_texture)
1155            });
1156
1157        let converted_clear_color = clear_color.map(Into::into);
1158
1159        let main_textures = MainTargetTextures {
1160            a: ColorAttachment::new(a.clone(), sampled.clone(), None, converted_clear_color),
1161            b: ColorAttachment::new(b.clone(), sampled.clone(), None, converted_clear_color),
1162            main_texture: main_texture.clone(),
1163        };
1164
1165        commands.entity(entity).insert(ViewTarget {
1166            main_texture: main_textures.main_texture.clone(),
1167            main_textures,
1168            main_texture_format,
1169            out_texture: out_attachment.clone(),
1170        });
1171    }
1172}