bevy_render/view/
mod.rs

1pub mod visibility;
2pub mod window;
3
4use bevy_camera::{
5    primitives::Frustum, CameraMainTextureUsages, ClearColor, ClearColorConfig, Exposure,
6    MainPassResolutionOverride, NormalizedRenderTarget,
7};
8use bevy_diagnostic::FrameCount;
9pub use visibility::*;
10pub use window::*;
11
12use crate::{
13    camera::{ExtractedCamera, MipBias, NormalizedRenderTargetExt as _, TemporalJitter},
14    experimental::occlusion_culling::OcclusionCulling,
15    extract_component::ExtractComponentPlugin,
16    render_asset::RenderAssets,
17    render_phase::ViewRangefinder3d,
18    render_resource::{DynamicUniformBuffer, ShaderType, Texture, TextureView},
19    renderer::{RenderDevice, RenderQueue},
20    sync_world::MainEntity,
21    texture::{
22        CachedTexture, ColorAttachment, DepthAttachment, GpuImage, ManualTextureViews,
23        OutputColorAttachment, TextureCache,
24    },
25    Render, RenderApp, RenderSystems,
26};
27use alloc::sync::Arc;
28use bevy_app::{App, Plugin};
29use bevy_color::LinearRgba;
30use bevy_derive::{Deref, DerefMut};
31use bevy_ecs::prelude::*;
32use bevy_image::{BevyDefault as _, ToExtents};
33use bevy_math::{mat3, vec2, vec3, Mat3, Mat4, UVec4, Vec2, Vec3, Vec4, Vec4Swizzles};
34use bevy_platform::collections::{hash_map::Entry, HashMap};
35use bevy_reflect::{std_traits::ReflectDefault, Reflect};
36use bevy_render_macros::ExtractComponent;
37use bevy_shader::load_shader_library;
38use bevy_transform::components::GlobalTransform;
39use core::{
40    ops::Range,
41    sync::atomic::{AtomicUsize, Ordering},
42};
43use wgpu::{
44    BufferUsages, RenderPassColorAttachment, RenderPassDepthStencilAttachment, StoreOp,
45    TextureDescriptor, TextureDimension, TextureFormat, TextureUsages,
46};
47
48/// The matrix that converts from the RGB to the LMS color space.
49///
50/// To derive this, first we convert from RGB to [CIE 1931 XYZ]:
51///
52/// ```text
53/// ⎡ X ⎤   ⎡ 0.490  0.310  0.200 ⎤ ⎡ R ⎤
54/// ⎢ Y ⎥ = ⎢ 0.177  0.812  0.011 ⎥ ⎢ G ⎥
55/// ⎣ Z ⎦   ⎣ 0.000  0.010  0.990 ⎦ ⎣ B ⎦
56/// ```
57///
58/// Then we convert to LMS according to the [CAM16 standard matrix]:
59///
60/// ```text
61/// ⎡ L ⎤   ⎡  0.401   0.650  -0.051 ⎤ ⎡ X ⎤
62/// ⎢ M ⎥ = ⎢ -0.250   1.204   0.046 ⎥ ⎢ Y ⎥
63/// ⎣ S ⎦   ⎣ -0.002   0.049   0.953 ⎦ ⎣ Z ⎦
64/// ```
65///
66/// The resulting matrix is just the concatenation of these two matrices, to do
67/// the conversion in one step.
68///
69/// [CIE 1931 XYZ]: https://en.wikipedia.org/wiki/CIE_1931_color_space
70/// [CAM16 standard matrix]: https://en.wikipedia.org/wiki/LMS_color_space
71static RGB_TO_LMS: Mat3 = mat3(
72    vec3(0.311692, 0.0905138, 0.00764433),
73    vec3(0.652085, 0.901341, 0.0486554),
74    vec3(0.0362225, 0.00814478, 0.943700),
75);
76
77/// The inverse of the [`RGB_TO_LMS`] matrix, converting from the LMS color
78/// space back to RGB.
79static LMS_TO_RGB: Mat3 = mat3(
80    vec3(4.06305, -0.40791, -0.0118812),
81    vec3(-2.93241, 1.40437, -0.0486532),
82    vec3(-0.130646, 0.00353630, 1.0605344),
83);
84
85/// The [CIE 1931] *xy* chromaticity coordinates of the [D65 white point].
86///
87/// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space
88/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values
89static D65_XY: Vec2 = vec2(0.31272, 0.32903);
90
91/// The [D65 white point] in [LMS color space].
92///
93/// [LMS color space]: https://en.wikipedia.org/wiki/LMS_color_space
94/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values
95static D65_LMS: Vec3 = vec3(0.975538, 1.01648, 1.08475);
96
97pub struct ViewPlugin;
98
99impl Plugin for ViewPlugin {
100    fn build(&self, app: &mut App) {
101        load_shader_library!(app, "view.wgsl");
102
103        app
104            // NOTE: windows.is_changed() handles cases where a window was resized
105            .add_plugins((
106                ExtractComponentPlugin::<Hdr>::default(),
107                ExtractComponentPlugin::<Msaa>::default(),
108                ExtractComponentPlugin::<OcclusionCulling>::default(),
109                RenderVisibilityRangePlugin,
110            ));
111
112        if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
113            render_app.add_systems(
114                Render,
115                (
116                    // `TextureView`s need to be dropped before reconfiguring window surfaces.
117                    clear_view_attachments
118                        .in_set(RenderSystems::ManageViews)
119                        .before(create_surfaces),
120                    prepare_view_attachments
121                        .in_set(RenderSystems::ManageViews)
122                        .before(prepare_view_targets)
123                        .after(prepare_windows),
124                    prepare_view_targets
125                        .in_set(RenderSystems::ManageViews)
126                        .after(prepare_windows)
127                        .after(crate::render_asset::prepare_assets::<GpuImage>)
128                        .ambiguous_with(crate::camera::sort_cameras), // doesn't use `sorted_camera_index_for_target`
129                    prepare_view_uniforms.in_set(RenderSystems::PrepareResources),
130                ),
131            );
132        }
133    }
134
135    fn finish(&self, app: &mut App) {
136        if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
137            render_app
138                .init_resource::<ViewUniforms>()
139                .init_resource::<ViewTargetAttachments>();
140        }
141    }
142}
143
144/// Component for configuring the number of samples for [Multi-Sample Anti-Aliasing](https://en.wikipedia.org/wiki/Multisample_anti-aliasing)
145/// for a [`Camera`](bevy_camera::Camera).
146///
147/// Defaults to 4 samples. A higher number of samples results in smoother edges.
148///
149/// Some advanced rendering features may require that MSAA is disabled.
150///
151/// Note that the web currently only supports 1 or 4 samples.
152#[derive(
153    Component,
154    Default,
155    Clone,
156    Copy,
157    ExtractComponent,
158    Reflect,
159    PartialEq,
160    PartialOrd,
161    Eq,
162    Hash,
163    Debug,
164)]
165#[reflect(Component, Default, PartialEq, Hash, Debug)]
166pub enum Msaa {
167    Off = 1,
168    Sample2 = 2,
169    #[default]
170    Sample4 = 4,
171    Sample8 = 8,
172}
173
174impl Msaa {
175    #[inline]
176    pub fn samples(&self) -> u32 {
177        *self as u32
178    }
179
180    pub fn from_samples(samples: u32) -> Self {
181        match samples {
182            1 => Msaa::Off,
183            2 => Msaa::Sample2,
184            4 => Msaa::Sample4,
185            8 => Msaa::Sample8,
186            _ => panic!("Unsupported MSAA sample count: {samples}"),
187        }
188    }
189}
190
191/// If this component is added to a camera, the camera will use an intermediate "high dynamic range" render texture.
192/// This allows rendering with a wider range of lighting values. However, this does *not* affect
193/// whether the camera will render with hdr display output (which bevy does not support currently)
194/// and only affects the intermediate render texture.
195#[derive(
196    Component, Default, Copy, Clone, ExtractComponent, Reflect, PartialEq, Eq, Hash, Debug,
197)]
198#[reflect(Component, Default, PartialEq, Hash, Debug)]
199pub struct Hdr;
200
201/// An identifier for a view that is stable across frames.
202///
203/// We can't use [`Entity`] for this because render world entities aren't
204/// stable, and we can't use just [`MainEntity`] because some main world views
205/// extract to multiple render world views. For example, a directional light
206/// extracts to one render world view per cascade, and a point light extracts to
207/// one render world view per cubemap face. So we pair the main entity with an
208/// *auxiliary entity* and a *subview index*, which *together* uniquely identify
209/// a view in the render world in a way that's stable from frame to frame.
210#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
211pub struct RetainedViewEntity {
212    /// The main entity that this view corresponds to.
213    pub main_entity: MainEntity,
214
215    /// Another entity associated with the view entity.
216    ///
217    /// This is currently used for shadow cascades. If there are multiple
218    /// cameras, each camera needs to have its own set of shadow cascades. Thus
219    /// the light and subview index aren't themselves enough to uniquely
220    /// identify a shadow cascade: we need the camera that the cascade is
221    /// associated with as well. This entity stores that camera.
222    ///
223    /// If not present, this will be `MainEntity(Entity::PLACEHOLDER)`.
224    pub auxiliary_entity: MainEntity,
225
226    /// The index of the view corresponding to the entity.
227    ///
228    /// For example, for point lights that cast shadows, this is the index of
229    /// the cubemap face (0 through 5 inclusive). For directional lights, this
230    /// is the index of the cascade.
231    pub subview_index: u32,
232}
233
234impl RetainedViewEntity {
235    /// Creates a new [`RetainedViewEntity`] from the given main world entity,
236    /// auxiliary main world entity, and subview index.
237    ///
238    /// See [`RetainedViewEntity::subview_index`] for an explanation of what
239    /// `auxiliary_entity` and `subview_index` are.
240    pub fn new(
241        main_entity: MainEntity,
242        auxiliary_entity: Option<MainEntity>,
243        subview_index: u32,
244    ) -> Self {
245        Self {
246            main_entity,
247            auxiliary_entity: auxiliary_entity.unwrap_or(Entity::PLACEHOLDER.into()),
248            subview_index,
249        }
250    }
251}
252
253/// Describes a camera in the render world.
254///
255/// Each entity in the main world can potentially extract to multiple subviews,
256/// each of which has a [`RetainedViewEntity::subview_index`]. For instance, 3D
257/// cameras extract to both a 3D camera subview with index 0 and a special UI
258/// subview with index 1. Likewise, point lights with shadows extract to 6
259/// subviews, one for each side of the shadow cubemap.
260#[derive(Component)]
261pub struct ExtractedView {
262    /// The entity in the main world corresponding to this render world view.
263    pub retained_view_entity: RetainedViewEntity,
264    /// Typically a column-major right-handed projection matrix, one of either:
265    ///
266    /// Perspective (infinite reverse z)
267    /// ```text
268    /// f = 1 / tan(fov_y_radians / 2)
269    ///
270    /// ⎡ f / aspect  0   0     0 ⎤
271    /// ⎢          0  f   0     0 ⎥
272    /// ⎢          0  0   0  near ⎥
273    /// ⎣          0  0  -1     0 ⎦
274    /// ```
275    ///
276    /// Orthographic
277    /// ```text
278    /// w = right - left
279    /// h = top - bottom
280    /// d = far - near
281    /// cw = -right - left
282    /// ch = -top - bottom
283    ///
284    /// ⎡ 2 / w      0      0   cw / w ⎤
285    /// ⎢     0  2 / h      0   ch / h ⎥
286    /// ⎢     0      0  1 / d  far / d ⎥
287    /// ⎣     0      0      0        1 ⎦
288    /// ```
289    ///
290    /// `clip_from_view[3][3] == 1.0` is the standard way to check if a projection is orthographic
291    ///
292    /// Glam matrices are column major, so for example getting the near plane of a perspective projection is `clip_from_view[3][2]`
293    ///
294    /// Custom projections are also possible however.
295    pub clip_from_view: Mat4,
296    pub world_from_view: GlobalTransform,
297    // The view-projection matrix. When provided it is used instead of deriving it from
298    // `projection` and `transform` fields, which can be helpful in cases where numerical
299    // stability matters and there is a more direct way to derive the view-projection matrix.
300    pub clip_from_world: Option<Mat4>,
301    pub hdr: bool,
302    // uvec4(origin.x, origin.y, width, height)
303    pub viewport: UVec4,
304    pub color_grading: ColorGrading,
305}
306
307impl ExtractedView {
308    /// Creates a 3D rangefinder for a view
309    pub fn rangefinder3d(&self) -> ViewRangefinder3d {
310        ViewRangefinder3d::from_world_from_view(&self.world_from_view.affine())
311    }
312}
313
314/// Configures filmic color grading parameters to adjust the image appearance.
315///
316/// Color grading is applied just before tonemapping for a given
317/// [`Camera`](bevy_camera::Camera) entity, with the sole exception of the
318/// `post_saturation` value in [`ColorGradingGlobal`], which is applied after
319/// tonemapping.
320#[derive(Component, Reflect, Debug, Default, Clone)]
321#[reflect(Component, Default, Debug, Clone)]
322pub struct ColorGrading {
323    /// Filmic color grading values applied to the image as a whole (as opposed
324    /// to individual sections, like shadows and highlights).
325    pub global: ColorGradingGlobal,
326
327    /// Color grading values that are applied to the darker parts of the image.
328    ///
329    /// The cutoff points can be customized with the
330    /// [`ColorGradingGlobal::midtones_range`] field.
331    pub shadows: ColorGradingSection,
332
333    /// Color grading values that are applied to the parts of the image with
334    /// intermediate brightness.
335    ///
336    /// The cutoff points can be customized with the
337    /// [`ColorGradingGlobal::midtones_range`] field.
338    pub midtones: ColorGradingSection,
339
340    /// Color grading values that are applied to the lighter parts of the image.
341    ///
342    /// The cutoff points can be customized with the
343    /// [`ColorGradingGlobal::midtones_range`] field.
344    pub highlights: ColorGradingSection,
345}
346
347/// Filmic color grading values applied to the image as a whole (as opposed to
348/// individual sections, like shadows and highlights).
349#[derive(Clone, Debug, Reflect)]
350#[reflect(Default, Clone)]
351pub struct ColorGradingGlobal {
352    /// Exposure value (EV) offset, measured in stops.
353    pub exposure: f32,
354
355    /// An adjustment made to the [CIE 1931] chromaticity *x* value.
356    ///
357    /// Positive values make the colors redder. Negative values make the colors
358    /// bluer. This has no effect on luminance (brightness).
359    ///
360    /// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
361    pub temperature: f32,
362
363    /// An adjustment made to the [CIE 1931] chromaticity *y* value.
364    ///
365    /// Positive values make the colors more magenta. Negative values make the
366    /// colors greener. This has no effect on luminance (brightness).
367    ///
368    /// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
369    pub tint: f32,
370
371    /// An adjustment to the [hue], in radians.
372    ///
373    /// Adjusting this value changes the perceived colors in the image: red to
374    /// yellow to green to blue, etc. It has no effect on the saturation or
375    /// brightness of the colors.
376    ///
377    /// [hue]: https://en.wikipedia.org/wiki/HSL_and_HSV#Formal_derivation
378    pub hue: f32,
379
380    /// Saturation adjustment applied after tonemapping.
381    /// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image
382    /// with luminance defined by ITU-R BT.709
383    /// Values above 1.0 increase saturation.
384    pub post_saturation: f32,
385
386    /// The luminance (brightness) ranges that are considered part of the
387    /// "midtones" of the image.
388    ///
389    /// This affects which [`ColorGradingSection`]s apply to which colors. Note
390    /// that the sections smoothly blend into one another, to avoid abrupt
391    /// transitions.
392    ///
393    /// The default value is 0.2 to 0.7.
394    pub midtones_range: Range<f32>,
395}
396
397/// The [`ColorGrading`] structure, packed into the most efficient form for the
398/// GPU.
399#[derive(Clone, Copy, Debug, ShaderType)]
400pub struct ColorGradingUniform {
401    pub balance: Mat3,
402    pub saturation: Vec3,
403    pub contrast: Vec3,
404    pub gamma: Vec3,
405    pub gain: Vec3,
406    pub lift: Vec3,
407    pub midtone_range: Vec2,
408    pub exposure: f32,
409    pub hue: f32,
410    pub post_saturation: f32,
411}
412
413/// A section of color grading values that can be selectively applied to
414/// shadows, midtones, and highlights.
415#[derive(Reflect, Debug, Copy, Clone, PartialEq)]
416#[reflect(Clone, PartialEq)]
417pub struct ColorGradingSection {
418    /// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image
419    /// with luminance defined by ITU-R BT.709.
420    /// Values above 1.0 increase saturation.
421    pub saturation: f32,
422
423    /// Adjusts the range of colors.
424    ///
425    /// A value of 1.0 applies no changes. Values below 1.0 move the colors more
426    /// toward a neutral gray. Values above 1.0 spread the colors out away from
427    /// the neutral gray.
428    pub contrast: f32,
429
430    /// A nonlinear luminance adjustment, mainly affecting the high end of the
431    /// range.
432    ///
433    /// This is the *n* exponent in the standard [ASC CDL] formula for color
434    /// correction:
435    ///
436    /// ```text
437    /// out = (i × s + o)ⁿ
438    /// ```
439    ///
440    /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
441    pub gamma: f32,
442
443    /// A linear luminance adjustment, mainly affecting the middle part of the
444    /// range.
445    ///
446    /// This is the *s* factor in the standard [ASC CDL] formula for color
447    /// correction:
448    ///
449    /// ```text
450    /// out = (i × s + o)ⁿ
451    /// ```
452    ///
453    /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
454    pub gain: f32,
455
456    /// A fixed luminance adjustment, mainly affecting the lower part of the
457    /// range.
458    ///
459    /// This is the *o* term in the standard [ASC CDL] formula for color
460    /// correction:
461    ///
462    /// ```text
463    /// out = (i × s + o)ⁿ
464    /// ```
465    ///
466    /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
467    pub lift: f32,
468}
469
470impl Default for ColorGradingGlobal {
471    fn default() -> Self {
472        Self {
473            exposure: 0.0,
474            temperature: 0.0,
475            tint: 0.0,
476            hue: 0.0,
477            post_saturation: 1.0,
478            midtones_range: 0.2..0.7,
479        }
480    }
481}
482
483impl Default for ColorGradingSection {
484    fn default() -> Self {
485        Self {
486            saturation: 1.0,
487            contrast: 1.0,
488            gamma: 1.0,
489            gain: 1.0,
490            lift: 0.0,
491        }
492    }
493}
494
495impl ColorGrading {
496    /// Creates a new [`ColorGrading`] instance in which shadows, midtones, and
497    /// highlights all have the same set of color grading values.
498    pub fn with_identical_sections(
499        global: ColorGradingGlobal,
500        section: ColorGradingSection,
501    ) -> ColorGrading {
502        ColorGrading {
503            global,
504            highlights: section,
505            midtones: section,
506            shadows: section,
507        }
508    }
509
510    /// Returns an iterator that visits the shadows, midtones, and highlights
511    /// sections, in that order.
512    pub fn all_sections(&self) -> impl Iterator<Item = &ColorGradingSection> {
513        [&self.shadows, &self.midtones, &self.highlights].into_iter()
514    }
515
516    /// Applies the given mutating function to the shadows, midtones, and
517    /// highlights sections, in that order.
518    ///
519    /// Returns an array composed of the results of such evaluation, in that
520    /// order.
521    pub fn all_sections_mut(&mut self) -> impl Iterator<Item = &mut ColorGradingSection> {
522        [&mut self.shadows, &mut self.midtones, &mut self.highlights].into_iter()
523    }
524}
525
526#[derive(Clone, ShaderType)]
527pub struct ViewUniform {
528    pub clip_from_world: Mat4,
529    pub unjittered_clip_from_world: Mat4,
530    pub world_from_clip: Mat4,
531    pub world_from_view: Mat4,
532    pub view_from_world: Mat4,
533    /// Typically a column-major right-handed projection matrix, one of either:
534    ///
535    /// Perspective (infinite reverse z)
536    /// ```text
537    /// f = 1 / tan(fov_y_radians / 2)
538    ///
539    /// ⎡ f / aspect  0   0     0 ⎤
540    /// ⎢          0  f   0     0 ⎥
541    /// ⎢          0  0   0  near ⎥
542    /// ⎣          0  0  -1     0 ⎦
543    /// ```
544    ///
545    /// Orthographic
546    /// ```text
547    /// w = right - left
548    /// h = top - bottom
549    /// d = far - near
550    /// cw = -right - left
551    /// ch = -top - bottom
552    ///
553    /// ⎡ 2 / w      0      0   cw / w ⎤
554    /// ⎢     0  2 / h      0   ch / h ⎥
555    /// ⎢     0      0  1 / d  far / d ⎥
556    /// ⎣     0      0      0        1 ⎦
557    /// ```
558    ///
559    /// `clip_from_view[3][3] == 1.0` is the standard way to check if a projection is orthographic
560    ///
561    /// Glam matrices are column major, so for example getting the near plane of a perspective projection is `clip_from_view[3][2]`
562    ///
563    /// Custom projections are also possible however.
564    pub clip_from_view: Mat4,
565    pub view_from_clip: Mat4,
566    pub world_position: Vec3,
567    pub exposure: f32,
568    // viewport(x_origin, y_origin, width, height)
569    pub viewport: Vec4,
570    pub main_pass_viewport: Vec4,
571    /// 6 world-space half spaces (normal: vec3, distance: f32) ordered left, right, top, bottom, near, far.
572    /// The normal vectors point towards the interior of the frustum.
573    /// A half space contains `p` if `normal.dot(p) + distance > 0.`
574    pub frustum: [Vec4; 6],
575    pub color_grading: ColorGradingUniform,
576    pub mip_bias: f32,
577    pub frame_count: u32,
578}
579
580#[derive(Resource)]
581pub struct ViewUniforms {
582    pub uniforms: DynamicUniformBuffer<ViewUniform>,
583}
584
585impl FromWorld for ViewUniforms {
586    fn from_world(world: &mut World) -> Self {
587        let mut uniforms = DynamicUniformBuffer::default();
588        uniforms.set_label(Some("view_uniforms_buffer"));
589
590        let render_device = world.resource::<RenderDevice>();
591        if render_device.limits().max_storage_buffers_per_shader_stage > 0 {
592            uniforms.add_usages(BufferUsages::STORAGE);
593        }
594
595        Self { uniforms }
596    }
597}
598
599#[derive(Component)]
600pub struct ViewUniformOffset {
601    pub offset: u32,
602}
603
604#[derive(Component)]
605pub struct ViewTarget {
606    main_textures: MainTargetTextures,
607    main_texture_format: TextureFormat,
608    /// 0 represents `main_textures.a`, 1 represents `main_textures.b`
609    /// This is shared across view targets with the same render target
610    main_texture: Arc<AtomicUsize>,
611    out_texture: OutputColorAttachment,
612}
613
614/// Contains [`OutputColorAttachment`] used for each target present on any view in the current
615/// frame, after being prepared by [`prepare_view_attachments`]. Users that want to override
616/// the default output color attachment for a specific target can do so by adding a
617/// [`OutputColorAttachment`] to this resource before [`prepare_view_targets`] is called.
618#[derive(Resource, Default, Deref, DerefMut)]
619pub struct ViewTargetAttachments(HashMap<NormalizedRenderTarget, OutputColorAttachment>);
620
621pub struct PostProcessWrite<'a> {
622    pub source: &'a TextureView,
623    pub source_texture: &'a Texture,
624    pub destination: &'a TextureView,
625    pub destination_texture: &'a Texture,
626}
627
628impl From<ColorGrading> for ColorGradingUniform {
629    fn from(component: ColorGrading) -> Self {
630        // Compute the balance matrix that will be used to apply the white
631        // balance adjustment to an RGB color. Our general approach will be to
632        // convert both the color and the developer-supplied white point to the
633        // LMS color space, apply the conversion, and then convert back.
634        //
635        // First, we start with the CIE 1931 *xy* values of the standard D65
636        // illuminant:
637        // <https://en.wikipedia.org/wiki/Standard_illuminant#D65_values>
638        //
639        // We then adjust them based on the developer's requested white balance.
640        let white_point_xy = D65_XY + vec2(-component.global.temperature, component.global.tint);
641
642        // Convert the white point from CIE 1931 *xy* to LMS. First, we convert to XYZ:
643        //
644        //                  Y          Y
645        //     Y = 1    X = ─ x    Z = ─ (1 - x - y)
646        //                  y          y
647        //
648        // Then we convert from XYZ to LMS color space, using the CAM16 matrix
649        // from <https://en.wikipedia.org/wiki/LMS_color_space#Later_CIECAMs>:
650        //
651        //     ⎡ L ⎤   ⎡  0.401   0.650  -0.051 ⎤ ⎡ X ⎤
652        //     ⎢ M ⎥ = ⎢ -0.250   1.204   0.046 ⎥ ⎢ Y ⎥
653        //     ⎣ S ⎦   ⎣ -0.002   0.049   0.953 ⎦ ⎣ Z ⎦
654        //
655        // The following formula is just a simplification of the above.
656
657        let white_point_lms = vec3(0.701634, 1.15856, -0.904175)
658            + (vec3(-0.051461, 0.045854, 0.953127)
659                + vec3(0.452749, -0.296122, -0.955206) * white_point_xy.x)
660                / white_point_xy.y;
661
662        // Now that we're in LMS space, perform the white point scaling.
663        let white_point_adjustment = Mat3::from_diagonal(D65_LMS / white_point_lms);
664
665        // Finally, combine the RGB → LMS → corrected LMS → corrected RGB
666        // pipeline into a single 3×3 matrix.
667        let balance = LMS_TO_RGB * white_point_adjustment * RGB_TO_LMS;
668
669        Self {
670            balance,
671            saturation: vec3(
672                component.shadows.saturation,
673                component.midtones.saturation,
674                component.highlights.saturation,
675            ),
676            contrast: vec3(
677                component.shadows.contrast,
678                component.midtones.contrast,
679                component.highlights.contrast,
680            ),
681            gamma: vec3(
682                component.shadows.gamma,
683                component.midtones.gamma,
684                component.highlights.gamma,
685            ),
686            gain: vec3(
687                component.shadows.gain,
688                component.midtones.gain,
689                component.highlights.gain,
690            ),
691            lift: vec3(
692                component.shadows.lift,
693                component.midtones.lift,
694                component.highlights.lift,
695            ),
696            midtone_range: vec2(
697                component.global.midtones_range.start,
698                component.global.midtones_range.end,
699            ),
700            exposure: component.global.exposure,
701            hue: component.global.hue,
702            post_saturation: component.global.post_saturation,
703        }
704    }
705}
706
707/// Add this component to a camera to disable *indirect mode*.
708///
709/// Indirect mode, automatically enabled on supported hardware, allows Bevy to
710/// offload transform and cull operations to the GPU, reducing CPU overhead.
711/// Doing this, however, reduces the amount of control that your app has over
712/// instancing decisions. In certain circumstances, you may want to disable
713/// indirect drawing so that your app can manually instance meshes as it sees
714/// fit. See the `custom_shader_instancing` example.
715///
716/// The vast majority of applications will not need to use this component, as it
717/// generally reduces rendering performance.
718///
719/// Note: This component should only be added when initially spawning a camera. Adding
720/// or removing after spawn can result in unspecified behavior.
721#[derive(Component, Default)]
722pub struct NoIndirectDrawing;
723
724impl ViewTarget {
725    pub const TEXTURE_FORMAT_HDR: TextureFormat = TextureFormat::Rgba16Float;
726
727    /// Retrieve this target's main texture's color attachment.
728    pub fn get_color_attachment(&self) -> RenderPassColorAttachment<'_> {
729        if self.main_texture.load(Ordering::SeqCst) == 0 {
730            self.main_textures.a.get_attachment()
731        } else {
732            self.main_textures.b.get_attachment()
733        }
734    }
735
736    /// Retrieve this target's "unsampled" main texture's color attachment.
737    pub fn get_unsampled_color_attachment(&self) -> RenderPassColorAttachment<'_> {
738        if self.main_texture.load(Ordering::SeqCst) == 0 {
739            self.main_textures.a.get_unsampled_attachment()
740        } else {
741            self.main_textures.b.get_unsampled_attachment()
742        }
743    }
744
745    /// The "main" unsampled texture.
746    pub fn main_texture(&self) -> &Texture {
747        if self.main_texture.load(Ordering::SeqCst) == 0 {
748            &self.main_textures.a.texture.texture
749        } else {
750            &self.main_textures.b.texture.texture
751        }
752    }
753
754    /// The _other_ "main" unsampled texture.
755    /// In most cases you should use [`Self::main_texture`] instead and never this.
756    /// The textures will naturally be swapped when [`Self::post_process_write`] is called.
757    ///
758    /// A use case for this is to be able to prepare a bind group for all main textures
759    /// ahead of time.
760    pub fn main_texture_other(&self) -> &Texture {
761        if self.main_texture.load(Ordering::SeqCst) == 0 {
762            &self.main_textures.b.texture.texture
763        } else {
764            &self.main_textures.a.texture.texture
765        }
766    }
767
768    /// The "main" unsampled texture.
769    pub fn main_texture_view(&self) -> &TextureView {
770        if self.main_texture.load(Ordering::SeqCst) == 0 {
771            &self.main_textures.a.texture.default_view
772        } else {
773            &self.main_textures.b.texture.default_view
774        }
775    }
776
777    /// The _other_ "main" unsampled texture view.
778    /// In most cases you should use [`Self::main_texture_view`] instead and never this.
779    /// The textures will naturally be swapped when [`Self::post_process_write`] is called.
780    ///
781    /// A use case for this is to be able to prepare a bind group for all main textures
782    /// ahead of time.
783    pub fn main_texture_other_view(&self) -> &TextureView {
784        if self.main_texture.load(Ordering::SeqCst) == 0 {
785            &self.main_textures.b.texture.default_view
786        } else {
787            &self.main_textures.a.texture.default_view
788        }
789    }
790
791    /// The "main" sampled texture.
792    pub fn sampled_main_texture(&self) -> Option<&Texture> {
793        self.main_textures
794            .a
795            .resolve_target
796            .as_ref()
797            .map(|sampled| &sampled.texture)
798    }
799
800    /// The "main" sampled texture view.
801    pub fn sampled_main_texture_view(&self) -> Option<&TextureView> {
802        self.main_textures
803            .a
804            .resolve_target
805            .as_ref()
806            .map(|sampled| &sampled.default_view)
807    }
808
809    #[inline]
810    pub fn main_texture_format(&self) -> TextureFormat {
811        self.main_texture_format
812    }
813
814    /// Returns `true` if and only if the main texture is [`Self::TEXTURE_FORMAT_HDR`]
815    #[inline]
816    pub fn is_hdr(&self) -> bool {
817        self.main_texture_format == ViewTarget::TEXTURE_FORMAT_HDR
818    }
819
820    /// The final texture this view will render to.
821    #[inline]
822    pub fn out_texture(&self) -> &TextureView {
823        &self.out_texture.view
824    }
825
826    pub fn out_texture_color_attachment(
827        &self,
828        clear_color: Option<LinearRgba>,
829    ) -> RenderPassColorAttachment<'_> {
830        self.out_texture.get_attachment(clear_color)
831    }
832
833    /// The format of the final texture this view will render to
834    #[inline]
835    pub fn out_texture_format(&self) -> TextureFormat {
836        self.out_texture.format
837    }
838
839    /// This will start a new "post process write", which assumes that the caller
840    /// will write the [`PostProcessWrite`]'s `source` to the `destination`.
841    ///
842    /// `source` is the "current" main texture. This will internally flip this
843    /// [`ViewTarget`]'s main texture to the `destination` texture, so the caller
844    /// _must_ ensure `source` is copied to `destination`, with or without modifications.
845    /// Failing to do so will cause the current main texture information to be lost.
846    pub fn post_process_write(&self) -> PostProcessWrite<'_> {
847        let old_is_a_main_texture = self.main_texture.fetch_xor(1, Ordering::SeqCst);
848        // if the old main texture is a, then the post processing must write from a to b
849        if old_is_a_main_texture == 0 {
850            self.main_textures.b.mark_as_cleared();
851            PostProcessWrite {
852                source: &self.main_textures.a.texture.default_view,
853                source_texture: &self.main_textures.a.texture.texture,
854                destination: &self.main_textures.b.texture.default_view,
855                destination_texture: &self.main_textures.b.texture.texture,
856            }
857        } else {
858            self.main_textures.a.mark_as_cleared();
859            PostProcessWrite {
860                source: &self.main_textures.b.texture.default_view,
861                source_texture: &self.main_textures.b.texture.texture,
862                destination: &self.main_textures.a.texture.default_view,
863                destination_texture: &self.main_textures.a.texture.texture,
864            }
865        }
866    }
867}
868
869#[derive(Component)]
870pub struct ViewDepthTexture {
871    pub texture: Texture,
872    attachment: DepthAttachment,
873}
874
875impl ViewDepthTexture {
876    pub fn new(texture: CachedTexture, clear_value: Option<f32>) -> Self {
877        Self {
878            texture: texture.texture,
879            attachment: DepthAttachment::new(texture.default_view, clear_value),
880        }
881    }
882
883    pub fn get_attachment(&self, store: StoreOp) -> RenderPassDepthStencilAttachment<'_> {
884        self.attachment.get_attachment(store)
885    }
886
887    pub fn view(&self) -> &TextureView {
888        &self.attachment.view
889    }
890}
891
892pub fn prepare_view_uniforms(
893    mut commands: Commands,
894    render_device: Res<RenderDevice>,
895    render_queue: Res<RenderQueue>,
896    mut view_uniforms: ResMut<ViewUniforms>,
897    views: Query<(
898        Entity,
899        Option<&ExtractedCamera>,
900        &ExtractedView,
901        Option<&Frustum>,
902        Option<&TemporalJitter>,
903        Option<&MipBias>,
904        Option<&MainPassResolutionOverride>,
905    )>,
906    frame_count: Res<FrameCount>,
907) {
908    let view_iter = views.iter();
909    let view_count = view_iter.len();
910    let Some(mut writer) =
911        view_uniforms
912            .uniforms
913            .get_writer(view_count, &render_device, &render_queue)
914    else {
915        return;
916    };
917    for (
918        entity,
919        extracted_camera,
920        extracted_view,
921        frustum,
922        temporal_jitter,
923        mip_bias,
924        resolution_override,
925    ) in &views
926    {
927        let viewport = extracted_view.viewport.as_vec4();
928        let mut main_pass_viewport = viewport;
929        if let Some(resolution_override) = resolution_override {
930            main_pass_viewport.z = resolution_override.0.x as f32;
931            main_pass_viewport.w = resolution_override.0.y as f32;
932        }
933
934        let unjittered_projection = extracted_view.clip_from_view;
935        let mut clip_from_view = unjittered_projection;
936
937        if let Some(temporal_jitter) = temporal_jitter {
938            temporal_jitter.jitter_projection(&mut clip_from_view, main_pass_viewport.zw());
939        }
940
941        let view_from_clip = clip_from_view.inverse();
942        let world_from_view = extracted_view.world_from_view.to_matrix();
943        let view_from_world = world_from_view.inverse();
944
945        let clip_from_world = if temporal_jitter.is_some() {
946            clip_from_view * view_from_world
947        } else {
948            extracted_view
949                .clip_from_world
950                .unwrap_or_else(|| clip_from_view * view_from_world)
951        };
952
953        // Map Frustum type to shader array<vec4<f32>, 6>
954        let frustum = frustum
955            .map(|frustum| frustum.half_spaces.map(|h| h.normal_d()))
956            .unwrap_or([Vec4::ZERO; 6]);
957
958        let view_uniforms = ViewUniformOffset {
959            offset: writer.write(&ViewUniform {
960                clip_from_world,
961                unjittered_clip_from_world: unjittered_projection * view_from_world,
962                world_from_clip: world_from_view * view_from_clip,
963                world_from_view,
964                view_from_world,
965                clip_from_view,
966                view_from_clip,
967                world_position: extracted_view.world_from_view.translation(),
968                exposure: extracted_camera
969                    .map(|c| c.exposure)
970                    .unwrap_or_else(|| Exposure::default().exposure()),
971                viewport,
972                main_pass_viewport,
973                frustum,
974                color_grading: extracted_view.color_grading.clone().into(),
975                mip_bias: mip_bias.unwrap_or(&MipBias(0.0)).0,
976                frame_count: frame_count.0,
977            }),
978        };
979
980        commands.entity(entity).insert(view_uniforms);
981    }
982}
983
984#[derive(Clone)]
985struct MainTargetTextures {
986    a: ColorAttachment,
987    b: ColorAttachment,
988    /// 0 represents `main_textures.a`, 1 represents `main_textures.b`
989    /// This is shared across view targets with the same render target
990    main_texture: Arc<AtomicUsize>,
991}
992
993/// Prepares the view target [`OutputColorAttachment`] for each view in the current frame.
994pub fn prepare_view_attachments(
995    windows: Res<ExtractedWindows>,
996    images: Res<RenderAssets<GpuImage>>,
997    manual_texture_views: Res<ManualTextureViews>,
998    cameras: Query<&ExtractedCamera>,
999    mut view_target_attachments: ResMut<ViewTargetAttachments>,
1000) {
1001    for camera in cameras.iter() {
1002        let Some(target) = &camera.target else {
1003            continue;
1004        };
1005
1006        match view_target_attachments.entry(target.clone()) {
1007            Entry::Occupied(_) => {}
1008            Entry::Vacant(entry) => {
1009                let Some(attachment) = target
1010                    .get_texture_view(&windows, &images, &manual_texture_views)
1011                    .cloned()
1012                    .zip(target.get_texture_format(&windows, &images, &manual_texture_views))
1013                    .map(|(view, format)| {
1014                        OutputColorAttachment::new(view.clone(), format.add_srgb_suffix())
1015                    })
1016                else {
1017                    continue;
1018                };
1019                entry.insert(attachment);
1020            }
1021        };
1022    }
1023}
1024
1025/// Clears the view target [`OutputColorAttachment`]s.
1026pub fn clear_view_attachments(mut view_target_attachments: ResMut<ViewTargetAttachments>) {
1027    view_target_attachments.clear();
1028}
1029
1030pub fn prepare_view_targets(
1031    mut commands: Commands,
1032    clear_color_global: Res<ClearColor>,
1033    render_device: Res<RenderDevice>,
1034    mut texture_cache: ResMut<TextureCache>,
1035    cameras: Query<(
1036        Entity,
1037        &ExtractedCamera,
1038        &ExtractedView,
1039        &CameraMainTextureUsages,
1040        &Msaa,
1041    )>,
1042    view_target_attachments: Res<ViewTargetAttachments>,
1043) {
1044    let mut textures = <HashMap<_, _>>::default();
1045    for (entity, camera, view, texture_usage, msaa) in cameras.iter() {
1046        let (Some(target_size), Some(target)) = (camera.physical_target_size, &camera.target)
1047        else {
1048            continue;
1049        };
1050
1051        let Some(out_attachment) = view_target_attachments.get(target) else {
1052            continue;
1053        };
1054
1055        let main_texture_format = if view.hdr {
1056            ViewTarget::TEXTURE_FORMAT_HDR
1057        } else {
1058            TextureFormat::bevy_default()
1059        };
1060
1061        let clear_color = match camera.clear_color {
1062            ClearColorConfig::Custom(color) => Some(color),
1063            ClearColorConfig::None => None,
1064            _ => Some(clear_color_global.0),
1065        };
1066
1067        let (a, b, sampled, main_texture) = textures
1068            .entry((camera.target.clone(), texture_usage.0, view.hdr, msaa))
1069            .or_insert_with(|| {
1070                let descriptor = TextureDescriptor {
1071                    label: None,
1072                    size: target_size.to_extents(),
1073                    mip_level_count: 1,
1074                    sample_count: 1,
1075                    dimension: TextureDimension::D2,
1076                    format: main_texture_format,
1077                    usage: texture_usage.0,
1078                    view_formats: match main_texture_format {
1079                        TextureFormat::Bgra8Unorm => &[TextureFormat::Bgra8UnormSrgb],
1080                        TextureFormat::Rgba8Unorm => &[TextureFormat::Rgba8UnormSrgb],
1081                        _ => &[],
1082                    },
1083                };
1084                let a = texture_cache.get(
1085                    &render_device,
1086                    TextureDescriptor {
1087                        label: Some("main_texture_a"),
1088                        ..descriptor
1089                    },
1090                );
1091                let b = texture_cache.get(
1092                    &render_device,
1093                    TextureDescriptor {
1094                        label: Some("main_texture_b"),
1095                        ..descriptor
1096                    },
1097                );
1098                let sampled = if msaa.samples() > 1 {
1099                    let sampled = texture_cache.get(
1100                        &render_device,
1101                        TextureDescriptor {
1102                            label: Some("main_texture_sampled"),
1103                            size: target_size.to_extents(),
1104                            mip_level_count: 1,
1105                            sample_count: msaa.samples(),
1106                            dimension: TextureDimension::D2,
1107                            format: main_texture_format,
1108                            usage: TextureUsages::RENDER_ATTACHMENT,
1109                            view_formats: descriptor.view_formats,
1110                        },
1111                    );
1112                    Some(sampled)
1113                } else {
1114                    None
1115                };
1116                let main_texture = Arc::new(AtomicUsize::new(0));
1117                (a, b, sampled, main_texture)
1118            });
1119
1120        let converted_clear_color = clear_color.map(Into::into);
1121
1122        let main_textures = MainTargetTextures {
1123            a: ColorAttachment::new(a.clone(), sampled.clone(), converted_clear_color),
1124            b: ColorAttachment::new(b.clone(), sampled.clone(), converted_clear_color),
1125            main_texture: main_texture.clone(),
1126        };
1127
1128        commands.entity(entity).insert(ViewTarget {
1129            main_texture: main_textures.main_texture.clone(),
1130            main_textures,
1131            main_texture_format,
1132            out_texture: out_attachment.clone(),
1133        });
1134    }
1135}