bevy_render/view/
mod.rs

1pub mod visibility;
2pub mod window;
3
4use bevy_asset::{load_internal_asset, Handle};
5pub use visibility::*;
6pub use window::*;
7
8use crate::{
9    camera::{
10        CameraMainTextureUsages, ClearColor, ClearColorConfig, Exposure, ExtractedCamera,
11        ManualTextureViews, MipBias, NormalizedRenderTarget, TemporalJitter,
12    },
13    extract_component::ExtractComponentPlugin,
14    prelude::Shader,
15    primitives::Frustum,
16    render_asset::RenderAssets,
17    render_phase::ViewRangefinder3d,
18    render_resource::{DynamicUniformBuffer, ShaderType, Texture, TextureView},
19    renderer::{RenderDevice, RenderQueue},
20    texture::{
21        CachedTexture, ColorAttachment, DepthAttachment, GpuImage, OutputColorAttachment,
22        TextureCache,
23    },
24    Render, RenderApp, RenderSet,
25};
26use alloc::sync::Arc;
27use bevy_app::{App, Plugin};
28use bevy_color::LinearRgba;
29use bevy_derive::{Deref, DerefMut};
30use bevy_ecs::prelude::*;
31use bevy_image::BevyDefault as _;
32use bevy_math::{mat3, vec2, vec3, Mat3, Mat4, UVec4, Vec2, Vec3, Vec4, Vec4Swizzles};
33use bevy_reflect::{std_traits::ReflectDefault, Reflect};
34use bevy_render_macros::ExtractComponent;
35use bevy_transform::components::GlobalTransform;
36use bevy_utils::{hashbrown::hash_map::Entry, HashMap};
37use core::{
38    ops::Range,
39    sync::atomic::{AtomicUsize, Ordering},
40};
41use wgpu::{
42    BufferUsages, Extent3d, RenderPassColorAttachment, RenderPassDepthStencilAttachment, StoreOp,
43    TextureDescriptor, TextureDimension, TextureFormat, TextureUsages,
44};
45
46pub const VIEW_TYPE_HANDLE: Handle<Shader> = Handle::weak_from_u128(15421373904451797197);
47
48/// The matrix that converts from the RGB to the LMS color space.
49///
50/// To derive this, first we convert from RGB to [CIE 1931 XYZ]:
51///
52/// ```text
53/// ⎡ X ⎤   ⎡ 0.490  0.310  0.200 ⎤ ⎡ R ⎤
54/// ⎢ Y ⎥ = ⎢ 0.177  0.812  0.011 ⎥ ⎢ G ⎥
55/// ⎣ Z ⎦   ⎣ 0.000  0.010  0.990 ⎦ ⎣ B ⎦
56/// ```
57///
58/// Then we convert to LMS according to the [CAM16 standard matrix]:
59///
60/// ```text
61/// ⎡ L ⎤   ⎡  0.401   0.650  -0.051 ⎤ ⎡ X ⎤
62/// ⎢ M ⎥ = ⎢ -0.250   1.204   0.046 ⎥ ⎢ Y ⎥
63/// ⎣ S ⎦   ⎣ -0.002   0.049   0.953 ⎦ ⎣ Z ⎦
64/// ```
65///
66/// The resulting matrix is just the concatenation of these two matrices, to do
67/// the conversion in one step.
68///
69/// [CIE 1931 XYZ]: https://en.wikipedia.org/wiki/CIE_1931_color_space
70/// [CAM16 standard matrix]: https://en.wikipedia.org/wiki/LMS_color_space
71static RGB_TO_LMS: Mat3 = mat3(
72    vec3(0.311692, 0.0905138, 0.00764433),
73    vec3(0.652085, 0.901341, 0.0486554),
74    vec3(0.0362225, 0.00814478, 0.943700),
75);
76
77/// The inverse of the [`RGB_TO_LMS`] matrix, converting from the LMS color
78/// space back to RGB.
79static LMS_TO_RGB: Mat3 = mat3(
80    vec3(4.06305, -0.40791, -0.0118812),
81    vec3(-2.93241, 1.40437, -0.0486532),
82    vec3(-0.130646, 0.00353630, 1.0605344),
83);
84
85/// The [CIE 1931] *xy* chromaticity coordinates of the [D65 white point].
86///
87/// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space
88/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values
89static D65_XY: Vec2 = vec2(0.31272, 0.32903);
90
91/// The [D65 white point] in [LMS color space].
92///
93/// [LMS color space]: https://en.wikipedia.org/wiki/LMS_color_space
94/// [D65 white point]: https://en.wikipedia.org/wiki/Standard_illuminant#D65_values
95static D65_LMS: Vec3 = vec3(0.975538, 1.01648, 1.08475);
96
97pub struct ViewPlugin;
98
99impl Plugin for ViewPlugin {
100    fn build(&self, app: &mut App) {
101        load_internal_asset!(app, VIEW_TYPE_HANDLE, "view.wgsl", Shader::from_wgsl);
102
103        app.register_type::<InheritedVisibility>()
104            .register_type::<ViewVisibility>()
105            .register_type::<Msaa>()
106            .register_type::<NoFrustumCulling>()
107            .register_type::<RenderLayers>()
108            .register_type::<Visibility>()
109            .register_type::<VisibleEntities>()
110            .register_type::<ColorGrading>()
111            // NOTE: windows.is_changed() handles cases where a window was resized
112            .add_plugins((
113                ExtractComponentPlugin::<Msaa>::default(),
114                VisibilityPlugin,
115                VisibilityRangePlugin,
116            ));
117
118        if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
119            render_app.add_systems(
120                Render,
121                (
122                    // `TextureView`s need to be dropped before reconfiguring window surfaces.
123                    clear_view_attachments
124                        .in_set(RenderSet::ManageViews)
125                        .before(create_surfaces),
126                    prepare_view_attachments
127                        .in_set(RenderSet::ManageViews)
128                        .before(prepare_view_targets)
129                        .after(prepare_windows),
130                    prepare_view_targets
131                        .in_set(RenderSet::ManageViews)
132                        .after(prepare_windows)
133                        .after(crate::render_asset::prepare_assets::<GpuImage>)
134                        .ambiguous_with(crate::camera::sort_cameras), // doesn't use `sorted_camera_index_for_target`
135                    prepare_view_uniforms.in_set(RenderSet::PrepareResources),
136                ),
137            );
138        }
139    }
140
141    fn finish(&self, app: &mut App) {
142        if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
143            render_app
144                .init_resource::<ViewUniforms>()
145                .init_resource::<ViewTargetAttachments>();
146        }
147    }
148}
149
150/// Component for configuring the number of samples for [Multi-Sample Anti-Aliasing](https://en.wikipedia.org/wiki/Multisample_anti-aliasing)
151/// for a [`Camera`](crate::camera::Camera).
152///
153/// Defaults to 4 samples. A higher number of samples results in smoother edges.
154///
155/// Some advanced rendering features may require that MSAA is disabled.
156///
157/// Note that the web currently only supports 1 or 4 samples.
158#[derive(
159    Component,
160    Default,
161    Clone,
162    Copy,
163    ExtractComponent,
164    Reflect,
165    PartialEq,
166    PartialOrd,
167    Eq,
168    Hash,
169    Debug,
170)]
171#[reflect(Component, Default, PartialEq, Hash, Debug)]
172pub enum Msaa {
173    Off = 1,
174    Sample2 = 2,
175    #[default]
176    Sample4 = 4,
177    Sample8 = 8,
178}
179
180impl Msaa {
181    #[inline]
182    pub fn samples(&self) -> u32 {
183        *self as u32
184    }
185}
186
187#[derive(Component)]
188pub struct ExtractedView {
189    pub clip_from_view: Mat4,
190    pub world_from_view: GlobalTransform,
191    // The view-projection matrix. When provided it is used instead of deriving it from
192    // `projection` and `transform` fields, which can be helpful in cases where numerical
193    // stability matters and there is a more direct way to derive the view-projection matrix.
194    pub clip_from_world: Option<Mat4>,
195    pub hdr: bool,
196    // uvec4(origin.x, origin.y, width, height)
197    pub viewport: UVec4,
198    pub color_grading: ColorGrading,
199}
200
201impl ExtractedView {
202    /// Creates a 3D rangefinder for a view
203    pub fn rangefinder3d(&self) -> ViewRangefinder3d {
204        ViewRangefinder3d::from_world_from_view(&self.world_from_view.compute_matrix())
205    }
206}
207
208/// Configures filmic color grading parameters to adjust the image appearance.
209///
210/// Color grading is applied just before tonemapping for a given
211/// [`Camera`](crate::camera::Camera) entity, with the sole exception of the
212/// `post_saturation` value in [`ColorGradingGlobal`], which is applied after
213/// tonemapping.
214#[derive(Component, Reflect, Debug, Default, Clone)]
215#[reflect(Component, Default, Debug)]
216pub struct ColorGrading {
217    /// Filmic color grading values applied to the image as a whole (as opposed
218    /// to individual sections, like shadows and highlights).
219    pub global: ColorGradingGlobal,
220
221    /// Color grading values that are applied to the darker parts of the image.
222    ///
223    /// The cutoff points can be customized with the
224    /// [`ColorGradingGlobal::midtones_range`] field.
225    pub shadows: ColorGradingSection,
226
227    /// Color grading values that are applied to the parts of the image with
228    /// intermediate brightness.
229    ///
230    /// The cutoff points can be customized with the
231    /// [`ColorGradingGlobal::midtones_range`] field.
232    pub midtones: ColorGradingSection,
233
234    /// Color grading values that are applied to the lighter parts of the image.
235    ///
236    /// The cutoff points can be customized with the
237    /// [`ColorGradingGlobal::midtones_range`] field.
238    pub highlights: ColorGradingSection,
239}
240
241/// Filmic color grading values applied to the image as a whole (as opposed to
242/// individual sections, like shadows and highlights).
243#[derive(Clone, Debug, Reflect)]
244#[reflect(Default)]
245pub struct ColorGradingGlobal {
246    /// Exposure value (EV) offset, measured in stops.
247    pub exposure: f32,
248
249    /// An adjustment made to the [CIE 1931] chromaticity *x* value.
250    ///
251    /// Positive values make the colors redder. Negative values make the colors
252    /// bluer. This has no effect on luminance (brightness).
253    ///
254    /// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
255    pub temperature: f32,
256
257    /// An adjustment made to the [CIE 1931] chromaticity *y* value.
258    ///
259    /// Positive values make the colors more magenta. Negative values make the
260    /// colors greener. This has no effect on luminance (brightness).
261    ///
262    /// [CIE 1931]: https://en.wikipedia.org/wiki/CIE_1931_color_space#CIE_xy_chromaticity_diagram_and_the_CIE_xyY_color_space
263    pub tint: f32,
264
265    /// An adjustment to the [hue], in radians.
266    ///
267    /// Adjusting this value changes the perceived colors in the image: red to
268    /// yellow to green to blue, etc. It has no effect on the saturation or
269    /// brightness of the colors.
270    ///
271    /// [hue]: https://en.wikipedia.org/wiki/HSL_and_HSV#Formal_derivation
272    pub hue: f32,
273
274    /// Saturation adjustment applied after tonemapping.
275    /// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image
276    /// with luminance defined by ITU-R BT.709
277    /// Values above 1.0 increase saturation.
278    pub post_saturation: f32,
279
280    /// The luminance (brightness) ranges that are considered part of the
281    /// "midtones" of the image.
282    ///
283    /// This affects which [`ColorGradingSection`]s apply to which colors. Note
284    /// that the sections smoothly blend into one another, to avoid abrupt
285    /// transitions.
286    ///
287    /// The default value is 0.2 to 0.7.
288    pub midtones_range: Range<f32>,
289}
290
291/// The [`ColorGrading`] structure, packed into the most efficient form for the
292/// GPU.
293#[derive(Clone, Copy, Debug, ShaderType)]
294pub struct ColorGradingUniform {
295    pub balance: Mat3,
296    pub saturation: Vec3,
297    pub contrast: Vec3,
298    pub gamma: Vec3,
299    pub gain: Vec3,
300    pub lift: Vec3,
301    pub midtone_range: Vec2,
302    pub exposure: f32,
303    pub hue: f32,
304    pub post_saturation: f32,
305}
306
307/// A section of color grading values that can be selectively applied to
308/// shadows, midtones, and highlights.
309#[derive(Reflect, Debug, Copy, Clone, PartialEq)]
310pub struct ColorGradingSection {
311    /// Values below 1.0 desaturate, with a value of 0.0 resulting in a grayscale image
312    /// with luminance defined by ITU-R BT.709.
313    /// Values above 1.0 increase saturation.
314    pub saturation: f32,
315
316    /// Adjusts the range of colors.
317    ///
318    /// A value of 1.0 applies no changes. Values below 1.0 move the colors more
319    /// toward a neutral gray. Values above 1.0 spread the colors out away from
320    /// the neutral gray.
321    pub contrast: f32,
322
323    /// A nonlinear luminance adjustment, mainly affecting the high end of the
324    /// range.
325    ///
326    /// This is the *n* exponent in the standard [ASC CDL] formula for color
327    /// correction:
328    ///
329    /// ```text
330    /// out = (i × s + o)ⁿ
331    /// ```
332    ///
333    /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
334    pub gamma: f32,
335
336    /// A linear luminance adjustment, mainly affecting the middle part of the
337    /// range.
338    ///
339    /// This is the *s* factor in the standard [ASC CDL] formula for color
340    /// correction:
341    ///
342    /// ```text
343    /// out = (i × s + o)ⁿ
344    /// ```
345    ///
346    /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
347    pub gain: f32,
348
349    /// A fixed luminance adjustment, mainly affecting the lower part of the
350    /// range.
351    ///
352    /// This is the *o* term in the standard [ASC CDL] formula for color
353    /// correction:
354    ///
355    /// ```text
356    /// out = (i × s + o)ⁿ
357    /// ```
358    ///
359    /// [ASC CDL]: https://en.wikipedia.org/wiki/ASC_CDL#Combined_Function
360    pub lift: f32,
361}
362
363impl Default for ColorGradingGlobal {
364    fn default() -> Self {
365        Self {
366            exposure: 0.0,
367            temperature: 0.0,
368            tint: 0.0,
369            hue: 0.0,
370            post_saturation: 1.0,
371            midtones_range: 0.2..0.7,
372        }
373    }
374}
375
376impl Default for ColorGradingSection {
377    fn default() -> Self {
378        Self {
379            saturation: 1.0,
380            contrast: 1.0,
381            gamma: 1.0,
382            gain: 1.0,
383            lift: 0.0,
384        }
385    }
386}
387
388impl ColorGrading {
389    /// Creates a new [`ColorGrading`] instance in which shadows, midtones, and
390    /// highlights all have the same set of color grading values.
391    pub fn with_identical_sections(
392        global: ColorGradingGlobal,
393        section: ColorGradingSection,
394    ) -> ColorGrading {
395        ColorGrading {
396            global,
397            highlights: section,
398            midtones: section,
399            shadows: section,
400        }
401    }
402
403    /// Returns an iterator that visits the shadows, midtones, and highlights
404    /// sections, in that order.
405    pub fn all_sections(&self) -> impl Iterator<Item = &ColorGradingSection> {
406        [&self.shadows, &self.midtones, &self.highlights].into_iter()
407    }
408
409    /// Applies the given mutating function to the shadows, midtones, and
410    /// highlights sections, in that order.
411    ///
412    /// Returns an array composed of the results of such evaluation, in that
413    /// order.
414    pub fn all_sections_mut(&mut self) -> impl Iterator<Item = &mut ColorGradingSection> {
415        [&mut self.shadows, &mut self.midtones, &mut self.highlights].into_iter()
416    }
417}
418
419#[derive(Clone, ShaderType)]
420pub struct ViewUniform {
421    pub clip_from_world: Mat4,
422    pub unjittered_clip_from_world: Mat4,
423    pub world_from_clip: Mat4,
424    pub world_from_view: Mat4,
425    pub view_from_world: Mat4,
426    pub clip_from_view: Mat4,
427    pub view_from_clip: Mat4,
428    pub world_position: Vec3,
429    pub exposure: f32,
430    // viewport(x_origin, y_origin, width, height)
431    pub viewport: Vec4,
432    pub frustum: [Vec4; 6],
433    pub color_grading: ColorGradingUniform,
434    pub mip_bias: f32,
435}
436
437#[derive(Resource)]
438pub struct ViewUniforms {
439    pub uniforms: DynamicUniformBuffer<ViewUniform>,
440}
441
442impl FromWorld for ViewUniforms {
443    fn from_world(world: &mut World) -> Self {
444        let mut uniforms = DynamicUniformBuffer::default();
445        uniforms.set_label(Some("view_uniforms_buffer"));
446
447        let render_device = world.resource::<RenderDevice>();
448        if render_device.limits().max_storage_buffers_per_shader_stage > 0 {
449            uniforms.add_usages(BufferUsages::STORAGE);
450        }
451
452        Self { uniforms }
453    }
454}
455
456#[derive(Component)]
457pub struct ViewUniformOffset {
458    pub offset: u32,
459}
460
461#[derive(Component)]
462pub struct ViewTarget {
463    main_textures: MainTargetTextures,
464    main_texture_format: TextureFormat,
465    /// 0 represents `main_textures.a`, 1 represents `main_textures.b`
466    /// This is shared across view targets with the same render target
467    main_texture: Arc<AtomicUsize>,
468    out_texture: OutputColorAttachment,
469}
470
471/// Contains [`OutputColorAttachment`] used for each target present on any view in the current
472/// frame, after being prepared by [`prepare_view_attachments`]. Users that want to override
473/// the default output color attachment for a specific target can do so by adding a
474/// [`OutputColorAttachment`] to this resource before [`prepare_view_targets`] is called.
475#[derive(Resource, Default, Deref, DerefMut)]
476pub struct ViewTargetAttachments(HashMap<NormalizedRenderTarget, OutputColorAttachment>);
477
478pub struct PostProcessWrite<'a> {
479    pub source: &'a TextureView,
480    pub destination: &'a TextureView,
481}
482
483impl From<ColorGrading> for ColorGradingUniform {
484    fn from(component: ColorGrading) -> Self {
485        // Compute the balance matrix that will be used to apply the white
486        // balance adjustment to an RGB color. Our general approach will be to
487        // convert both the color and the developer-supplied white point to the
488        // LMS color space, apply the conversion, and then convert back.
489        //
490        // First, we start with the CIE 1931 *xy* values of the standard D65
491        // illuminant:
492        // <https://en.wikipedia.org/wiki/Standard_illuminant#D65_values>
493        //
494        // We then adjust them based on the developer's requested white balance.
495        let white_point_xy = D65_XY + vec2(-component.global.temperature, component.global.tint);
496
497        // Convert the white point from CIE 1931 *xy* to LMS. First, we convert to XYZ:
498        //
499        //                  Y          Y
500        //     Y = 1    X = ─ x    Z = ─ (1 - x - y)
501        //                  y          y
502        //
503        // Then we convert from XYZ to LMS color space, using the CAM16 matrix
504        // from <https://en.wikipedia.org/wiki/LMS_color_space#Later_CIECAMs>:
505        //
506        //     ⎡ L ⎤   ⎡  0.401   0.650  -0.051 ⎤ ⎡ X ⎤
507        //     ⎢ M ⎥ = ⎢ -0.250   1.204   0.046 ⎥ ⎢ Y ⎥
508        //     ⎣ S ⎦   ⎣ -0.002   0.049   0.953 ⎦ ⎣ Z ⎦
509        //
510        // The following formula is just a simplification of the above.
511
512        let white_point_lms = vec3(0.701634, 1.15856, -0.904175)
513            + (vec3(-0.051461, 0.045854, 0.953127)
514                + vec3(0.452749, -0.296122, -0.955206) * white_point_xy.x)
515                / white_point_xy.y;
516
517        // Now that we're in LMS space, perform the white point scaling.
518        let white_point_adjustment = Mat3::from_diagonal(D65_LMS / white_point_lms);
519
520        // Finally, combine the RGB → LMS → corrected LMS → corrected RGB
521        // pipeline into a single 3×3 matrix.
522        let balance = LMS_TO_RGB * white_point_adjustment * RGB_TO_LMS;
523
524        Self {
525            balance,
526            saturation: vec3(
527                component.shadows.saturation,
528                component.midtones.saturation,
529                component.highlights.saturation,
530            ),
531            contrast: vec3(
532                component.shadows.contrast,
533                component.midtones.contrast,
534                component.highlights.contrast,
535            ),
536            gamma: vec3(
537                component.shadows.gamma,
538                component.midtones.gamma,
539                component.highlights.gamma,
540            ),
541            gain: vec3(
542                component.shadows.gain,
543                component.midtones.gain,
544                component.highlights.gain,
545            ),
546            lift: vec3(
547                component.shadows.lift,
548                component.midtones.lift,
549                component.highlights.lift,
550            ),
551            midtone_range: vec2(
552                component.global.midtones_range.start,
553                component.global.midtones_range.end,
554            ),
555            exposure: component.global.exposure,
556            hue: component.global.hue,
557            post_saturation: component.global.post_saturation,
558        }
559    }
560}
561
562#[derive(Component)]
563pub struct GpuCulling;
564
565#[derive(Component)]
566pub struct NoCpuCulling;
567
568impl ViewTarget {
569    pub const TEXTURE_FORMAT_HDR: TextureFormat = TextureFormat::Rgba16Float;
570
571    /// Retrieve this target's main texture's color attachment.
572    pub fn get_color_attachment(&self) -> RenderPassColorAttachment {
573        if self.main_texture.load(Ordering::SeqCst) == 0 {
574            self.main_textures.a.get_attachment()
575        } else {
576            self.main_textures.b.get_attachment()
577        }
578    }
579
580    /// Retrieve this target's "unsampled" main texture's color attachment.
581    pub fn get_unsampled_color_attachment(&self) -> RenderPassColorAttachment {
582        if self.main_texture.load(Ordering::SeqCst) == 0 {
583            self.main_textures.a.get_unsampled_attachment()
584        } else {
585            self.main_textures.b.get_unsampled_attachment()
586        }
587    }
588
589    /// The "main" unsampled texture.
590    pub fn main_texture(&self) -> &Texture {
591        if self.main_texture.load(Ordering::SeqCst) == 0 {
592            &self.main_textures.a.texture.texture
593        } else {
594            &self.main_textures.b.texture.texture
595        }
596    }
597
598    /// The _other_ "main" unsampled texture.
599    /// In most cases you should use [`Self::main_texture`] instead and never this.
600    /// The textures will naturally be swapped when [`Self::post_process_write`] is called.
601    ///
602    /// A use case for this is to be able to prepare a bind group for all main textures
603    /// ahead of time.
604    pub fn main_texture_other(&self) -> &Texture {
605        if self.main_texture.load(Ordering::SeqCst) == 0 {
606            &self.main_textures.b.texture.texture
607        } else {
608            &self.main_textures.a.texture.texture
609        }
610    }
611
612    /// The "main" unsampled texture.
613    pub fn main_texture_view(&self) -> &TextureView {
614        if self.main_texture.load(Ordering::SeqCst) == 0 {
615            &self.main_textures.a.texture.default_view
616        } else {
617            &self.main_textures.b.texture.default_view
618        }
619    }
620
621    /// The _other_ "main" unsampled texture view.
622    /// In most cases you should use [`Self::main_texture_view`] instead and never this.
623    /// The textures will naturally be swapped when [`Self::post_process_write`] is called.
624    ///
625    /// A use case for this is to be able to prepare a bind group for all main textures
626    /// ahead of time.
627    pub fn main_texture_other_view(&self) -> &TextureView {
628        if self.main_texture.load(Ordering::SeqCst) == 0 {
629            &self.main_textures.b.texture.default_view
630        } else {
631            &self.main_textures.a.texture.default_view
632        }
633    }
634
635    /// The "main" sampled texture.
636    pub fn sampled_main_texture(&self) -> Option<&Texture> {
637        self.main_textures
638            .a
639            .resolve_target
640            .as_ref()
641            .map(|sampled| &sampled.texture)
642    }
643
644    /// The "main" sampled texture view.
645    pub fn sampled_main_texture_view(&self) -> Option<&TextureView> {
646        self.main_textures
647            .a
648            .resolve_target
649            .as_ref()
650            .map(|sampled| &sampled.default_view)
651    }
652
653    #[inline]
654    pub fn main_texture_format(&self) -> TextureFormat {
655        self.main_texture_format
656    }
657
658    /// Returns `true` if and only if the main texture is [`Self::TEXTURE_FORMAT_HDR`]
659    #[inline]
660    pub fn is_hdr(&self) -> bool {
661        self.main_texture_format == ViewTarget::TEXTURE_FORMAT_HDR
662    }
663
664    /// The final texture this view will render to.
665    #[inline]
666    pub fn out_texture(&self) -> &TextureView {
667        &self.out_texture.view
668    }
669
670    pub fn out_texture_color_attachment(
671        &self,
672        clear_color: Option<LinearRgba>,
673    ) -> RenderPassColorAttachment {
674        self.out_texture.get_attachment(clear_color)
675    }
676
677    /// The format of the final texture this view will render to
678    #[inline]
679    pub fn out_texture_format(&self) -> TextureFormat {
680        self.out_texture.format
681    }
682
683    /// This will start a new "post process write", which assumes that the caller
684    /// will write the [`PostProcessWrite`]'s `source` to the `destination`.
685    ///
686    /// `source` is the "current" main texture. This will internally flip this
687    /// [`ViewTarget`]'s main texture to the `destination` texture, so the caller
688    /// _must_ ensure `source` is copied to `destination`, with or without modifications.
689    /// Failing to do so will cause the current main texture information to be lost.
690    pub fn post_process_write(&self) -> PostProcessWrite {
691        let old_is_a_main_texture = self.main_texture.fetch_xor(1, Ordering::SeqCst);
692        // if the old main texture is a, then the post processing must write from a to b
693        if old_is_a_main_texture == 0 {
694            self.main_textures.b.mark_as_cleared();
695            PostProcessWrite {
696                source: &self.main_textures.a.texture.default_view,
697                destination: &self.main_textures.b.texture.default_view,
698            }
699        } else {
700            self.main_textures.a.mark_as_cleared();
701            PostProcessWrite {
702                source: &self.main_textures.b.texture.default_view,
703                destination: &self.main_textures.a.texture.default_view,
704            }
705        }
706    }
707}
708
709#[derive(Component)]
710pub struct ViewDepthTexture {
711    pub texture: Texture,
712    attachment: DepthAttachment,
713}
714
715impl ViewDepthTexture {
716    pub fn new(texture: CachedTexture, clear_value: Option<f32>) -> Self {
717        Self {
718            texture: texture.texture,
719            attachment: DepthAttachment::new(texture.default_view, clear_value),
720        }
721    }
722
723    pub fn get_attachment(&self, store: StoreOp) -> RenderPassDepthStencilAttachment {
724        self.attachment.get_attachment(store)
725    }
726
727    pub fn view(&self) -> &TextureView {
728        &self.attachment.view
729    }
730}
731
732pub fn prepare_view_uniforms(
733    mut commands: Commands,
734    render_device: Res<RenderDevice>,
735    render_queue: Res<RenderQueue>,
736    mut view_uniforms: ResMut<ViewUniforms>,
737    views: Query<(
738        Entity,
739        Option<&ExtractedCamera>,
740        &ExtractedView,
741        Option<&Frustum>,
742        Option<&TemporalJitter>,
743        Option<&MipBias>,
744    )>,
745) {
746    let view_iter = views.iter();
747    let view_count = view_iter.len();
748    let Some(mut writer) =
749        view_uniforms
750            .uniforms
751            .get_writer(view_count, &render_device, &render_queue)
752    else {
753        return;
754    };
755    for (entity, extracted_camera, extracted_view, frustum, temporal_jitter, mip_bias) in &views {
756        let viewport = extracted_view.viewport.as_vec4();
757        let unjittered_projection = extracted_view.clip_from_view;
758        let mut clip_from_view = unjittered_projection;
759
760        if let Some(temporal_jitter) = temporal_jitter {
761            temporal_jitter.jitter_projection(&mut clip_from_view, viewport.zw());
762        }
763
764        let view_from_clip = clip_from_view.inverse();
765        let world_from_view = extracted_view.world_from_view.compute_matrix();
766        let view_from_world = world_from_view.inverse();
767
768        let clip_from_world = if temporal_jitter.is_some() {
769            clip_from_view * view_from_world
770        } else {
771            extracted_view
772                .clip_from_world
773                .unwrap_or_else(|| clip_from_view * view_from_world)
774        };
775
776        // Map Frustum type to shader array<vec4<f32>, 6>
777        let frustum = frustum
778            .map(|frustum| frustum.half_spaces.map(|h| h.normal_d()))
779            .unwrap_or([Vec4::ZERO; 6]);
780
781        let view_uniforms = ViewUniformOffset {
782            offset: writer.write(&ViewUniform {
783                clip_from_world,
784                unjittered_clip_from_world: unjittered_projection * view_from_world,
785                world_from_clip: world_from_view * view_from_clip,
786                world_from_view,
787                view_from_world,
788                clip_from_view,
789                view_from_clip,
790                world_position: extracted_view.world_from_view.translation(),
791                exposure: extracted_camera
792                    .map(|c| c.exposure)
793                    .unwrap_or_else(|| Exposure::default().exposure()),
794                viewport,
795                frustum,
796                color_grading: extracted_view.color_grading.clone().into(),
797                mip_bias: mip_bias.unwrap_or(&MipBias(0.0)).0,
798            }),
799        };
800
801        commands.entity(entity).insert(view_uniforms);
802    }
803}
804
805#[derive(Clone)]
806struct MainTargetTextures {
807    a: ColorAttachment,
808    b: ColorAttachment,
809    /// 0 represents `main_textures.a`, 1 represents `main_textures.b`
810    /// This is shared across view targets with the same render target
811    main_texture: Arc<AtomicUsize>,
812}
813
814/// Prepares the view target [`OutputColorAttachment`] for each view in the current frame.
815pub fn prepare_view_attachments(
816    windows: Res<ExtractedWindows>,
817    images: Res<RenderAssets<GpuImage>>,
818    manual_texture_views: Res<ManualTextureViews>,
819    cameras: Query<&ExtractedCamera>,
820    mut view_target_attachments: ResMut<ViewTargetAttachments>,
821) {
822    for camera in cameras.iter() {
823        let Some(target) = &camera.target else {
824            continue;
825        };
826
827        match view_target_attachments.entry(target.clone()) {
828            Entry::Occupied(_) => {}
829            Entry::Vacant(entry) => {
830                let Some(attachment) = target
831                    .get_texture_view(&windows, &images, &manual_texture_views)
832                    .cloned()
833                    .zip(target.get_texture_format(&windows, &images, &manual_texture_views))
834                    .map(|(view, format)| {
835                        OutputColorAttachment::new(view.clone(), format.add_srgb_suffix())
836                    })
837                else {
838                    continue;
839                };
840                entry.insert(attachment);
841            }
842        };
843    }
844}
845
846/// Clears the view target [`OutputColorAttachment`]s.
847pub fn clear_view_attachments(mut view_target_attachments: ResMut<ViewTargetAttachments>) {
848    view_target_attachments.clear();
849}
850
851pub fn prepare_view_targets(
852    mut commands: Commands,
853    clear_color_global: Res<ClearColor>,
854    render_device: Res<RenderDevice>,
855    mut texture_cache: ResMut<TextureCache>,
856    cameras: Query<(
857        Entity,
858        &ExtractedCamera,
859        &ExtractedView,
860        &CameraMainTextureUsages,
861        &Msaa,
862    )>,
863    view_target_attachments: Res<ViewTargetAttachments>,
864) {
865    let mut textures = HashMap::default();
866    for (entity, camera, view, texture_usage, msaa) in cameras.iter() {
867        let (Some(target_size), Some(target)) = (camera.physical_target_size, &camera.target)
868        else {
869            continue;
870        };
871
872        let Some(out_attachment) = view_target_attachments.get(target) else {
873            continue;
874        };
875
876        let size = Extent3d {
877            width: target_size.x,
878            height: target_size.y,
879            depth_or_array_layers: 1,
880        };
881
882        let main_texture_format = if view.hdr {
883            ViewTarget::TEXTURE_FORMAT_HDR
884        } else {
885            TextureFormat::bevy_default()
886        };
887
888        let clear_color = match camera.clear_color {
889            ClearColorConfig::Custom(color) => Some(color),
890            ClearColorConfig::None => None,
891            _ => Some(clear_color_global.0),
892        };
893
894        let (a, b, sampled, main_texture) = textures
895            .entry((camera.target.clone(), view.hdr, msaa))
896            .or_insert_with(|| {
897                let descriptor = TextureDescriptor {
898                    label: None,
899                    size,
900                    mip_level_count: 1,
901                    sample_count: 1,
902                    dimension: TextureDimension::D2,
903                    format: main_texture_format,
904                    usage: texture_usage.0,
905                    view_formats: match main_texture_format {
906                        TextureFormat::Bgra8Unorm => &[TextureFormat::Bgra8UnormSrgb],
907                        TextureFormat::Rgba8Unorm => &[TextureFormat::Rgba8UnormSrgb],
908                        _ => &[],
909                    },
910                };
911                let a = texture_cache.get(
912                    &render_device,
913                    TextureDescriptor {
914                        label: Some("main_texture_a"),
915                        ..descriptor
916                    },
917                );
918                let b = texture_cache.get(
919                    &render_device,
920                    TextureDescriptor {
921                        label: Some("main_texture_b"),
922                        ..descriptor
923                    },
924                );
925                let sampled = if msaa.samples() > 1 {
926                    let sampled = texture_cache.get(
927                        &render_device,
928                        TextureDescriptor {
929                            label: Some("main_texture_sampled"),
930                            size,
931                            mip_level_count: 1,
932                            sample_count: msaa.samples(),
933                            dimension: TextureDimension::D2,
934                            format: main_texture_format,
935                            usage: TextureUsages::RENDER_ATTACHMENT,
936                            view_formats: descriptor.view_formats,
937                        },
938                    );
939                    Some(sampled)
940                } else {
941                    None
942                };
943                let main_texture = Arc::new(AtomicUsize::new(0));
944                (a, b, sampled, main_texture)
945            });
946
947        let converted_clear_color = clear_color.map(Into::into);
948
949        let main_textures = MainTargetTextures {
950            a: ColorAttachment::new(a.clone(), sampled.clone(), converted_clear_color),
951            b: ColorAttachment::new(b.clone(), sampled.clone(), converted_clear_color),
952            main_texture: main_texture.clone(),
953        };
954
955        commands.entity(entity).insert(ViewTarget {
956            main_texture: main_textures.main_texture.clone(),
957            main_textures,
958            main_texture_format,
959            out_texture: out_attachment.clone(),
960        });
961    }
962}