bevy_render/
camera.rs

1use crate::{
2    batching::gpu_preprocessing::{GpuPreprocessingMode, GpuPreprocessingSupport},
3    extract_component::{ExtractComponent, ExtractComponentPlugin},
4    extract_resource::{ExtractResource, ExtractResourcePlugin},
5    render_asset::RenderAssets,
6    render_graph::{CameraDriverNode, InternedRenderSubGraph, RenderGraph, RenderSubGraph},
7    render_resource::TextureView,
8    sync_world::{RenderEntity, SyncToRenderWorld},
9    texture::{GpuImage, ManualTextureViews},
10    view::{
11        ColorGrading, ExtractedView, ExtractedWindows, Hdr, Msaa, NoIndirectDrawing,
12        RenderVisibleEntities, RetainedViewEntity, ViewUniformOffset,
13    },
14    Extract, ExtractSchedule, Render, RenderApp, RenderSystems,
15};
16
17use bevy_app::{App, Plugin, PostStartup, PostUpdate};
18use bevy_asset::{AssetEvent, AssetEventSystems, AssetId, Assets};
19use bevy_camera::{
20    primitives::Frustum,
21    visibility::{self, RenderLayers, VisibleEntities},
22    Camera, Camera2d, Camera3d, CameraMainTextureUsages, CameraOutputMode, CameraUpdateSystems,
23    ClearColor, ClearColorConfig, Exposure, ManualTextureViewHandle, MsaaWriteback,
24    NormalizedRenderTarget, Projection, RenderTarget, RenderTargetInfo, Viewport,
25};
26use bevy_derive::{Deref, DerefMut};
27use bevy_ecs::{
28    change_detection::DetectChanges,
29    component::Component,
30    entity::{ContainsEntity, Entity},
31    error::BevyError,
32    lifecycle::HookContext,
33    message::MessageReader,
34    prelude::With,
35    query::{Has, QueryItem},
36    reflect::ReflectComponent,
37    resource::Resource,
38    schedule::IntoScheduleConfigs,
39    system::{Commands, Query, Res, ResMut},
40    world::DeferredWorld,
41};
42use bevy_image::Image;
43use bevy_math::{uvec2, vec2, Mat4, URect, UVec2, UVec4, Vec2};
44use bevy_platform::collections::{HashMap, HashSet};
45use bevy_reflect::prelude::*;
46use bevy_transform::components::GlobalTransform;
47use bevy_window::{PrimaryWindow, Window, WindowCreated, WindowResized, WindowScaleFactorChanged};
48use tracing::warn;
49use wgpu::TextureFormat;
50
51#[derive(Default)]
52pub struct CameraPlugin;
53
54impl Plugin for CameraPlugin {
55    fn build(&self, app: &mut App) {
56        app.register_required_components::<Camera, Msaa>()
57            .register_required_components::<Camera, SyncToRenderWorld>()
58            .register_required_components::<Camera3d, ColorGrading>()
59            .register_required_components::<Camera3d, Exposure>()
60            .add_plugins((
61                ExtractResourcePlugin::<ClearColor>::default(),
62                ExtractComponentPlugin::<CameraMainTextureUsages>::default(),
63            ))
64            .add_systems(PostStartup, camera_system.in_set(CameraUpdateSystems))
65            .add_systems(
66                PostUpdate,
67                camera_system
68                    .in_set(CameraUpdateSystems)
69                    .before(AssetEventSystems)
70                    .before(visibility::update_frusta),
71            );
72        app.world_mut()
73            .register_component_hooks::<Camera>()
74            .on_add(warn_on_no_render_graph);
75
76        if let Some(render_app) = app.get_sub_app_mut(RenderApp) {
77            render_app
78                .init_resource::<SortedCameras>()
79                .add_systems(ExtractSchedule, extract_cameras)
80                .add_systems(Render, sort_cameras.in_set(RenderSystems::ManageViews));
81            let camera_driver_node = CameraDriverNode::new(render_app.world_mut());
82            let mut render_graph = render_app.world_mut().resource_mut::<RenderGraph>();
83            render_graph.add_node(crate::graph::CameraDriverLabel, camera_driver_node);
84        }
85    }
86}
87
88fn warn_on_no_render_graph(world: DeferredWorld, HookContext { entity, caller, .. }: HookContext) {
89    if !world.entity(entity).contains::<CameraRenderGraph>() {
90        warn!("{}Entity {entity} has a `Camera` component, but it doesn't have a render graph configured. Usually, adding a `Camera2d` or `Camera3d` component will work.
91        However, you may instead need to enable `bevy_core_pipeline`, or may want to manually add a `CameraRenderGraph` component to create a custom render graph.", caller.map(|location|format!("{location}: ")).unwrap_or_default());
92    }
93}
94
95impl ExtractResource for ClearColor {
96    type Source = Self;
97
98    fn extract_resource(source: &Self::Source) -> Self {
99        source.clone()
100    }
101}
102impl ExtractComponent for CameraMainTextureUsages {
103    type QueryData = &'static Self;
104    type QueryFilter = ();
105    type Out = Self;
106
107    fn extract_component(item: QueryItem<Self::QueryData>) -> Option<Self::Out> {
108        Some(*item)
109    }
110}
111impl ExtractComponent for Camera2d {
112    type QueryData = &'static Self;
113    type QueryFilter = With<Camera>;
114    type Out = Self;
115
116    fn extract_component(item: QueryItem<Self::QueryData>) -> Option<Self::Out> {
117        Some(item.clone())
118    }
119}
120impl ExtractComponent for Camera3d {
121    type QueryData = &'static Self;
122    type QueryFilter = With<Camera>;
123    type Out = Self;
124
125    fn extract_component(item: QueryItem<Self::QueryData>) -> Option<Self::Out> {
126        Some(item.clone())
127    }
128}
129
130/// Configures the [`RenderGraph`] name assigned to be run for a given [`Camera`] entity.
131#[derive(Component, Debug, Deref, DerefMut, Reflect, Clone)]
132#[reflect(opaque)]
133#[reflect(Component, Debug, Clone)]
134pub struct CameraRenderGraph(InternedRenderSubGraph);
135
136impl CameraRenderGraph {
137    /// Creates a new [`CameraRenderGraph`] from any string-like type.
138    #[inline]
139    pub fn new<T: RenderSubGraph>(name: T) -> Self {
140        Self(name.intern())
141    }
142
143    /// Sets the graph name.
144    #[inline]
145    pub fn set<T: RenderSubGraph>(&mut self, name: T) {
146        self.0 = name.intern();
147    }
148}
149
150pub trait NormalizedRenderTargetExt {
151    fn get_texture_view<'a>(
152        &self,
153        windows: &'a ExtractedWindows,
154        images: &'a RenderAssets<GpuImage>,
155        manual_texture_views: &'a ManualTextureViews,
156    ) -> Option<&'a TextureView>;
157
158    /// Retrieves the [`TextureFormat`] of this render target, if it exists.
159    fn get_texture_view_format<'a>(
160        &self,
161        windows: &'a ExtractedWindows,
162        images: &'a RenderAssets<GpuImage>,
163        manual_texture_views: &'a ManualTextureViews,
164    ) -> Option<TextureFormat>;
165
166    fn get_render_target_info<'a>(
167        &self,
168        resolutions: impl IntoIterator<Item = (Entity, &'a Window)>,
169        images: &Assets<Image>,
170        manual_texture_views: &ManualTextureViews,
171    ) -> Result<RenderTargetInfo, MissingRenderTargetInfoError>;
172
173    // Check if this render target is contained in the given changed windows or images.
174    fn is_changed(
175        &self,
176        changed_window_ids: &HashSet<Entity>,
177        changed_image_handles: &HashSet<&AssetId<Image>>,
178    ) -> bool;
179}
180
181impl NormalizedRenderTargetExt for NormalizedRenderTarget {
182    fn get_texture_view<'a>(
183        &self,
184        windows: &'a ExtractedWindows,
185        images: &'a RenderAssets<GpuImage>,
186        manual_texture_views: &'a ManualTextureViews,
187    ) -> Option<&'a TextureView> {
188        match self {
189            NormalizedRenderTarget::Window(window_ref) => windows
190                .get(&window_ref.entity())
191                .and_then(|window| window.swap_chain_texture_view.as_ref()),
192            NormalizedRenderTarget::Image(image_target) => images
193                .get(&image_target.handle)
194                .map(|image| &image.texture_view),
195            NormalizedRenderTarget::TextureView(id) => {
196                manual_texture_views.get(id).map(|tex| &tex.texture_view)
197            }
198            NormalizedRenderTarget::None { .. } => None,
199        }
200    }
201
202    /// Retrieves the texture view's [`TextureFormat`] of this render target, if it exists.
203    fn get_texture_view_format<'a>(
204        &self,
205        windows: &'a ExtractedWindows,
206        images: &'a RenderAssets<GpuImage>,
207        manual_texture_views: &'a ManualTextureViews,
208    ) -> Option<TextureFormat> {
209        match self {
210            NormalizedRenderTarget::Window(window_ref) => windows
211                .get(&window_ref.entity())
212                .and_then(|window| window.swap_chain_texture_view_format),
213            NormalizedRenderTarget::Image(image_target) => images
214                .get(&image_target.handle)
215                .map(|image| image.texture_view_format.unwrap_or(image.texture_format)),
216            NormalizedRenderTarget::TextureView(id) => {
217                manual_texture_views.get(id).map(|tex| tex.view_format)
218            }
219            NormalizedRenderTarget::None { .. } => None,
220        }
221    }
222
223    fn get_render_target_info<'a>(
224        &self,
225        resolutions: impl IntoIterator<Item = (Entity, &'a Window)>,
226        images: &Assets<Image>,
227        manual_texture_views: &ManualTextureViews,
228    ) -> Result<RenderTargetInfo, MissingRenderTargetInfoError> {
229        match self {
230            NormalizedRenderTarget::Window(window_ref) => resolutions
231                .into_iter()
232                .find(|(entity, _)| *entity == window_ref.entity())
233                .map(|(_, window)| RenderTargetInfo {
234                    physical_size: window.physical_size(),
235                    scale_factor: window.resolution.scale_factor(),
236                })
237                .ok_or(MissingRenderTargetInfoError::Window {
238                    window: window_ref.entity(),
239                }),
240            NormalizedRenderTarget::Image(image_target) => images
241                .get(&image_target.handle)
242                .map(|image| RenderTargetInfo {
243                    physical_size: image.size(),
244                    scale_factor: image_target.scale_factor,
245                })
246                .ok_or(MissingRenderTargetInfoError::Image {
247                    image: image_target.handle.id(),
248                }),
249            NormalizedRenderTarget::TextureView(id) => manual_texture_views
250                .get(id)
251                .map(|tex| RenderTargetInfo {
252                    physical_size: tex.size,
253                    scale_factor: 1.0,
254                })
255                .ok_or(MissingRenderTargetInfoError::TextureView { texture_view: *id }),
256            NormalizedRenderTarget::None { width, height } => Ok(RenderTargetInfo {
257                physical_size: uvec2(*width, *height),
258                scale_factor: 1.0,
259            }),
260        }
261    }
262
263    // Check if this render target is contained in the given changed windows or images.
264    fn is_changed(
265        &self,
266        changed_window_ids: &HashSet<Entity>,
267        changed_image_handles: &HashSet<&AssetId<Image>>,
268    ) -> bool {
269        match self {
270            NormalizedRenderTarget::Window(window_ref) => {
271                changed_window_ids.contains(&window_ref.entity())
272            }
273            NormalizedRenderTarget::Image(image_target) => {
274                changed_image_handles.contains(&image_target.handle.id())
275            }
276            NormalizedRenderTarget::TextureView(_) => true,
277            NormalizedRenderTarget::None { .. } => false,
278        }
279    }
280}
281
282#[derive(Debug, thiserror::Error)]
283pub enum MissingRenderTargetInfoError {
284    #[error("RenderTarget::Window missing ({window:?}): Make sure the provided entity has a Window component.")]
285    Window { window: Entity },
286    #[error("RenderTarget::Image missing ({image:?}): Make sure the Image's usages include RenderAssetUsages::MAIN_WORLD.")]
287    Image { image: AssetId<Image> },
288    #[error("RenderTarget::TextureView missing ({texture_view:?}): make sure the texture view handle was not removed.")]
289    TextureView {
290        texture_view: ManualTextureViewHandle,
291    },
292}
293
294/// System in charge of updating a [`Camera`] when its window or projection changes.
295///
296/// The system detects window creation, resize, and scale factor change events to update the camera
297/// [`Projection`] if needed.
298///
299/// ## World Resources
300///
301/// [`Res<Assets<Image>>`](Assets<Image>) -- For cameras that render to an image, this resource is used to
302/// inspect information about the render target. This system will not access any other image assets.
303///
304/// [`OrthographicProjection`]: bevy_camera::OrthographicProjection
305/// [`PerspectiveProjection`]: bevy_camera::PerspectiveProjection
306pub fn camera_system(
307    mut window_resized_reader: MessageReader<WindowResized>,
308    mut window_created_reader: MessageReader<WindowCreated>,
309    mut window_scale_factor_changed_reader: MessageReader<WindowScaleFactorChanged>,
310    mut image_asset_event_reader: MessageReader<AssetEvent<Image>>,
311    primary_window: Query<Entity, With<PrimaryWindow>>,
312    windows: Query<(Entity, &Window)>,
313    images: Res<Assets<Image>>,
314    manual_texture_views: Res<ManualTextureViews>,
315    mut cameras: Query<(&mut Camera, &RenderTarget, &mut Projection)>,
316) -> Result<(), BevyError> {
317    let primary_window = primary_window.iter().next();
318
319    let mut changed_window_ids = <HashSet<_>>::default();
320    changed_window_ids.extend(window_created_reader.read().map(|event| event.window));
321    changed_window_ids.extend(window_resized_reader.read().map(|event| event.window));
322    let scale_factor_changed_window_ids: HashSet<_> = window_scale_factor_changed_reader
323        .read()
324        .map(|event| event.window)
325        .collect();
326    changed_window_ids.extend(scale_factor_changed_window_ids.clone());
327
328    let changed_image_handles: HashSet<&AssetId<Image>> = image_asset_event_reader
329        .read()
330        .filter_map(|event| match event {
331            AssetEvent::Modified { id } | AssetEvent::Added { id } => Some(id),
332            _ => None,
333        })
334        .collect();
335
336    for (mut camera, render_target, mut camera_projection) in &mut cameras {
337        let mut viewport_size = camera
338            .viewport
339            .as_ref()
340            .map(|viewport| viewport.physical_size);
341
342        if let Some(normalized_target) = render_target.normalize(primary_window)
343            && (normalized_target.is_changed(&changed_window_ids, &changed_image_handles)
344                || camera.is_added()
345                || camera_projection.is_changed()
346                || camera.computed.old_viewport_size != viewport_size
347                || camera.computed.old_sub_camera_view != camera.sub_camera_view)
348        {
349            let new_computed_target_info = normalized_target.get_render_target_info(
350                windows,
351                &images,
352                &manual_texture_views,
353            )?;
354            // Check for the scale factor changing, and resize the viewport if needed.
355            // This can happen when the window is moved between monitors with different DPIs.
356            // Without this, the viewport will take a smaller portion of the window moved to
357            // a higher DPI monitor.
358            if normalized_target.is_changed(&scale_factor_changed_window_ids, &HashSet::default())
359                && let Some(old_scale_factor) = camera
360                    .computed
361                    .target_info
362                    .as_ref()
363                    .map(|info| info.scale_factor)
364            {
365                let resize_factor = new_computed_target_info.scale_factor / old_scale_factor;
366                if let Some(ref mut viewport) = camera.viewport {
367                    let resize = |vec: UVec2| (vec.as_vec2() * resize_factor).as_uvec2();
368                    viewport.physical_position = resize(viewport.physical_position);
369                    viewport.physical_size = resize(viewport.physical_size);
370                    viewport_size = Some(viewport.physical_size);
371                }
372            }
373            // This check is needed because when changing WindowMode to Fullscreen, the viewport may have invalid
374            // arguments due to a sudden change on the window size to a lower value.
375            // If the size of the window is lower, the viewport will match that lower value.
376            if let Some(viewport) = &mut camera.viewport {
377                viewport.clamp_to_size(new_computed_target_info.physical_size);
378            }
379            camera.computed.target_info = Some(new_computed_target_info);
380            if let Some(size) = camera.logical_viewport_size()
381                && size.x != 0.0
382                && size.y != 0.0
383            {
384                camera_projection.update(size.x, size.y);
385                camera.computed.clip_from_view = match &camera.sub_camera_view {
386                    Some(sub_view) => camera_projection.get_clip_from_view_for_sub(sub_view),
387                    None => camera_projection.get_clip_from_view(),
388                }
389            }
390        }
391
392        if camera.computed.old_viewport_size != viewport_size {
393            camera.computed.old_viewport_size = viewport_size;
394        }
395
396        if camera.computed.old_sub_camera_view != camera.sub_camera_view {
397            camera.computed.old_sub_camera_view = camera.sub_camera_view;
398        }
399    }
400    Ok(())
401}
402
403#[derive(Component, Debug)]
404pub struct ExtractedCamera {
405    pub target: Option<NormalizedRenderTarget>,
406    pub physical_viewport_size: Option<UVec2>,
407    pub physical_target_size: Option<UVec2>,
408    pub viewport: Option<Viewport>,
409    pub render_graph: InternedRenderSubGraph,
410    pub order: isize,
411    pub output_mode: CameraOutputMode,
412    pub msaa_writeback: MsaaWriteback,
413    pub clear_color: ClearColorConfig,
414    pub sorted_camera_index_for_target: usize,
415    pub exposure: f32,
416    pub hdr: bool,
417}
418
419pub fn extract_cameras(
420    mut commands: Commands,
421    query: Extract<
422        Query<(
423            Entity,
424            RenderEntity,
425            &Camera,
426            &RenderTarget,
427            &CameraRenderGraph,
428            &GlobalTransform,
429            &VisibleEntities,
430            &Frustum,
431            (
432                Has<Hdr>,
433                Option<&ColorGrading>,
434                Option<&Exposure>,
435                Option<&TemporalJitter>,
436                Option<&MipBias>,
437                Option<&RenderLayers>,
438                Option<&Projection>,
439                Has<NoIndirectDrawing>,
440            ),
441        )>,
442    >,
443    primary_window: Extract<Query<Entity, With<PrimaryWindow>>>,
444    gpu_preprocessing_support: Res<GpuPreprocessingSupport>,
445    mapper: Extract<Query<&RenderEntity>>,
446) {
447    let primary_window = primary_window.iter().next();
448    type ExtractedCameraComponents = (
449        ExtractedCamera,
450        ExtractedView,
451        RenderVisibleEntities,
452        TemporalJitter,
453        MipBias,
454        RenderLayers,
455        Projection,
456        NoIndirectDrawing,
457        ViewUniformOffset,
458    );
459    for (
460        main_entity,
461        render_entity,
462        camera,
463        render_target,
464        camera_render_graph,
465        transform,
466        visible_entities,
467        frustum,
468        (
469            hdr,
470            color_grading,
471            exposure,
472            temporal_jitter,
473            mip_bias,
474            render_layers,
475            projection,
476            no_indirect_drawing,
477        ),
478    ) in query.iter()
479    {
480        if !camera.is_active {
481            commands
482                .entity(render_entity)
483                .remove::<ExtractedCameraComponents>();
484            continue;
485        }
486
487        let color_grading = color_grading.unwrap_or(&ColorGrading::default()).clone();
488
489        if let (
490            Some(URect {
491                min: viewport_origin,
492                ..
493            }),
494            Some(viewport_size),
495            Some(target_size),
496        ) = (
497            camera.physical_viewport_rect(),
498            camera.physical_viewport_size(),
499            camera.physical_target_size(),
500        ) {
501            if target_size.x == 0 || target_size.y == 0 {
502                commands
503                    .entity(render_entity)
504                    .remove::<ExtractedCameraComponents>();
505                continue;
506            }
507
508            let render_visible_entities = RenderVisibleEntities {
509                entities: visible_entities
510                    .entities
511                    .iter()
512                    .map(|(type_id, entities)| {
513                        let entities = entities
514                            .iter()
515                            .map(|entity| {
516                                let render_entity = mapper
517                                    .get(*entity)
518                                    .cloned()
519                                    .map(|entity| entity.id())
520                                    .unwrap_or(Entity::PLACEHOLDER);
521                                (render_entity, (*entity).into())
522                            })
523                            .collect();
524                        (*type_id, entities)
525                    })
526                    .collect(),
527            };
528
529            let mut commands = commands.entity(render_entity);
530            commands.insert((
531                ExtractedCamera {
532                    target: render_target.normalize(primary_window),
533                    viewport: camera.viewport.clone(),
534                    physical_viewport_size: Some(viewport_size),
535                    physical_target_size: Some(target_size),
536                    render_graph: camera_render_graph.0,
537                    order: camera.order,
538                    output_mode: camera.output_mode,
539                    msaa_writeback: camera.msaa_writeback,
540                    clear_color: camera.clear_color,
541                    // this will be set in sort_cameras
542                    sorted_camera_index_for_target: 0,
543                    exposure: exposure
544                        .map(Exposure::exposure)
545                        .unwrap_or_else(|| Exposure::default().exposure()),
546                    hdr,
547                },
548                ExtractedView {
549                    retained_view_entity: RetainedViewEntity::new(main_entity.into(), None, 0),
550                    clip_from_view: camera.clip_from_view(),
551                    world_from_view: *transform,
552                    clip_from_world: None,
553                    hdr,
554                    viewport: UVec4::new(
555                        viewport_origin.x,
556                        viewport_origin.y,
557                        viewport_size.x,
558                        viewport_size.y,
559                    ),
560                    color_grading,
561                    invert_culling: camera.invert_culling,
562                },
563                render_visible_entities,
564                *frustum,
565            ));
566
567            if let Some(temporal_jitter) = temporal_jitter {
568                commands.insert(temporal_jitter.clone());
569            } else {
570                commands.remove::<TemporalJitter>();
571            }
572
573            if let Some(mip_bias) = mip_bias {
574                commands.insert(mip_bias.clone());
575            } else {
576                commands.remove::<MipBias>();
577            }
578
579            if let Some(render_layers) = render_layers {
580                commands.insert(render_layers.clone());
581            } else {
582                commands.remove::<RenderLayers>();
583            }
584
585            if let Some(projection) = projection {
586                commands.insert(projection.clone());
587            } else {
588                commands.remove::<Projection>();
589            }
590
591            if no_indirect_drawing
592                || !matches!(
593                    gpu_preprocessing_support.max_supported_mode,
594                    GpuPreprocessingMode::Culling
595                )
596            {
597                commands.insert(NoIndirectDrawing);
598            } else {
599                commands.remove::<NoIndirectDrawing>();
600            }
601        };
602    }
603}
604
605/// Cameras sorted by their order field. This is updated in the [`sort_cameras`] system.
606#[derive(Resource, Default)]
607pub struct SortedCameras(pub Vec<SortedCamera>);
608
609pub struct SortedCamera {
610    pub entity: Entity,
611    pub order: isize,
612    pub target: Option<NormalizedRenderTarget>,
613    pub hdr: bool,
614}
615
616pub fn sort_cameras(
617    mut sorted_cameras: ResMut<SortedCameras>,
618    mut cameras: Query<(Entity, &mut ExtractedCamera)>,
619) {
620    sorted_cameras.0.clear();
621    for (entity, camera) in cameras.iter() {
622        sorted_cameras.0.push(SortedCamera {
623            entity,
624            order: camera.order,
625            target: camera.target.clone(),
626            hdr: camera.hdr,
627        });
628    }
629    // sort by order and ensure within an order, RenderTargets of the same type are packed together
630    sorted_cameras
631        .0
632        .sort_by(|c1, c2| (c1.order, &c1.target).cmp(&(c2.order, &c2.target)));
633    let mut previous_order_target = None;
634    let mut ambiguities = <HashSet<_>>::default();
635    let mut target_counts = <HashMap<_, _>>::default();
636    for sorted_camera in &mut sorted_cameras.0 {
637        let new_order_target = (sorted_camera.order, sorted_camera.target.clone());
638        if let Some(previous_order_target) = previous_order_target
639            && previous_order_target == new_order_target
640        {
641            ambiguities.insert(new_order_target.clone());
642        }
643        if let Some(target) = &sorted_camera.target {
644            let count = target_counts
645                .entry((target.clone(), sorted_camera.hdr))
646                .or_insert(0usize);
647            let (_, mut camera) = cameras.get_mut(sorted_camera.entity).unwrap();
648            camera.sorted_camera_index_for_target = *count;
649            *count += 1;
650        }
651        previous_order_target = Some(new_order_target);
652    }
653
654    if !ambiguities.is_empty() {
655        warn!(
656            "Camera order ambiguities detected for active cameras with the following priorities: {:?}. \
657            To fix this, ensure there is exactly one Camera entity spawned with a given order for a given RenderTarget. \
658            Ambiguities should be resolved because either (1) multiple active cameras were spawned accidentally, which will \
659            result in rendering multiple instances of the scene or (2) for cases where multiple active cameras is intentional, \
660            ambiguities could result in unpredictable render results.",
661            ambiguities
662        );
663    }
664}
665
666/// A subpixel offset to jitter a perspective camera's frustum by.
667///
668/// Useful for temporal rendering techniques.
669#[derive(Component, Clone, Default, Reflect)]
670#[reflect(Default, Component, Clone)]
671pub struct TemporalJitter {
672    /// Offset is in range [-0.5, 0.5].
673    pub offset: Vec2,
674}
675
676impl TemporalJitter {
677    pub fn jitter_projection(&self, clip_from_view: &mut Mat4, view_size: Vec2) {
678        // https://github.com/GPUOpen-LibrariesAndSDKs/FidelityFX-SDK/blob/d7531ae47d8b36a5d4025663e731a47a38be882f/docs/techniques/media/super-resolution-temporal/jitter-space.svg
679        let mut jitter = (self.offset * vec2(2.0, -2.0)) / view_size;
680
681        // orthographic
682        if clip_from_view.w_axis.w == 1.0 {
683            jitter *= vec2(clip_from_view.x_axis.x, clip_from_view.y_axis.y) * 0.5;
684        }
685
686        clip_from_view.z_axis.x += jitter.x;
687        clip_from_view.z_axis.y += jitter.y;
688    }
689}
690
691/// Camera component specifying a mip bias to apply when sampling from material textures.
692///
693/// Often used in conjunction with antialiasing post-process effects to reduce textures blurriness.
694#[derive(Component, Reflect, Clone)]
695#[reflect(Default, Component)]
696pub struct MipBias(pub f32);
697
698impl Default for MipBias {
699    fn default() -> Self {
700        Self(-1.0)
701    }
702}