bevy_core_pipeline/oit/resolve/
node.rs

1use bevy_camera::{MainPassResolutionOverride, Viewport};
2use bevy_ecs::{prelude::*, query::QueryItem};
3use bevy_render::{
4    camera::ExtractedCamera,
5    diagnostic::RecordDiagnostics,
6    render_graph::{NodeRunError, RenderGraphContext, RenderLabel, ViewNode},
7    render_resource::{BindGroupEntries, PipelineCache, RenderPassDescriptor},
8    renderer::RenderContext,
9    view::{ViewDepthTexture, ViewTarget, ViewUniformOffset},
10};
11
12use super::{OitResolveBindGroup, OitResolvePipeline, OitResolvePipelineId};
13
14/// Render label for the OIT resolve pass.
15#[derive(RenderLabel, Debug, Clone, Hash, PartialEq, Eq)]
16pub struct OitResolvePass;
17
18/// The node that executes the OIT resolve pass.
19#[derive(Default)]
20pub struct OitResolveNode;
21impl ViewNode for OitResolveNode {
22    type ViewQuery = (
23        &'static ExtractedCamera,
24        &'static ViewTarget,
25        &'static ViewUniformOffset,
26        &'static OitResolvePipelineId,
27        &'static ViewDepthTexture,
28        Option<&'static MainPassResolutionOverride>,
29    );
30
31    fn run(
32        &self,
33        _graph: &mut RenderGraphContext,
34        render_context: &mut RenderContext,
35        (camera, view_target, view_uniform, oit_resolve_pipeline_id, depth, resolution_override): QueryItem<
36            Self::ViewQuery,
37        >,
38        world: &World,
39    ) -> Result<(), NodeRunError> {
40        let Some(resolve_pipeline) = world.get_resource::<OitResolvePipeline>() else {
41            return Ok(());
42        };
43
44        // resolve oit
45        // sorts the layers and renders the final blended color to the screen
46        {
47            let pipeline_cache = world.resource::<PipelineCache>();
48            let bind_group = world.resource::<OitResolveBindGroup>();
49            let Some(pipeline) = pipeline_cache.get_render_pipeline(oit_resolve_pipeline_id.0)
50            else {
51                return Ok(());
52            };
53
54            let diagnostics = render_context.diagnostic_recorder();
55
56            let depth_bind_group = render_context.render_device().create_bind_group(
57                "oit_resolve_depth_bind_group",
58                &pipeline_cache
59                    .get_bind_group_layout(&resolve_pipeline.oit_depth_bind_group_layout),
60                &BindGroupEntries::single(depth.view()),
61            );
62
63            let mut render_pass = render_context.begin_tracked_render_pass(RenderPassDescriptor {
64                label: Some("oit_resolve"),
65                color_attachments: &[Some(view_target.get_color_attachment())],
66                depth_stencil_attachment: None,
67                timestamp_writes: None,
68                occlusion_query_set: None,
69            });
70            let pass_span = diagnostics.pass_span(&mut render_pass, "oit_resolve");
71
72            if let Some(viewport) =
73                Viewport::from_viewport_and_override(camera.viewport.as_ref(), resolution_override)
74            {
75                render_pass.set_camera_viewport(&viewport);
76            }
77
78            render_pass.set_render_pipeline(pipeline);
79            render_pass.set_bind_group(0, bind_group, &[view_uniform.offset]);
80            render_pass.set_bind_group(1, &depth_bind_group, &[]);
81
82            render_pass.draw(0..3, 0..1);
83
84            pass_span.end(&mut render_pass);
85        }
86
87        Ok(())
88    }
89}