wgpu_hal/gles/
command.rs

1use super::{conv, Command as C};
2use arrayvec::ArrayVec;
3use std::{
4    mem::{self, size_of, size_of_val},
5    ops::Range,
6};
7
8#[derive(Clone, Copy, Debug, Default)]
9struct TextureSlotDesc {
10    tex_target: super::BindTarget,
11    sampler_index: Option<u8>,
12}
13
14pub(super) struct State {
15    topology: u32,
16    primitive: super::PrimitiveState,
17    index_format: wgt::IndexFormat,
18    index_offset: wgt::BufferAddress,
19    vertex_buffers:
20        [(super::VertexBufferDesc, Option<super::BufferBinding>); crate::MAX_VERTEX_BUFFERS],
21    vertex_attributes: ArrayVec<super::AttributeDesc, { super::MAX_VERTEX_ATTRIBUTES }>,
22    color_targets: ArrayVec<super::ColorTargetDesc, { crate::MAX_COLOR_ATTACHMENTS }>,
23    stencil: super::StencilState,
24    depth_bias: wgt::DepthBiasState,
25    alpha_to_coverage_enabled: bool,
26    samplers: [Option<glow::Sampler>; super::MAX_SAMPLERS],
27    texture_slots: [TextureSlotDesc; super::MAX_TEXTURE_SLOTS],
28    render_size: wgt::Extent3d,
29    resolve_attachments: ArrayVec<(u32, super::TextureView), { crate::MAX_COLOR_ATTACHMENTS }>,
30    invalidate_attachments: ArrayVec<u32, { crate::MAX_COLOR_ATTACHMENTS + 2 }>,
31    has_pass_label: bool,
32    instance_vbuf_mask: usize,
33    dirty_vbuf_mask: usize,
34    active_first_instance: u32,
35    first_instance_location: Option<glow::UniformLocation>,
36    push_constant_descs: ArrayVec<super::PushConstantDesc, { super::MAX_PUSH_CONSTANT_COMMANDS }>,
37    // The current state of the push constant data block.
38    current_push_constant_data: [u32; super::MAX_PUSH_CONSTANTS],
39    end_of_pass_timestamp: Option<glow::Query>,
40}
41
42impl Default for State {
43    fn default() -> Self {
44        Self {
45            topology: Default::default(),
46            primitive: Default::default(),
47            index_format: Default::default(),
48            index_offset: Default::default(),
49            vertex_buffers: Default::default(),
50            vertex_attributes: Default::default(),
51            color_targets: Default::default(),
52            stencil: Default::default(),
53            depth_bias: Default::default(),
54            alpha_to_coverage_enabled: Default::default(),
55            samplers: Default::default(),
56            texture_slots: Default::default(),
57            render_size: Default::default(),
58            resolve_attachments: Default::default(),
59            invalidate_attachments: Default::default(),
60            has_pass_label: Default::default(),
61            instance_vbuf_mask: Default::default(),
62            dirty_vbuf_mask: Default::default(),
63            active_first_instance: Default::default(),
64            first_instance_location: Default::default(),
65            push_constant_descs: Default::default(),
66            current_push_constant_data: [0; super::MAX_PUSH_CONSTANTS],
67            end_of_pass_timestamp: Default::default(),
68        }
69    }
70}
71
72impl super::CommandBuffer {
73    fn clear(&mut self) {
74        self.label = None;
75        self.commands.clear();
76        self.data_bytes.clear();
77        self.queries.clear();
78    }
79
80    fn add_marker(&mut self, marker: &str) -> Range<u32> {
81        let start = self.data_bytes.len() as u32;
82        self.data_bytes.extend(marker.as_bytes());
83        start..self.data_bytes.len() as u32
84    }
85
86    fn add_push_constant_data(&mut self, data: &[u32]) -> Range<u32> {
87        let data_raw =
88            unsafe { std::slice::from_raw_parts(data.as_ptr().cast(), size_of_val(data)) };
89        let start = self.data_bytes.len();
90        assert!(start < u32::MAX as usize);
91        self.data_bytes.extend_from_slice(data_raw);
92        let end = self.data_bytes.len();
93        assert!(end < u32::MAX as usize);
94        (start as u32)..(end as u32)
95    }
96}
97
98impl Drop for super::CommandEncoder {
99    fn drop(&mut self) {
100        use crate::CommandEncoder;
101        unsafe { self.discard_encoding() }
102    }
103}
104
105impl super::CommandEncoder {
106    fn rebind_stencil_func(&mut self) {
107        fn make(s: &super::StencilSide, face: u32) -> C {
108            C::SetStencilFunc {
109                face,
110                function: s.function,
111                reference: s.reference,
112                read_mask: s.mask_read,
113            }
114        }
115
116        let s = &self.state.stencil;
117        if s.front.function == s.back.function
118            && s.front.mask_read == s.back.mask_read
119            && s.front.reference == s.back.reference
120        {
121            self.cmd_buffer
122                .commands
123                .push(make(&s.front, glow::FRONT_AND_BACK));
124        } else {
125            self.cmd_buffer.commands.push(make(&s.front, glow::FRONT));
126            self.cmd_buffer.commands.push(make(&s.back, glow::BACK));
127        }
128    }
129
130    fn rebind_vertex_data(&mut self, first_instance: u32) {
131        if self
132            .private_caps
133            .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
134        {
135            for (index, pair) in self.state.vertex_buffers.iter().enumerate() {
136                if self.state.dirty_vbuf_mask & (1 << index) == 0 {
137                    continue;
138                }
139                let (buffer_desc, vb) = match *pair {
140                    // Not all dirty bindings are necessarily filled. Some may be unused.
141                    (_, None) => continue,
142                    (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
143                };
144                let instance_offset = match buffer_desc.step {
145                    wgt::VertexStepMode::Vertex => 0,
146                    wgt::VertexStepMode::Instance => first_instance * buffer_desc.stride,
147                };
148
149                self.cmd_buffer.commands.push(C::SetVertexBuffer {
150                    index: index as u32,
151                    buffer: super::BufferBinding {
152                        raw: vb.raw,
153                        offset: vb.offset + instance_offset as wgt::BufferAddress,
154                    },
155                    buffer_desc,
156                });
157                self.state.dirty_vbuf_mask ^= 1 << index;
158            }
159        } else {
160            let mut vbuf_mask = 0;
161            for attribute in self.state.vertex_attributes.iter() {
162                if self.state.dirty_vbuf_mask & (1 << attribute.buffer_index) == 0 {
163                    continue;
164                }
165                let (buffer_desc, vb) =
166                    match self.state.vertex_buffers[attribute.buffer_index as usize] {
167                        // Not all dirty bindings are necessarily filled. Some may be unused.
168                        (_, None) => continue,
169                        (ref vb_desc, Some(ref vb)) => (vb_desc.clone(), vb),
170                    };
171
172                let mut attribute_desc = attribute.clone();
173                attribute_desc.offset += vb.offset as u32;
174                if buffer_desc.step == wgt::VertexStepMode::Instance {
175                    attribute_desc.offset += buffer_desc.stride * first_instance;
176                }
177
178                self.cmd_buffer.commands.push(C::SetVertexAttribute {
179                    buffer: Some(vb.raw),
180                    buffer_desc,
181                    attribute_desc,
182                });
183                vbuf_mask |= 1 << attribute.buffer_index;
184            }
185            self.state.dirty_vbuf_mask ^= vbuf_mask;
186        }
187    }
188
189    fn rebind_sampler_states(&mut self, dirty_textures: u32, dirty_samplers: u32) {
190        for (texture_index, slot) in self.state.texture_slots.iter().enumerate() {
191            if dirty_textures & (1 << texture_index) != 0
192                || slot
193                    .sampler_index
194                    .map_or(false, |si| dirty_samplers & (1 << si) != 0)
195            {
196                let sampler = slot
197                    .sampler_index
198                    .and_then(|si| self.state.samplers[si as usize]);
199                self.cmd_buffer
200                    .commands
201                    .push(C::BindSampler(texture_index as u32, sampler));
202            }
203        }
204    }
205
206    fn prepare_draw(&mut self, first_instance: u32) {
207        // If we support fully featured instancing, we want to bind everything as normal
208        // and let the draw call sort it out.
209        let emulated_first_instance_value = if self
210            .private_caps
211            .contains(super::PrivateCapabilities::FULLY_FEATURED_INSTANCING)
212        {
213            0
214        } else {
215            first_instance
216        };
217
218        if emulated_first_instance_value != self.state.active_first_instance {
219            // rebind all per-instance buffers on first-instance change
220            self.state.dirty_vbuf_mask |= self.state.instance_vbuf_mask;
221            self.state.active_first_instance = emulated_first_instance_value;
222        }
223        if self.state.dirty_vbuf_mask != 0 {
224            self.rebind_vertex_data(emulated_first_instance_value);
225        }
226    }
227
228    #[allow(clippy::clone_on_copy)] // False positive when cloning glow::UniformLocation
229    fn set_pipeline_inner(&mut self, inner: &super::PipelineInner) {
230        self.cmd_buffer.commands.push(C::SetProgram(inner.program));
231
232        self.state
233            .first_instance_location
234            .clone_from(&inner.first_instance_location);
235        self.state
236            .push_constant_descs
237            .clone_from(&inner.push_constant_descs);
238
239        // rebind textures, if needed
240        let mut dirty_textures = 0u32;
241        for (texture_index, (slot, &sampler_index)) in self
242            .state
243            .texture_slots
244            .iter_mut()
245            .zip(inner.sampler_map.iter())
246            .enumerate()
247        {
248            if slot.sampler_index != sampler_index {
249                slot.sampler_index = sampler_index;
250                dirty_textures |= 1 << texture_index;
251            }
252        }
253        if dirty_textures != 0 {
254            self.rebind_sampler_states(dirty_textures, 0);
255        }
256    }
257}
258
259impl crate::CommandEncoder for super::CommandEncoder {
260    type A = super::Api;
261
262    unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
263        self.state = State::default();
264        self.cmd_buffer.label = label.map(str::to_string);
265        Ok(())
266    }
267    unsafe fn discard_encoding(&mut self) {
268        self.cmd_buffer.clear();
269    }
270    unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
271        Ok(mem::take(&mut self.cmd_buffer))
272    }
273    unsafe fn reset_all<I>(&mut self, _command_buffers: I) {
274        //TODO: could re-use the allocations in all these command buffers
275    }
276
277    unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
278    where
279        T: Iterator<Item = crate::BufferBarrier<'a, super::Buffer>>,
280    {
281        if !self
282            .private_caps
283            .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
284        {
285            return;
286        }
287        for bar in barriers {
288            // GLES only synchronizes storage -> anything explicitly
289            if !bar
290                .usage
291                .start
292                .contains(crate::BufferUses::STORAGE_READ_WRITE)
293            {
294                continue;
295            }
296            self.cmd_buffer
297                .commands
298                .push(C::BufferBarrier(bar.buffer.raw.unwrap(), bar.usage.end));
299        }
300    }
301
302    unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
303    where
304        T: Iterator<Item = crate::TextureBarrier<'a, super::Texture>>,
305    {
306        if !self
307            .private_caps
308            .contains(super::PrivateCapabilities::MEMORY_BARRIERS)
309        {
310            return;
311        }
312
313        let mut combined_usage = crate::TextureUses::empty();
314        for bar in barriers {
315            // GLES only synchronizes storage -> anything explicitly
316            if !bar
317                .usage
318                .start
319                .contains(crate::TextureUses::STORAGE_READ_WRITE)
320            {
321                continue;
322            }
323            // unlike buffers, there is no need for a concrete texture
324            // object to be bound anywhere for a barrier
325            combined_usage |= bar.usage.end;
326        }
327
328        if !combined_usage.is_empty() {
329            self.cmd_buffer
330                .commands
331                .push(C::TextureBarrier(combined_usage));
332        }
333    }
334
335    unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
336        self.cmd_buffer.commands.push(C::ClearBuffer {
337            dst: buffer.clone(),
338            dst_target: buffer.target,
339            range,
340        });
341    }
342
343    unsafe fn copy_buffer_to_buffer<T>(
344        &mut self,
345        src: &super::Buffer,
346        dst: &super::Buffer,
347        regions: T,
348    ) where
349        T: Iterator<Item = crate::BufferCopy>,
350    {
351        let (src_target, dst_target) = if src.target == dst.target {
352            (glow::COPY_READ_BUFFER, glow::COPY_WRITE_BUFFER)
353        } else {
354            (src.target, dst.target)
355        };
356        for copy in regions {
357            self.cmd_buffer.commands.push(C::CopyBufferToBuffer {
358                src: src.clone(),
359                src_target,
360                dst: dst.clone(),
361                dst_target,
362                copy,
363            })
364        }
365    }
366
367    #[cfg(webgl)]
368    unsafe fn copy_external_image_to_texture<T>(
369        &mut self,
370        src: &wgt::ImageCopyExternalImage,
371        dst: &super::Texture,
372        dst_premultiplication: bool,
373        regions: T,
374    ) where
375        T: Iterator<Item = crate::TextureCopy>,
376    {
377        let (dst_raw, dst_target) = dst.inner.as_native();
378        for copy in regions {
379            self.cmd_buffer
380                .commands
381                .push(C::CopyExternalImageToTexture {
382                    src: src.clone(),
383                    dst: dst_raw,
384                    dst_target,
385                    dst_format: dst.format,
386                    dst_premultiplication,
387                    copy,
388                })
389        }
390    }
391
392    unsafe fn copy_texture_to_texture<T>(
393        &mut self,
394        src: &super::Texture,
395        _src_usage: crate::TextureUses,
396        dst: &super::Texture,
397        regions: T,
398    ) where
399        T: Iterator<Item = crate::TextureCopy>,
400    {
401        let (src_raw, src_target) = src.inner.as_native();
402        let (dst_raw, dst_target) = dst.inner.as_native();
403        for mut copy in regions {
404            copy.clamp_size_to_virtual(&src.copy_size, &dst.copy_size);
405            self.cmd_buffer.commands.push(C::CopyTextureToTexture {
406                src: src_raw,
407                src_target,
408                dst: dst_raw,
409                dst_target,
410                copy,
411            })
412        }
413    }
414
415    unsafe fn copy_buffer_to_texture<T>(
416        &mut self,
417        src: &super::Buffer,
418        dst: &super::Texture,
419        regions: T,
420    ) where
421        T: Iterator<Item = crate::BufferTextureCopy>,
422    {
423        let (dst_raw, dst_target) = dst.inner.as_native();
424
425        for mut copy in regions {
426            copy.clamp_size_to_virtual(&dst.copy_size);
427            self.cmd_buffer.commands.push(C::CopyBufferToTexture {
428                src: src.clone(),
429                src_target: src.target,
430                dst: dst_raw,
431                dst_target,
432                dst_format: dst.format,
433                copy,
434            })
435        }
436    }
437
438    unsafe fn copy_texture_to_buffer<T>(
439        &mut self,
440        src: &super::Texture,
441        _src_usage: crate::TextureUses,
442        dst: &super::Buffer,
443        regions: T,
444    ) where
445        T: Iterator<Item = crate::BufferTextureCopy>,
446    {
447        let (src_raw, src_target) = src.inner.as_native();
448        for mut copy in regions {
449            copy.clamp_size_to_virtual(&src.copy_size);
450            self.cmd_buffer.commands.push(C::CopyTextureToBuffer {
451                src: src_raw,
452                src_target,
453                src_format: src.format,
454                dst: dst.clone(),
455                dst_target: dst.target,
456                copy,
457            })
458        }
459    }
460
461    unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
462        let query = set.queries[index as usize];
463        self.cmd_buffer
464            .commands
465            .push(C::BeginQuery(query, set.target));
466    }
467    unsafe fn end_query(&mut self, set: &super::QuerySet, _index: u32) {
468        self.cmd_buffer.commands.push(C::EndQuery(set.target));
469    }
470    unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
471        let query = set.queries[index as usize];
472        self.cmd_buffer.commands.push(C::TimestampQuery(query));
473    }
474    unsafe fn reset_queries(&mut self, _set: &super::QuerySet, _range: Range<u32>) {
475        //TODO: what do we do here?
476    }
477    unsafe fn copy_query_results(
478        &mut self,
479        set: &super::QuerySet,
480        range: Range<u32>,
481        buffer: &super::Buffer,
482        offset: wgt::BufferAddress,
483        _stride: wgt::BufferSize,
484    ) {
485        let start = self.cmd_buffer.queries.len();
486        self.cmd_buffer
487            .queries
488            .extend_from_slice(&set.queries[range.start as usize..range.end as usize]);
489        let query_range = start as u32..self.cmd_buffer.queries.len() as u32;
490        self.cmd_buffer.commands.push(C::CopyQueryResults {
491            query_range,
492            dst: buffer.clone(),
493            dst_target: buffer.target,
494            dst_offset: offset,
495        });
496    }
497
498    // render
499
500    unsafe fn begin_render_pass(
501        &mut self,
502        desc: &crate::RenderPassDescriptor<super::QuerySet, super::TextureView>,
503    ) {
504        debug_assert!(self.state.end_of_pass_timestamp.is_none());
505        if let Some(ref t) = desc.timestamp_writes {
506            if let Some(index) = t.beginning_of_pass_write_index {
507                unsafe { self.write_timestamp(t.query_set, index) }
508            }
509            self.state.end_of_pass_timestamp = t
510                .end_of_pass_write_index
511                .map(|index| t.query_set.queries[index as usize]);
512        }
513
514        self.state.render_size = desc.extent;
515        self.state.resolve_attachments.clear();
516        self.state.invalidate_attachments.clear();
517        if let Some(label) = desc.label {
518            let range = self.cmd_buffer.add_marker(label);
519            self.cmd_buffer.commands.push(C::PushDebugGroup(range));
520            self.state.has_pass_label = true;
521        }
522
523        let rendering_to_external_framebuffer = desc
524            .color_attachments
525            .iter()
526            .filter_map(|at| at.as_ref())
527            .any(|at| match at.target.view.inner {
528                #[cfg(webgl)]
529                super::TextureInner::ExternalFramebuffer { .. } => true,
530                _ => false,
531            });
532
533        if rendering_to_external_framebuffer && desc.color_attachments.len() != 1 {
534            panic!("Multiple render attachments with external framebuffers are not supported.");
535        }
536
537        // `COLOR_ATTACHMENT0` to `COLOR_ATTACHMENT31` gives 32 possible color attachments.
538        assert!(desc.color_attachments.len() <= 32);
539
540        match desc
541            .color_attachments
542            .first()
543            .filter(|at| at.is_some())
544            .and_then(|at| at.as_ref().map(|at| &at.target.view.inner))
545        {
546            // default framebuffer (provided externally)
547            Some(&super::TextureInner::DefaultRenderbuffer) => {
548                self.cmd_buffer
549                    .commands
550                    .push(C::ResetFramebuffer { is_default: true });
551            }
552            _ => {
553                // set the framebuffer
554                self.cmd_buffer
555                    .commands
556                    .push(C::ResetFramebuffer { is_default: false });
557
558                for (i, cat) in desc.color_attachments.iter().enumerate() {
559                    if let Some(cat) = cat.as_ref() {
560                        let attachment = glow::COLOR_ATTACHMENT0 + i as u32;
561                        self.cmd_buffer.commands.push(C::BindAttachment {
562                            attachment,
563                            view: cat.target.view.clone(),
564                        });
565                        if let Some(ref rat) = cat.resolve_target {
566                            self.state
567                                .resolve_attachments
568                                .push((attachment, rat.view.clone()));
569                        }
570                        if !cat.ops.contains(crate::AttachmentOps::STORE) {
571                            self.state.invalidate_attachments.push(attachment);
572                        }
573                    }
574                }
575                if let Some(ref dsat) = desc.depth_stencil_attachment {
576                    let aspects = dsat.target.view.aspects;
577                    let attachment = match aspects {
578                        crate::FormatAspects::DEPTH => glow::DEPTH_ATTACHMENT,
579                        crate::FormatAspects::STENCIL => glow::STENCIL_ATTACHMENT,
580                        _ => glow::DEPTH_STENCIL_ATTACHMENT,
581                    };
582                    self.cmd_buffer.commands.push(C::BindAttachment {
583                        attachment,
584                        view: dsat.target.view.clone(),
585                    });
586                    if aspects.contains(crate::FormatAspects::DEPTH)
587                        && !dsat.depth_ops.contains(crate::AttachmentOps::STORE)
588                    {
589                        self.state
590                            .invalidate_attachments
591                            .push(glow::DEPTH_ATTACHMENT);
592                    }
593                    if aspects.contains(crate::FormatAspects::STENCIL)
594                        && !dsat.stencil_ops.contains(crate::AttachmentOps::STORE)
595                    {
596                        self.state
597                            .invalidate_attachments
598                            .push(glow::STENCIL_ATTACHMENT);
599                    }
600                }
601            }
602        }
603
604        let rect = crate::Rect {
605            x: 0,
606            y: 0,
607            w: desc.extent.width as i32,
608            h: desc.extent.height as i32,
609        };
610        self.cmd_buffer.commands.push(C::SetScissor(rect.clone()));
611        self.cmd_buffer.commands.push(C::SetViewport {
612            rect,
613            depth: 0.0..1.0,
614        });
615
616        if !rendering_to_external_framebuffer {
617            // set the draw buffers and states
618            self.cmd_buffer
619                .commands
620                .push(C::SetDrawColorBuffers(desc.color_attachments.len() as u8));
621        }
622
623        // issue the clears
624        for (i, cat) in desc
625            .color_attachments
626            .iter()
627            .filter_map(|at| at.as_ref())
628            .enumerate()
629        {
630            if !cat.ops.contains(crate::AttachmentOps::LOAD) {
631                let c = &cat.clear_value;
632                self.cmd_buffer.commands.push(
633                    match cat.target.view.format.sample_type(None, None).unwrap() {
634                        wgt::TextureSampleType::Float { .. } => C::ClearColorF {
635                            draw_buffer: i as u32,
636                            color: [c.r as f32, c.g as f32, c.b as f32, c.a as f32],
637                            is_srgb: cat.target.view.format.is_srgb(),
638                        },
639                        wgt::TextureSampleType::Uint => C::ClearColorU(
640                            i as u32,
641                            [c.r as u32, c.g as u32, c.b as u32, c.a as u32],
642                        ),
643                        wgt::TextureSampleType::Sint => C::ClearColorI(
644                            i as u32,
645                            [c.r as i32, c.g as i32, c.b as i32, c.a as i32],
646                        ),
647                        wgt::TextureSampleType::Depth => unreachable!(),
648                    },
649                );
650            }
651        }
652
653        if let Some(ref dsat) = desc.depth_stencil_attachment {
654            let clear_depth = !dsat.depth_ops.contains(crate::AttachmentOps::LOAD);
655            let clear_stencil = !dsat.stencil_ops.contains(crate::AttachmentOps::LOAD);
656
657            if clear_depth && clear_stencil {
658                self.cmd_buffer.commands.push(C::ClearDepthAndStencil(
659                    dsat.clear_value.0,
660                    dsat.clear_value.1,
661                ));
662            } else if clear_depth {
663                self.cmd_buffer
664                    .commands
665                    .push(C::ClearDepth(dsat.clear_value.0));
666            } else if clear_stencil {
667                self.cmd_buffer
668                    .commands
669                    .push(C::ClearStencil(dsat.clear_value.1));
670            }
671        }
672    }
673    unsafe fn end_render_pass(&mut self) {
674        for (attachment, dst) in self.state.resolve_attachments.drain(..) {
675            self.cmd_buffer.commands.push(C::ResolveAttachment {
676                attachment,
677                dst,
678                size: self.state.render_size,
679            });
680        }
681        if !self.state.invalidate_attachments.is_empty() {
682            self.cmd_buffer.commands.push(C::InvalidateAttachments(
683                self.state.invalidate_attachments.clone(),
684            ));
685            self.state.invalidate_attachments.clear();
686        }
687        if self.state.has_pass_label {
688            self.cmd_buffer.commands.push(C::PopDebugGroup);
689            self.state.has_pass_label = false;
690        }
691        self.state.instance_vbuf_mask = 0;
692        self.state.dirty_vbuf_mask = 0;
693        self.state.active_first_instance = 0;
694        self.state.color_targets.clear();
695        for vat in &self.state.vertex_attributes {
696            self.cmd_buffer
697                .commands
698                .push(C::UnsetVertexAttribute(vat.location));
699        }
700        self.state.vertex_attributes.clear();
701        self.state.primitive = super::PrimitiveState::default();
702
703        if let Some(query) = self.state.end_of_pass_timestamp.take() {
704            self.cmd_buffer.commands.push(C::TimestampQuery(query));
705        }
706    }
707
708    unsafe fn set_bind_group(
709        &mut self,
710        layout: &super::PipelineLayout,
711        index: u32,
712        group: &super::BindGroup,
713        dynamic_offsets: &[wgt::DynamicOffset],
714    ) {
715        let mut do_index = 0;
716        let mut dirty_textures = 0u32;
717        let mut dirty_samplers = 0u32;
718        let group_info = &layout.group_infos[index as usize];
719
720        for (binding_layout, raw_binding) in group_info.entries.iter().zip(group.contents.iter()) {
721            let slot = group_info.binding_to_slot[binding_layout.binding as usize] as u32;
722            match *raw_binding {
723                super::RawBinding::Buffer {
724                    raw,
725                    offset: base_offset,
726                    size,
727                } => {
728                    let mut offset = base_offset;
729                    let target = match binding_layout.ty {
730                        wgt::BindingType::Buffer {
731                            ty,
732                            has_dynamic_offset,
733                            min_binding_size: _,
734                        } => {
735                            if has_dynamic_offset {
736                                offset += dynamic_offsets[do_index] as i32;
737                                do_index += 1;
738                            }
739                            match ty {
740                                wgt::BufferBindingType::Uniform => glow::UNIFORM_BUFFER,
741                                wgt::BufferBindingType::Storage { .. } => {
742                                    glow::SHADER_STORAGE_BUFFER
743                                }
744                            }
745                        }
746                        _ => unreachable!(),
747                    };
748                    self.cmd_buffer.commands.push(C::BindBuffer {
749                        target,
750                        slot,
751                        buffer: raw,
752                        offset,
753                        size,
754                    });
755                }
756                super::RawBinding::Sampler(sampler) => {
757                    dirty_samplers |= 1 << slot;
758                    self.state.samplers[slot as usize] = Some(sampler);
759                }
760                super::RawBinding::Texture {
761                    raw,
762                    target,
763                    aspects,
764                    ref mip_levels,
765                } => {
766                    dirty_textures |= 1 << slot;
767                    self.state.texture_slots[slot as usize].tex_target = target;
768                    self.cmd_buffer.commands.push(C::BindTexture {
769                        slot,
770                        texture: raw,
771                        target,
772                        aspects,
773                        mip_levels: mip_levels.clone(),
774                    });
775                }
776                super::RawBinding::Image(ref binding) => {
777                    self.cmd_buffer.commands.push(C::BindImage {
778                        slot,
779                        binding: binding.clone(),
780                    });
781                }
782            }
783        }
784
785        self.rebind_sampler_states(dirty_textures, dirty_samplers);
786    }
787
788    unsafe fn set_push_constants(
789        &mut self,
790        _layout: &super::PipelineLayout,
791        _stages: wgt::ShaderStages,
792        offset_bytes: u32,
793        data: &[u32],
794    ) {
795        // There is nothing preventing the user from trying to update a single value within
796        // a vector or matrix in the set_push_constant call, as to the user, all of this is
797        // just memory. However OpenGL does not allow partial uniform updates.
798        //
799        // As such, we locally keep a copy of the current state of the push constant memory
800        // block. If the user tries to update a single value, we have the data to update the entirety
801        // of the uniform.
802        let start_words = offset_bytes / 4;
803        let end_words = start_words + data.len() as u32;
804        self.state.current_push_constant_data[start_words as usize..end_words as usize]
805            .copy_from_slice(data);
806
807        // We iterate over the uniform list as there may be multiple uniforms that need
808        // updating from the same push constant memory (one for each shader stage).
809        //
810        // Additionally, any statically unused uniform descs will have been removed from this list
811        // by OpenGL, so the uniform list is not contiguous.
812        for uniform in self.state.push_constant_descs.iter().cloned() {
813            let uniform_size_words = uniform.size_bytes / 4;
814            let uniform_start_words = uniform.offset / 4;
815            let uniform_end_words = uniform_start_words + uniform_size_words;
816
817            // Is true if any word within the uniform binding was updated
818            let needs_updating =
819                start_words < uniform_end_words || uniform_start_words <= end_words;
820
821            if needs_updating {
822                let uniform_data = &self.state.current_push_constant_data
823                    [uniform_start_words as usize..uniform_end_words as usize];
824
825                let range = self.cmd_buffer.add_push_constant_data(uniform_data);
826
827                self.cmd_buffer.commands.push(C::SetPushConstants {
828                    uniform,
829                    offset: range.start,
830                });
831            }
832        }
833    }
834
835    unsafe fn insert_debug_marker(&mut self, label: &str) {
836        let range = self.cmd_buffer.add_marker(label);
837        self.cmd_buffer.commands.push(C::InsertDebugMarker(range));
838    }
839    unsafe fn begin_debug_marker(&mut self, group_label: &str) {
840        let range = self.cmd_buffer.add_marker(group_label);
841        self.cmd_buffer.commands.push(C::PushDebugGroup(range));
842    }
843    unsafe fn end_debug_marker(&mut self) {
844        self.cmd_buffer.commands.push(C::PopDebugGroup);
845    }
846
847    unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
848        self.state.topology = conv::map_primitive_topology(pipeline.primitive.topology);
849
850        if self
851            .private_caps
852            .contains(super::PrivateCapabilities::VERTEX_BUFFER_LAYOUT)
853        {
854            for vat in pipeline.vertex_attributes.iter() {
855                let vb = &pipeline.vertex_buffers[vat.buffer_index as usize];
856                // set the layout
857                self.cmd_buffer.commands.push(C::SetVertexAttribute {
858                    buffer: None,
859                    buffer_desc: vb.clone(),
860                    attribute_desc: vat.clone(),
861                });
862            }
863        } else {
864            for vat in &self.state.vertex_attributes {
865                self.cmd_buffer
866                    .commands
867                    .push(C::UnsetVertexAttribute(vat.location));
868            }
869            self.state.vertex_attributes.clear();
870
871            self.state.dirty_vbuf_mask = 0;
872            // copy vertex attributes
873            for vat in pipeline.vertex_attributes.iter() {
874                //Note: we can invalidate more carefully here.
875                self.state.dirty_vbuf_mask |= 1 << vat.buffer_index;
876                self.state.vertex_attributes.push(vat.clone());
877            }
878        }
879
880        self.state.instance_vbuf_mask = 0;
881        // copy vertex state
882        for (index, (&mut (ref mut state_desc, _), pipe_desc)) in self
883            .state
884            .vertex_buffers
885            .iter_mut()
886            .zip(pipeline.vertex_buffers.iter())
887            .enumerate()
888        {
889            if pipe_desc.step == wgt::VertexStepMode::Instance {
890                self.state.instance_vbuf_mask |= 1 << index;
891            }
892            if state_desc != pipe_desc {
893                self.state.dirty_vbuf_mask |= 1 << index;
894                *state_desc = pipe_desc.clone();
895            }
896        }
897
898        self.set_pipeline_inner(&pipeline.inner);
899
900        // set primitive state
901        let prim_state = conv::map_primitive_state(&pipeline.primitive);
902        if prim_state != self.state.primitive {
903            self.cmd_buffer
904                .commands
905                .push(C::SetPrimitive(prim_state.clone()));
906            self.state.primitive = prim_state;
907        }
908
909        // set depth/stencil states
910        let mut aspects = crate::FormatAspects::empty();
911        if pipeline.depth_bias != self.state.depth_bias {
912            self.state.depth_bias = pipeline.depth_bias;
913            self.cmd_buffer
914                .commands
915                .push(C::SetDepthBias(pipeline.depth_bias));
916        }
917        if let Some(ref depth) = pipeline.depth {
918            aspects |= crate::FormatAspects::DEPTH;
919            self.cmd_buffer.commands.push(C::SetDepth(depth.clone()));
920        }
921        if let Some(ref stencil) = pipeline.stencil {
922            aspects |= crate::FormatAspects::STENCIL;
923            self.state.stencil = stencil.clone();
924            self.rebind_stencil_func();
925            if stencil.front.ops == stencil.back.ops
926                && stencil.front.mask_write == stencil.back.mask_write
927            {
928                self.cmd_buffer.commands.push(C::SetStencilOps {
929                    face: glow::FRONT_AND_BACK,
930                    write_mask: stencil.front.mask_write,
931                    ops: stencil.front.ops.clone(),
932                });
933            } else {
934                self.cmd_buffer.commands.push(C::SetStencilOps {
935                    face: glow::FRONT,
936                    write_mask: stencil.front.mask_write,
937                    ops: stencil.front.ops.clone(),
938                });
939                self.cmd_buffer.commands.push(C::SetStencilOps {
940                    face: glow::BACK,
941                    write_mask: stencil.back.mask_write,
942                    ops: stencil.back.ops.clone(),
943                });
944            }
945        }
946        self.cmd_buffer
947            .commands
948            .push(C::ConfigureDepthStencil(aspects));
949
950        // set multisampling state
951        if pipeline.alpha_to_coverage_enabled != self.state.alpha_to_coverage_enabled {
952            self.state.alpha_to_coverage_enabled = pipeline.alpha_to_coverage_enabled;
953            self.cmd_buffer
954                .commands
955                .push(C::SetAlphaToCoverage(pipeline.alpha_to_coverage_enabled));
956        }
957
958        // set blend states
959        if self.state.color_targets[..] != pipeline.color_targets[..] {
960            if pipeline
961                .color_targets
962                .iter()
963                .skip(1)
964                .any(|ct| *ct != pipeline.color_targets[0])
965            {
966                for (index, ct) in pipeline.color_targets.iter().enumerate() {
967                    self.cmd_buffer.commands.push(C::SetColorTarget {
968                        draw_buffer_index: Some(index as u32),
969                        desc: ct.clone(),
970                    });
971                }
972            } else {
973                self.cmd_buffer.commands.push(C::SetColorTarget {
974                    draw_buffer_index: None,
975                    desc: pipeline.color_targets.first().cloned().unwrap_or_default(),
976                });
977            }
978        }
979        self.state.color_targets.clear();
980        for ct in pipeline.color_targets.iter() {
981            self.state.color_targets.push(ct.clone());
982        }
983    }
984
985    unsafe fn set_index_buffer<'a>(
986        &mut self,
987        binding: crate::BufferBinding<'a, super::Buffer>,
988        format: wgt::IndexFormat,
989    ) {
990        self.state.index_offset = binding.offset;
991        self.state.index_format = format;
992        self.cmd_buffer
993            .commands
994            .push(C::SetIndexBuffer(binding.buffer.raw.unwrap()));
995    }
996    unsafe fn set_vertex_buffer<'a>(
997        &mut self,
998        index: u32,
999        binding: crate::BufferBinding<'a, super::Buffer>,
1000    ) {
1001        self.state.dirty_vbuf_mask |= 1 << index;
1002        let (_, ref mut vb) = self.state.vertex_buffers[index as usize];
1003        *vb = Some(super::BufferBinding {
1004            raw: binding.buffer.raw.unwrap(),
1005            offset: binding.offset,
1006        });
1007    }
1008    unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth: Range<f32>) {
1009        self.cmd_buffer.commands.push(C::SetViewport {
1010            rect: crate::Rect {
1011                x: rect.x as i32,
1012                y: rect.y as i32,
1013                w: rect.w as i32,
1014                h: rect.h as i32,
1015            },
1016            depth,
1017        });
1018    }
1019    unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
1020        self.cmd_buffer.commands.push(C::SetScissor(crate::Rect {
1021            x: rect.x as i32,
1022            y: rect.y as i32,
1023            w: rect.w as i32,
1024            h: rect.h as i32,
1025        }));
1026    }
1027    unsafe fn set_stencil_reference(&mut self, value: u32) {
1028        self.state.stencil.front.reference = value;
1029        self.state.stencil.back.reference = value;
1030        self.rebind_stencil_func();
1031    }
1032    unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
1033        self.cmd_buffer.commands.push(C::SetBlendConstant(*color));
1034    }
1035
1036    unsafe fn draw(
1037        &mut self,
1038        first_vertex: u32,
1039        vertex_count: u32,
1040        first_instance: u32,
1041        instance_count: u32,
1042    ) {
1043        self.prepare_draw(first_instance);
1044        #[allow(clippy::clone_on_copy)] // False positive when cloning glow::UniformLocation
1045        self.cmd_buffer.commands.push(C::Draw {
1046            topology: self.state.topology,
1047            first_vertex,
1048            vertex_count,
1049            first_instance,
1050            instance_count,
1051            first_instance_location: self.state.first_instance_location.clone(),
1052        });
1053    }
1054    unsafe fn draw_indexed(
1055        &mut self,
1056        first_index: u32,
1057        index_count: u32,
1058        base_vertex: i32,
1059        first_instance: u32,
1060        instance_count: u32,
1061    ) {
1062        self.prepare_draw(first_instance);
1063        let (index_size, index_type) = match self.state.index_format {
1064            wgt::IndexFormat::Uint16 => (2, glow::UNSIGNED_SHORT),
1065            wgt::IndexFormat::Uint32 => (4, glow::UNSIGNED_INT),
1066        };
1067        let index_offset = self.state.index_offset + index_size * first_index as wgt::BufferAddress;
1068        #[allow(clippy::clone_on_copy)] // False positive when cloning glow::UniformLocation
1069        self.cmd_buffer.commands.push(C::DrawIndexed {
1070            topology: self.state.topology,
1071            index_type,
1072            index_offset,
1073            index_count,
1074            base_vertex,
1075            first_instance,
1076            instance_count,
1077            first_instance_location: self.state.first_instance_location.clone(),
1078        });
1079    }
1080    unsafe fn draw_indirect(
1081        &mut self,
1082        buffer: &super::Buffer,
1083        offset: wgt::BufferAddress,
1084        draw_count: u32,
1085    ) {
1086        self.prepare_draw(0);
1087        for draw in 0..draw_count as wgt::BufferAddress {
1088            let indirect_offset =
1089                offset + draw * size_of::<wgt::DrawIndirectArgs>() as wgt::BufferAddress;
1090            #[allow(clippy::clone_on_copy)] // False positive when cloning glow::UniformLocation
1091            self.cmd_buffer.commands.push(C::DrawIndirect {
1092                topology: self.state.topology,
1093                indirect_buf: buffer.raw.unwrap(),
1094                indirect_offset,
1095                first_instance_location: self.state.first_instance_location.clone(),
1096            });
1097        }
1098    }
1099    unsafe fn draw_indexed_indirect(
1100        &mut self,
1101        buffer: &super::Buffer,
1102        offset: wgt::BufferAddress,
1103        draw_count: u32,
1104    ) {
1105        self.prepare_draw(0);
1106        let index_type = match self.state.index_format {
1107            wgt::IndexFormat::Uint16 => glow::UNSIGNED_SHORT,
1108            wgt::IndexFormat::Uint32 => glow::UNSIGNED_INT,
1109        };
1110        for draw in 0..draw_count as wgt::BufferAddress {
1111            let indirect_offset =
1112                offset + draw * size_of::<wgt::DrawIndexedIndirectArgs>() as wgt::BufferAddress;
1113            #[allow(clippy::clone_on_copy)] // False positive when cloning glow::UniformLocation
1114            self.cmd_buffer.commands.push(C::DrawIndexedIndirect {
1115                topology: self.state.topology,
1116                index_type,
1117                indirect_buf: buffer.raw.unwrap(),
1118                indirect_offset,
1119                first_instance_location: self.state.first_instance_location.clone(),
1120            });
1121        }
1122    }
1123    unsafe fn draw_indirect_count(
1124        &mut self,
1125        _buffer: &super::Buffer,
1126        _offset: wgt::BufferAddress,
1127        _count_buffer: &super::Buffer,
1128        _count_offset: wgt::BufferAddress,
1129        _max_count: u32,
1130    ) {
1131        unreachable!()
1132    }
1133    unsafe fn draw_indexed_indirect_count(
1134        &mut self,
1135        _buffer: &super::Buffer,
1136        _offset: wgt::BufferAddress,
1137        _count_buffer: &super::Buffer,
1138        _count_offset: wgt::BufferAddress,
1139        _max_count: u32,
1140    ) {
1141        unreachable!()
1142    }
1143
1144    // compute
1145
1146    unsafe fn begin_compute_pass(&mut self, desc: &crate::ComputePassDescriptor<super::QuerySet>) {
1147        debug_assert!(self.state.end_of_pass_timestamp.is_none());
1148        if let Some(ref t) = desc.timestamp_writes {
1149            if let Some(index) = t.beginning_of_pass_write_index {
1150                unsafe { self.write_timestamp(t.query_set, index) }
1151            }
1152            self.state.end_of_pass_timestamp = t
1153                .end_of_pass_write_index
1154                .map(|index| t.query_set.queries[index as usize]);
1155        }
1156
1157        if let Some(label) = desc.label {
1158            let range = self.cmd_buffer.add_marker(label);
1159            self.cmd_buffer.commands.push(C::PushDebugGroup(range));
1160            self.state.has_pass_label = true;
1161        }
1162    }
1163    unsafe fn end_compute_pass(&mut self) {
1164        if self.state.has_pass_label {
1165            self.cmd_buffer.commands.push(C::PopDebugGroup);
1166            self.state.has_pass_label = false;
1167        }
1168
1169        if let Some(query) = self.state.end_of_pass_timestamp.take() {
1170            self.cmd_buffer.commands.push(C::TimestampQuery(query));
1171        }
1172    }
1173
1174    unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1175        self.set_pipeline_inner(&pipeline.inner);
1176    }
1177
1178    unsafe fn dispatch(&mut self, count: [u32; 3]) {
1179        self.cmd_buffer.commands.push(C::Dispatch(count));
1180    }
1181    unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1182        self.cmd_buffer.commands.push(C::DispatchIndirect {
1183            indirect_buf: buffer.raw.unwrap(),
1184            indirect_offset: offset,
1185        });
1186    }
1187
1188    unsafe fn build_acceleration_structures<'a, T>(
1189        &mut self,
1190        _descriptor_count: u32,
1191        _descriptors: T,
1192    ) where
1193        super::Api: 'a,
1194        T: IntoIterator<
1195            Item = crate::BuildAccelerationStructureDescriptor<
1196                'a,
1197                super::Buffer,
1198                super::AccelerationStructure,
1199            >,
1200        >,
1201    {
1202        unimplemented!()
1203    }
1204
1205    unsafe fn place_acceleration_structure_barrier(
1206        &mut self,
1207        _barriers: crate::AccelerationStructureBarrier,
1208    ) {
1209        unimplemented!()
1210    }
1211}