wgpu_hal/vulkan/
command.rs

1use super::conv;
2
3use arrayvec::ArrayVec;
4use ash::vk;
5
6use std::{
7    mem::{self, size_of},
8    ops::Range,
9    slice,
10};
11
12const ALLOCATION_GRANULARITY: u32 = 16;
13const DST_IMAGE_LAYOUT: vk::ImageLayout = vk::ImageLayout::TRANSFER_DST_OPTIMAL;
14
15impl super::Texture {
16    fn map_buffer_copies<T>(&self, regions: T) -> impl Iterator<Item = vk::BufferImageCopy>
17    where
18        T: Iterator<Item = crate::BufferTextureCopy>,
19    {
20        let (block_width, block_height) = self.format.block_dimensions();
21        let format = self.format;
22        let copy_size = self.copy_size;
23        regions.map(move |r| {
24            let extent = r.texture_base.max_copy_size(&copy_size).min(&r.size);
25            let (image_subresource, image_offset) = conv::map_subresource_layers(&r.texture_base);
26            vk::BufferImageCopy {
27                buffer_offset: r.buffer_layout.offset,
28                buffer_row_length: r.buffer_layout.bytes_per_row.map_or(0, |bpr| {
29                    let block_size = format
30                        .block_copy_size(Some(r.texture_base.aspect.map()))
31                        .unwrap();
32                    block_width * (bpr / block_size)
33                }),
34                buffer_image_height: r
35                    .buffer_layout
36                    .rows_per_image
37                    .map_or(0, |rpi| rpi * block_height),
38                image_subresource,
39                image_offset,
40                image_extent: conv::map_copy_extent(&extent),
41            }
42        })
43    }
44}
45
46impl super::CommandEncoder {
47    fn write_pass_end_timestamp_if_requested(&mut self) {
48        if let Some((query_set, index)) = self.end_of_pass_timer_query.take() {
49            unsafe {
50                self.device.raw.cmd_write_timestamp(
51                    self.active,
52                    vk::PipelineStageFlags::BOTTOM_OF_PIPE,
53                    query_set,
54                    index,
55                );
56            }
57        }
58    }
59}
60
61impl crate::CommandEncoder for super::CommandEncoder {
62    type A = super::Api;
63
64    unsafe fn begin_encoding(&mut self, label: crate::Label) -> Result<(), crate::DeviceError> {
65        if self.free.is_empty() {
66            let vk_info = vk::CommandBufferAllocateInfo::default()
67                .command_pool(self.raw)
68                .command_buffer_count(ALLOCATION_GRANULARITY);
69            let cmd_buf_vec = unsafe {
70                self.device
71                    .raw
72                    .allocate_command_buffers(&vk_info)
73                    .map_err(super::map_host_device_oom_err)?
74            };
75            self.free.extend(cmd_buf_vec);
76        }
77        let raw = self.free.pop().unwrap();
78
79        // Set the name unconditionally, since there might be a
80        // previous name assigned to this.
81        unsafe { self.device.set_object_name(raw, label.unwrap_or_default()) };
82
83        // Reset this in case the last renderpass was never ended.
84        self.rpass_debug_marker_active = false;
85
86        let vk_info = vk::CommandBufferBeginInfo::default()
87            .flags(vk::CommandBufferUsageFlags::ONE_TIME_SUBMIT);
88        unsafe { self.device.raw.begin_command_buffer(raw, &vk_info) }
89            .map_err(super::map_host_device_oom_err)?;
90        self.active = raw;
91
92        Ok(())
93    }
94
95    unsafe fn end_encoding(&mut self) -> Result<super::CommandBuffer, crate::DeviceError> {
96        let raw = self.active;
97        self.active = vk::CommandBuffer::null();
98        unsafe { self.device.raw.end_command_buffer(raw) }.map_err(map_err)?;
99        fn map_err(err: vk::Result) -> crate::DeviceError {
100            // We don't use VK_KHR_video_encode_queue
101            // VK_ERROR_INVALID_VIDEO_STD_PARAMETERS_KHR
102            super::map_host_device_oom_err(err)
103        }
104        Ok(super::CommandBuffer { raw })
105    }
106
107    unsafe fn discard_encoding(&mut self) {
108        // Safe use requires this is not called in the "closed" state, so the buffer
109        // shouldn't be null. Assert this to make sure we're not pushing null
110        // buffers to the discard pile.
111        assert_ne!(self.active, vk::CommandBuffer::null());
112
113        self.discarded.push(self.active);
114        self.active = vk::CommandBuffer::null();
115    }
116
117    unsafe fn reset_all<I>(&mut self, cmd_bufs: I)
118    where
119        I: Iterator<Item = super::CommandBuffer>,
120    {
121        self.temp.clear();
122        self.free
123            .extend(cmd_bufs.into_iter().map(|cmd_buf| cmd_buf.raw));
124        self.free.append(&mut self.discarded);
125        let _ = unsafe {
126            self.device
127                .raw
128                .reset_command_pool(self.raw, vk::CommandPoolResetFlags::default())
129        };
130    }
131
132    unsafe fn transition_buffers<'a, T>(&mut self, barriers: T)
133    where
134        T: Iterator<Item = crate::BufferBarrier<'a, super::Buffer>>,
135    {
136        //Note: this is done so that we never end up with empty stage flags
137        let mut src_stages = vk::PipelineStageFlags::TOP_OF_PIPE;
138        let mut dst_stages = vk::PipelineStageFlags::BOTTOM_OF_PIPE;
139        let vk_barriers = &mut self.temp.buffer_barriers;
140        vk_barriers.clear();
141
142        for bar in barriers {
143            let (src_stage, src_access) = conv::map_buffer_usage_to_barrier(bar.usage.start);
144            src_stages |= src_stage;
145            let (dst_stage, dst_access) = conv::map_buffer_usage_to_barrier(bar.usage.end);
146            dst_stages |= dst_stage;
147
148            vk_barriers.push(
149                vk::BufferMemoryBarrier::default()
150                    .buffer(bar.buffer.raw)
151                    .size(vk::WHOLE_SIZE)
152                    .src_access_mask(src_access)
153                    .dst_access_mask(dst_access),
154            )
155        }
156
157        if !vk_barriers.is_empty() {
158            unsafe {
159                self.device.raw.cmd_pipeline_barrier(
160                    self.active,
161                    src_stages,
162                    dst_stages,
163                    vk::DependencyFlags::empty(),
164                    &[],
165                    vk_barriers,
166                    &[],
167                )
168            };
169        }
170    }
171
172    unsafe fn transition_textures<'a, T>(&mut self, barriers: T)
173    where
174        T: Iterator<Item = crate::TextureBarrier<'a, super::Texture>>,
175    {
176        let mut src_stages = vk::PipelineStageFlags::empty();
177        let mut dst_stages = vk::PipelineStageFlags::empty();
178        let vk_barriers = &mut self.temp.image_barriers;
179        vk_barriers.clear();
180
181        for bar in barriers {
182            let range = conv::map_subresource_range_combined_aspect(
183                &bar.range,
184                bar.texture.format,
185                &self.device.private_caps,
186            );
187            let (src_stage, src_access) = conv::map_texture_usage_to_barrier(bar.usage.start);
188            let src_layout = conv::derive_image_layout(bar.usage.start, bar.texture.format);
189            src_stages |= src_stage;
190            let (dst_stage, dst_access) = conv::map_texture_usage_to_barrier(bar.usage.end);
191            let dst_layout = conv::derive_image_layout(bar.usage.end, bar.texture.format);
192            dst_stages |= dst_stage;
193
194            vk_barriers.push(
195                vk::ImageMemoryBarrier::default()
196                    .image(bar.texture.raw)
197                    .subresource_range(range)
198                    .src_access_mask(src_access)
199                    .dst_access_mask(dst_access)
200                    .old_layout(src_layout)
201                    .new_layout(dst_layout),
202            );
203        }
204
205        if !vk_barriers.is_empty() {
206            unsafe {
207                self.device.raw.cmd_pipeline_barrier(
208                    self.active,
209                    src_stages,
210                    dst_stages,
211                    vk::DependencyFlags::empty(),
212                    &[],
213                    &[],
214                    vk_barriers,
215                )
216            };
217        }
218    }
219
220    unsafe fn clear_buffer(&mut self, buffer: &super::Buffer, range: crate::MemoryRange) {
221        let range_size = range.end - range.start;
222        if self.device.workarounds.contains(
223            super::Workarounds::FORCE_FILL_BUFFER_WITH_SIZE_GREATER_4096_ALIGNED_OFFSET_16,
224        ) && range_size >= 4096
225            && range.start % 16 != 0
226        {
227            let rounded_start = wgt::math::align_to(range.start, 16);
228            let prefix_size = rounded_start - range.start;
229
230            unsafe {
231                self.device.raw.cmd_fill_buffer(
232                    self.active,
233                    buffer.raw,
234                    range.start,
235                    prefix_size,
236                    0,
237                )
238            };
239
240            // This will never be zero, as rounding can only add up to 12 bytes, and the total size is 4096.
241            let suffix_size = range.end - rounded_start;
242
243            unsafe {
244                self.device.raw.cmd_fill_buffer(
245                    self.active,
246                    buffer.raw,
247                    rounded_start,
248                    suffix_size,
249                    0,
250                )
251            };
252        } else {
253            unsafe {
254                self.device
255                    .raw
256                    .cmd_fill_buffer(self.active, buffer.raw, range.start, range_size, 0)
257            };
258        }
259    }
260
261    unsafe fn copy_buffer_to_buffer<T>(
262        &mut self,
263        src: &super::Buffer,
264        dst: &super::Buffer,
265        regions: T,
266    ) where
267        T: Iterator<Item = crate::BufferCopy>,
268    {
269        let vk_regions_iter = regions.map(|r| vk::BufferCopy {
270            src_offset: r.src_offset,
271            dst_offset: r.dst_offset,
272            size: r.size.get(),
273        });
274
275        unsafe {
276            self.device.raw.cmd_copy_buffer(
277                self.active,
278                src.raw,
279                dst.raw,
280                &smallvec::SmallVec::<[vk::BufferCopy; 32]>::from_iter(vk_regions_iter),
281            )
282        };
283    }
284
285    unsafe fn copy_texture_to_texture<T>(
286        &mut self,
287        src: &super::Texture,
288        src_usage: crate::TextureUses,
289        dst: &super::Texture,
290        regions: T,
291    ) where
292        T: Iterator<Item = crate::TextureCopy>,
293    {
294        let src_layout = conv::derive_image_layout(src_usage, src.format);
295
296        let vk_regions_iter = regions.map(|r| {
297            let (src_subresource, src_offset) = conv::map_subresource_layers(&r.src_base);
298            let (dst_subresource, dst_offset) = conv::map_subresource_layers(&r.dst_base);
299            let extent = r
300                .size
301                .min(&r.src_base.max_copy_size(&src.copy_size))
302                .min(&r.dst_base.max_copy_size(&dst.copy_size));
303            vk::ImageCopy {
304                src_subresource,
305                src_offset,
306                dst_subresource,
307                dst_offset,
308                extent: conv::map_copy_extent(&extent),
309            }
310        });
311
312        unsafe {
313            self.device.raw.cmd_copy_image(
314                self.active,
315                src.raw,
316                src_layout,
317                dst.raw,
318                DST_IMAGE_LAYOUT,
319                &smallvec::SmallVec::<[vk::ImageCopy; 32]>::from_iter(vk_regions_iter),
320            )
321        };
322    }
323
324    unsafe fn copy_buffer_to_texture<T>(
325        &mut self,
326        src: &super::Buffer,
327        dst: &super::Texture,
328        regions: T,
329    ) where
330        T: Iterator<Item = crate::BufferTextureCopy>,
331    {
332        let vk_regions_iter = dst.map_buffer_copies(regions);
333
334        unsafe {
335            self.device.raw.cmd_copy_buffer_to_image(
336                self.active,
337                src.raw,
338                dst.raw,
339                DST_IMAGE_LAYOUT,
340                &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
341            )
342        };
343    }
344
345    unsafe fn copy_texture_to_buffer<T>(
346        &mut self,
347        src: &super::Texture,
348        src_usage: crate::TextureUses,
349        dst: &super::Buffer,
350        regions: T,
351    ) where
352        T: Iterator<Item = crate::BufferTextureCopy>,
353    {
354        let src_layout = conv::derive_image_layout(src_usage, src.format);
355        let vk_regions_iter = src.map_buffer_copies(regions);
356
357        unsafe {
358            self.device.raw.cmd_copy_image_to_buffer(
359                self.active,
360                src.raw,
361                src_layout,
362                dst.raw,
363                &smallvec::SmallVec::<[vk::BufferImageCopy; 32]>::from_iter(vk_regions_iter),
364            )
365        };
366    }
367
368    unsafe fn begin_query(&mut self, set: &super::QuerySet, index: u32) {
369        unsafe {
370            self.device.raw.cmd_begin_query(
371                self.active,
372                set.raw,
373                index,
374                vk::QueryControlFlags::empty(),
375            )
376        };
377    }
378    unsafe fn end_query(&mut self, set: &super::QuerySet, index: u32) {
379        unsafe { self.device.raw.cmd_end_query(self.active, set.raw, index) };
380    }
381    unsafe fn write_timestamp(&mut self, set: &super::QuerySet, index: u32) {
382        unsafe {
383            self.device.raw.cmd_write_timestamp(
384                self.active,
385                vk::PipelineStageFlags::BOTTOM_OF_PIPE,
386                set.raw,
387                index,
388            )
389        };
390    }
391    unsafe fn reset_queries(&mut self, set: &super::QuerySet, range: Range<u32>) {
392        unsafe {
393            self.device.raw.cmd_reset_query_pool(
394                self.active,
395                set.raw,
396                range.start,
397                range.end - range.start,
398            )
399        };
400    }
401    unsafe fn copy_query_results(
402        &mut self,
403        set: &super::QuerySet,
404        range: Range<u32>,
405        buffer: &super::Buffer,
406        offset: wgt::BufferAddress,
407        stride: wgt::BufferSize,
408    ) {
409        unsafe {
410            self.device.raw.cmd_copy_query_pool_results(
411                self.active,
412                set.raw,
413                range.start,
414                range.end - range.start,
415                buffer.raw,
416                offset,
417                stride.get(),
418                vk::QueryResultFlags::TYPE_64 | vk::QueryResultFlags::WAIT,
419            )
420        };
421    }
422
423    unsafe fn build_acceleration_structures<'a, T>(&mut self, descriptor_count: u32, descriptors: T)
424    where
425        super::Api: 'a,
426        T: IntoIterator<
427            Item = crate::BuildAccelerationStructureDescriptor<
428                'a,
429                super::Buffer,
430                super::AccelerationStructure,
431            >,
432        >,
433    {
434        const CAPACITY_OUTER: usize = 8;
435        const CAPACITY_INNER: usize = 1;
436        let descriptor_count = descriptor_count as usize;
437
438        let ray_tracing_functions = self
439            .device
440            .extension_fns
441            .ray_tracing
442            .as_ref()
443            .expect("Feature `RAY_TRACING` not enabled");
444
445        let get_device_address = |buffer: Option<&super::Buffer>| unsafe {
446            match buffer {
447                Some(buffer) => ray_tracing_functions
448                    .buffer_device_address
449                    .get_buffer_device_address(
450                        &vk::BufferDeviceAddressInfo::default().buffer(buffer.raw),
451                    ),
452                None => panic!("Buffers are required to build acceleration structures"),
453            }
454        };
455
456        // storage to all the data required for cmd_build_acceleration_structures
457        let mut ranges_storage = smallvec::SmallVec::<
458            [smallvec::SmallVec<[vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER]>;
459                CAPACITY_OUTER],
460        >::with_capacity(descriptor_count);
461        let mut geometries_storage = smallvec::SmallVec::<
462            [smallvec::SmallVec<[vk::AccelerationStructureGeometryKHR; CAPACITY_INNER]>;
463                CAPACITY_OUTER],
464        >::with_capacity(descriptor_count);
465
466        // pointers to all the data required for cmd_build_acceleration_structures
467        let mut geometry_infos = smallvec::SmallVec::<
468            [vk::AccelerationStructureBuildGeometryInfoKHR; CAPACITY_OUTER],
469        >::with_capacity(descriptor_count);
470        let mut ranges_ptrs = smallvec::SmallVec::<
471            [&[vk::AccelerationStructureBuildRangeInfoKHR]; CAPACITY_OUTER],
472        >::with_capacity(descriptor_count);
473
474        for desc in descriptors {
475            let (geometries, ranges) = match *desc.entries {
476                crate::AccelerationStructureEntries::Instances(ref instances) => {
477                    let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default(
478                    // TODO: Code is so large that rustfmt refuses to treat this... :(
479                    )
480                    .data(vk::DeviceOrHostAddressConstKHR {
481                        device_address: get_device_address(instances.buffer),
482                    });
483
484                    let geometry = vk::AccelerationStructureGeometryKHR::default()
485                        .geometry_type(vk::GeometryTypeKHR::INSTANCES)
486                        .geometry(vk::AccelerationStructureGeometryDataKHR {
487                            instances: instance_data,
488                        });
489
490                    let range = vk::AccelerationStructureBuildRangeInfoKHR::default()
491                        .primitive_count(instances.count)
492                        .primitive_offset(instances.offset);
493
494                    (smallvec::smallvec![geometry], smallvec::smallvec![range])
495                }
496                crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
497                    let mut ranges = smallvec::SmallVec::<
498                        [vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER],
499                    >::with_capacity(in_geometries.len());
500                    let mut geometries = smallvec::SmallVec::<
501                        [vk::AccelerationStructureGeometryKHR; CAPACITY_INNER],
502                    >::with_capacity(in_geometries.len());
503                    for triangles in in_geometries {
504                        let mut triangle_data =
505                            vk::AccelerationStructureGeometryTrianglesDataKHR::default()
506                                // IndexType::NONE_KHR is not set by default (due to being provided by VK_KHR_acceleration_structure) but unless there is an
507                                // index buffer we need to have IndexType::NONE_KHR as our index type.
508                                .index_type(vk::IndexType::NONE_KHR)
509                                .vertex_data(vk::DeviceOrHostAddressConstKHR {
510                                    device_address: get_device_address(triangles.vertex_buffer),
511                                })
512                                .vertex_format(conv::map_vertex_format(triangles.vertex_format))
513                                .max_vertex(triangles.vertex_count)
514                                .vertex_stride(triangles.vertex_stride);
515
516                        let mut range = vk::AccelerationStructureBuildRangeInfoKHR::default();
517
518                        if let Some(ref indices) = triangles.indices {
519                            triangle_data = triangle_data
520                                .index_data(vk::DeviceOrHostAddressConstKHR {
521                                    device_address: get_device_address(indices.buffer),
522                                })
523                                .index_type(conv::map_index_format(indices.format));
524
525                            range = range
526                                .primitive_count(indices.count / 3)
527                                .primitive_offset(indices.offset)
528                                .first_vertex(triangles.first_vertex);
529                        } else {
530                            range = range
531                                .primitive_count(triangles.vertex_count)
532                                .first_vertex(triangles.first_vertex);
533                        }
534
535                        if let Some(ref transform) = triangles.transform {
536                            let transform_device_address = unsafe {
537                                ray_tracing_functions
538                                    .buffer_device_address
539                                    .get_buffer_device_address(
540                                        &vk::BufferDeviceAddressInfo::default()
541                                            .buffer(transform.buffer.raw),
542                                    )
543                            };
544                            triangle_data =
545                                triangle_data.transform_data(vk::DeviceOrHostAddressConstKHR {
546                                    device_address: transform_device_address,
547                                });
548
549                            range = range.transform_offset(transform.offset);
550                        }
551
552                        let geometry = vk::AccelerationStructureGeometryKHR::default()
553                            .geometry_type(vk::GeometryTypeKHR::TRIANGLES)
554                            .geometry(vk::AccelerationStructureGeometryDataKHR {
555                                triangles: triangle_data,
556                            })
557                            .flags(conv::map_acceleration_structure_geometry_flags(
558                                triangles.flags,
559                            ));
560
561                        geometries.push(geometry);
562                        ranges.push(range);
563                    }
564                    (geometries, ranges)
565                }
566                crate::AccelerationStructureEntries::AABBs(ref in_geometries) => {
567                    let mut ranges = smallvec::SmallVec::<
568                        [vk::AccelerationStructureBuildRangeInfoKHR; CAPACITY_INNER],
569                    >::with_capacity(in_geometries.len());
570                    let mut geometries = smallvec::SmallVec::<
571                        [vk::AccelerationStructureGeometryKHR; CAPACITY_INNER],
572                    >::with_capacity(in_geometries.len());
573                    for aabb in in_geometries {
574                        let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::default()
575                            .data(vk::DeviceOrHostAddressConstKHR {
576                                device_address: get_device_address(aabb.buffer),
577                            })
578                            .stride(aabb.stride);
579
580                        let range = vk::AccelerationStructureBuildRangeInfoKHR::default()
581                            .primitive_count(aabb.count)
582                            .primitive_offset(aabb.offset);
583
584                        let geometry = vk::AccelerationStructureGeometryKHR::default()
585                            .geometry_type(vk::GeometryTypeKHR::AABBS)
586                            .geometry(vk::AccelerationStructureGeometryDataKHR {
587                                aabbs: aabbs_data,
588                            })
589                            .flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
590
591                        geometries.push(geometry);
592                        ranges.push(range);
593                    }
594                    (geometries, ranges)
595                }
596            };
597
598            ranges_storage.push(ranges);
599            geometries_storage.push(geometries);
600
601            let scratch_device_address = unsafe {
602                ray_tracing_functions
603                    .buffer_device_address
604                    .get_buffer_device_address(
605                        &vk::BufferDeviceAddressInfo::default().buffer(desc.scratch_buffer.raw),
606                    )
607            };
608            let ty = match *desc.entries {
609                crate::AccelerationStructureEntries::Instances(_) => {
610                    vk::AccelerationStructureTypeKHR::TOP_LEVEL
611                }
612                _ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
613            };
614            let mut geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::default()
615                .ty(ty)
616                .mode(conv::map_acceleration_structure_build_mode(desc.mode))
617                .flags(conv::map_acceleration_structure_flags(desc.flags))
618                .dst_acceleration_structure(desc.destination_acceleration_structure.raw)
619                .scratch_data(vk::DeviceOrHostAddressKHR {
620                    device_address: scratch_device_address + desc.scratch_buffer_offset,
621                });
622
623            if desc.mode == crate::AccelerationStructureBuildMode::Update {
624                geometry_info.src_acceleration_structure = desc
625                    .source_acceleration_structure
626                    .unwrap_or(desc.destination_acceleration_structure)
627                    .raw;
628            }
629
630            geometry_infos.push(geometry_info);
631        }
632
633        for (i, geometry_info) in geometry_infos.iter_mut().enumerate() {
634            geometry_info.geometry_count = geometries_storage[i].len() as u32;
635            geometry_info.p_geometries = geometries_storage[i].as_ptr();
636            ranges_ptrs.push(&ranges_storage[i]);
637        }
638
639        unsafe {
640            ray_tracing_functions
641                .acceleration_structure
642                .cmd_build_acceleration_structures(self.active, &geometry_infos, &ranges_ptrs);
643        }
644    }
645
646    unsafe fn place_acceleration_structure_barrier(
647        &mut self,
648        barrier: crate::AccelerationStructureBarrier,
649    ) {
650        let (src_stage, src_access) =
651            conv::map_acceleration_structure_usage_to_barrier(barrier.usage.start);
652        let (dst_stage, dst_access) =
653            conv::map_acceleration_structure_usage_to_barrier(barrier.usage.end);
654
655        unsafe {
656            self.device.raw.cmd_pipeline_barrier(
657                self.active,
658                src_stage | vk::PipelineStageFlags::TOP_OF_PIPE,
659                dst_stage | vk::PipelineStageFlags::BOTTOM_OF_PIPE,
660                vk::DependencyFlags::empty(),
661                &[vk::MemoryBarrier::default()
662                    .src_access_mask(src_access)
663                    .dst_access_mask(dst_access)],
664                &[],
665                &[],
666            )
667        };
668    }
669    // render
670
671    unsafe fn begin_render_pass(
672        &mut self,
673        desc: &crate::RenderPassDescriptor<super::QuerySet, super::TextureView>,
674    ) {
675        let mut vk_clear_values =
676            ArrayVec::<vk::ClearValue, { super::MAX_TOTAL_ATTACHMENTS }>::new();
677        let mut vk_image_views = ArrayVec::<vk::ImageView, { super::MAX_TOTAL_ATTACHMENTS }>::new();
678        let mut rp_key = super::RenderPassKey::default();
679        let mut fb_key = super::FramebufferKey {
680            attachments: ArrayVec::default(),
681            extent: desc.extent,
682            sample_count: desc.sample_count,
683        };
684        let caps = &self.device.private_caps;
685
686        for cat in desc.color_attachments {
687            if let Some(cat) = cat.as_ref() {
688                vk_clear_values.push(vk::ClearValue {
689                    color: unsafe { cat.make_vk_clear_color() },
690                });
691                vk_image_views.push(cat.target.view.raw);
692                let color = super::ColorAttachmentKey {
693                    base: cat.target.make_attachment_key(cat.ops, caps),
694                    resolve: cat.resolve_target.as_ref().map(|target| {
695                        target.make_attachment_key(crate::AttachmentOps::STORE, caps)
696                    }),
697                };
698
699                rp_key.colors.push(Some(color));
700                fb_key.attachments.push(cat.target.view.attachment.clone());
701                if let Some(ref at) = cat.resolve_target {
702                    vk_clear_values.push(unsafe { mem::zeroed() });
703                    vk_image_views.push(at.view.raw);
704                    fb_key.attachments.push(at.view.attachment.clone());
705                }
706
707                // Assert this attachment is valid for the detected multiview, as a sanity check
708                // The driver crash for this is really bad on AMD, so the check is worth it
709                if let Some(multiview) = desc.multiview {
710                    assert_eq!(cat.target.view.layers, multiview);
711                    if let Some(ref resolve_target) = cat.resolve_target {
712                        assert_eq!(resolve_target.view.layers, multiview);
713                    }
714                }
715            } else {
716                rp_key.colors.push(None);
717            }
718        }
719        if let Some(ref ds) = desc.depth_stencil_attachment {
720            vk_clear_values.push(vk::ClearValue {
721                depth_stencil: vk::ClearDepthStencilValue {
722                    depth: ds.clear_value.0,
723                    stencil: ds.clear_value.1,
724                },
725            });
726            vk_image_views.push(ds.target.view.raw);
727            rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
728                base: ds.target.make_attachment_key(ds.depth_ops, caps),
729                stencil_ops: ds.stencil_ops,
730            });
731            fb_key.attachments.push(ds.target.view.attachment.clone());
732
733            // Assert this attachment is valid for the detected multiview, as a sanity check
734            // The driver crash for this is really bad on AMD, so the check is worth it
735            if let Some(multiview) = desc.multiview {
736                assert_eq!(ds.target.view.layers, multiview);
737            }
738        }
739        rp_key.sample_count = fb_key.sample_count;
740        rp_key.multiview = desc.multiview;
741
742        let render_area = vk::Rect2D {
743            offset: vk::Offset2D { x: 0, y: 0 },
744            extent: vk::Extent2D {
745                width: desc.extent.width,
746                height: desc.extent.height,
747            },
748        };
749        let vk_viewports = [vk::Viewport {
750            x: 0.0,
751            y: if self.device.private_caps.flip_y_requires_shift {
752                desc.extent.height as f32
753            } else {
754                0.0
755            },
756            width: desc.extent.width as f32,
757            height: -(desc.extent.height as f32),
758            min_depth: 0.0,
759            max_depth: 1.0,
760        }];
761
762        let raw_pass = self.device.make_render_pass(rp_key).unwrap();
763        let raw_framebuffer = self
764            .device
765            .make_framebuffer(fb_key, raw_pass, desc.label)
766            .unwrap();
767
768        let mut vk_info = vk::RenderPassBeginInfo::default()
769            .render_pass(raw_pass)
770            .render_area(render_area)
771            .clear_values(&vk_clear_values)
772            .framebuffer(raw_framebuffer);
773        let mut vk_attachment_info = if caps.imageless_framebuffers {
774            Some(vk::RenderPassAttachmentBeginInfo::default().attachments(&vk_image_views))
775        } else {
776            None
777        };
778        if let Some(attachment_info) = vk_attachment_info.as_mut() {
779            vk_info = vk_info.push_next(attachment_info);
780        }
781
782        if let Some(label) = desc.label {
783            unsafe { self.begin_debug_marker(label) };
784            self.rpass_debug_marker_active = true;
785        }
786
787        // Start timestamp if any (before all other commands but after debug marker)
788        if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
789            if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
790                unsafe {
791                    self.write_timestamp(timestamp_writes.query_set, index);
792                }
793            }
794            self.end_of_pass_timer_query = timestamp_writes
795                .end_of_pass_write_index
796                .map(|index| (timestamp_writes.query_set.raw, index));
797        }
798
799        unsafe {
800            self.device
801                .raw
802                .cmd_set_viewport(self.active, 0, &vk_viewports);
803            self.device
804                .raw
805                .cmd_set_scissor(self.active, 0, &[render_area]);
806            self.device.raw.cmd_begin_render_pass(
807                self.active,
808                &vk_info,
809                vk::SubpassContents::INLINE,
810            );
811        };
812
813        self.bind_point = vk::PipelineBindPoint::GRAPHICS;
814    }
815    unsafe fn end_render_pass(&mut self) {
816        unsafe {
817            self.device.raw.cmd_end_render_pass(self.active);
818        }
819
820        // After all other commands but before debug marker, so this is still seen as part of this pass.
821        self.write_pass_end_timestamp_if_requested();
822
823        if self.rpass_debug_marker_active {
824            unsafe {
825                self.end_debug_marker();
826            }
827            self.rpass_debug_marker_active = false;
828        }
829    }
830
831    unsafe fn set_bind_group(
832        &mut self,
833        layout: &super::PipelineLayout,
834        index: u32,
835        group: &super::BindGroup,
836        dynamic_offsets: &[wgt::DynamicOffset],
837    ) {
838        let sets = [*group.set.raw()];
839        unsafe {
840            self.device.raw.cmd_bind_descriptor_sets(
841                self.active,
842                self.bind_point,
843                layout.raw,
844                index,
845                &sets,
846                dynamic_offsets,
847            )
848        };
849    }
850    unsafe fn set_push_constants(
851        &mut self,
852        layout: &super::PipelineLayout,
853        stages: wgt::ShaderStages,
854        offset_bytes: u32,
855        data: &[u32],
856    ) {
857        unsafe {
858            self.device.raw.cmd_push_constants(
859                self.active,
860                layout.raw,
861                conv::map_shader_stage(stages),
862                offset_bytes,
863                slice::from_raw_parts(data.as_ptr().cast(), data.len() * 4),
864            )
865        };
866    }
867
868    unsafe fn insert_debug_marker(&mut self, label: &str) {
869        if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
870            let cstr = self.temp.make_c_str(label);
871            let vk_label = vk::DebugUtilsLabelEXT::default().label_name(cstr);
872            unsafe { ext.cmd_insert_debug_utils_label(self.active, &vk_label) };
873        }
874    }
875    unsafe fn begin_debug_marker(&mut self, group_label: &str) {
876        if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
877            let cstr = self.temp.make_c_str(group_label);
878            let vk_label = vk::DebugUtilsLabelEXT::default().label_name(cstr);
879            unsafe { ext.cmd_begin_debug_utils_label(self.active, &vk_label) };
880        }
881    }
882    unsafe fn end_debug_marker(&mut self) {
883        if let Some(ext) = self.device.extension_fns.debug_utils.as_ref() {
884            unsafe { ext.cmd_end_debug_utils_label(self.active) };
885        }
886    }
887
888    unsafe fn set_render_pipeline(&mut self, pipeline: &super::RenderPipeline) {
889        unsafe {
890            self.device.raw.cmd_bind_pipeline(
891                self.active,
892                vk::PipelineBindPoint::GRAPHICS,
893                pipeline.raw,
894            )
895        };
896    }
897
898    unsafe fn set_index_buffer<'a>(
899        &mut self,
900        binding: crate::BufferBinding<'a, super::Buffer>,
901        format: wgt::IndexFormat,
902    ) {
903        unsafe {
904            self.device.raw.cmd_bind_index_buffer(
905                self.active,
906                binding.buffer.raw,
907                binding.offset,
908                conv::map_index_format(format),
909            )
910        };
911    }
912    unsafe fn set_vertex_buffer<'a>(
913        &mut self,
914        index: u32,
915        binding: crate::BufferBinding<'a, super::Buffer>,
916    ) {
917        let vk_buffers = [binding.buffer.raw];
918        let vk_offsets = [binding.offset];
919        unsafe {
920            self.device
921                .raw
922                .cmd_bind_vertex_buffers(self.active, index, &vk_buffers, &vk_offsets)
923        };
924    }
925    unsafe fn set_viewport(&mut self, rect: &crate::Rect<f32>, depth_range: Range<f32>) {
926        let vk_viewports = [vk::Viewport {
927            x: rect.x,
928            y: if self.device.private_caps.flip_y_requires_shift {
929                rect.y + rect.h
930            } else {
931                rect.y
932            },
933            width: rect.w,
934            height: -rect.h, // flip Y
935            min_depth: depth_range.start,
936            max_depth: depth_range.end,
937        }];
938        unsafe {
939            self.device
940                .raw
941                .cmd_set_viewport(self.active, 0, &vk_viewports)
942        };
943    }
944    unsafe fn set_scissor_rect(&mut self, rect: &crate::Rect<u32>) {
945        let vk_scissors = [vk::Rect2D {
946            offset: vk::Offset2D {
947                x: rect.x as i32,
948                y: rect.y as i32,
949            },
950            extent: vk::Extent2D {
951                width: rect.w,
952                height: rect.h,
953            },
954        }];
955        unsafe {
956            self.device
957                .raw
958                .cmd_set_scissor(self.active, 0, &vk_scissors)
959        };
960    }
961    unsafe fn set_stencil_reference(&mut self, value: u32) {
962        unsafe {
963            self.device.raw.cmd_set_stencil_reference(
964                self.active,
965                vk::StencilFaceFlags::FRONT_AND_BACK,
966                value,
967            )
968        };
969    }
970    unsafe fn set_blend_constants(&mut self, color: &[f32; 4]) {
971        unsafe { self.device.raw.cmd_set_blend_constants(self.active, color) };
972    }
973
974    unsafe fn draw(
975        &mut self,
976        first_vertex: u32,
977        vertex_count: u32,
978        first_instance: u32,
979        instance_count: u32,
980    ) {
981        unsafe {
982            self.device.raw.cmd_draw(
983                self.active,
984                vertex_count,
985                instance_count,
986                first_vertex,
987                first_instance,
988            )
989        };
990    }
991    unsafe fn draw_indexed(
992        &mut self,
993        first_index: u32,
994        index_count: u32,
995        base_vertex: i32,
996        first_instance: u32,
997        instance_count: u32,
998    ) {
999        unsafe {
1000            self.device.raw.cmd_draw_indexed(
1001                self.active,
1002                index_count,
1003                instance_count,
1004                first_index,
1005                base_vertex,
1006                first_instance,
1007            )
1008        };
1009    }
1010    unsafe fn draw_indirect(
1011        &mut self,
1012        buffer: &super::Buffer,
1013        offset: wgt::BufferAddress,
1014        draw_count: u32,
1015    ) {
1016        unsafe {
1017            self.device.raw.cmd_draw_indirect(
1018                self.active,
1019                buffer.raw,
1020                offset,
1021                draw_count,
1022                size_of::<wgt::DrawIndirectArgs>() as u32,
1023            )
1024        };
1025    }
1026    unsafe fn draw_indexed_indirect(
1027        &mut self,
1028        buffer: &super::Buffer,
1029        offset: wgt::BufferAddress,
1030        draw_count: u32,
1031    ) {
1032        unsafe {
1033            self.device.raw.cmd_draw_indexed_indirect(
1034                self.active,
1035                buffer.raw,
1036                offset,
1037                draw_count,
1038                size_of::<wgt::DrawIndexedIndirectArgs>() as u32,
1039            )
1040        };
1041    }
1042    unsafe fn draw_indirect_count(
1043        &mut self,
1044        buffer: &super::Buffer,
1045        offset: wgt::BufferAddress,
1046        count_buffer: &super::Buffer,
1047        count_offset: wgt::BufferAddress,
1048        max_count: u32,
1049    ) {
1050        let stride = size_of::<wgt::DrawIndirectArgs>() as u32;
1051        match self.device.extension_fns.draw_indirect_count {
1052            Some(ref t) => {
1053                unsafe {
1054                    t.cmd_draw_indirect_count(
1055                        self.active,
1056                        buffer.raw,
1057                        offset,
1058                        count_buffer.raw,
1059                        count_offset,
1060                        max_count,
1061                        stride,
1062                    )
1063                };
1064            }
1065            None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
1066        }
1067    }
1068    unsafe fn draw_indexed_indirect_count(
1069        &mut self,
1070        buffer: &super::Buffer,
1071        offset: wgt::BufferAddress,
1072        count_buffer: &super::Buffer,
1073        count_offset: wgt::BufferAddress,
1074        max_count: u32,
1075    ) {
1076        let stride = size_of::<wgt::DrawIndexedIndirectArgs>() as u32;
1077        match self.device.extension_fns.draw_indirect_count {
1078            Some(ref t) => {
1079                unsafe {
1080                    t.cmd_draw_indexed_indirect_count(
1081                        self.active,
1082                        buffer.raw,
1083                        offset,
1084                        count_buffer.raw,
1085                        count_offset,
1086                        max_count,
1087                        stride,
1088                    )
1089                };
1090            }
1091            None => panic!("Feature `DRAW_INDIRECT_COUNT` not enabled"),
1092        }
1093    }
1094
1095    // compute
1096
1097    unsafe fn begin_compute_pass(
1098        &mut self,
1099        desc: &crate::ComputePassDescriptor<'_, super::QuerySet>,
1100    ) {
1101        self.bind_point = vk::PipelineBindPoint::COMPUTE;
1102        if let Some(label) = desc.label {
1103            unsafe { self.begin_debug_marker(label) };
1104            self.rpass_debug_marker_active = true;
1105        }
1106
1107        if let Some(timestamp_writes) = desc.timestamp_writes.as_ref() {
1108            if let Some(index) = timestamp_writes.beginning_of_pass_write_index {
1109                unsafe {
1110                    self.write_timestamp(timestamp_writes.query_set, index);
1111                }
1112            }
1113            self.end_of_pass_timer_query = timestamp_writes
1114                .end_of_pass_write_index
1115                .map(|index| (timestamp_writes.query_set.raw, index));
1116        }
1117    }
1118    unsafe fn end_compute_pass(&mut self) {
1119        self.write_pass_end_timestamp_if_requested();
1120
1121        if self.rpass_debug_marker_active {
1122            unsafe { self.end_debug_marker() };
1123            self.rpass_debug_marker_active = false
1124        }
1125    }
1126
1127    unsafe fn set_compute_pipeline(&mut self, pipeline: &super::ComputePipeline) {
1128        unsafe {
1129            self.device.raw.cmd_bind_pipeline(
1130                self.active,
1131                vk::PipelineBindPoint::COMPUTE,
1132                pipeline.raw,
1133            )
1134        };
1135    }
1136
1137    unsafe fn dispatch(&mut self, count: [u32; 3]) {
1138        unsafe {
1139            self.device
1140                .raw
1141                .cmd_dispatch(self.active, count[0], count[1], count[2])
1142        };
1143    }
1144    unsafe fn dispatch_indirect(&mut self, buffer: &super::Buffer, offset: wgt::BufferAddress) {
1145        unsafe {
1146            self.device
1147                .raw
1148                .cmd_dispatch_indirect(self.active, buffer.raw, offset)
1149        }
1150    }
1151}
1152
1153#[test]
1154fn check_dst_image_layout() {
1155    assert_eq!(
1156        conv::derive_image_layout(crate::TextureUses::COPY_DST, wgt::TextureFormat::Rgba8Unorm),
1157        DST_IMAGE_LAYOUT
1158    );
1159}