wgpu_hal/vulkan/
device.rs

1use super::conv;
2
3use arrayvec::ArrayVec;
4use ash::{khr, vk};
5use parking_lot::Mutex;
6
7use std::{
8    borrow::Cow,
9    collections::{hash_map::Entry, BTreeMap},
10    ffi::{CStr, CString},
11    mem::MaybeUninit,
12    num::NonZeroU32,
13    ptr,
14    sync::Arc,
15};
16
17impl super::DeviceShared {
18    /// Set the name of `object` to `name`.
19    ///
20    /// If `name` contains an interior null byte, then the name set will be truncated to that byte.
21    ///
22    /// # Safety
23    ///
24    /// It must be valid to set `object`'s debug name
25    pub(super) unsafe fn set_object_name(&self, object: impl vk::Handle, name: &str) {
26        let Some(extension) = self.extension_fns.debug_utils.as_ref() else {
27            return;
28        };
29
30        // Keep variables outside the if-else block to ensure they do not
31        // go out of scope while we hold a pointer to them
32        let mut buffer: [u8; 64] = [0u8; 64];
33        let buffer_vec: Vec<u8>;
34
35        // Append a null terminator to the string
36        let name_bytes = if name.len() < buffer.len() {
37            // Common case, string is very small. Allocate a copy on the stack.
38            buffer[..name.len()].copy_from_slice(name.as_bytes());
39            // Add null terminator
40            buffer[name.len()] = 0;
41            &buffer[..name.len() + 1]
42        } else {
43            // Less common case, the string is large.
44            // This requires a heap allocation.
45            buffer_vec = name
46                .as_bytes()
47                .iter()
48                .cloned()
49                .chain(std::iter::once(0))
50                .collect();
51            &buffer_vec
52        };
53
54        let name = CStr::from_bytes_until_nul(name_bytes).expect("We have added a null byte");
55
56        let _result = unsafe {
57            extension.set_debug_utils_object_name(
58                &vk::DebugUtilsObjectNameInfoEXT::default()
59                    .object_handle(object)
60                    .object_name(name),
61            )
62        };
63    }
64
65    pub fn make_render_pass(
66        &self,
67        key: super::RenderPassKey,
68    ) -> Result<vk::RenderPass, crate::DeviceError> {
69        Ok(match self.render_passes.lock().entry(key) {
70            Entry::Occupied(e) => *e.get(),
71            Entry::Vacant(e) => {
72                let mut vk_attachments = Vec::new();
73                let mut color_refs = Vec::with_capacity(e.key().colors.len());
74                let mut resolve_refs = Vec::with_capacity(color_refs.capacity());
75                let mut ds_ref = None;
76                let samples = vk::SampleCountFlags::from_raw(e.key().sample_count);
77                let unused = vk::AttachmentReference {
78                    attachment: vk::ATTACHMENT_UNUSED,
79                    layout: vk::ImageLayout::UNDEFINED,
80                };
81                for cat in e.key().colors.iter() {
82                    let (color_ref, resolve_ref) = if let Some(cat) = cat.as_ref() {
83                        let color_ref = vk::AttachmentReference {
84                            attachment: vk_attachments.len() as u32,
85                            layout: cat.base.layout,
86                        };
87                        vk_attachments.push({
88                            let (load_op, store_op) = conv::map_attachment_ops(cat.base.ops);
89                            vk::AttachmentDescription::default()
90                                .format(cat.base.format)
91                                .samples(samples)
92                                .load_op(load_op)
93                                .store_op(store_op)
94                                .initial_layout(cat.base.layout)
95                                .final_layout(cat.base.layout)
96                        });
97                        let resolve_ref = if let Some(ref rat) = cat.resolve {
98                            let (load_op, store_op) = conv::map_attachment_ops(rat.ops);
99                            let vk_attachment = vk::AttachmentDescription::default()
100                                .format(rat.format)
101                                .samples(vk::SampleCountFlags::TYPE_1)
102                                .load_op(load_op)
103                                .store_op(store_op)
104                                .initial_layout(rat.layout)
105                                .final_layout(rat.layout);
106                            vk_attachments.push(vk_attachment);
107
108                            vk::AttachmentReference {
109                                attachment: vk_attachments.len() as u32 - 1,
110                                layout: rat.layout,
111                            }
112                        } else {
113                            unused
114                        };
115
116                        (color_ref, resolve_ref)
117                    } else {
118                        (unused, unused)
119                    };
120
121                    color_refs.push(color_ref);
122                    resolve_refs.push(resolve_ref);
123                }
124
125                if let Some(ref ds) = e.key().depth_stencil {
126                    ds_ref = Some(vk::AttachmentReference {
127                        attachment: vk_attachments.len() as u32,
128                        layout: ds.base.layout,
129                    });
130                    let (load_op, store_op) = conv::map_attachment_ops(ds.base.ops);
131                    let (stencil_load_op, stencil_store_op) =
132                        conv::map_attachment_ops(ds.stencil_ops);
133                    let vk_attachment = vk::AttachmentDescription::default()
134                        .format(ds.base.format)
135                        .samples(samples)
136                        .load_op(load_op)
137                        .store_op(store_op)
138                        .stencil_load_op(stencil_load_op)
139                        .stencil_store_op(stencil_store_op)
140                        .initial_layout(ds.base.layout)
141                        .final_layout(ds.base.layout);
142                    vk_attachments.push(vk_attachment);
143                }
144
145                let vk_subpasses = [{
146                    let mut vk_subpass = vk::SubpassDescription::default()
147                        .pipeline_bind_point(vk::PipelineBindPoint::GRAPHICS)
148                        .color_attachments(&color_refs)
149                        .resolve_attachments(&resolve_refs);
150
151                    if self
152                        .workarounds
153                        .contains(super::Workarounds::EMPTY_RESOLVE_ATTACHMENT_LISTS)
154                        && resolve_refs.is_empty()
155                    {
156                        vk_subpass.p_resolve_attachments = ptr::null();
157                    }
158
159                    if let Some(ref reference) = ds_ref {
160                        vk_subpass = vk_subpass.depth_stencil_attachment(reference)
161                    }
162                    vk_subpass
163                }];
164
165                let mut vk_info = vk::RenderPassCreateInfo::default()
166                    .attachments(&vk_attachments)
167                    .subpasses(&vk_subpasses);
168
169                let mut multiview_info;
170                let mask;
171                if let Some(multiview) = e.key().multiview {
172                    // Sanity checks, better to panic here than cause a driver crash
173                    assert!(multiview.get() <= 8);
174                    assert!(multiview.get() > 1);
175
176                    // Right now we enable all bits on the view masks and correlation masks.
177                    // This means we're rendering to all views in the subpass, and that all views
178                    // can be rendered concurrently.
179                    mask = [(1 << multiview.get()) - 1];
180
181                    // On Vulkan 1.1 or later, this is an alias for core functionality
182                    multiview_info = vk::RenderPassMultiviewCreateInfoKHR::default()
183                        .view_masks(&mask)
184                        .correlation_masks(&mask);
185                    vk_info = vk_info.push_next(&mut multiview_info);
186                }
187
188                let raw = unsafe {
189                    self.raw
190                        .create_render_pass(&vk_info, None)
191                        .map_err(super::map_host_device_oom_err)?
192                };
193
194                *e.insert(raw)
195            }
196        })
197    }
198
199    pub fn make_framebuffer(
200        &self,
201        key: super::FramebufferKey,
202        raw_pass: vk::RenderPass,
203        pass_label: crate::Label,
204    ) -> Result<vk::Framebuffer, crate::DeviceError> {
205        Ok(match self.framebuffers.lock().entry(key) {
206            Entry::Occupied(e) => *e.get(),
207            Entry::Vacant(e) => {
208                let vk_views = e
209                    .key()
210                    .attachments
211                    .iter()
212                    .map(|at| at.raw)
213                    .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
214                let vk_view_formats = e
215                    .key()
216                    .attachments
217                    .iter()
218                    .map(|at| self.private_caps.map_texture_format(at.view_format))
219                    .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
220                let vk_view_formats_list = e
221                    .key()
222                    .attachments
223                    .iter()
224                    .map(|at| at.raw_view_formats.clone())
225                    .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
226
227                let vk_image_infos = e
228                    .key()
229                    .attachments
230                    .iter()
231                    .enumerate()
232                    .map(|(i, at)| {
233                        let mut info = vk::FramebufferAttachmentImageInfo::default()
234                            .usage(conv::map_texture_usage(at.view_usage))
235                            .flags(at.raw_image_flags)
236                            .width(e.key().extent.width)
237                            .height(e.key().extent.height)
238                            .layer_count(e.key().extent.depth_or_array_layers);
239                        // https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkRenderPassBeginInfo.html#VUID-VkRenderPassBeginInfo-framebuffer-03214
240                        if vk_view_formats_list[i].is_empty() {
241                            info = info.view_formats(&vk_view_formats[i..i + 1]);
242                        } else {
243                            info = info.view_formats(&vk_view_formats_list[i]);
244                        };
245                        info
246                    })
247                    .collect::<ArrayVec<_, { super::MAX_TOTAL_ATTACHMENTS }>>();
248
249                let mut vk_attachment_info = vk::FramebufferAttachmentsCreateInfo::default()
250                    .attachment_image_infos(&vk_image_infos);
251                let mut vk_info = vk::FramebufferCreateInfo::default()
252                    .render_pass(raw_pass)
253                    .width(e.key().extent.width)
254                    .height(e.key().extent.height)
255                    .layers(e.key().extent.depth_or_array_layers);
256
257                if self.private_caps.imageless_framebuffers {
258                    //TODO: https://github.com/MaikKlein/ash/issues/450
259                    vk_info = vk_info
260                        .flags(vk::FramebufferCreateFlags::IMAGELESS_KHR)
261                        .push_next(&mut vk_attachment_info);
262                    vk_info.attachment_count = e.key().attachments.len() as u32;
263                } else {
264                    vk_info = vk_info.attachments(&vk_views);
265                }
266
267                *e.insert(unsafe {
268                    let raw = self.raw.create_framebuffer(&vk_info, None).unwrap();
269                    if let Some(label) = pass_label {
270                        self.set_object_name(raw, label);
271                    }
272                    raw
273                })
274            }
275        })
276    }
277
278    fn make_memory_ranges<'a, I: 'a + Iterator<Item = crate::MemoryRange>>(
279        &self,
280        buffer: &'a super::Buffer,
281        ranges: I,
282    ) -> Option<impl 'a + Iterator<Item = vk::MappedMemoryRange>> {
283        let block = buffer.block.as_ref()?.lock();
284        let mask = self.private_caps.non_coherent_map_mask;
285        Some(ranges.map(move |range| {
286            vk::MappedMemoryRange::default()
287                .memory(*block.memory())
288                .offset((block.offset() + range.start) & !mask)
289                .size((range.end - range.start + mask) & !mask)
290        }))
291    }
292
293    unsafe fn free_resources(&self) {
294        for &raw in self.render_passes.lock().values() {
295            unsafe { self.raw.destroy_render_pass(raw, None) };
296        }
297        for &raw in self.framebuffers.lock().values() {
298            unsafe { self.raw.destroy_framebuffer(raw, None) };
299        }
300        if self.drop_guard.is_none() {
301            unsafe { self.raw.destroy_device(None) };
302        }
303    }
304}
305
306impl gpu_alloc::MemoryDevice<vk::DeviceMemory> for super::DeviceShared {
307    unsafe fn allocate_memory(
308        &self,
309        size: u64,
310        memory_type: u32,
311        flags: gpu_alloc::AllocationFlags,
312    ) -> Result<vk::DeviceMemory, gpu_alloc::OutOfMemory> {
313        let mut info = vk::MemoryAllocateInfo::default()
314            .allocation_size(size)
315            .memory_type_index(memory_type);
316
317        let mut info_flags;
318
319        if flags.contains(gpu_alloc::AllocationFlags::DEVICE_ADDRESS) {
320            info_flags = vk::MemoryAllocateFlagsInfo::default()
321                .flags(vk::MemoryAllocateFlags::DEVICE_ADDRESS);
322            info = info.push_next(&mut info_flags);
323        }
324
325        match unsafe { self.raw.allocate_memory(&info, None) } {
326            Ok(memory) => {
327                self.memory_allocations_counter.add(1);
328                Ok(memory)
329            }
330            Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
331                Err(gpu_alloc::OutOfMemory::OutOfDeviceMemory)
332            }
333            Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
334                Err(gpu_alloc::OutOfMemory::OutOfHostMemory)
335            }
336            // We don't use VK_KHR_external_memory
337            // VK_ERROR_INVALID_EXTERNAL_HANDLE
338            // We don't use VK_KHR_buffer_device_address
339            // VK_ERROR_INVALID_OPAQUE_CAPTURE_ADDRESS_KHR
340            Err(err) => handle_unexpected(err),
341        }
342    }
343
344    unsafe fn deallocate_memory(&self, memory: vk::DeviceMemory) {
345        self.memory_allocations_counter.sub(1);
346
347        unsafe { self.raw.free_memory(memory, None) };
348    }
349
350    unsafe fn map_memory(
351        &self,
352        memory: &mut vk::DeviceMemory,
353        offset: u64,
354        size: u64,
355    ) -> Result<ptr::NonNull<u8>, gpu_alloc::DeviceMapError> {
356        match unsafe {
357            self.raw
358                .map_memory(*memory, offset, size, vk::MemoryMapFlags::empty())
359        } {
360            Ok(ptr) => Ok(ptr::NonNull::new(ptr.cast::<u8>())
361                .expect("Pointer to memory mapping must not be null")),
362            Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
363                Err(gpu_alloc::DeviceMapError::OutOfDeviceMemory)
364            }
365            Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
366                Err(gpu_alloc::DeviceMapError::OutOfHostMemory)
367            }
368            Err(vk::Result::ERROR_MEMORY_MAP_FAILED) => Err(gpu_alloc::DeviceMapError::MapFailed),
369            Err(err) => handle_unexpected(err),
370        }
371    }
372
373    unsafe fn unmap_memory(&self, memory: &mut vk::DeviceMemory) {
374        unsafe { self.raw.unmap_memory(*memory) };
375    }
376
377    unsafe fn invalidate_memory_ranges(
378        &self,
379        _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
380    ) -> Result<(), gpu_alloc::OutOfMemory> {
381        // should never be called
382        unimplemented!()
383    }
384
385    unsafe fn flush_memory_ranges(
386        &self,
387        _ranges: &[gpu_alloc::MappedMemoryRange<'_, vk::DeviceMemory>],
388    ) -> Result<(), gpu_alloc::OutOfMemory> {
389        // should never be called
390        unimplemented!()
391    }
392}
393
394impl
395    gpu_descriptor::DescriptorDevice<vk::DescriptorSetLayout, vk::DescriptorPool, vk::DescriptorSet>
396    for super::DeviceShared
397{
398    unsafe fn create_descriptor_pool(
399        &self,
400        descriptor_count: &gpu_descriptor::DescriptorTotalCount,
401        max_sets: u32,
402        flags: gpu_descriptor::DescriptorPoolCreateFlags,
403    ) -> Result<vk::DescriptorPool, gpu_descriptor::CreatePoolError> {
404        //Note: ignoring other types, since they can't appear here
405        let unfiltered_counts = [
406            (vk::DescriptorType::SAMPLER, descriptor_count.sampler),
407            (
408                vk::DescriptorType::SAMPLED_IMAGE,
409                descriptor_count.sampled_image,
410            ),
411            (
412                vk::DescriptorType::STORAGE_IMAGE,
413                descriptor_count.storage_image,
414            ),
415            (
416                vk::DescriptorType::UNIFORM_BUFFER,
417                descriptor_count.uniform_buffer,
418            ),
419            (
420                vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC,
421                descriptor_count.uniform_buffer_dynamic,
422            ),
423            (
424                vk::DescriptorType::STORAGE_BUFFER,
425                descriptor_count.storage_buffer,
426            ),
427            (
428                vk::DescriptorType::STORAGE_BUFFER_DYNAMIC,
429                descriptor_count.storage_buffer_dynamic,
430            ),
431        ];
432
433        let filtered_counts = unfiltered_counts
434            .iter()
435            .cloned()
436            .filter(|&(_, count)| count != 0)
437            .map(|(ty, count)| vk::DescriptorPoolSize {
438                ty,
439                descriptor_count: count,
440            })
441            .collect::<ArrayVec<_, 8>>();
442
443        let mut vk_flags =
444            if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND) {
445                vk::DescriptorPoolCreateFlags::UPDATE_AFTER_BIND
446            } else {
447                vk::DescriptorPoolCreateFlags::empty()
448            };
449        if flags.contains(gpu_descriptor::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET) {
450            vk_flags |= vk::DescriptorPoolCreateFlags::FREE_DESCRIPTOR_SET;
451        }
452        let vk_info = vk::DescriptorPoolCreateInfo::default()
453            .max_sets(max_sets)
454            .flags(vk_flags)
455            .pool_sizes(&filtered_counts);
456
457        match unsafe { self.raw.create_descriptor_pool(&vk_info, None) } {
458            Ok(pool) => Ok(pool),
459            Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY) => {
460                Err(gpu_descriptor::CreatePoolError::OutOfHostMemory)
461            }
462            Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
463                Err(gpu_descriptor::CreatePoolError::OutOfDeviceMemory)
464            }
465            Err(vk::Result::ERROR_FRAGMENTATION) => {
466                Err(gpu_descriptor::CreatePoolError::Fragmentation)
467            }
468            Err(err) => handle_unexpected(err),
469        }
470    }
471
472    unsafe fn destroy_descriptor_pool(&self, pool: vk::DescriptorPool) {
473        unsafe { self.raw.destroy_descriptor_pool(pool, None) }
474    }
475
476    unsafe fn alloc_descriptor_sets<'a>(
477        &self,
478        pool: &mut vk::DescriptorPool,
479        layouts: impl ExactSizeIterator<Item = &'a vk::DescriptorSetLayout>,
480        sets: &mut impl Extend<vk::DescriptorSet>,
481    ) -> Result<(), gpu_descriptor::DeviceAllocationError> {
482        let result = unsafe {
483            self.raw.allocate_descriptor_sets(
484                &vk::DescriptorSetAllocateInfo::default()
485                    .descriptor_pool(*pool)
486                    .set_layouts(
487                        &smallvec::SmallVec::<[vk::DescriptorSetLayout; 32]>::from_iter(
488                            layouts.cloned(),
489                        ),
490                    ),
491            )
492        };
493
494        match result {
495            Ok(vk_sets) => {
496                sets.extend(vk_sets);
497                Ok(())
498            }
499            Err(vk::Result::ERROR_OUT_OF_HOST_MEMORY)
500            | Err(vk::Result::ERROR_OUT_OF_POOL_MEMORY) => {
501                Err(gpu_descriptor::DeviceAllocationError::OutOfHostMemory)
502            }
503            Err(vk::Result::ERROR_OUT_OF_DEVICE_MEMORY) => {
504                Err(gpu_descriptor::DeviceAllocationError::OutOfDeviceMemory)
505            }
506            Err(vk::Result::ERROR_FRAGMENTED_POOL) => {
507                Err(gpu_descriptor::DeviceAllocationError::FragmentedPool)
508            }
509            Err(err) => handle_unexpected(err),
510        }
511    }
512
513    unsafe fn dealloc_descriptor_sets<'a>(
514        &self,
515        pool: &mut vk::DescriptorPool,
516        sets: impl Iterator<Item = vk::DescriptorSet>,
517    ) {
518        let result = unsafe {
519            self.raw.free_descriptor_sets(
520                *pool,
521                &smallvec::SmallVec::<[vk::DescriptorSet; 32]>::from_iter(sets),
522            )
523        };
524        match result {
525            Ok(()) => {}
526            Err(err) => handle_unexpected(err),
527        }
528    }
529}
530
531struct CompiledStage {
532    create_info: vk::PipelineShaderStageCreateInfo<'static>,
533    _entry_point: CString,
534    temp_raw_module: Option<vk::ShaderModule>,
535}
536
537impl super::Device {
538    pub(super) unsafe fn create_swapchain(
539        &self,
540        surface: &super::Surface,
541        config: &crate::SurfaceConfiguration,
542        provided_old_swapchain: Option<super::Swapchain>,
543    ) -> Result<super::Swapchain, crate::SurfaceError> {
544        profiling::scope!("Device::create_swapchain");
545        let functor = khr::swapchain::Device::new(&surface.instance.raw, &self.shared.raw);
546
547        let old_swapchain = match provided_old_swapchain {
548            Some(osc) => osc.raw,
549            None => vk::SwapchainKHR::null(),
550        };
551
552        let color_space = if config.format == wgt::TextureFormat::Rgba16Float {
553            // Enable wide color gamut mode
554            // Vulkan swapchain for Android only supports DISPLAY_P3_NONLINEAR_EXT and EXTENDED_SRGB_LINEAR_EXT
555            vk::ColorSpaceKHR::EXTENDED_SRGB_LINEAR_EXT
556        } else {
557            vk::ColorSpaceKHR::SRGB_NONLINEAR
558        };
559
560        let original_format = self.shared.private_caps.map_texture_format(config.format);
561        let mut raw_flags = vk::SwapchainCreateFlagsKHR::empty();
562        let mut raw_view_formats: Vec<vk::Format> = vec![];
563        let mut wgt_view_formats = vec![];
564        if !config.view_formats.is_empty() {
565            raw_flags |= vk::SwapchainCreateFlagsKHR::MUTABLE_FORMAT;
566            raw_view_formats = config
567                .view_formats
568                .iter()
569                .map(|f| self.shared.private_caps.map_texture_format(*f))
570                .collect();
571            raw_view_formats.push(original_format);
572
573            wgt_view_formats.clone_from(&config.view_formats);
574            wgt_view_formats.push(config.format);
575        }
576
577        let mut info = vk::SwapchainCreateInfoKHR::default()
578            .flags(raw_flags)
579            .surface(surface.raw)
580            .min_image_count(config.maximum_frame_latency + 1) // TODO: https://github.com/gfx-rs/wgpu/issues/2869
581            .image_format(original_format)
582            .image_color_space(color_space)
583            .image_extent(vk::Extent2D {
584                width: config.extent.width,
585                height: config.extent.height,
586            })
587            .image_array_layers(config.extent.depth_or_array_layers)
588            .image_usage(conv::map_texture_usage(config.usage))
589            .image_sharing_mode(vk::SharingMode::EXCLUSIVE)
590            .pre_transform(vk::SurfaceTransformFlagsKHR::IDENTITY)
591            .composite_alpha(conv::map_composite_alpha_mode(config.composite_alpha_mode))
592            .present_mode(conv::map_present_mode(config.present_mode))
593            .clipped(true)
594            .old_swapchain(old_swapchain);
595
596        let mut format_list_info = vk::ImageFormatListCreateInfo::default();
597        if !raw_view_formats.is_empty() {
598            format_list_info = format_list_info.view_formats(&raw_view_formats);
599            info = info.push_next(&mut format_list_info);
600        }
601
602        let result = {
603            profiling::scope!("vkCreateSwapchainKHR");
604            unsafe { functor.create_swapchain(&info, None) }
605        };
606
607        // doing this before bailing out with error
608        if old_swapchain != vk::SwapchainKHR::null() {
609            unsafe { functor.destroy_swapchain(old_swapchain, None) }
610        }
611
612        let raw = match result {
613            Ok(swapchain) => swapchain,
614            Err(error) => {
615                return Err(match error {
616                    vk::Result::ERROR_SURFACE_LOST_KHR
617                    | vk::Result::ERROR_INITIALIZATION_FAILED => crate::SurfaceError::Lost,
618                    vk::Result::ERROR_NATIVE_WINDOW_IN_USE_KHR => {
619                        crate::SurfaceError::Other("Native window is in use")
620                    }
621                    // We don't use VK_EXT_image_compression_control
622                    // VK_ERROR_COMPRESSION_EXHAUSTED_EXT
623                    other => super::map_host_device_oom_and_lost_err(other).into(),
624                });
625            }
626        };
627
628        let images =
629            unsafe { functor.get_swapchain_images(raw) }.map_err(super::map_host_device_oom_err)?;
630
631        // NOTE: It's important that we define at least images.len() wait
632        // semaphores, since we prospectively need to provide the call to
633        // acquire the next image with an unsignaled semaphore.
634        let surface_semaphores = (0..=images.len())
635            .map(|_| {
636                super::SwapchainImageSemaphores::new(&self.shared)
637                    .map(Mutex::new)
638                    .map(Arc::new)
639            })
640            .collect::<Result<Vec<_>, _>>()?;
641
642        Ok(super::Swapchain {
643            raw,
644            raw_flags,
645            functor,
646            device: Arc::clone(&self.shared),
647            images,
648            config: config.clone(),
649            view_formats: wgt_view_formats,
650            surface_semaphores,
651            next_semaphore_index: 0,
652            next_present_time: None,
653        })
654    }
655
656    /// # Safety
657    ///
658    /// - `vk_image` must be created respecting `desc`
659    /// - If `drop_callback` is [`None`], wgpu-hal will take ownership of `vk_image`. If
660    ///   `drop_callback` is [`Some`], `vk_image` must be valid until the callback is called.
661    /// - If the `ImageCreateFlags` does not contain `MUTABLE_FORMAT`, the `view_formats` of `desc` must be empty.
662    pub unsafe fn texture_from_raw(
663        vk_image: vk::Image,
664        desc: &crate::TextureDescriptor,
665        drop_callback: Option<crate::DropCallback>,
666    ) -> super::Texture {
667        let mut raw_flags = vk::ImageCreateFlags::empty();
668        let mut view_formats = vec![];
669        for tf in desc.view_formats.iter() {
670            if *tf == desc.format {
671                continue;
672            }
673            view_formats.push(*tf);
674        }
675        if !view_formats.is_empty() {
676            raw_flags |=
677                vk::ImageCreateFlags::MUTABLE_FORMAT | vk::ImageCreateFlags::EXTENDED_USAGE;
678            view_formats.push(desc.format)
679        }
680        if desc.format.is_multi_planar_format() {
681            raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
682        }
683
684        let drop_guard = crate::DropGuard::from_option(drop_callback);
685
686        super::Texture {
687            raw: vk_image,
688            drop_guard,
689            external_memory: None,
690            block: None,
691            usage: desc.usage,
692            format: desc.format,
693            raw_flags: vk::ImageCreateFlags::empty(),
694            copy_size: desc.copy_extent(),
695            view_formats,
696        }
697    }
698
699    #[cfg(windows)]
700    fn find_memory_type_index(
701        &self,
702        type_bits_req: u32,
703        flags_req: vk::MemoryPropertyFlags,
704    ) -> Option<usize> {
705        let mem_properties = unsafe {
706            self.shared
707                .instance
708                .raw
709                .get_physical_device_memory_properties(self.shared.physical_device)
710        };
711
712        // https://registry.khronos.org/vulkan/specs/1.3-extensions/man/html/VkPhysicalDeviceMemoryProperties.html
713        for (i, mem_ty) in mem_properties.memory_types_as_slice().iter().enumerate() {
714            let types_bits = 1 << i;
715            let is_required_memory_type = type_bits_req & types_bits != 0;
716            let has_required_properties = mem_ty.property_flags & flags_req == flags_req;
717            if is_required_memory_type && has_required_properties {
718                return Some(i);
719            }
720        }
721
722        None
723    }
724
725    fn create_image_without_memory(
726        &self,
727        desc: &crate::TextureDescriptor,
728        external_memory_image_create_info: Option<&mut vk::ExternalMemoryImageCreateInfo>,
729    ) -> Result<ImageWithoutMemory, crate::DeviceError> {
730        let copy_size = desc.copy_extent();
731
732        let mut raw_flags = vk::ImageCreateFlags::empty();
733        if desc.is_cube_compatible() {
734            raw_flags |= vk::ImageCreateFlags::CUBE_COMPATIBLE;
735        }
736
737        let original_format = self.shared.private_caps.map_texture_format(desc.format);
738        let mut vk_view_formats = vec![];
739        let mut wgt_view_formats = vec![];
740        if !desc.view_formats.is_empty() {
741            raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
742            wgt_view_formats.clone_from(&desc.view_formats);
743            wgt_view_formats.push(desc.format);
744
745            if self.shared.private_caps.image_format_list {
746                vk_view_formats = desc
747                    .view_formats
748                    .iter()
749                    .map(|f| self.shared.private_caps.map_texture_format(*f))
750                    .collect();
751                vk_view_formats.push(original_format)
752            }
753        }
754        if desc.format.is_multi_planar_format() {
755            raw_flags |= vk::ImageCreateFlags::MUTABLE_FORMAT;
756        }
757
758        let mut vk_info = vk::ImageCreateInfo::default()
759            .flags(raw_flags)
760            .image_type(conv::map_texture_dimension(desc.dimension))
761            .format(original_format)
762            .extent(conv::map_copy_extent(&copy_size))
763            .mip_levels(desc.mip_level_count)
764            .array_layers(desc.array_layer_count())
765            .samples(vk::SampleCountFlags::from_raw(desc.sample_count))
766            .tiling(vk::ImageTiling::OPTIMAL)
767            .usage(conv::map_texture_usage(desc.usage))
768            .sharing_mode(vk::SharingMode::EXCLUSIVE)
769            .initial_layout(vk::ImageLayout::UNDEFINED);
770
771        let mut format_list_info = vk::ImageFormatListCreateInfo::default();
772        if !vk_view_formats.is_empty() {
773            format_list_info = format_list_info.view_formats(&vk_view_formats);
774            vk_info = vk_info.push_next(&mut format_list_info);
775        }
776
777        if let Some(ext_info) = external_memory_image_create_info {
778            vk_info = vk_info.push_next(ext_info);
779        }
780
781        let raw = unsafe { self.shared.raw.create_image(&vk_info, None) }.map_err(map_err)?;
782        fn map_err(err: vk::Result) -> crate::DeviceError {
783            // We don't use VK_EXT_image_compression_control
784            // VK_ERROR_COMPRESSION_EXHAUSTED_EXT
785            super::map_host_device_oom_and_ioca_err(err)
786        }
787        let req = unsafe { self.shared.raw.get_image_memory_requirements(raw) };
788
789        Ok(ImageWithoutMemory {
790            raw,
791            requirements: req,
792            copy_size,
793            view_formats: wgt_view_formats,
794            raw_flags,
795        })
796    }
797
798    /// # Safety
799    ///
800    /// - Vulkan 1.1+ (or VK_KHR_external_memory)
801    /// - The `d3d11_shared_handle` must be valid and respecting `desc`
802    /// - `VK_EXTERNAL_MEMORY_HANDLE_TYPE_D3D11_TEXTURE_BIT` flag is used because we need to hold a reference to the handle
803    #[cfg(windows)]
804    pub unsafe fn texture_from_d3d11_shared_handle(
805        &self,
806        d3d11_shared_handle: windows::Win32::Foundation::HANDLE,
807        desc: &crate::TextureDescriptor,
808    ) -> Result<super::Texture, crate::DeviceError> {
809        if !self.shared.private_caps.external_memory_win32 {
810            log::error!("VK_KHR_external_memory extension is required");
811            return Err(crate::DeviceError::ResourceCreationFailed);
812        }
813
814        let mut external_memory_image_info = vk::ExternalMemoryImageCreateInfo::default()
815            .handle_types(vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE);
816
817        let image =
818            self.create_image_without_memory(desc, Some(&mut external_memory_image_info))?;
819
820        let mut import_memory_info = vk::ImportMemoryWin32HandleInfoKHR::default()
821            .handle_type(vk::ExternalMemoryHandleTypeFlags::D3D11_TEXTURE)
822            .handle(d3d11_shared_handle.0 as _);
823
824        let mem_type_index = self
825            .find_memory_type_index(
826                image.requirements.memory_type_bits,
827                vk::MemoryPropertyFlags::DEVICE_LOCAL,
828            )
829            .ok_or(crate::DeviceError::ResourceCreationFailed)?;
830
831        let memory_allocate_info = vk::MemoryAllocateInfo::default()
832            .allocation_size(image.requirements.size)
833            .memory_type_index(mem_type_index as _)
834            .push_next(&mut import_memory_info);
835        let memory = unsafe { self.shared.raw.allocate_memory(&memory_allocate_info, None) }
836            .map_err(super::map_host_device_oom_err)?;
837
838        unsafe { self.shared.raw.bind_image_memory(image.raw, memory, 0) }
839            .map_err(super::map_host_device_oom_err)?;
840
841        if let Some(label) = desc.label {
842            unsafe { self.shared.set_object_name(image.raw, label) };
843        }
844
845        self.counters.textures.add(1);
846
847        Ok(super::Texture {
848            raw: image.raw,
849            drop_guard: None,
850            external_memory: Some(memory),
851            block: None,
852            usage: desc.usage,
853            format: desc.format,
854            raw_flags: image.raw_flags,
855            copy_size: image.copy_size,
856            view_formats: image.view_formats,
857        })
858    }
859
860    /// # Safety
861    ///
862    /// - `vk_buffer`'s memory must be managed by the caller
863    /// - Externally imported buffers can't be mapped by `wgpu`
864    pub unsafe fn buffer_from_raw(vk_buffer: vk::Buffer) -> super::Buffer {
865        super::Buffer {
866            raw: vk_buffer,
867            block: None,
868        }
869    }
870
871    fn create_shader_module_impl(
872        &self,
873        spv: &[u32],
874    ) -> Result<vk::ShaderModule, crate::DeviceError> {
875        let vk_info = vk::ShaderModuleCreateInfo::default()
876            .flags(vk::ShaderModuleCreateFlags::empty())
877            .code(spv);
878
879        let raw = unsafe {
880            profiling::scope!("vkCreateShaderModule");
881            self.shared
882                .raw
883                .create_shader_module(&vk_info, None)
884                .map_err(map_err)?
885        };
886        fn map_err(err: vk::Result) -> crate::DeviceError {
887            // We don't use VK_NV_glsl_shader
888            // VK_ERROR_INVALID_SHADER_NV
889            super::map_host_device_oom_err(err)
890        }
891        Ok(raw)
892    }
893
894    fn compile_stage(
895        &self,
896        stage: &crate::ProgrammableStage<super::ShaderModule>,
897        naga_stage: naga::ShaderStage,
898        binding_map: &naga::back::spv::BindingMap,
899    ) -> Result<CompiledStage, crate::PipelineError> {
900        let stage_flags = crate::auxil::map_naga_stage(naga_stage);
901        let vk_module = match *stage.module {
902            super::ShaderModule::Raw(raw) => raw,
903            super::ShaderModule::Intermediate {
904                ref naga_shader,
905                runtime_checks,
906            } => {
907                let pipeline_options = naga::back::spv::PipelineOptions {
908                    entry_point: stage.entry_point.to_string(),
909                    shader_stage: naga_stage,
910                };
911                let needs_temp_options = !runtime_checks
912                    || !binding_map.is_empty()
913                    || naga_shader.debug_source.is_some()
914                    || !stage.zero_initialize_workgroup_memory;
915                let mut temp_options;
916                let options = if needs_temp_options {
917                    temp_options = self.naga_options.clone();
918                    if !runtime_checks {
919                        temp_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
920                            index: naga::proc::BoundsCheckPolicy::Unchecked,
921                            buffer: naga::proc::BoundsCheckPolicy::Unchecked,
922                            image_load: naga::proc::BoundsCheckPolicy::Unchecked,
923                            binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
924                        };
925                    }
926                    if !binding_map.is_empty() {
927                        temp_options.binding_map = binding_map.clone();
928                    }
929
930                    if let Some(ref debug) = naga_shader.debug_source {
931                        temp_options.debug_info = Some(naga::back::spv::DebugInfo {
932                            source_code: &debug.source_code,
933                            file_name: debug.file_name.as_ref().as_ref(),
934                            language: naga::back::spv::SourceLanguage::WGSL,
935                        })
936                    }
937                    if !stage.zero_initialize_workgroup_memory {
938                        temp_options.zero_initialize_workgroup_memory =
939                            naga::back::spv::ZeroInitializeWorkgroupMemoryMode::None;
940                    }
941
942                    &temp_options
943                } else {
944                    &self.naga_options
945                };
946
947                let (module, info) = naga::back::pipeline_constants::process_overrides(
948                    &naga_shader.module,
949                    &naga_shader.info,
950                    stage.constants,
951                )
952                .map_err(|e| {
953                    crate::PipelineError::PipelineConstants(stage_flags, format!("{e}"))
954                })?;
955
956                let spv = {
957                    profiling::scope!("naga::spv::write_vec");
958                    naga::back::spv::write_vec(&module, &info, options, Some(&pipeline_options))
959                }
960                .map_err(|e| crate::PipelineError::Linkage(stage_flags, format!("{e}")))?;
961                self.create_shader_module_impl(&spv)?
962            }
963        };
964
965        let mut flags = vk::PipelineShaderStageCreateFlags::empty();
966        if self.shared.features.contains(wgt::Features::SUBGROUP) {
967            flags |= vk::PipelineShaderStageCreateFlags::ALLOW_VARYING_SUBGROUP_SIZE
968        }
969
970        let entry_point = CString::new(stage.entry_point).unwrap();
971        let mut create_info = vk::PipelineShaderStageCreateInfo::default()
972            .flags(flags)
973            .stage(conv::map_shader_stage(stage_flags))
974            .module(vk_module);
975
976        // Circumvent struct lifetime check because of a self-reference inside CompiledStage
977        create_info.p_name = entry_point.as_ptr();
978
979        Ok(CompiledStage {
980            create_info,
981            _entry_point: entry_point,
982            temp_raw_module: match *stage.module {
983                super::ShaderModule::Raw(_) => None,
984                super::ShaderModule::Intermediate { .. } => Some(vk_module),
985            },
986        })
987    }
988
989    /// Returns the queue family index of the device's internal queue.
990    ///
991    /// This is useful for constructing memory barriers needed for queue family ownership transfer when
992    /// external memory is involved (from/to `VK_QUEUE_FAMILY_EXTERNAL_KHR` and `VK_QUEUE_FAMILY_FOREIGN_EXT`
993    /// for example).
994    pub fn queue_family_index(&self) -> u32 {
995        self.shared.family_index
996    }
997
998    pub fn queue_index(&self) -> u32 {
999        self.shared.queue_index
1000    }
1001
1002    pub fn raw_device(&self) -> &ash::Device {
1003        &self.shared.raw
1004    }
1005
1006    pub fn raw_physical_device(&self) -> vk::PhysicalDevice {
1007        self.shared.physical_device
1008    }
1009
1010    pub fn raw_queue(&self) -> vk::Queue {
1011        self.shared.raw_queue
1012    }
1013
1014    pub fn enabled_device_extensions(&self) -> &[&'static CStr] {
1015        &self.shared.enabled_extensions
1016    }
1017
1018    pub fn shared_instance(&self) -> &super::InstanceShared {
1019        &self.shared.instance
1020    }
1021}
1022
1023impl crate::Device for super::Device {
1024    type A = super::Api;
1025
1026    unsafe fn exit(self, queue: super::Queue) {
1027        unsafe { self.mem_allocator.into_inner().cleanup(&*self.shared) };
1028        unsafe { self.desc_allocator.into_inner().cleanup(&*self.shared) };
1029        unsafe {
1030            queue
1031                .relay_semaphores
1032                .into_inner()
1033                .destroy(&self.shared.raw)
1034        };
1035        unsafe { self.shared.free_resources() };
1036    }
1037
1038    unsafe fn create_buffer(
1039        &self,
1040        desc: &crate::BufferDescriptor,
1041    ) -> Result<super::Buffer, crate::DeviceError> {
1042        let vk_info = vk::BufferCreateInfo::default()
1043            .size(desc.size)
1044            .usage(conv::map_buffer_usage(desc.usage))
1045            .sharing_mode(vk::SharingMode::EXCLUSIVE);
1046
1047        let raw = unsafe {
1048            self.shared
1049                .raw
1050                .create_buffer(&vk_info, None)
1051                .map_err(super::map_host_device_oom_and_ioca_err)?
1052        };
1053        let req = unsafe { self.shared.raw.get_buffer_memory_requirements(raw) };
1054
1055        let mut alloc_usage = if desc
1056            .usage
1057            .intersects(crate::BufferUses::MAP_READ | crate::BufferUses::MAP_WRITE)
1058        {
1059            let mut flags = gpu_alloc::UsageFlags::HOST_ACCESS;
1060            //TODO: find a way to use `crate::MemoryFlags::PREFER_COHERENT`
1061            flags.set(
1062                gpu_alloc::UsageFlags::DOWNLOAD,
1063                desc.usage.contains(crate::BufferUses::MAP_READ),
1064            );
1065            flags.set(
1066                gpu_alloc::UsageFlags::UPLOAD,
1067                desc.usage.contains(crate::BufferUses::MAP_WRITE),
1068            );
1069            flags
1070        } else {
1071            gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS
1072        };
1073        alloc_usage.set(
1074            gpu_alloc::UsageFlags::TRANSIENT,
1075            desc.memory_flags.contains(crate::MemoryFlags::TRANSIENT),
1076        );
1077
1078        let alignment_mask = if desc.usage.intersects(
1079            crate::BufferUses::TOP_LEVEL_ACCELERATION_STRUCTURE_INPUT
1080                | crate::BufferUses::BOTTOM_LEVEL_ACCELERATION_STRUCTURE_INPUT,
1081        ) {
1082            16
1083        } else {
1084            req.alignment
1085        } - 1;
1086
1087        let block = unsafe {
1088            self.mem_allocator.lock().alloc(
1089                &*self.shared,
1090                gpu_alloc::Request {
1091                    size: req.size,
1092                    align_mask: alignment_mask,
1093                    usage: alloc_usage,
1094                    memory_types: req.memory_type_bits & self.valid_ash_memory_types,
1095                },
1096            )?
1097        };
1098
1099        unsafe {
1100            self.shared
1101                .raw
1102                .bind_buffer_memory(raw, *block.memory(), block.offset())
1103                .map_err(super::map_host_device_oom_and_ioca_err)?
1104        };
1105
1106        if let Some(label) = desc.label {
1107            unsafe { self.shared.set_object_name(raw, label) };
1108        }
1109
1110        self.counters.buffer_memory.add(block.size() as isize);
1111        self.counters.buffers.add(1);
1112
1113        Ok(super::Buffer {
1114            raw,
1115            block: Some(Mutex::new(block)),
1116        })
1117    }
1118    unsafe fn destroy_buffer(&self, buffer: super::Buffer) {
1119        unsafe { self.shared.raw.destroy_buffer(buffer.raw, None) };
1120        if let Some(block) = buffer.block {
1121            let block = block.into_inner();
1122            self.counters.buffer_memory.sub(block.size() as isize);
1123            unsafe { self.mem_allocator.lock().dealloc(&*self.shared, block) };
1124        }
1125
1126        self.counters.buffers.sub(1);
1127    }
1128
1129    unsafe fn add_raw_buffer(&self, _buffer: &super::Buffer) {
1130        self.counters.buffers.add(1);
1131    }
1132
1133    unsafe fn map_buffer(
1134        &self,
1135        buffer: &super::Buffer,
1136        range: crate::MemoryRange,
1137    ) -> Result<crate::BufferMapping, crate::DeviceError> {
1138        if let Some(ref block) = buffer.block {
1139            let size = range.end - range.start;
1140            let mut block = block.lock();
1141            let ptr = unsafe { block.map(&*self.shared, range.start, size as usize)? };
1142            let is_coherent = block
1143                .props()
1144                .contains(gpu_alloc::MemoryPropertyFlags::HOST_COHERENT);
1145            Ok(crate::BufferMapping { ptr, is_coherent })
1146        } else {
1147            crate::hal_usage_error("tried to map external buffer")
1148        }
1149    }
1150    unsafe fn unmap_buffer(&self, buffer: &super::Buffer) {
1151        if let Some(ref block) = buffer.block {
1152            unsafe { block.lock().unmap(&*self.shared) };
1153        } else {
1154            crate::hal_usage_error("tried to unmap external buffer")
1155        }
1156    }
1157
1158    unsafe fn flush_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
1159    where
1160        I: Iterator<Item = crate::MemoryRange>,
1161    {
1162        if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
1163            unsafe {
1164                self.shared
1165                    .raw
1166                    .flush_mapped_memory_ranges(
1167                        &smallvec::SmallVec::<[vk::MappedMemoryRange; 32]>::from_iter(vk_ranges),
1168                    )
1169            }
1170            .unwrap();
1171        }
1172    }
1173    unsafe fn invalidate_mapped_ranges<I>(&self, buffer: &super::Buffer, ranges: I)
1174    where
1175        I: Iterator<Item = crate::MemoryRange>,
1176    {
1177        if let Some(vk_ranges) = self.shared.make_memory_ranges(buffer, ranges) {
1178            unsafe {
1179                self.shared
1180                    .raw
1181                    .invalidate_mapped_memory_ranges(&smallvec::SmallVec::<
1182                        [vk::MappedMemoryRange; 32],
1183                    >::from_iter(vk_ranges))
1184            }
1185            .unwrap();
1186        }
1187    }
1188
1189    unsafe fn create_texture(
1190        &self,
1191        desc: &crate::TextureDescriptor,
1192    ) -> Result<super::Texture, crate::DeviceError> {
1193        let image = self.create_image_without_memory(desc, None)?;
1194
1195        let block = unsafe {
1196            self.mem_allocator.lock().alloc(
1197                &*self.shared,
1198                gpu_alloc::Request {
1199                    size: image.requirements.size,
1200                    align_mask: image.requirements.alignment - 1,
1201                    usage: gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS,
1202                    memory_types: image.requirements.memory_type_bits & self.valid_ash_memory_types,
1203                },
1204            )?
1205        };
1206
1207        self.counters.texture_memory.add(block.size() as isize);
1208
1209        unsafe {
1210            self.shared
1211                .raw
1212                .bind_image_memory(image.raw, *block.memory(), block.offset())
1213                .map_err(super::map_host_device_oom_err)?
1214        };
1215
1216        if let Some(label) = desc.label {
1217            unsafe { self.shared.set_object_name(image.raw, label) };
1218        }
1219
1220        self.counters.textures.add(1);
1221
1222        Ok(super::Texture {
1223            raw: image.raw,
1224            drop_guard: None,
1225            external_memory: None,
1226            block: Some(block),
1227            usage: desc.usage,
1228            format: desc.format,
1229            raw_flags: image.raw_flags,
1230            copy_size: image.copy_size,
1231            view_formats: image.view_formats,
1232        })
1233    }
1234    unsafe fn destroy_texture(&self, texture: super::Texture) {
1235        if texture.drop_guard.is_none() {
1236            unsafe { self.shared.raw.destroy_image(texture.raw, None) };
1237        }
1238        if let Some(memory) = texture.external_memory {
1239            unsafe { self.shared.raw.free_memory(memory, None) };
1240        }
1241        if let Some(block) = texture.block {
1242            self.counters.texture_memory.sub(block.size() as isize);
1243
1244            unsafe { self.mem_allocator.lock().dealloc(&*self.shared, block) };
1245        }
1246
1247        self.counters.textures.sub(1);
1248    }
1249
1250    unsafe fn add_raw_texture(&self, _texture: &super::Texture) {
1251        self.counters.textures.add(1);
1252    }
1253
1254    unsafe fn create_texture_view(
1255        &self,
1256        texture: &super::Texture,
1257        desc: &crate::TextureViewDescriptor,
1258    ) -> Result<super::TextureView, crate::DeviceError> {
1259        let subresource_range = conv::map_subresource_range(&desc.range, texture.format);
1260        let mut vk_info = vk::ImageViewCreateInfo::default()
1261            .flags(vk::ImageViewCreateFlags::empty())
1262            .image(texture.raw)
1263            .view_type(conv::map_view_dimension(desc.dimension))
1264            .format(self.shared.private_caps.map_texture_format(desc.format))
1265            .subresource_range(subresource_range);
1266        let layers =
1267            NonZeroU32::new(subresource_range.layer_count).expect("Unexpected zero layer count");
1268
1269        let mut image_view_info;
1270        let view_usage = if self.shared.private_caps.image_view_usage && !desc.usage.is_empty() {
1271            image_view_info =
1272                vk::ImageViewUsageCreateInfo::default().usage(conv::map_texture_usage(desc.usage));
1273            vk_info = vk_info.push_next(&mut image_view_info);
1274            desc.usage
1275        } else {
1276            texture.usage
1277        };
1278
1279        let raw = unsafe { self.shared.raw.create_image_view(&vk_info, None) }
1280            .map_err(super::map_host_device_oom_and_ioca_err)?;
1281
1282        if let Some(label) = desc.label {
1283            unsafe { self.shared.set_object_name(raw, label) };
1284        }
1285
1286        let attachment = super::FramebufferAttachment {
1287            raw: if self.shared.private_caps.imageless_framebuffers {
1288                vk::ImageView::null()
1289            } else {
1290                raw
1291            },
1292            raw_image_flags: texture.raw_flags,
1293            view_usage,
1294            view_format: desc.format,
1295            raw_view_formats: texture
1296                .view_formats
1297                .iter()
1298                .map(|tf| self.shared.private_caps.map_texture_format(*tf))
1299                .collect(),
1300        };
1301
1302        self.counters.texture_views.add(1);
1303
1304        Ok(super::TextureView {
1305            raw,
1306            layers,
1307            attachment,
1308        })
1309    }
1310    unsafe fn destroy_texture_view(&self, view: super::TextureView) {
1311        if !self.shared.private_caps.imageless_framebuffers {
1312            let mut fbuf_lock = self.shared.framebuffers.lock();
1313            for (key, &raw_fbuf) in fbuf_lock.iter() {
1314                if key.attachments.iter().any(|at| at.raw == view.raw) {
1315                    unsafe { self.shared.raw.destroy_framebuffer(raw_fbuf, None) };
1316                }
1317            }
1318            fbuf_lock.retain(|key, _| !key.attachments.iter().any(|at| at.raw == view.raw));
1319        }
1320        unsafe { self.shared.raw.destroy_image_view(view.raw, None) };
1321
1322        self.counters.texture_views.sub(1);
1323    }
1324
1325    unsafe fn create_sampler(
1326        &self,
1327        desc: &crate::SamplerDescriptor,
1328    ) -> Result<super::Sampler, crate::DeviceError> {
1329        let mut vk_info = vk::SamplerCreateInfo::default()
1330            .flags(vk::SamplerCreateFlags::empty())
1331            .mag_filter(conv::map_filter_mode(desc.mag_filter))
1332            .min_filter(conv::map_filter_mode(desc.min_filter))
1333            .mipmap_mode(conv::map_mip_filter_mode(desc.mipmap_filter))
1334            .address_mode_u(conv::map_address_mode(desc.address_modes[0]))
1335            .address_mode_v(conv::map_address_mode(desc.address_modes[1]))
1336            .address_mode_w(conv::map_address_mode(desc.address_modes[2]))
1337            .min_lod(desc.lod_clamp.start)
1338            .max_lod(desc.lod_clamp.end);
1339
1340        if let Some(fun) = desc.compare {
1341            vk_info = vk_info
1342                .compare_enable(true)
1343                .compare_op(conv::map_comparison(fun));
1344        }
1345
1346        if desc.anisotropy_clamp != 1 {
1347            // We only enable anisotropy if it is supported, and wgpu-hal interface guarantees
1348            // the clamp is in the range [1, 16] which is always supported if anisotropy is.
1349            vk_info = vk_info
1350                .anisotropy_enable(true)
1351                .max_anisotropy(desc.anisotropy_clamp as f32);
1352        }
1353
1354        if let Some(color) = desc.border_color {
1355            vk_info = vk_info.border_color(conv::map_border_color(color));
1356        }
1357
1358        let raw = unsafe {
1359            self.shared
1360                .raw
1361                .create_sampler(&vk_info, None)
1362                .map_err(super::map_host_device_oom_and_ioca_err)?
1363        };
1364
1365        if let Some(label) = desc.label {
1366            unsafe { self.shared.set_object_name(raw, label) };
1367        }
1368
1369        self.counters.samplers.add(1);
1370
1371        Ok(super::Sampler { raw })
1372    }
1373    unsafe fn destroy_sampler(&self, sampler: super::Sampler) {
1374        unsafe { self.shared.raw.destroy_sampler(sampler.raw, None) };
1375
1376        self.counters.samplers.sub(1);
1377    }
1378
1379    unsafe fn create_command_encoder(
1380        &self,
1381        desc: &crate::CommandEncoderDescriptor<super::Queue>,
1382    ) -> Result<super::CommandEncoder, crate::DeviceError> {
1383        let vk_info = vk::CommandPoolCreateInfo::default()
1384            .queue_family_index(desc.queue.family_index)
1385            .flags(vk::CommandPoolCreateFlags::TRANSIENT);
1386
1387        let raw = unsafe {
1388            self.shared
1389                .raw
1390                .create_command_pool(&vk_info, None)
1391                .map_err(super::map_host_device_oom_err)?
1392        };
1393
1394        self.counters.command_encoders.add(1);
1395
1396        Ok(super::CommandEncoder {
1397            raw,
1398            device: Arc::clone(&self.shared),
1399            active: vk::CommandBuffer::null(),
1400            bind_point: vk::PipelineBindPoint::default(),
1401            temp: super::Temp::default(),
1402            free: Vec::new(),
1403            discarded: Vec::new(),
1404            rpass_debug_marker_active: false,
1405            end_of_pass_timer_query: None,
1406        })
1407    }
1408    unsafe fn destroy_command_encoder(&self, cmd_encoder: super::CommandEncoder) {
1409        unsafe {
1410            // `vkDestroyCommandPool` also frees any command buffers allocated
1411            // from that pool, so there's no need to explicitly call
1412            // `vkFreeCommandBuffers` on `cmd_encoder`'s `free` and `discarded`
1413            // fields.
1414            self.shared.raw.destroy_command_pool(cmd_encoder.raw, None);
1415        }
1416
1417        self.counters.command_encoders.sub(1);
1418    }
1419
1420    unsafe fn create_bind_group_layout(
1421        &self,
1422        desc: &crate::BindGroupLayoutDescriptor,
1423    ) -> Result<super::BindGroupLayout, crate::DeviceError> {
1424        let mut desc_count = gpu_descriptor::DescriptorTotalCount::default();
1425        let mut types = Vec::new();
1426        for entry in desc.entries {
1427            let count = entry.count.map_or(1, |c| c.get());
1428            if entry.binding as usize >= types.len() {
1429                types.resize(
1430                    entry.binding as usize + 1,
1431                    (vk::DescriptorType::INPUT_ATTACHMENT, 0),
1432                );
1433            }
1434            types[entry.binding as usize] = (
1435                conv::map_binding_type(entry.ty),
1436                entry.count.map_or(1, |c| c.get()),
1437            );
1438
1439            match entry.ty {
1440                wgt::BindingType::Buffer {
1441                    ty,
1442                    has_dynamic_offset,
1443                    ..
1444                } => match ty {
1445                    wgt::BufferBindingType::Uniform => {
1446                        if has_dynamic_offset {
1447                            desc_count.uniform_buffer_dynamic += count;
1448                        } else {
1449                            desc_count.uniform_buffer += count;
1450                        }
1451                    }
1452                    wgt::BufferBindingType::Storage { .. } => {
1453                        if has_dynamic_offset {
1454                            desc_count.storage_buffer_dynamic += count;
1455                        } else {
1456                            desc_count.storage_buffer += count;
1457                        }
1458                    }
1459                },
1460                wgt::BindingType::Sampler { .. } => {
1461                    desc_count.sampler += count;
1462                }
1463                wgt::BindingType::Texture { .. } => {
1464                    desc_count.sampled_image += count;
1465                }
1466                wgt::BindingType::StorageTexture { .. } => {
1467                    desc_count.storage_image += count;
1468                }
1469                wgt::BindingType::AccelerationStructure => {
1470                    desc_count.acceleration_structure += count;
1471                }
1472            }
1473        }
1474
1475        //Note: not bothering with on stack array here as it's low frequency
1476        let vk_bindings = desc
1477            .entries
1478            .iter()
1479            .map(|entry| vk::DescriptorSetLayoutBinding {
1480                binding: entry.binding,
1481                descriptor_type: types[entry.binding as usize].0,
1482                descriptor_count: types[entry.binding as usize].1,
1483                stage_flags: conv::map_shader_stage(entry.visibility),
1484                p_immutable_samplers: ptr::null(),
1485                _marker: Default::default(),
1486            })
1487            .collect::<Vec<_>>();
1488
1489        let vk_info = vk::DescriptorSetLayoutCreateInfo::default().bindings(&vk_bindings);
1490
1491        let binding_arrays = desc
1492            .entries
1493            .iter()
1494            .enumerate()
1495            .filter_map(|(idx, entry)| entry.count.map(|count| (idx as u32, count)))
1496            .collect();
1497
1498        let mut binding_flag_info;
1499        let binding_flag_vec;
1500
1501        let partially_bound = desc
1502            .flags
1503            .contains(crate::BindGroupLayoutFlags::PARTIALLY_BOUND);
1504
1505        let vk_info = if partially_bound {
1506            binding_flag_vec = desc
1507                .entries
1508                .iter()
1509                .map(|entry| {
1510                    let mut flags = vk::DescriptorBindingFlags::empty();
1511
1512                    if partially_bound && entry.count.is_some() {
1513                        flags |= vk::DescriptorBindingFlags::PARTIALLY_BOUND;
1514                    }
1515
1516                    flags
1517                })
1518                .collect::<Vec<_>>();
1519
1520            binding_flag_info = vk::DescriptorSetLayoutBindingFlagsCreateInfo::default()
1521                .binding_flags(&binding_flag_vec);
1522
1523            vk_info.push_next(&mut binding_flag_info)
1524        } else {
1525            vk_info
1526        };
1527
1528        let raw = unsafe {
1529            self.shared
1530                .raw
1531                .create_descriptor_set_layout(&vk_info, None)
1532                .map_err(super::map_host_device_oom_err)?
1533        };
1534
1535        if let Some(label) = desc.label {
1536            unsafe { self.shared.set_object_name(raw, label) };
1537        }
1538
1539        self.counters.bind_group_layouts.add(1);
1540
1541        Ok(super::BindGroupLayout {
1542            raw,
1543            desc_count,
1544            types: types.into_boxed_slice(),
1545            binding_arrays,
1546        })
1547    }
1548    unsafe fn destroy_bind_group_layout(&self, bg_layout: super::BindGroupLayout) {
1549        unsafe {
1550            self.shared
1551                .raw
1552                .destroy_descriptor_set_layout(bg_layout.raw, None)
1553        };
1554
1555        self.counters.bind_group_layouts.sub(1);
1556    }
1557
1558    unsafe fn create_pipeline_layout(
1559        &self,
1560        desc: &crate::PipelineLayoutDescriptor<super::BindGroupLayout>,
1561    ) -> Result<super::PipelineLayout, crate::DeviceError> {
1562        //Note: not bothering with on stack array here as it's low frequency
1563        let vk_set_layouts = desc
1564            .bind_group_layouts
1565            .iter()
1566            .map(|bgl| bgl.raw)
1567            .collect::<Vec<_>>();
1568        let vk_push_constant_ranges = desc
1569            .push_constant_ranges
1570            .iter()
1571            .map(|pcr| vk::PushConstantRange {
1572                stage_flags: conv::map_shader_stage(pcr.stages),
1573                offset: pcr.range.start,
1574                size: pcr.range.end - pcr.range.start,
1575            })
1576            .collect::<Vec<_>>();
1577
1578        let vk_info = vk::PipelineLayoutCreateInfo::default()
1579            .flags(vk::PipelineLayoutCreateFlags::empty())
1580            .set_layouts(&vk_set_layouts)
1581            .push_constant_ranges(&vk_push_constant_ranges);
1582
1583        let raw = {
1584            profiling::scope!("vkCreatePipelineLayout");
1585            unsafe {
1586                self.shared
1587                    .raw
1588                    .create_pipeline_layout(&vk_info, None)
1589                    .map_err(super::map_host_device_oom_err)?
1590            }
1591        };
1592
1593        if let Some(label) = desc.label {
1594            unsafe { self.shared.set_object_name(raw, label) };
1595        }
1596
1597        let mut binding_arrays = BTreeMap::new();
1598        for (group, &layout) in desc.bind_group_layouts.iter().enumerate() {
1599            for &(binding, binding_array_size) in &layout.binding_arrays {
1600                binding_arrays.insert(
1601                    naga::ResourceBinding {
1602                        group: group as u32,
1603                        binding,
1604                    },
1605                    naga::back::spv::BindingInfo {
1606                        binding_array_size: Some(binding_array_size.get()),
1607                    },
1608                );
1609            }
1610        }
1611
1612        self.counters.pipeline_layouts.add(1);
1613
1614        Ok(super::PipelineLayout {
1615            raw,
1616            binding_arrays,
1617        })
1618    }
1619    unsafe fn destroy_pipeline_layout(&self, pipeline_layout: super::PipelineLayout) {
1620        unsafe {
1621            self.shared
1622                .raw
1623                .destroy_pipeline_layout(pipeline_layout.raw, None)
1624        };
1625
1626        self.counters.pipeline_layouts.sub(1);
1627    }
1628
1629    unsafe fn create_bind_group(
1630        &self,
1631        desc: &crate::BindGroupDescriptor<
1632            super::BindGroupLayout,
1633            super::Buffer,
1634            super::Sampler,
1635            super::TextureView,
1636            super::AccelerationStructure,
1637        >,
1638    ) -> Result<super::BindGroup, crate::DeviceError> {
1639        let mut vk_sets = unsafe {
1640            self.desc_allocator.lock().allocate(
1641                &*self.shared,
1642                &desc.layout.raw,
1643                gpu_descriptor::DescriptorSetLayoutCreateFlags::empty(),
1644                &desc.layout.desc_count,
1645                1,
1646            )?
1647        };
1648
1649        let set = vk_sets.pop().unwrap();
1650        if let Some(label) = desc.label {
1651            unsafe { self.shared.set_object_name(*set.raw(), label) };
1652        }
1653
1654        /// Helper for splitting off and initializing a given number of elements on a pre-allocated
1655        /// stack, based on items returned from an [`ExactSizeIterator`].  Typically created from a
1656        /// [`MaybeUninit`] slice (see [`Vec::spare_capacity_mut()`]).
1657        /// The updated [`ExtensionStack`] of remaining uninitialized elements is returned, safely
1658        /// representing that the initialized and remaining elements are two independent mutable
1659        /// borrows.
1660        struct ExtendStack<'a, T> {
1661            remainder: &'a mut [MaybeUninit<T>],
1662        }
1663
1664        impl<'a, T> ExtendStack<'a, T> {
1665            fn from_vec_capacity(vec: &'a mut Vec<T>) -> Self {
1666                Self {
1667                    remainder: vec.spare_capacity_mut(),
1668                }
1669            }
1670
1671            fn extend_one(self, value: T) -> (Self, &'a mut T) {
1672                let (to_init, remainder) = self.remainder.split_first_mut().unwrap();
1673                let init = to_init.write(value);
1674                (Self { remainder }, init)
1675            }
1676
1677            fn extend(
1678                self,
1679                iter: impl IntoIterator<Item = T> + ExactSizeIterator,
1680            ) -> (Self, &'a mut [T]) {
1681                let (to_init, remainder) = self.remainder.split_at_mut(iter.len());
1682
1683                for (value, to_init) in iter.into_iter().zip(to_init.iter_mut()) {
1684                    to_init.write(value);
1685                }
1686
1687                // we can't use the safe (yet unstable) MaybeUninit::write_slice() here because of having an iterator to write
1688
1689                let init = {
1690                    // SAFETY: The loop above has initialized exactly as many items as to_init is
1691                    // long, so it is safe to cast away the MaybeUninit<T> wrapper into T.
1692
1693                    // Additional safety docs from unstable slice_assume_init_mut
1694                    // SAFETY: similar to safety notes for `slice_get_ref`, but we have a
1695                    // mutable reference which is also guaranteed to be valid for writes.
1696                    unsafe { std::mem::transmute::<&mut [MaybeUninit<T>], &mut [T]>(to_init) }
1697                };
1698                (Self { remainder }, init)
1699            }
1700        }
1701
1702        let mut writes = Vec::with_capacity(desc.entries.len());
1703        let mut buffer_infos = Vec::with_capacity(desc.buffers.len());
1704        let mut buffer_infos = ExtendStack::from_vec_capacity(&mut buffer_infos);
1705        let mut image_infos = Vec::with_capacity(desc.samplers.len() + desc.textures.len());
1706        let mut image_infos = ExtendStack::from_vec_capacity(&mut image_infos);
1707        // TODO: This length could be reduced to just the number of top-level acceleration
1708        // structure bindings, where multiple consecutive TLAS bindings that are set via
1709        // one `WriteDescriptorSet` count towards one "info" struct, not the total number of
1710        // acceleration structure bindings to write:
1711        let mut acceleration_structure_infos =
1712            Vec::with_capacity(desc.acceleration_structures.len());
1713        let mut acceleration_structure_infos =
1714            ExtendStack::from_vec_capacity(&mut acceleration_structure_infos);
1715        let mut raw_acceleration_structures =
1716            Vec::with_capacity(desc.acceleration_structures.len());
1717        let mut raw_acceleration_structures =
1718            ExtendStack::from_vec_capacity(&mut raw_acceleration_structures);
1719        for entry in desc.entries {
1720            let (ty, size) = desc.layout.types[entry.binding as usize];
1721            if size == 0 {
1722                continue; // empty slot
1723            }
1724            let mut write = vk::WriteDescriptorSet::default()
1725                .dst_set(*set.raw())
1726                .dst_binding(entry.binding)
1727                .descriptor_type(ty);
1728
1729            write = match ty {
1730                vk::DescriptorType::SAMPLER => {
1731                    let start = entry.resource_index;
1732                    let end = start + entry.count;
1733                    let local_image_infos;
1734                    (image_infos, local_image_infos) =
1735                        image_infos.extend(desc.samplers[start as usize..end as usize].iter().map(
1736                            |sampler| vk::DescriptorImageInfo::default().sampler(sampler.raw),
1737                        ));
1738                    write.image_info(local_image_infos)
1739                }
1740                vk::DescriptorType::SAMPLED_IMAGE | vk::DescriptorType::STORAGE_IMAGE => {
1741                    let start = entry.resource_index;
1742                    let end = start + entry.count;
1743                    let local_image_infos;
1744                    (image_infos, local_image_infos) =
1745                        image_infos.extend(desc.textures[start as usize..end as usize].iter().map(
1746                            |binding| {
1747                                let layout = conv::derive_image_layout(
1748                                    binding.usage,
1749                                    binding.view.attachment.view_format,
1750                                );
1751                                vk::DescriptorImageInfo::default()
1752                                    .image_view(binding.view.raw)
1753                                    .image_layout(layout)
1754                            },
1755                        ));
1756                    write.image_info(local_image_infos)
1757                }
1758                vk::DescriptorType::UNIFORM_BUFFER
1759                | vk::DescriptorType::UNIFORM_BUFFER_DYNAMIC
1760                | vk::DescriptorType::STORAGE_BUFFER
1761                | vk::DescriptorType::STORAGE_BUFFER_DYNAMIC => {
1762                    let start = entry.resource_index;
1763                    let end = start + entry.count;
1764                    let local_buffer_infos;
1765                    (buffer_infos, local_buffer_infos) =
1766                        buffer_infos.extend(desc.buffers[start as usize..end as usize].iter().map(
1767                            |binding| {
1768                                vk::DescriptorBufferInfo::default()
1769                                    .buffer(binding.buffer.raw)
1770                                    .offset(binding.offset)
1771                                    .range(
1772                                        binding.size.map_or(vk::WHOLE_SIZE, wgt::BufferSize::get),
1773                                    )
1774                            },
1775                        ));
1776                    write.buffer_info(local_buffer_infos)
1777                }
1778                vk::DescriptorType::ACCELERATION_STRUCTURE_KHR => {
1779                    let start = entry.resource_index;
1780                    let end = start + entry.count;
1781
1782                    let local_raw_acceleration_structures;
1783                    (
1784                        raw_acceleration_structures,
1785                        local_raw_acceleration_structures,
1786                    ) = raw_acceleration_structures.extend(
1787                        desc.acceleration_structures[start as usize..end as usize]
1788                            .iter()
1789                            .map(|acceleration_structure| acceleration_structure.raw),
1790                    );
1791
1792                    let local_acceleration_structure_infos;
1793                    (
1794                        acceleration_structure_infos,
1795                        local_acceleration_structure_infos,
1796                    ) = acceleration_structure_infos.extend_one(
1797                        vk::WriteDescriptorSetAccelerationStructureKHR::default()
1798                            .acceleration_structures(local_raw_acceleration_structures),
1799                    );
1800
1801                    write
1802                        .descriptor_count(entry.count)
1803                        .push_next(local_acceleration_structure_infos)
1804                }
1805                _ => unreachable!(),
1806            };
1807
1808            writes.push(write);
1809        }
1810
1811        unsafe { self.shared.raw.update_descriptor_sets(&writes, &[]) };
1812
1813        self.counters.bind_groups.add(1);
1814
1815        Ok(super::BindGroup { set })
1816    }
1817
1818    unsafe fn destroy_bind_group(&self, group: super::BindGroup) {
1819        unsafe {
1820            self.desc_allocator
1821                .lock()
1822                .free(&*self.shared, Some(group.set))
1823        };
1824
1825        self.counters.bind_groups.sub(1);
1826    }
1827
1828    unsafe fn create_shader_module(
1829        &self,
1830        desc: &crate::ShaderModuleDescriptor,
1831        shader: crate::ShaderInput,
1832    ) -> Result<super::ShaderModule, crate::ShaderError> {
1833        let spv = match shader {
1834            crate::ShaderInput::Naga(naga_shader) => {
1835                if self
1836                    .shared
1837                    .workarounds
1838                    .contains(super::Workarounds::SEPARATE_ENTRY_POINTS)
1839                    || !naga_shader.module.overrides.is_empty()
1840                {
1841                    return Ok(super::ShaderModule::Intermediate {
1842                        naga_shader,
1843                        runtime_checks: desc.runtime_checks,
1844                    });
1845                }
1846                let mut naga_options = self.naga_options.clone();
1847                naga_options.debug_info =
1848                    naga_shader
1849                        .debug_source
1850                        .as_ref()
1851                        .map(|d| naga::back::spv::DebugInfo {
1852                            source_code: d.source_code.as_ref(),
1853                            file_name: d.file_name.as_ref().as_ref(),
1854                            language: naga::back::spv::SourceLanguage::WGSL,
1855                        });
1856                if !desc.runtime_checks {
1857                    naga_options.bounds_check_policies = naga::proc::BoundsCheckPolicies {
1858                        index: naga::proc::BoundsCheckPolicy::Unchecked,
1859                        buffer: naga::proc::BoundsCheckPolicy::Unchecked,
1860                        image_load: naga::proc::BoundsCheckPolicy::Unchecked,
1861                        binding_array: naga::proc::BoundsCheckPolicy::Unchecked,
1862                    };
1863                }
1864                Cow::Owned(
1865                    naga::back::spv::write_vec(
1866                        &naga_shader.module,
1867                        &naga_shader.info,
1868                        &naga_options,
1869                        None,
1870                    )
1871                    .map_err(|e| crate::ShaderError::Compilation(format!("{e}")))?,
1872                )
1873            }
1874            crate::ShaderInput::SpirV(spv) => Cow::Borrowed(spv),
1875        };
1876
1877        let raw = self.create_shader_module_impl(&spv)?;
1878
1879        if let Some(label) = desc.label {
1880            unsafe { self.shared.set_object_name(raw, label) };
1881        }
1882
1883        self.counters.shader_modules.add(1);
1884
1885        Ok(super::ShaderModule::Raw(raw))
1886    }
1887
1888    unsafe fn destroy_shader_module(&self, module: super::ShaderModule) {
1889        match module {
1890            super::ShaderModule::Raw(raw) => {
1891                unsafe { self.shared.raw.destroy_shader_module(raw, None) };
1892            }
1893            super::ShaderModule::Intermediate { .. } => {}
1894        }
1895
1896        self.counters.shader_modules.sub(1);
1897    }
1898
1899    unsafe fn create_render_pipeline(
1900        &self,
1901        desc: &crate::RenderPipelineDescriptor<
1902            super::PipelineLayout,
1903            super::ShaderModule,
1904            super::PipelineCache,
1905        >,
1906    ) -> Result<super::RenderPipeline, crate::PipelineError> {
1907        let dynamic_states = [
1908            vk::DynamicState::VIEWPORT,
1909            vk::DynamicState::SCISSOR,
1910            vk::DynamicState::BLEND_CONSTANTS,
1911            vk::DynamicState::STENCIL_REFERENCE,
1912        ];
1913        let mut compatible_rp_key = super::RenderPassKey {
1914            sample_count: desc.multisample.count,
1915            multiview: desc.multiview,
1916            ..Default::default()
1917        };
1918        let mut stages = ArrayVec::<_, { crate::MAX_CONCURRENT_SHADER_STAGES }>::new();
1919        let mut vertex_buffers = Vec::with_capacity(desc.vertex_buffers.len());
1920        let mut vertex_attributes = Vec::new();
1921
1922        for (i, vb) in desc.vertex_buffers.iter().enumerate() {
1923            vertex_buffers.push(vk::VertexInputBindingDescription {
1924                binding: i as u32,
1925                stride: vb.array_stride as u32,
1926                input_rate: match vb.step_mode {
1927                    wgt::VertexStepMode::Vertex => vk::VertexInputRate::VERTEX,
1928                    wgt::VertexStepMode::Instance => vk::VertexInputRate::INSTANCE,
1929                },
1930            });
1931            for at in vb.attributes {
1932                vertex_attributes.push(vk::VertexInputAttributeDescription {
1933                    location: at.shader_location,
1934                    binding: i as u32,
1935                    format: conv::map_vertex_format(at.format),
1936                    offset: at.offset as u32,
1937                });
1938            }
1939        }
1940
1941        let vk_vertex_input = vk::PipelineVertexInputStateCreateInfo::default()
1942            .vertex_binding_descriptions(&vertex_buffers)
1943            .vertex_attribute_descriptions(&vertex_attributes);
1944
1945        let vk_input_assembly = vk::PipelineInputAssemblyStateCreateInfo::default()
1946            .topology(conv::map_topology(desc.primitive.topology))
1947            .primitive_restart_enable(desc.primitive.strip_index_format.is_some());
1948
1949        let compiled_vs = self.compile_stage(
1950            &desc.vertex_stage,
1951            naga::ShaderStage::Vertex,
1952            &desc.layout.binding_arrays,
1953        )?;
1954        stages.push(compiled_vs.create_info);
1955        let compiled_fs = match desc.fragment_stage {
1956            Some(ref stage) => {
1957                let compiled = self.compile_stage(
1958                    stage,
1959                    naga::ShaderStage::Fragment,
1960                    &desc.layout.binding_arrays,
1961                )?;
1962                stages.push(compiled.create_info);
1963                Some(compiled)
1964            }
1965            None => None,
1966        };
1967
1968        let mut vk_rasterization = vk::PipelineRasterizationStateCreateInfo::default()
1969            .polygon_mode(conv::map_polygon_mode(desc.primitive.polygon_mode))
1970            .front_face(conv::map_front_face(desc.primitive.front_face))
1971            .line_width(1.0)
1972            .depth_clamp_enable(desc.primitive.unclipped_depth);
1973        if let Some(face) = desc.primitive.cull_mode {
1974            vk_rasterization = vk_rasterization.cull_mode(conv::map_cull_face(face))
1975        }
1976        let mut vk_rasterization_conservative_state =
1977            vk::PipelineRasterizationConservativeStateCreateInfoEXT::default()
1978                .conservative_rasterization_mode(
1979                    vk::ConservativeRasterizationModeEXT::OVERESTIMATE,
1980                );
1981        if desc.primitive.conservative {
1982            vk_rasterization = vk_rasterization.push_next(&mut vk_rasterization_conservative_state);
1983        }
1984
1985        let mut vk_depth_stencil = vk::PipelineDepthStencilStateCreateInfo::default();
1986        if let Some(ref ds) = desc.depth_stencil {
1987            let vk_format = self.shared.private_caps.map_texture_format(ds.format);
1988            let vk_layout = if ds.is_read_only(desc.primitive.cull_mode) {
1989                vk::ImageLayout::DEPTH_STENCIL_READ_ONLY_OPTIMAL
1990            } else {
1991                vk::ImageLayout::DEPTH_STENCIL_ATTACHMENT_OPTIMAL
1992            };
1993            compatible_rp_key.depth_stencil = Some(super::DepthStencilAttachmentKey {
1994                base: super::AttachmentKey::compatible(vk_format, vk_layout),
1995                stencil_ops: crate::AttachmentOps::all(),
1996            });
1997
1998            if ds.is_depth_enabled() {
1999                vk_depth_stencil = vk_depth_stencil
2000                    .depth_test_enable(true)
2001                    .depth_write_enable(ds.depth_write_enabled)
2002                    .depth_compare_op(conv::map_comparison(ds.depth_compare));
2003            }
2004            if ds.stencil.is_enabled() {
2005                let s = &ds.stencil;
2006                let front = conv::map_stencil_face(&s.front, s.read_mask, s.write_mask);
2007                let back = conv::map_stencil_face(&s.back, s.read_mask, s.write_mask);
2008                vk_depth_stencil = vk_depth_stencil
2009                    .stencil_test_enable(true)
2010                    .front(front)
2011                    .back(back);
2012            }
2013
2014            if ds.bias.is_enabled() {
2015                vk_rasterization = vk_rasterization
2016                    .depth_bias_enable(true)
2017                    .depth_bias_constant_factor(ds.bias.constant as f32)
2018                    .depth_bias_clamp(ds.bias.clamp)
2019                    .depth_bias_slope_factor(ds.bias.slope_scale);
2020            }
2021        }
2022
2023        let vk_viewport = vk::PipelineViewportStateCreateInfo::default()
2024            .flags(vk::PipelineViewportStateCreateFlags::empty())
2025            .scissor_count(1)
2026            .viewport_count(1);
2027
2028        let vk_sample_mask = [
2029            desc.multisample.mask as u32,
2030            (desc.multisample.mask >> 32) as u32,
2031        ];
2032        let vk_multisample = vk::PipelineMultisampleStateCreateInfo::default()
2033            .rasterization_samples(vk::SampleCountFlags::from_raw(desc.multisample.count))
2034            .alpha_to_coverage_enable(desc.multisample.alpha_to_coverage_enabled)
2035            .sample_mask(&vk_sample_mask);
2036
2037        let mut vk_attachments = Vec::with_capacity(desc.color_targets.len());
2038        for cat in desc.color_targets {
2039            let (key, attarchment) = if let Some(cat) = cat.as_ref() {
2040                let mut vk_attachment = vk::PipelineColorBlendAttachmentState::default()
2041                    .color_write_mask(vk::ColorComponentFlags::from_raw(cat.write_mask.bits()));
2042                if let Some(ref blend) = cat.blend {
2043                    let (color_op, color_src, color_dst) = conv::map_blend_component(&blend.color);
2044                    let (alpha_op, alpha_src, alpha_dst) = conv::map_blend_component(&blend.alpha);
2045                    vk_attachment = vk_attachment
2046                        .blend_enable(true)
2047                        .color_blend_op(color_op)
2048                        .src_color_blend_factor(color_src)
2049                        .dst_color_blend_factor(color_dst)
2050                        .alpha_blend_op(alpha_op)
2051                        .src_alpha_blend_factor(alpha_src)
2052                        .dst_alpha_blend_factor(alpha_dst);
2053                }
2054
2055                let vk_format = self.shared.private_caps.map_texture_format(cat.format);
2056                (
2057                    Some(super::ColorAttachmentKey {
2058                        base: super::AttachmentKey::compatible(
2059                            vk_format,
2060                            vk::ImageLayout::COLOR_ATTACHMENT_OPTIMAL,
2061                        ),
2062                        resolve: None,
2063                    }),
2064                    vk_attachment,
2065                )
2066            } else {
2067                (None, vk::PipelineColorBlendAttachmentState::default())
2068            };
2069
2070            compatible_rp_key.colors.push(key);
2071            vk_attachments.push(attarchment);
2072        }
2073
2074        let vk_color_blend =
2075            vk::PipelineColorBlendStateCreateInfo::default().attachments(&vk_attachments);
2076
2077        let vk_dynamic_state =
2078            vk::PipelineDynamicStateCreateInfo::default().dynamic_states(&dynamic_states);
2079
2080        let raw_pass = self
2081            .shared
2082            .make_render_pass(compatible_rp_key)
2083            .map_err(crate::DeviceError::from)?;
2084
2085        let vk_infos = [{
2086            vk::GraphicsPipelineCreateInfo::default()
2087                .layout(desc.layout.raw)
2088                .stages(&stages)
2089                .vertex_input_state(&vk_vertex_input)
2090                .input_assembly_state(&vk_input_assembly)
2091                .rasterization_state(&vk_rasterization)
2092                .viewport_state(&vk_viewport)
2093                .multisample_state(&vk_multisample)
2094                .depth_stencil_state(&vk_depth_stencil)
2095                .color_blend_state(&vk_color_blend)
2096                .dynamic_state(&vk_dynamic_state)
2097                .render_pass(raw_pass)
2098        }];
2099
2100        let pipeline_cache = desc
2101            .cache
2102            .map(|it| it.raw)
2103            .unwrap_or(vk::PipelineCache::null());
2104
2105        let mut raw_vec = {
2106            profiling::scope!("vkCreateGraphicsPipelines");
2107            unsafe {
2108                self.shared
2109                    .raw
2110                    .create_graphics_pipelines(pipeline_cache, &vk_infos, None)
2111                    .map_err(|(_, e)| super::map_pipeline_err(e))
2112            }?
2113        };
2114
2115        let raw = raw_vec.pop().unwrap();
2116        if let Some(label) = desc.label {
2117            unsafe { self.shared.set_object_name(raw, label) };
2118        }
2119
2120        if let Some(raw_module) = compiled_vs.temp_raw_module {
2121            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2122        }
2123        if let Some(CompiledStage {
2124            temp_raw_module: Some(raw_module),
2125            ..
2126        }) = compiled_fs
2127        {
2128            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2129        }
2130
2131        self.counters.render_pipelines.add(1);
2132
2133        Ok(super::RenderPipeline { raw })
2134    }
2135
2136    unsafe fn destroy_render_pipeline(&self, pipeline: super::RenderPipeline) {
2137        unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
2138
2139        self.counters.render_pipelines.sub(1);
2140    }
2141
2142    unsafe fn create_compute_pipeline(
2143        &self,
2144        desc: &crate::ComputePipelineDescriptor<
2145            super::PipelineLayout,
2146            super::ShaderModule,
2147            super::PipelineCache,
2148        >,
2149    ) -> Result<super::ComputePipeline, crate::PipelineError> {
2150        let compiled = self.compile_stage(
2151            &desc.stage,
2152            naga::ShaderStage::Compute,
2153            &desc.layout.binding_arrays,
2154        )?;
2155
2156        let vk_infos = [{
2157            vk::ComputePipelineCreateInfo::default()
2158                .layout(desc.layout.raw)
2159                .stage(compiled.create_info)
2160        }];
2161
2162        let pipeline_cache = desc
2163            .cache
2164            .map(|it| it.raw)
2165            .unwrap_or(vk::PipelineCache::null());
2166
2167        let mut raw_vec = {
2168            profiling::scope!("vkCreateComputePipelines");
2169            unsafe {
2170                self.shared
2171                    .raw
2172                    .create_compute_pipelines(pipeline_cache, &vk_infos, None)
2173                    .map_err(|(_, e)| super::map_pipeline_err(e))
2174            }?
2175        };
2176
2177        let raw = raw_vec.pop().unwrap();
2178        if let Some(label) = desc.label {
2179            unsafe { self.shared.set_object_name(raw, label) };
2180        }
2181
2182        if let Some(raw_module) = compiled.temp_raw_module {
2183            unsafe { self.shared.raw.destroy_shader_module(raw_module, None) };
2184        }
2185
2186        self.counters.compute_pipelines.add(1);
2187
2188        Ok(super::ComputePipeline { raw })
2189    }
2190
2191    unsafe fn destroy_compute_pipeline(&self, pipeline: super::ComputePipeline) {
2192        unsafe { self.shared.raw.destroy_pipeline(pipeline.raw, None) };
2193
2194        self.counters.compute_pipelines.sub(1);
2195    }
2196
2197    unsafe fn create_pipeline_cache(
2198        &self,
2199        desc: &crate::PipelineCacheDescriptor<'_>,
2200    ) -> Result<super::PipelineCache, crate::PipelineCacheError> {
2201        let mut info = vk::PipelineCacheCreateInfo::default();
2202        if let Some(data) = desc.data {
2203            info = info.initial_data(data)
2204        }
2205        profiling::scope!("vkCreatePipelineCache");
2206        let raw = unsafe { self.shared.raw.create_pipeline_cache(&info, None) }
2207            .map_err(super::map_host_device_oom_err)?;
2208
2209        Ok(super::PipelineCache { raw })
2210    }
2211    fn pipeline_cache_validation_key(&self) -> Option<[u8; 16]> {
2212        Some(self.shared.pipeline_cache_validation_key)
2213    }
2214    unsafe fn destroy_pipeline_cache(&self, cache: super::PipelineCache) {
2215        unsafe { self.shared.raw.destroy_pipeline_cache(cache.raw, None) }
2216    }
2217    unsafe fn create_query_set(
2218        &self,
2219        desc: &wgt::QuerySetDescriptor<crate::Label>,
2220    ) -> Result<super::QuerySet, crate::DeviceError> {
2221        let (vk_type, pipeline_statistics) = match desc.ty {
2222            wgt::QueryType::Occlusion => (
2223                vk::QueryType::OCCLUSION,
2224                vk::QueryPipelineStatisticFlags::empty(),
2225            ),
2226            wgt::QueryType::PipelineStatistics(statistics) => (
2227                vk::QueryType::PIPELINE_STATISTICS,
2228                conv::map_pipeline_statistics(statistics),
2229            ),
2230            wgt::QueryType::Timestamp => (
2231                vk::QueryType::TIMESTAMP,
2232                vk::QueryPipelineStatisticFlags::empty(),
2233            ),
2234        };
2235
2236        let vk_info = vk::QueryPoolCreateInfo::default()
2237            .query_type(vk_type)
2238            .query_count(desc.count)
2239            .pipeline_statistics(pipeline_statistics);
2240
2241        let raw = unsafe { self.shared.raw.create_query_pool(&vk_info, None) }
2242            .map_err(super::map_host_device_oom_err)?;
2243        if let Some(label) = desc.label {
2244            unsafe { self.shared.set_object_name(raw, label) };
2245        }
2246
2247        self.counters.query_sets.add(1);
2248
2249        Ok(super::QuerySet { raw })
2250    }
2251
2252    unsafe fn destroy_query_set(&self, set: super::QuerySet) {
2253        unsafe { self.shared.raw.destroy_query_pool(set.raw, None) };
2254
2255        self.counters.query_sets.sub(1);
2256    }
2257
2258    unsafe fn create_fence(&self) -> Result<super::Fence, crate::DeviceError> {
2259        self.counters.fences.add(1);
2260
2261        Ok(if self.shared.private_caps.timeline_semaphores {
2262            let mut sem_type_info =
2263                vk::SemaphoreTypeCreateInfo::default().semaphore_type(vk::SemaphoreType::TIMELINE);
2264            let vk_info = vk::SemaphoreCreateInfo::default().push_next(&mut sem_type_info);
2265            let raw = unsafe { self.shared.raw.create_semaphore(&vk_info, None) }
2266                .map_err(super::map_host_device_oom_err)?;
2267
2268            super::Fence::TimelineSemaphore(raw)
2269        } else {
2270            super::Fence::FencePool {
2271                last_completed: 0,
2272                active: Vec::new(),
2273                free: Vec::new(),
2274            }
2275        })
2276    }
2277    unsafe fn destroy_fence(&self, fence: super::Fence) {
2278        match fence {
2279            super::Fence::TimelineSemaphore(raw) => {
2280                unsafe { self.shared.raw.destroy_semaphore(raw, None) };
2281            }
2282            super::Fence::FencePool {
2283                active,
2284                free,
2285                last_completed: _,
2286            } => {
2287                for (_, raw) in active {
2288                    unsafe { self.shared.raw.destroy_fence(raw, None) };
2289                }
2290                for raw in free {
2291                    unsafe { self.shared.raw.destroy_fence(raw, None) };
2292                }
2293            }
2294        }
2295
2296        self.counters.fences.sub(1);
2297    }
2298    unsafe fn get_fence_value(
2299        &self,
2300        fence: &super::Fence,
2301    ) -> Result<crate::FenceValue, crate::DeviceError> {
2302        fence.get_latest(
2303            &self.shared.raw,
2304            self.shared.extension_fns.timeline_semaphore.as_ref(),
2305        )
2306    }
2307    unsafe fn wait(
2308        &self,
2309        fence: &super::Fence,
2310        wait_value: crate::FenceValue,
2311        timeout_ms: u32,
2312    ) -> Result<bool, crate::DeviceError> {
2313        let timeout_ns = timeout_ms as u64 * super::MILLIS_TO_NANOS;
2314        self.shared.wait_for_fence(fence, wait_value, timeout_ns)
2315    }
2316
2317    unsafe fn start_capture(&self) -> bool {
2318        #[cfg(feature = "renderdoc")]
2319        {
2320            // Renderdoc requires us to give us the pointer that vkInstance _points to_.
2321            let raw_vk_instance =
2322                vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2323            let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2324            unsafe {
2325                self.render_doc
2326                    .start_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2327            }
2328        }
2329        #[cfg(not(feature = "renderdoc"))]
2330        false
2331    }
2332    unsafe fn stop_capture(&self) {
2333        #[cfg(feature = "renderdoc")]
2334        {
2335            // Renderdoc requires us to give us the pointer that vkInstance _points to_.
2336            let raw_vk_instance =
2337                vk::Handle::as_raw(self.shared.instance.raw.handle()) as *mut *mut _;
2338            let raw_vk_instance_dispatch_table = unsafe { *raw_vk_instance };
2339
2340            unsafe {
2341                self.render_doc
2342                    .end_frame_capture(raw_vk_instance_dispatch_table, ptr::null_mut())
2343            }
2344        }
2345    }
2346
2347    unsafe fn pipeline_cache_get_data(&self, cache: &super::PipelineCache) -> Option<Vec<u8>> {
2348        let data = unsafe { self.raw_device().get_pipeline_cache_data(cache.raw) };
2349        data.ok()
2350    }
2351
2352    unsafe fn get_acceleration_structure_build_sizes<'a>(
2353        &self,
2354        desc: &crate::GetAccelerationStructureBuildSizesDescriptor<'a, super::Buffer>,
2355    ) -> crate::AccelerationStructureBuildSizes {
2356        const CAPACITY: usize = 8;
2357
2358        let ray_tracing_functions = self
2359            .shared
2360            .extension_fns
2361            .ray_tracing
2362            .as_ref()
2363            .expect("Feature `RAY_TRACING` not enabled");
2364
2365        let (geometries, primitive_counts) = match *desc.entries {
2366            crate::AccelerationStructureEntries::Instances(ref instances) => {
2367                let instance_data = vk::AccelerationStructureGeometryInstancesDataKHR::default();
2368
2369                let geometry = vk::AccelerationStructureGeometryKHR::default()
2370                    .geometry_type(vk::GeometryTypeKHR::INSTANCES)
2371                    .geometry(vk::AccelerationStructureGeometryDataKHR {
2372                        instances: instance_data,
2373                    });
2374
2375                (
2376                    smallvec::smallvec![geometry],
2377                    smallvec::smallvec![instances.count],
2378                )
2379            }
2380            crate::AccelerationStructureEntries::Triangles(ref in_geometries) => {
2381                let mut primitive_counts =
2382                    smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2383                let mut geometries = smallvec::SmallVec::<
2384                    [vk::AccelerationStructureGeometryKHR; CAPACITY],
2385                >::with_capacity(in_geometries.len());
2386
2387                for triangles in in_geometries {
2388                    let mut triangle_data =
2389                        vk::AccelerationStructureGeometryTrianglesDataKHR::default()
2390                            .vertex_format(conv::map_vertex_format(triangles.vertex_format))
2391                            .max_vertex(triangles.vertex_count)
2392                            .vertex_stride(triangles.vertex_stride);
2393
2394                    let pritive_count = if let Some(ref indices) = triangles.indices {
2395                        triangle_data =
2396                            triangle_data.index_type(conv::map_index_format(indices.format));
2397                        indices.count / 3
2398                    } else {
2399                        triangles.vertex_count
2400                    };
2401
2402                    let geometry = vk::AccelerationStructureGeometryKHR::default()
2403                        .geometry_type(vk::GeometryTypeKHR::TRIANGLES)
2404                        .geometry(vk::AccelerationStructureGeometryDataKHR {
2405                            triangles: triangle_data,
2406                        })
2407                        .flags(conv::map_acceleration_structure_geometry_flags(
2408                            triangles.flags,
2409                        ));
2410
2411                    geometries.push(geometry);
2412                    primitive_counts.push(pritive_count);
2413                }
2414                (geometries, primitive_counts)
2415            }
2416            crate::AccelerationStructureEntries::AABBs(ref in_geometries) => {
2417                let mut primitive_counts =
2418                    smallvec::SmallVec::<[u32; CAPACITY]>::with_capacity(in_geometries.len());
2419                let mut geometries = smallvec::SmallVec::<
2420                    [vk::AccelerationStructureGeometryKHR; CAPACITY],
2421                >::with_capacity(in_geometries.len());
2422                for aabb in in_geometries {
2423                    let aabbs_data = vk::AccelerationStructureGeometryAabbsDataKHR::default()
2424                        .stride(aabb.stride);
2425
2426                    let geometry = vk::AccelerationStructureGeometryKHR::default()
2427                        .geometry_type(vk::GeometryTypeKHR::AABBS)
2428                        .geometry(vk::AccelerationStructureGeometryDataKHR { aabbs: aabbs_data })
2429                        .flags(conv::map_acceleration_structure_geometry_flags(aabb.flags));
2430
2431                    geometries.push(geometry);
2432                    primitive_counts.push(aabb.count);
2433                }
2434                (geometries, primitive_counts)
2435            }
2436        };
2437
2438        let ty = match *desc.entries {
2439            crate::AccelerationStructureEntries::Instances(_) => {
2440                vk::AccelerationStructureTypeKHR::TOP_LEVEL
2441            }
2442            _ => vk::AccelerationStructureTypeKHR::BOTTOM_LEVEL,
2443        };
2444
2445        let geometry_info = vk::AccelerationStructureBuildGeometryInfoKHR::default()
2446            .ty(ty)
2447            .flags(conv::map_acceleration_structure_flags(desc.flags))
2448            .geometries(&geometries);
2449
2450        let mut raw = Default::default();
2451        unsafe {
2452            ray_tracing_functions
2453                .acceleration_structure
2454                .get_acceleration_structure_build_sizes(
2455                    vk::AccelerationStructureBuildTypeKHR::DEVICE,
2456                    &geometry_info,
2457                    &primitive_counts,
2458                    &mut raw,
2459                )
2460        }
2461
2462        crate::AccelerationStructureBuildSizes {
2463            acceleration_structure_size: raw.acceleration_structure_size,
2464            update_scratch_size: raw.update_scratch_size,
2465            build_scratch_size: raw.build_scratch_size,
2466        }
2467    }
2468
2469    unsafe fn get_acceleration_structure_device_address(
2470        &self,
2471        acceleration_structure: &super::AccelerationStructure,
2472    ) -> wgt::BufferAddress {
2473        let ray_tracing_functions = self
2474            .shared
2475            .extension_fns
2476            .ray_tracing
2477            .as_ref()
2478            .expect("Feature `RAY_TRACING` not enabled");
2479
2480        unsafe {
2481            ray_tracing_functions
2482                .acceleration_structure
2483                .get_acceleration_structure_device_address(
2484                    &vk::AccelerationStructureDeviceAddressInfoKHR::default()
2485                        .acceleration_structure(acceleration_structure.raw),
2486                )
2487        }
2488    }
2489
2490    unsafe fn create_acceleration_structure(
2491        &self,
2492        desc: &crate::AccelerationStructureDescriptor,
2493    ) -> Result<super::AccelerationStructure, crate::DeviceError> {
2494        let ray_tracing_functions = self
2495            .shared
2496            .extension_fns
2497            .ray_tracing
2498            .as_ref()
2499            .expect("Feature `RAY_TRACING` not enabled");
2500
2501        let vk_buffer_info = vk::BufferCreateInfo::default()
2502            .size(desc.size)
2503            .usage(
2504                vk::BufferUsageFlags::ACCELERATION_STRUCTURE_STORAGE_KHR
2505                    | vk::BufferUsageFlags::SHADER_DEVICE_ADDRESS,
2506            )
2507            .sharing_mode(vk::SharingMode::EXCLUSIVE);
2508
2509        unsafe {
2510            let raw_buffer = self
2511                .shared
2512                .raw
2513                .create_buffer(&vk_buffer_info, None)
2514                .map_err(super::map_host_device_oom_and_ioca_err)?;
2515            let req = self.shared.raw.get_buffer_memory_requirements(raw_buffer);
2516
2517            let block = self.mem_allocator.lock().alloc(
2518                &*self.shared,
2519                gpu_alloc::Request {
2520                    size: req.size,
2521                    align_mask: req.alignment - 1,
2522                    usage: gpu_alloc::UsageFlags::FAST_DEVICE_ACCESS,
2523                    memory_types: req.memory_type_bits & self.valid_ash_memory_types,
2524                },
2525            )?;
2526
2527            self.shared
2528                .raw
2529                .bind_buffer_memory(raw_buffer, *block.memory(), block.offset())
2530                .map_err(super::map_host_device_oom_and_ioca_err)?;
2531
2532            if let Some(label) = desc.label {
2533                self.shared.set_object_name(raw_buffer, label);
2534            }
2535
2536            let vk_info = vk::AccelerationStructureCreateInfoKHR::default()
2537                .buffer(raw_buffer)
2538                .offset(0)
2539                .size(desc.size)
2540                .ty(conv::map_acceleration_structure_format(desc.format));
2541
2542            let raw_acceleration_structure = ray_tracing_functions
2543                .acceleration_structure
2544                .create_acceleration_structure(&vk_info, None)
2545                .map_err(super::map_host_oom_and_ioca_err)?;
2546
2547            if let Some(label) = desc.label {
2548                self.shared
2549                    .set_object_name(raw_acceleration_structure, label);
2550            }
2551
2552            Ok(super::AccelerationStructure {
2553                raw: raw_acceleration_structure,
2554                buffer: raw_buffer,
2555                block: Mutex::new(block),
2556            })
2557        }
2558    }
2559
2560    unsafe fn destroy_acceleration_structure(
2561        &self,
2562        acceleration_structure: super::AccelerationStructure,
2563    ) {
2564        let ray_tracing_functions = self
2565            .shared
2566            .extension_fns
2567            .ray_tracing
2568            .as_ref()
2569            .expect("Feature `RAY_TRACING` not enabled");
2570
2571        unsafe {
2572            ray_tracing_functions
2573                .acceleration_structure
2574                .destroy_acceleration_structure(acceleration_structure.raw, None);
2575            self.shared
2576                .raw
2577                .destroy_buffer(acceleration_structure.buffer, None);
2578            self.mem_allocator
2579                .lock()
2580                .dealloc(&*self.shared, acceleration_structure.block.into_inner());
2581        }
2582    }
2583
2584    fn get_internal_counters(&self) -> wgt::HalCounters {
2585        self.counters
2586            .memory_allocations
2587            .set(self.shared.memory_allocations_counter.read());
2588
2589        self.counters.clone()
2590    }
2591}
2592
2593impl super::DeviceShared {
2594    pub(super) fn new_binary_semaphore(&self) -> Result<vk::Semaphore, crate::DeviceError> {
2595        unsafe {
2596            self.raw
2597                .create_semaphore(&vk::SemaphoreCreateInfo::default(), None)
2598                .map_err(super::map_host_device_oom_err)
2599        }
2600    }
2601
2602    pub(super) fn wait_for_fence(
2603        &self,
2604        fence: &super::Fence,
2605        wait_value: crate::FenceValue,
2606        timeout_ns: u64,
2607    ) -> Result<bool, crate::DeviceError> {
2608        profiling::scope!("Device::wait");
2609        match *fence {
2610            super::Fence::TimelineSemaphore(raw) => {
2611                let semaphores = [raw];
2612                let values = [wait_value];
2613                let vk_info = vk::SemaphoreWaitInfo::default()
2614                    .semaphores(&semaphores)
2615                    .values(&values);
2616                let result = match self.extension_fns.timeline_semaphore {
2617                    Some(super::ExtensionFn::Extension(ref ext)) => unsafe {
2618                        ext.wait_semaphores(&vk_info, timeout_ns)
2619                    },
2620                    Some(super::ExtensionFn::Promoted) => unsafe {
2621                        self.raw.wait_semaphores(&vk_info, timeout_ns)
2622                    },
2623                    None => unreachable!(),
2624                };
2625                match result {
2626                    Ok(()) => Ok(true),
2627                    Err(vk::Result::TIMEOUT) => Ok(false),
2628                    Err(other) => Err(super::map_host_device_oom_and_lost_err(other)),
2629                }
2630            }
2631            super::Fence::FencePool {
2632                last_completed,
2633                ref active,
2634                free: _,
2635            } => {
2636                if wait_value <= last_completed {
2637                    Ok(true)
2638                } else {
2639                    match active.iter().find(|&&(value, _)| value >= wait_value) {
2640                        Some(&(_, raw)) => {
2641                            match unsafe { self.raw.wait_for_fences(&[raw], true, timeout_ns) } {
2642                                Ok(()) => Ok(true),
2643                                Err(vk::Result::TIMEOUT) => Ok(false),
2644                                Err(other) => Err(super::map_host_device_oom_and_lost_err(other)),
2645                            }
2646                        }
2647                        None => {
2648                            crate::hal_usage_error(format!(
2649                                "no signals reached value {wait_value}"
2650                            ));
2651                        }
2652                    }
2653                }
2654            }
2655        }
2656    }
2657}
2658
2659impl From<gpu_alloc::AllocationError> for crate::DeviceError {
2660    fn from(error: gpu_alloc::AllocationError) -> Self {
2661        use gpu_alloc::AllocationError as Ae;
2662        match error {
2663            Ae::OutOfDeviceMemory | Ae::OutOfHostMemory | Ae::TooManyObjects => Self::OutOfMemory,
2664            Ae::NoCompatibleMemoryTypes => crate::hal_usage_error(error),
2665        }
2666    }
2667}
2668impl From<gpu_alloc::MapError> for crate::DeviceError {
2669    fn from(error: gpu_alloc::MapError) -> Self {
2670        use gpu_alloc::MapError as Me;
2671        match error {
2672            Me::OutOfDeviceMemory | Me::OutOfHostMemory | Me::MapFailed => Self::OutOfMemory,
2673            Me::NonHostVisible | Me::AlreadyMapped => crate::hal_usage_error(error),
2674        }
2675    }
2676}
2677impl From<gpu_descriptor::AllocationError> for crate::DeviceError {
2678    fn from(error: gpu_descriptor::AllocationError) -> Self {
2679        use gpu_descriptor::AllocationError as Ae;
2680        match error {
2681            Ae::OutOfDeviceMemory | Ae::OutOfHostMemory | Ae::Fragmentation => Self::OutOfMemory,
2682        }
2683    }
2684}
2685
2686/// We usually map unexpected vulkan errors to the [`crate::DeviceError::Unexpected`]
2687/// variant to be more robust even in cases where the driver is not
2688/// complying with the spec.
2689///
2690/// However, we implement a few Trait methods that don't have an equivalent
2691/// error variant. In those cases we use this function.
2692fn handle_unexpected(err: vk::Result) -> ! {
2693    panic!("Unexpected Vulkan error: `{err}`")
2694}
2695
2696struct ImageWithoutMemory {
2697    raw: vk::Image,
2698    requirements: vk::MemoryRequirements,
2699    copy_size: crate::CopyExtent,
2700    view_formats: Vec<wgt::TextureFormat>,
2701    raw_flags: vk::ImageCreateFlags,
2702}