1#![allow(clippy::trivially_copy_pass_by_ref)]
2use crate::prelude::*;
3use crate::vk;
4use crate::RawPtr;
5use alloc::vec::Vec;
6use core::ffi;
7use core::mem;
8use core::ptr;
9
10#[derive(Clone)]
12pub struct Device {
13 pub(crate) handle: vk::Device,
14
15 pub(crate) device_fn_1_0: crate::DeviceFnV1_0,
16 pub(crate) device_fn_1_1: crate::DeviceFnV1_1,
17 pub(crate) device_fn_1_2: crate::DeviceFnV1_2,
18 pub(crate) device_fn_1_3: crate::DeviceFnV1_3,
19}
20
21impl Device {
22 pub unsafe fn load(instance_fn: &crate::InstanceFnV1_0, device: vk::Device) -> Self {
23 Self::load_with(
24 |name| mem::transmute((instance_fn.get_device_proc_addr)(device, name.as_ptr())),
25 device,
26 )
27 }
28
29 pub unsafe fn load_with(
30 mut load_fn: impl FnMut(&ffi::CStr) -> *const ffi::c_void,
31 device: vk::Device,
32 ) -> Self {
33 Self::from_parts_1_3(
34 device,
35 crate::DeviceFnV1_0::load(&mut load_fn),
36 crate::DeviceFnV1_1::load(&mut load_fn),
37 crate::DeviceFnV1_2::load(&mut load_fn),
38 crate::DeviceFnV1_3::load(&mut load_fn),
39 )
40 }
41
42 #[inline]
43 pub fn from_parts_1_3(
44 handle: vk::Device,
45 device_fn_1_0: crate::DeviceFnV1_0,
46 device_fn_1_1: crate::DeviceFnV1_1,
47 device_fn_1_2: crate::DeviceFnV1_2,
48 device_fn_1_3: crate::DeviceFnV1_3,
49 ) -> Self {
50 Self {
51 handle,
52
53 device_fn_1_0,
54 device_fn_1_1,
55 device_fn_1_2,
56 device_fn_1_3,
57 }
58 }
59
60 #[inline]
61 pub fn handle(&self) -> vk::Device {
62 self.handle
63 }
64}
65
66impl Device {
68 #[inline]
69 pub fn fp_v1_3(&self) -> &crate::DeviceFnV1_3 {
70 &self.device_fn_1_3
71 }
72
73 #[inline]
75 pub unsafe fn create_private_data_slot(
76 &self,
77 create_info: &vk::PrivateDataSlotCreateInfo<'_>,
78 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
79 ) -> VkResult<vk::PrivateDataSlot> {
80 let mut private_data_slot = mem::MaybeUninit::uninit();
81 (self.device_fn_1_3.create_private_data_slot)(
82 self.handle,
83 create_info,
84 allocation_callbacks.as_raw_ptr(),
85 private_data_slot.as_mut_ptr(),
86 )
87 .assume_init_on_success(private_data_slot)
88 }
89
90 #[inline]
92 pub unsafe fn destroy_private_data_slot(
93 &self,
94 private_data_slot: vk::PrivateDataSlot,
95 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
96 ) {
97 (self.device_fn_1_3.destroy_private_data_slot)(
98 self.handle,
99 private_data_slot,
100 allocation_callbacks.as_raw_ptr(),
101 )
102 }
103
104 #[inline]
106 pub unsafe fn set_private_data<T: vk::Handle>(
107 &self,
108 object: T,
109 private_data_slot: vk::PrivateDataSlot,
110 data: u64,
111 ) -> VkResult<()> {
112 (self.device_fn_1_3.set_private_data)(
113 self.handle,
114 T::TYPE,
115 object.as_raw(),
116 private_data_slot,
117 data,
118 )
119 .result()
120 }
121
122 #[inline]
124 pub unsafe fn get_private_data<T: vk::Handle>(
125 &self,
126 object: T,
127 private_data_slot: vk::PrivateDataSlot,
128 ) -> u64 {
129 let mut data = mem::MaybeUninit::uninit();
130 (self.device_fn_1_3.get_private_data)(
131 self.handle,
132 T::TYPE,
133 object.as_raw(),
134 private_data_slot,
135 data.as_mut_ptr(),
136 );
137 data.assume_init()
138 }
139
140 #[inline]
142 pub unsafe fn cmd_pipeline_barrier2(
143 &self,
144 command_buffer: vk::CommandBuffer,
145 dependency_info: &vk::DependencyInfo<'_>,
146 ) {
147 (self.device_fn_1_3.cmd_pipeline_barrier2)(command_buffer, dependency_info)
148 }
149
150 #[inline]
152 pub unsafe fn cmd_reset_event2(
153 &self,
154 command_buffer: vk::CommandBuffer,
155 event: vk::Event,
156 stage_mask: vk::PipelineStageFlags2,
157 ) {
158 (self.device_fn_1_3.cmd_reset_event2)(command_buffer, event, stage_mask)
159 }
160
161 #[inline]
163 pub unsafe fn cmd_set_event2(
164 &self,
165 command_buffer: vk::CommandBuffer,
166 event: vk::Event,
167 dependency_info: &vk::DependencyInfo<'_>,
168 ) {
169 (self.device_fn_1_3.cmd_set_event2)(command_buffer, event, dependency_info)
170 }
171
172 #[inline]
174 pub unsafe fn cmd_wait_events2(
175 &self,
176 command_buffer: vk::CommandBuffer,
177 events: &[vk::Event],
178 dependency_infos: &[vk::DependencyInfo<'_>],
179 ) {
180 assert_eq!(events.len(), dependency_infos.len());
181 (self.device_fn_1_3.cmd_wait_events2)(
182 command_buffer,
183 events.len() as u32,
184 events.as_ptr(),
185 dependency_infos.as_ptr(),
186 )
187 }
188
189 #[inline]
191 pub unsafe fn cmd_write_timestamp2(
192 &self,
193 command_buffer: vk::CommandBuffer,
194 stage: vk::PipelineStageFlags2,
195 query_pool: vk::QueryPool,
196 query: u32,
197 ) {
198 (self.device_fn_1_3.cmd_write_timestamp2)(command_buffer, stage, query_pool, query)
199 }
200
201 #[inline]
203 pub unsafe fn queue_submit2(
204 &self,
205 queue: vk::Queue,
206 submits: &[vk::SubmitInfo2<'_>],
207 fence: vk::Fence,
208 ) -> VkResult<()> {
209 (self.device_fn_1_3.queue_submit2)(queue, submits.len() as u32, submits.as_ptr(), fence)
210 .result()
211 }
212
213 #[inline]
215 pub unsafe fn cmd_copy_buffer2(
216 &self,
217 command_buffer: vk::CommandBuffer,
218 copy_buffer_info: &vk::CopyBufferInfo2<'_>,
219 ) {
220 (self.device_fn_1_3.cmd_copy_buffer2)(command_buffer, copy_buffer_info)
221 }
222 #[inline]
224 pub unsafe fn cmd_copy_image2(
225 &self,
226 command_buffer: vk::CommandBuffer,
227 copy_image_info: &vk::CopyImageInfo2<'_>,
228 ) {
229 (self.device_fn_1_3.cmd_copy_image2)(command_buffer, copy_image_info)
230 }
231 #[inline]
233 pub unsafe fn cmd_copy_buffer_to_image2(
234 &self,
235 command_buffer: vk::CommandBuffer,
236 copy_buffer_to_image_info: &vk::CopyBufferToImageInfo2<'_>,
237 ) {
238 (self.device_fn_1_3.cmd_copy_buffer_to_image2)(command_buffer, copy_buffer_to_image_info)
239 }
240 #[inline]
242 pub unsafe fn cmd_copy_image_to_buffer2(
243 &self,
244 command_buffer: vk::CommandBuffer,
245 copy_image_to_buffer_info: &vk::CopyImageToBufferInfo2<'_>,
246 ) {
247 (self.device_fn_1_3.cmd_copy_image_to_buffer2)(command_buffer, copy_image_to_buffer_info)
248 }
249 #[inline]
251 pub unsafe fn cmd_blit_image2(
252 &self,
253 command_buffer: vk::CommandBuffer,
254 blit_image_info: &vk::BlitImageInfo2<'_>,
255 ) {
256 (self.device_fn_1_3.cmd_blit_image2)(command_buffer, blit_image_info)
257 }
258 #[inline]
260 pub unsafe fn cmd_resolve_image2(
261 &self,
262 command_buffer: vk::CommandBuffer,
263 resolve_image_info: &vk::ResolveImageInfo2<'_>,
264 ) {
265 (self.device_fn_1_3.cmd_resolve_image2)(command_buffer, resolve_image_info)
266 }
267
268 #[inline]
270 pub unsafe fn cmd_begin_rendering(
271 &self,
272 command_buffer: vk::CommandBuffer,
273 rendering_info: &vk::RenderingInfo<'_>,
274 ) {
275 (self.device_fn_1_3.cmd_begin_rendering)(command_buffer, rendering_info)
276 }
277
278 #[inline]
280 pub unsafe fn cmd_end_rendering(&self, command_buffer: vk::CommandBuffer) {
281 (self.device_fn_1_3.cmd_end_rendering)(command_buffer)
282 }
283
284 #[inline]
286 pub unsafe fn cmd_set_cull_mode(
287 &self,
288 command_buffer: vk::CommandBuffer,
289 cull_mode: vk::CullModeFlags,
290 ) {
291 (self.device_fn_1_3.cmd_set_cull_mode)(command_buffer, cull_mode)
292 }
293
294 #[inline]
296 pub unsafe fn cmd_set_front_face(
297 &self,
298 command_buffer: vk::CommandBuffer,
299 front_face: vk::FrontFace,
300 ) {
301 (self.device_fn_1_3.cmd_set_front_face)(command_buffer, front_face)
302 }
303
304 #[inline]
306 pub unsafe fn cmd_set_primitive_topology(
307 &self,
308 command_buffer: vk::CommandBuffer,
309 primitive_topology: vk::PrimitiveTopology,
310 ) {
311 (self.device_fn_1_3.cmd_set_primitive_topology)(command_buffer, primitive_topology)
312 }
313
314 #[inline]
316 pub unsafe fn cmd_set_viewport_with_count(
317 &self,
318 command_buffer: vk::CommandBuffer,
319 viewports: &[vk::Viewport],
320 ) {
321 (self.device_fn_1_3.cmd_set_viewport_with_count)(
322 command_buffer,
323 viewports.len() as u32,
324 viewports.as_ptr(),
325 )
326 }
327
328 #[inline]
330 pub unsafe fn cmd_set_scissor_with_count(
331 &self,
332 command_buffer: vk::CommandBuffer,
333 scissors: &[vk::Rect2D],
334 ) {
335 (self.device_fn_1_3.cmd_set_scissor_with_count)(
336 command_buffer,
337 scissors.len() as u32,
338 scissors.as_ptr(),
339 )
340 }
341
342 #[inline]
344 pub unsafe fn cmd_bind_vertex_buffers2(
345 &self,
346 command_buffer: vk::CommandBuffer,
347 first_binding: u32,
348 buffers: &[vk::Buffer],
349 offsets: &[vk::DeviceSize],
350 sizes: Option<&[vk::DeviceSize]>,
351 strides: Option<&[vk::DeviceSize]>,
352 ) {
353 assert_eq!(offsets.len(), buffers.len());
354 let p_sizes = if let Some(sizes) = sizes {
355 assert_eq!(sizes.len(), buffers.len());
356 sizes.as_ptr()
357 } else {
358 ptr::null()
359 };
360 let p_strides = if let Some(strides) = strides {
361 assert_eq!(strides.len(), buffers.len());
362 strides.as_ptr()
363 } else {
364 ptr::null()
365 };
366 (self.device_fn_1_3.cmd_bind_vertex_buffers2)(
367 command_buffer,
368 first_binding,
369 buffers.len() as u32,
370 buffers.as_ptr(),
371 offsets.as_ptr(),
372 p_sizes,
373 p_strides,
374 )
375 }
376
377 #[inline]
379 pub unsafe fn cmd_set_depth_test_enable(
380 &self,
381 command_buffer: vk::CommandBuffer,
382 depth_test_enable: bool,
383 ) {
384 (self.device_fn_1_3.cmd_set_depth_test_enable)(command_buffer, depth_test_enable.into())
385 }
386
387 #[inline]
389 pub unsafe fn cmd_set_depth_write_enable(
390 &self,
391 command_buffer: vk::CommandBuffer,
392 depth_write_enable: bool,
393 ) {
394 (self.device_fn_1_3.cmd_set_depth_write_enable)(command_buffer, depth_write_enable.into())
395 }
396
397 #[inline]
399 pub unsafe fn cmd_set_depth_compare_op(
400 &self,
401 command_buffer: vk::CommandBuffer,
402 depth_compare_op: vk::CompareOp,
403 ) {
404 (self.device_fn_1_3.cmd_set_depth_compare_op)(command_buffer, depth_compare_op)
405 }
406
407 #[inline]
409 pub unsafe fn cmd_set_depth_bounds_test_enable(
410 &self,
411 command_buffer: vk::CommandBuffer,
412 depth_bounds_test_enable: bool,
413 ) {
414 (self.device_fn_1_3.cmd_set_depth_bounds_test_enable)(
415 command_buffer,
416 depth_bounds_test_enable.into(),
417 )
418 }
419
420 #[inline]
422 pub unsafe fn cmd_set_stencil_test_enable(
423 &self,
424 command_buffer: vk::CommandBuffer,
425 stencil_test_enable: bool,
426 ) {
427 (self.device_fn_1_3.cmd_set_stencil_test_enable)(command_buffer, stencil_test_enable.into())
428 }
429
430 #[inline]
432 pub unsafe fn cmd_set_stencil_op(
433 &self,
434 command_buffer: vk::CommandBuffer,
435 face_mask: vk::StencilFaceFlags,
436 fail_op: vk::StencilOp,
437 pass_op: vk::StencilOp,
438 depth_fail_op: vk::StencilOp,
439 compare_op: vk::CompareOp,
440 ) {
441 (self.device_fn_1_3.cmd_set_stencil_op)(
442 command_buffer,
443 face_mask,
444 fail_op,
445 pass_op,
446 depth_fail_op,
447 compare_op,
448 )
449 }
450
451 #[inline]
453 pub unsafe fn cmd_set_rasterizer_discard_enable(
454 &self,
455 command_buffer: vk::CommandBuffer,
456 rasterizer_discard_enable: bool,
457 ) {
458 (self.device_fn_1_3.cmd_set_rasterizer_discard_enable)(
459 command_buffer,
460 rasterizer_discard_enable.into(),
461 )
462 }
463
464 #[inline]
466 pub unsafe fn cmd_set_depth_bias_enable(
467 &self,
468 command_buffer: vk::CommandBuffer,
469 depth_bias_enable: bool,
470 ) {
471 (self.device_fn_1_3.cmd_set_depth_bias_enable)(command_buffer, depth_bias_enable.into())
472 }
473
474 #[inline]
476 pub unsafe fn cmd_set_primitive_restart_enable(
477 &self,
478 command_buffer: vk::CommandBuffer,
479 primitive_restart_enable: bool,
480 ) {
481 (self.device_fn_1_3.cmd_set_primitive_restart_enable)(
482 command_buffer,
483 primitive_restart_enable.into(),
484 )
485 }
486
487 #[inline]
489 pub unsafe fn get_device_buffer_memory_requirements(
490 &self,
491 memory_requirements: &vk::DeviceBufferMemoryRequirements<'_>,
492 out: &mut vk::MemoryRequirements2<'_>,
493 ) {
494 (self.device_fn_1_3.get_device_buffer_memory_requirements)(
495 self.handle,
496 memory_requirements,
497 out,
498 )
499 }
500
501 #[inline]
503 pub unsafe fn get_device_image_memory_requirements(
504 &self,
505 memory_requirements: &vk::DeviceImageMemoryRequirements<'_>,
506 out: &mut vk::MemoryRequirements2<'_>,
507 ) {
508 (self.device_fn_1_3.get_device_image_memory_requirements)(
509 self.handle,
510 memory_requirements,
511 out,
512 )
513 }
514
515 #[inline]
517 pub unsafe fn get_device_image_sparse_memory_requirements_len(
518 &self,
519 memory_requirements: &vk::DeviceImageMemoryRequirements<'_>,
520 ) -> usize {
521 let mut count = mem::MaybeUninit::uninit();
522 (self
523 .device_fn_1_3
524 .get_device_image_sparse_memory_requirements)(
525 self.handle,
526 memory_requirements,
527 count.as_mut_ptr(),
528 ptr::null_mut(),
529 );
530 count.assume_init() as usize
531 }
532
533 #[inline]
538 pub unsafe fn get_device_image_sparse_memory_requirements(
539 &self,
540 memory_requirements: &vk::DeviceImageMemoryRequirements<'_>,
541 out: &mut [vk::SparseImageMemoryRequirements2<'_>],
542 ) {
543 let mut count = out.len() as u32;
544 (self
545 .device_fn_1_3
546 .get_device_image_sparse_memory_requirements)(
547 self.handle,
548 memory_requirements,
549 &mut count,
550 out.as_mut_ptr(),
551 );
552 assert_eq!(count as usize, out.len());
553 }
554}
555
556impl Device {
558 #[inline]
559 pub fn fp_v1_2(&self) -> &crate::DeviceFnV1_2 {
560 &self.device_fn_1_2
561 }
562
563 #[inline]
565 pub unsafe fn cmd_draw_indirect_count(
566 &self,
567 command_buffer: vk::CommandBuffer,
568 buffer: vk::Buffer,
569 offset: vk::DeviceSize,
570 count_buffer: vk::Buffer,
571 count_buffer_offset: vk::DeviceSize,
572 max_draw_count: u32,
573 stride: u32,
574 ) {
575 (self.device_fn_1_2.cmd_draw_indirect_count)(
576 command_buffer,
577 buffer,
578 offset,
579 count_buffer,
580 count_buffer_offset,
581 max_draw_count,
582 stride,
583 );
584 }
585
586 #[inline]
588 pub unsafe fn cmd_draw_indexed_indirect_count(
589 &self,
590 command_buffer: vk::CommandBuffer,
591 buffer: vk::Buffer,
592 offset: vk::DeviceSize,
593 count_buffer: vk::Buffer,
594 count_buffer_offset: vk::DeviceSize,
595 max_draw_count: u32,
596 stride: u32,
597 ) {
598 (self.device_fn_1_2.cmd_draw_indexed_indirect_count)(
599 command_buffer,
600 buffer,
601 offset,
602 count_buffer,
603 count_buffer_offset,
604 max_draw_count,
605 stride,
606 );
607 }
608
609 #[inline]
611 pub unsafe fn create_render_pass2(
612 &self,
613 create_info: &vk::RenderPassCreateInfo2<'_>,
614 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
615 ) -> VkResult<vk::RenderPass> {
616 let mut renderpass = mem::MaybeUninit::uninit();
617 (self.device_fn_1_2.create_render_pass2)(
618 self.handle(),
619 create_info,
620 allocation_callbacks.as_raw_ptr(),
621 renderpass.as_mut_ptr(),
622 )
623 .assume_init_on_success(renderpass)
624 }
625
626 #[inline]
628 pub unsafe fn cmd_begin_render_pass2(
629 &self,
630 command_buffer: vk::CommandBuffer,
631 render_pass_begin_info: &vk::RenderPassBeginInfo<'_>,
632 subpass_begin_info: &vk::SubpassBeginInfo<'_>,
633 ) {
634 (self.device_fn_1_2.cmd_begin_render_pass2)(
635 command_buffer,
636 render_pass_begin_info,
637 subpass_begin_info,
638 );
639 }
640
641 #[inline]
643 pub unsafe fn cmd_next_subpass2(
644 &self,
645 command_buffer: vk::CommandBuffer,
646 subpass_begin_info: &vk::SubpassBeginInfo<'_>,
647 subpass_end_info: &vk::SubpassEndInfo<'_>,
648 ) {
649 (self.device_fn_1_2.cmd_next_subpass2)(
650 command_buffer,
651 subpass_begin_info,
652 subpass_end_info,
653 );
654 }
655
656 #[inline]
658 pub unsafe fn cmd_end_render_pass2(
659 &self,
660 command_buffer: vk::CommandBuffer,
661 subpass_end_info: &vk::SubpassEndInfo<'_>,
662 ) {
663 (self.device_fn_1_2.cmd_end_render_pass2)(command_buffer, subpass_end_info);
664 }
665
666 #[inline]
668 pub unsafe fn reset_query_pool(
669 &self,
670 query_pool: vk::QueryPool,
671 first_query: u32,
672 query_count: u32,
673 ) {
674 (self.device_fn_1_2.reset_query_pool)(self.handle(), query_pool, first_query, query_count);
675 }
676
677 #[inline]
679 pub unsafe fn get_semaphore_counter_value(&self, semaphore: vk::Semaphore) -> VkResult<u64> {
680 let mut value = mem::MaybeUninit::uninit();
681 (self.device_fn_1_2.get_semaphore_counter_value)(
682 self.handle(),
683 semaphore,
684 value.as_mut_ptr(),
685 )
686 .assume_init_on_success(value)
687 }
688
689 #[inline]
691 pub unsafe fn wait_semaphores(
692 &self,
693 wait_info: &vk::SemaphoreWaitInfo<'_>,
694 timeout: u64,
695 ) -> VkResult<()> {
696 (self.device_fn_1_2.wait_semaphores)(self.handle(), wait_info, timeout).result()
697 }
698
699 #[inline]
701 pub unsafe fn signal_semaphore(
702 &self,
703 signal_info: &vk::SemaphoreSignalInfo<'_>,
704 ) -> VkResult<()> {
705 (self.device_fn_1_2.signal_semaphore)(self.handle(), signal_info).result()
706 }
707
708 #[inline]
710 pub unsafe fn get_buffer_device_address(
711 &self,
712 info: &vk::BufferDeviceAddressInfo<'_>,
713 ) -> vk::DeviceAddress {
714 (self.device_fn_1_2.get_buffer_device_address)(self.handle(), info)
715 }
716
717 #[inline]
719 pub unsafe fn get_buffer_opaque_capture_address(
720 &self,
721 info: &vk::BufferDeviceAddressInfo<'_>,
722 ) -> u64 {
723 (self.device_fn_1_2.get_buffer_opaque_capture_address)(self.handle(), info)
724 }
725
726 #[inline]
728 pub unsafe fn get_device_memory_opaque_capture_address(
729 &self,
730 info: &vk::DeviceMemoryOpaqueCaptureAddressInfo<'_>,
731 ) -> u64 {
732 (self.device_fn_1_2.get_device_memory_opaque_capture_address)(self.handle(), info)
733 }
734}
735
736impl Device {
738 #[inline]
739 pub fn fp_v1_1(&self) -> &crate::DeviceFnV1_1 {
740 &self.device_fn_1_1
741 }
742
743 #[inline]
745 pub unsafe fn bind_buffer_memory2(
746 &self,
747 bind_infos: &[vk::BindBufferMemoryInfo<'_>],
748 ) -> VkResult<()> {
749 (self.device_fn_1_1.bind_buffer_memory2)(
750 self.handle(),
751 bind_infos.len() as _,
752 bind_infos.as_ptr(),
753 )
754 .result()
755 }
756
757 #[inline]
759 pub unsafe fn bind_image_memory2(
760 &self,
761 bind_infos: &[vk::BindImageMemoryInfo<'_>],
762 ) -> VkResult<()> {
763 (self.device_fn_1_1.bind_image_memory2)(
764 self.handle(),
765 bind_infos.len() as _,
766 bind_infos.as_ptr(),
767 )
768 .result()
769 }
770
771 #[inline]
773 pub unsafe fn get_device_group_peer_memory_features(
774 &self,
775 heap_index: u32,
776 local_device_index: u32,
777 remote_device_index: u32,
778 ) -> vk::PeerMemoryFeatureFlags {
779 let mut peer_memory_features = mem::MaybeUninit::uninit();
780 (self.device_fn_1_1.get_device_group_peer_memory_features)(
781 self.handle(),
782 heap_index,
783 local_device_index,
784 remote_device_index,
785 peer_memory_features.as_mut_ptr(),
786 );
787 peer_memory_features.assume_init()
788 }
789
790 #[inline]
792 pub unsafe fn cmd_set_device_mask(&self, command_buffer: vk::CommandBuffer, device_mask: u32) {
793 (self.device_fn_1_1.cmd_set_device_mask)(command_buffer, device_mask);
794 }
795
796 #[inline]
798 pub unsafe fn cmd_dispatch_base(
799 &self,
800 command_buffer: vk::CommandBuffer,
801 base_group_x: u32,
802 base_group_y: u32,
803 base_group_z: u32,
804 group_count_x: u32,
805 group_count_y: u32,
806 group_count_z: u32,
807 ) {
808 (self.device_fn_1_1.cmd_dispatch_base)(
809 command_buffer,
810 base_group_x,
811 base_group_y,
812 base_group_z,
813 group_count_x,
814 group_count_y,
815 group_count_z,
816 );
817 }
818
819 #[inline]
821 pub unsafe fn get_image_memory_requirements2(
822 &self,
823 info: &vk::ImageMemoryRequirementsInfo2<'_>,
824 out: &mut vk::MemoryRequirements2<'_>,
825 ) {
826 (self.device_fn_1_1.get_image_memory_requirements2)(self.handle(), info, out);
827 }
828
829 #[inline]
831 pub unsafe fn get_buffer_memory_requirements2(
832 &self,
833 info: &vk::BufferMemoryRequirementsInfo2<'_>,
834 out: &mut vk::MemoryRequirements2<'_>,
835 ) {
836 (self.device_fn_1_1.get_buffer_memory_requirements2)(self.handle(), info, out);
837 }
838
839 #[inline]
841 pub unsafe fn get_image_sparse_memory_requirements2_len(
842 &self,
843 info: &vk::ImageSparseMemoryRequirementsInfo2<'_>,
844 ) -> usize {
845 let mut count = mem::MaybeUninit::uninit();
846 (self.device_fn_1_1.get_image_sparse_memory_requirements2)(
847 self.handle(),
848 info,
849 count.as_mut_ptr(),
850 ptr::null_mut(),
851 );
852 count.assume_init() as usize
853 }
854
855 #[inline]
860 pub unsafe fn get_image_sparse_memory_requirements2(
861 &self,
862 info: &vk::ImageSparseMemoryRequirementsInfo2<'_>,
863 out: &mut [vk::SparseImageMemoryRequirements2<'_>],
864 ) {
865 let mut count = out.len() as u32;
866 (self.device_fn_1_1.get_image_sparse_memory_requirements2)(
867 self.handle(),
868 info,
869 &mut count,
870 out.as_mut_ptr(),
871 );
872 assert_eq!(count as usize, out.len());
873 }
874
875 #[inline]
877 pub unsafe fn trim_command_pool(
878 &self,
879 command_pool: vk::CommandPool,
880 flags: vk::CommandPoolTrimFlags,
881 ) {
882 (self.device_fn_1_1.trim_command_pool)(self.handle(), command_pool, flags);
883 }
884
885 #[inline]
887 pub unsafe fn get_device_queue2(&self, queue_info: &vk::DeviceQueueInfo2<'_>) -> vk::Queue {
888 let mut queue = mem::MaybeUninit::uninit();
889 (self.device_fn_1_1.get_device_queue2)(self.handle(), queue_info, queue.as_mut_ptr());
890 queue.assume_init()
891 }
892
893 #[inline]
895 pub unsafe fn create_sampler_ycbcr_conversion(
896 &self,
897 create_info: &vk::SamplerYcbcrConversionCreateInfo<'_>,
898 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
899 ) -> VkResult<vk::SamplerYcbcrConversion> {
900 let mut ycbcr_conversion = mem::MaybeUninit::uninit();
901 (self.device_fn_1_1.create_sampler_ycbcr_conversion)(
902 self.handle(),
903 create_info,
904 allocation_callbacks.as_raw_ptr(),
905 ycbcr_conversion.as_mut_ptr(),
906 )
907 .assume_init_on_success(ycbcr_conversion)
908 }
909
910 #[inline]
912 pub unsafe fn destroy_sampler_ycbcr_conversion(
913 &self,
914 ycbcr_conversion: vk::SamplerYcbcrConversion,
915 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
916 ) {
917 (self.device_fn_1_1.destroy_sampler_ycbcr_conversion)(
918 self.handle(),
919 ycbcr_conversion,
920 allocation_callbacks.as_raw_ptr(),
921 );
922 }
923
924 #[inline]
926 pub unsafe fn create_descriptor_update_template(
927 &self,
928 create_info: &vk::DescriptorUpdateTemplateCreateInfo<'_>,
929 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
930 ) -> VkResult<vk::DescriptorUpdateTemplate> {
931 let mut descriptor_update_template = mem::MaybeUninit::uninit();
932 (self.device_fn_1_1.create_descriptor_update_template)(
933 self.handle(),
934 create_info,
935 allocation_callbacks.as_raw_ptr(),
936 descriptor_update_template.as_mut_ptr(),
937 )
938 .assume_init_on_success(descriptor_update_template)
939 }
940
941 #[inline]
943 pub unsafe fn destroy_descriptor_update_template(
944 &self,
945 descriptor_update_template: vk::DescriptorUpdateTemplate,
946 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
947 ) {
948 (self.device_fn_1_1.destroy_descriptor_update_template)(
949 self.handle(),
950 descriptor_update_template,
951 allocation_callbacks.as_raw_ptr(),
952 );
953 }
954
955 #[inline]
957 pub unsafe fn update_descriptor_set_with_template(
958 &self,
959 descriptor_set: vk::DescriptorSet,
960 descriptor_update_template: vk::DescriptorUpdateTemplate,
961 data: *const ffi::c_void,
962 ) {
963 (self.device_fn_1_1.update_descriptor_set_with_template)(
964 self.handle(),
965 descriptor_set,
966 descriptor_update_template,
967 data,
968 );
969 }
970
971 #[inline]
973 pub unsafe fn get_descriptor_set_layout_support(
974 &self,
975 create_info: &vk::DescriptorSetLayoutCreateInfo<'_>,
976 out: &mut vk::DescriptorSetLayoutSupport<'_>,
977 ) {
978 (self.device_fn_1_1.get_descriptor_set_layout_support)(self.handle(), create_info, out);
979 }
980}
981
982impl Device {
984 #[inline]
985 pub fn fp_v1_0(&self) -> &crate::DeviceFnV1_0 {
986 &self.device_fn_1_0
987 }
988
989 #[inline]
991 pub unsafe fn destroy_device(
992 &self,
993 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
994 ) {
995 (self.device_fn_1_0.destroy_device)(self.handle(), allocation_callbacks.as_raw_ptr());
996 }
997
998 #[inline]
1000 pub unsafe fn destroy_sampler(
1001 &self,
1002 sampler: vk::Sampler,
1003 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1004 ) {
1005 (self.device_fn_1_0.destroy_sampler)(
1006 self.handle(),
1007 sampler,
1008 allocation_callbacks.as_raw_ptr(),
1009 );
1010 }
1011
1012 #[inline]
1014 pub unsafe fn free_memory(
1015 &self,
1016 memory: vk::DeviceMemory,
1017 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1018 ) {
1019 (self.device_fn_1_0.free_memory)(self.handle(), memory, allocation_callbacks.as_raw_ptr());
1020 }
1021
1022 #[inline]
1024 pub unsafe fn free_command_buffers(
1025 &self,
1026 command_pool: vk::CommandPool,
1027 command_buffers: &[vk::CommandBuffer],
1028 ) {
1029 (self.device_fn_1_0.free_command_buffers)(
1030 self.handle(),
1031 command_pool,
1032 command_buffers.len() as u32,
1033 command_buffers.as_ptr(),
1034 );
1035 }
1036
1037 #[inline]
1039 pub unsafe fn create_event(
1040 &self,
1041 create_info: &vk::EventCreateInfo<'_>,
1042 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1043 ) -> VkResult<vk::Event> {
1044 let mut event = mem::MaybeUninit::uninit();
1045 (self.device_fn_1_0.create_event)(
1046 self.handle(),
1047 create_info,
1048 allocation_callbacks.as_raw_ptr(),
1049 event.as_mut_ptr(),
1050 )
1051 .assume_init_on_success(event)
1052 }
1053
1054 #[inline]
1058 pub unsafe fn get_event_status(&self, event: vk::Event) -> VkResult<bool> {
1059 let err_code = (self.device_fn_1_0.get_event_status)(self.handle(), event);
1060 match err_code {
1061 vk::Result::EVENT_SET => Ok(true),
1062 vk::Result::EVENT_RESET => Ok(false),
1063 _ => Err(err_code),
1064 }
1065 }
1066
1067 #[inline]
1069 pub unsafe fn set_event(&self, event: vk::Event) -> VkResult<()> {
1070 (self.device_fn_1_0.set_event)(self.handle(), event).result()
1071 }
1072
1073 #[inline]
1075 pub unsafe fn reset_event(&self, event: vk::Event) -> VkResult<()> {
1076 (self.device_fn_1_0.reset_event)(self.handle(), event).result()
1077 }
1078 #[inline]
1080 pub unsafe fn cmd_set_event(
1081 &self,
1082 command_buffer: vk::CommandBuffer,
1083 event: vk::Event,
1084 stage_mask: vk::PipelineStageFlags,
1085 ) {
1086 (self.device_fn_1_0.cmd_set_event)(command_buffer, event, stage_mask);
1087 }
1088 #[inline]
1090 pub unsafe fn cmd_reset_event(
1091 &self,
1092 command_buffer: vk::CommandBuffer,
1093 event: vk::Event,
1094 stage_mask: vk::PipelineStageFlags,
1095 ) {
1096 (self.device_fn_1_0.cmd_reset_event)(command_buffer, event, stage_mask);
1097 }
1098
1099 #[inline]
1101 pub unsafe fn cmd_wait_events(
1102 &self,
1103 command_buffer: vk::CommandBuffer,
1104 events: &[vk::Event],
1105 src_stage_mask: vk::PipelineStageFlags,
1106 dst_stage_mask: vk::PipelineStageFlags,
1107 memory_barriers: &[vk::MemoryBarrier<'_>],
1108 buffer_memory_barriers: &[vk::BufferMemoryBarrier<'_>],
1109 image_memory_barriers: &[vk::ImageMemoryBarrier<'_>],
1110 ) {
1111 (self.device_fn_1_0.cmd_wait_events)(
1112 command_buffer,
1113 events.len() as _,
1114 events.as_ptr(),
1115 src_stage_mask,
1116 dst_stage_mask,
1117 memory_barriers.len() as _,
1118 memory_barriers.as_ptr(),
1119 buffer_memory_barriers.len() as _,
1120 buffer_memory_barriers.as_ptr(),
1121 image_memory_barriers.len() as _,
1122 image_memory_barriers.as_ptr(),
1123 );
1124 }
1125
1126 #[inline]
1128 pub unsafe fn destroy_fence(
1129 &self,
1130 fence: vk::Fence,
1131 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1132 ) {
1133 (self.device_fn_1_0.destroy_fence)(self.handle(), fence, allocation_callbacks.as_raw_ptr());
1134 }
1135
1136 #[inline]
1138 pub unsafe fn destroy_event(
1139 &self,
1140 event: vk::Event,
1141 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1142 ) {
1143 (self.device_fn_1_0.destroy_event)(self.handle(), event, allocation_callbacks.as_raw_ptr());
1144 }
1145
1146 #[inline]
1148 pub unsafe fn destroy_image(
1149 &self,
1150 image: vk::Image,
1151 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1152 ) {
1153 (self.device_fn_1_0.destroy_image)(self.handle(), image, allocation_callbacks.as_raw_ptr());
1154 }
1155
1156 #[inline]
1158 pub unsafe fn destroy_command_pool(
1159 &self,
1160 pool: vk::CommandPool,
1161 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1162 ) {
1163 (self.device_fn_1_0.destroy_command_pool)(
1164 self.handle(),
1165 pool,
1166 allocation_callbacks.as_raw_ptr(),
1167 );
1168 }
1169
1170 #[inline]
1172 pub unsafe fn destroy_image_view(
1173 &self,
1174 image_view: vk::ImageView,
1175 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1176 ) {
1177 (self.device_fn_1_0.destroy_image_view)(
1178 self.handle(),
1179 image_view,
1180 allocation_callbacks.as_raw_ptr(),
1181 );
1182 }
1183
1184 #[inline]
1186 pub unsafe fn destroy_render_pass(
1187 &self,
1188 renderpass: vk::RenderPass,
1189 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1190 ) {
1191 (self.device_fn_1_0.destroy_render_pass)(
1192 self.handle(),
1193 renderpass,
1194 allocation_callbacks.as_raw_ptr(),
1195 );
1196 }
1197
1198 #[inline]
1200 pub unsafe fn destroy_framebuffer(
1201 &self,
1202 framebuffer: vk::Framebuffer,
1203 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1204 ) {
1205 (self.device_fn_1_0.destroy_framebuffer)(
1206 self.handle(),
1207 framebuffer,
1208 allocation_callbacks.as_raw_ptr(),
1209 );
1210 }
1211
1212 #[inline]
1214 pub unsafe fn destroy_pipeline_layout(
1215 &self,
1216 pipeline_layout: vk::PipelineLayout,
1217 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1218 ) {
1219 (self.device_fn_1_0.destroy_pipeline_layout)(
1220 self.handle(),
1221 pipeline_layout,
1222 allocation_callbacks.as_raw_ptr(),
1223 );
1224 }
1225
1226 #[inline]
1228 pub unsafe fn destroy_pipeline_cache(
1229 &self,
1230 pipeline_cache: vk::PipelineCache,
1231 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1232 ) {
1233 (self.device_fn_1_0.destroy_pipeline_cache)(
1234 self.handle(),
1235 pipeline_cache,
1236 allocation_callbacks.as_raw_ptr(),
1237 );
1238 }
1239
1240 #[inline]
1242 pub unsafe fn destroy_buffer(
1243 &self,
1244 buffer: vk::Buffer,
1245 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1246 ) {
1247 (self.device_fn_1_0.destroy_buffer)(
1248 self.handle(),
1249 buffer,
1250 allocation_callbacks.as_raw_ptr(),
1251 );
1252 }
1253
1254 #[inline]
1256 pub unsafe fn destroy_shader_module(
1257 &self,
1258 shader: vk::ShaderModule,
1259 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1260 ) {
1261 (self.device_fn_1_0.destroy_shader_module)(
1262 self.handle(),
1263 shader,
1264 allocation_callbacks.as_raw_ptr(),
1265 );
1266 }
1267
1268 #[inline]
1270 pub unsafe fn destroy_pipeline(
1271 &self,
1272 pipeline: vk::Pipeline,
1273 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1274 ) {
1275 (self.device_fn_1_0.destroy_pipeline)(
1276 self.handle(),
1277 pipeline,
1278 allocation_callbacks.as_raw_ptr(),
1279 );
1280 }
1281
1282 #[inline]
1284 pub unsafe fn destroy_semaphore(
1285 &self,
1286 semaphore: vk::Semaphore,
1287 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1288 ) {
1289 (self.device_fn_1_0.destroy_semaphore)(
1290 self.handle(),
1291 semaphore,
1292 allocation_callbacks.as_raw_ptr(),
1293 );
1294 }
1295
1296 #[inline]
1298 pub unsafe fn destroy_descriptor_pool(
1299 &self,
1300 pool: vk::DescriptorPool,
1301 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1302 ) {
1303 (self.device_fn_1_0.destroy_descriptor_pool)(
1304 self.handle(),
1305 pool,
1306 allocation_callbacks.as_raw_ptr(),
1307 );
1308 }
1309
1310 #[inline]
1312 pub unsafe fn destroy_query_pool(
1313 &self,
1314 pool: vk::QueryPool,
1315 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1316 ) {
1317 (self.device_fn_1_0.destroy_query_pool)(
1318 self.handle(),
1319 pool,
1320 allocation_callbacks.as_raw_ptr(),
1321 );
1322 }
1323
1324 #[inline]
1326 pub unsafe fn destroy_descriptor_set_layout(
1327 &self,
1328 layout: vk::DescriptorSetLayout,
1329 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1330 ) {
1331 (self.device_fn_1_0.destroy_descriptor_set_layout)(
1332 self.handle(),
1333 layout,
1334 allocation_callbacks.as_raw_ptr(),
1335 );
1336 }
1337
1338 #[inline]
1340 pub unsafe fn free_descriptor_sets(
1341 &self,
1342 pool: vk::DescriptorPool,
1343 descriptor_sets: &[vk::DescriptorSet],
1344 ) -> VkResult<()> {
1345 (self.device_fn_1_0.free_descriptor_sets)(
1346 self.handle(),
1347 pool,
1348 descriptor_sets.len() as u32,
1349 descriptor_sets.as_ptr(),
1350 )
1351 .result()
1352 }
1353
1354 #[inline]
1356 pub unsafe fn update_descriptor_sets(
1357 &self,
1358 descriptor_writes: &[vk::WriteDescriptorSet<'_>],
1359 descriptor_copies: &[vk::CopyDescriptorSet<'_>],
1360 ) {
1361 (self.device_fn_1_0.update_descriptor_sets)(
1362 self.handle(),
1363 descriptor_writes.len() as u32,
1364 descriptor_writes.as_ptr(),
1365 descriptor_copies.len() as u32,
1366 descriptor_copies.as_ptr(),
1367 );
1368 }
1369
1370 #[inline]
1372 pub unsafe fn create_sampler(
1373 &self,
1374 create_info: &vk::SamplerCreateInfo<'_>,
1375 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1376 ) -> VkResult<vk::Sampler> {
1377 let mut sampler = mem::MaybeUninit::uninit();
1378 (self.device_fn_1_0.create_sampler)(
1379 self.handle(),
1380 create_info,
1381 allocation_callbacks.as_raw_ptr(),
1382 sampler.as_mut_ptr(),
1383 )
1384 .assume_init_on_success(sampler)
1385 }
1386
1387 #[inline]
1389 pub unsafe fn cmd_blit_image(
1390 &self,
1391 command_buffer: vk::CommandBuffer,
1392 src_image: vk::Image,
1393 src_image_layout: vk::ImageLayout,
1394 dst_image: vk::Image,
1395 dst_image_layout: vk::ImageLayout,
1396 regions: &[vk::ImageBlit],
1397 filter: vk::Filter,
1398 ) {
1399 (self.device_fn_1_0.cmd_blit_image)(
1400 command_buffer,
1401 src_image,
1402 src_image_layout,
1403 dst_image,
1404 dst_image_layout,
1405 regions.len() as _,
1406 regions.as_ptr(),
1407 filter,
1408 );
1409 }
1410
1411 #[inline]
1413 pub unsafe fn cmd_resolve_image(
1414 &self,
1415 command_buffer: vk::CommandBuffer,
1416 src_image: vk::Image,
1417 src_image_layout: vk::ImageLayout,
1418 dst_image: vk::Image,
1419 dst_image_layout: vk::ImageLayout,
1420 regions: &[vk::ImageResolve],
1421 ) {
1422 (self.device_fn_1_0.cmd_resolve_image)(
1423 command_buffer,
1424 src_image,
1425 src_image_layout,
1426 dst_image,
1427 dst_image_layout,
1428 regions.len() as u32,
1429 regions.as_ptr(),
1430 );
1431 }
1432
1433 #[inline]
1435 pub unsafe fn cmd_fill_buffer(
1436 &self,
1437 command_buffer: vk::CommandBuffer,
1438 buffer: vk::Buffer,
1439 offset: vk::DeviceSize,
1440 size: vk::DeviceSize,
1441 data: u32,
1442 ) {
1443 (self.device_fn_1_0.cmd_fill_buffer)(command_buffer, buffer, offset, size, data);
1444 }
1445
1446 #[inline]
1448 pub unsafe fn cmd_update_buffer(
1449 &self,
1450 command_buffer: vk::CommandBuffer,
1451 buffer: vk::Buffer,
1452 offset: vk::DeviceSize,
1453 data: &[u8],
1454 ) {
1455 (self.device_fn_1_0.cmd_update_buffer)(
1456 command_buffer,
1457 buffer,
1458 offset,
1459 data.len() as u64,
1460 data.as_ptr() as _,
1461 );
1462 }
1463
1464 #[inline]
1466 pub unsafe fn cmd_copy_buffer(
1467 &self,
1468 command_buffer: vk::CommandBuffer,
1469 src_buffer: vk::Buffer,
1470 dst_buffer: vk::Buffer,
1471 regions: &[vk::BufferCopy],
1472 ) {
1473 (self.device_fn_1_0.cmd_copy_buffer)(
1474 command_buffer,
1475 src_buffer,
1476 dst_buffer,
1477 regions.len() as u32,
1478 regions.as_ptr(),
1479 );
1480 }
1481
1482 #[inline]
1484 pub unsafe fn cmd_copy_image_to_buffer(
1485 &self,
1486 command_buffer: vk::CommandBuffer,
1487 src_image: vk::Image,
1488 src_image_layout: vk::ImageLayout,
1489 dst_buffer: vk::Buffer,
1490 regions: &[vk::BufferImageCopy],
1491 ) {
1492 (self.device_fn_1_0.cmd_copy_image_to_buffer)(
1493 command_buffer,
1494 src_image,
1495 src_image_layout,
1496 dst_buffer,
1497 regions.len() as u32,
1498 regions.as_ptr(),
1499 );
1500 }
1501
1502 #[inline]
1504 pub unsafe fn cmd_copy_buffer_to_image(
1505 &self,
1506 command_buffer: vk::CommandBuffer,
1507 src_buffer: vk::Buffer,
1508 dst_image: vk::Image,
1509 dst_image_layout: vk::ImageLayout,
1510 regions: &[vk::BufferImageCopy],
1511 ) {
1512 (self.device_fn_1_0.cmd_copy_buffer_to_image)(
1513 command_buffer,
1514 src_buffer,
1515 dst_image,
1516 dst_image_layout,
1517 regions.len() as u32,
1518 regions.as_ptr(),
1519 );
1520 }
1521
1522 #[inline]
1524 pub unsafe fn cmd_copy_image(
1525 &self,
1526 command_buffer: vk::CommandBuffer,
1527 src_image: vk::Image,
1528 src_image_layout: vk::ImageLayout,
1529 dst_image: vk::Image,
1530 dst_image_layout: vk::ImageLayout,
1531 regions: &[vk::ImageCopy],
1532 ) {
1533 (self.device_fn_1_0.cmd_copy_image)(
1534 command_buffer,
1535 src_image,
1536 src_image_layout,
1537 dst_image,
1538 dst_image_layout,
1539 regions.len() as u32,
1540 regions.as_ptr(),
1541 );
1542 }
1543
1544 #[inline]
1546 pub unsafe fn allocate_descriptor_sets(
1547 &self,
1548 allocate_info: &vk::DescriptorSetAllocateInfo<'_>,
1549 ) -> VkResult<Vec<vk::DescriptorSet>> {
1550 let mut desc_set = Vec::with_capacity(allocate_info.descriptor_set_count as usize);
1551 (self.device_fn_1_0.allocate_descriptor_sets)(
1552 self.handle(),
1553 allocate_info,
1554 desc_set.as_mut_ptr(),
1555 )
1556 .set_vec_len_on_success(desc_set, allocate_info.descriptor_set_count as usize)
1557 }
1558
1559 #[inline]
1561 pub unsafe fn create_descriptor_set_layout(
1562 &self,
1563 create_info: &vk::DescriptorSetLayoutCreateInfo<'_>,
1564 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1565 ) -> VkResult<vk::DescriptorSetLayout> {
1566 let mut layout = mem::MaybeUninit::uninit();
1567 (self.device_fn_1_0.create_descriptor_set_layout)(
1568 self.handle(),
1569 create_info,
1570 allocation_callbacks.as_raw_ptr(),
1571 layout.as_mut_ptr(),
1572 )
1573 .assume_init_on_success(layout)
1574 }
1575
1576 #[inline]
1578 pub unsafe fn device_wait_idle(&self) -> VkResult<()> {
1579 (self.device_fn_1_0.device_wait_idle)(self.handle()).result()
1580 }
1581
1582 #[inline]
1584 pub unsafe fn create_descriptor_pool(
1585 &self,
1586 create_info: &vk::DescriptorPoolCreateInfo<'_>,
1587 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
1588 ) -> VkResult<vk::DescriptorPool> {
1589 let mut pool = mem::MaybeUninit::uninit();
1590 (self.device_fn_1_0.create_descriptor_pool)(
1591 self.handle(),
1592 create_info,
1593 allocation_callbacks.as_raw_ptr(),
1594 pool.as_mut_ptr(),
1595 )
1596 .assume_init_on_success(pool)
1597 }
1598
1599 #[inline]
1601 pub unsafe fn reset_descriptor_pool(
1602 &self,
1603 pool: vk::DescriptorPool,
1604 flags: vk::DescriptorPoolResetFlags,
1605 ) -> VkResult<()> {
1606 (self.device_fn_1_0.reset_descriptor_pool)(self.handle(), pool, flags).result()
1607 }
1608
1609 #[inline]
1611 pub unsafe fn reset_command_pool(
1612 &self,
1613 command_pool: vk::CommandPool,
1614 flags: vk::CommandPoolResetFlags,
1615 ) -> VkResult<()> {
1616 (self.device_fn_1_0.reset_command_pool)(self.handle(), command_pool, flags).result()
1617 }
1618
1619 #[inline]
1621 pub unsafe fn reset_command_buffer(
1622 &self,
1623 command_buffer: vk::CommandBuffer,
1624 flags: vk::CommandBufferResetFlags,
1625 ) -> VkResult<()> {
1626 (self.device_fn_1_0.reset_command_buffer)(command_buffer, flags).result()
1627 }
1628
1629 #[inline]
1631 pub unsafe fn reset_fences(&self, fences: &[vk::Fence]) -> VkResult<()> {
1632 (self.device_fn_1_0.reset_fences)(self.handle(), fences.len() as u32, fences.as_ptr())
1633 .result()
1634 }
1635
1636 #[inline]
1638 pub unsafe fn cmd_bind_index_buffer(
1639 &self,
1640 command_buffer: vk::CommandBuffer,
1641 buffer: vk::Buffer,
1642 offset: vk::DeviceSize,
1643 index_type: vk::IndexType,
1644 ) {
1645 (self.device_fn_1_0.cmd_bind_index_buffer)(command_buffer, buffer, offset, index_type);
1646 }
1647
1648 #[inline]
1650 pub unsafe fn cmd_clear_color_image(
1651 &self,
1652 command_buffer: vk::CommandBuffer,
1653 image: vk::Image,
1654 image_layout: vk::ImageLayout,
1655 clear_color_value: &vk::ClearColorValue,
1656 ranges: &[vk::ImageSubresourceRange],
1657 ) {
1658 (self.device_fn_1_0.cmd_clear_color_image)(
1659 command_buffer,
1660 image,
1661 image_layout,
1662 clear_color_value,
1663 ranges.len() as u32,
1664 ranges.as_ptr(),
1665 );
1666 }
1667
1668 #[inline]
1670 pub unsafe fn cmd_clear_depth_stencil_image(
1671 &self,
1672 command_buffer: vk::CommandBuffer,
1673 image: vk::Image,
1674 image_layout: vk::ImageLayout,
1675 clear_depth_stencil_value: &vk::ClearDepthStencilValue,
1676 ranges: &[vk::ImageSubresourceRange],
1677 ) {
1678 (self.device_fn_1_0.cmd_clear_depth_stencil_image)(
1679 command_buffer,
1680 image,
1681 image_layout,
1682 clear_depth_stencil_value,
1683 ranges.len() as u32,
1684 ranges.as_ptr(),
1685 );
1686 }
1687
1688 #[inline]
1690 pub unsafe fn cmd_clear_attachments(
1691 &self,
1692 command_buffer: vk::CommandBuffer,
1693 attachments: &[vk::ClearAttachment],
1694 rects: &[vk::ClearRect],
1695 ) {
1696 (self.device_fn_1_0.cmd_clear_attachments)(
1697 command_buffer,
1698 attachments.len() as u32,
1699 attachments.as_ptr(),
1700 rects.len() as u32,
1701 rects.as_ptr(),
1702 );
1703 }
1704
1705 #[inline]
1707 pub unsafe fn cmd_draw_indexed(
1708 &self,
1709 command_buffer: vk::CommandBuffer,
1710 index_count: u32,
1711 instance_count: u32,
1712 first_index: u32,
1713 vertex_offset: i32,
1714 first_instance: u32,
1715 ) {
1716 (self.device_fn_1_0.cmd_draw_indexed)(
1717 command_buffer,
1718 index_count,
1719 instance_count,
1720 first_index,
1721 vertex_offset,
1722 first_instance,
1723 );
1724 }
1725
1726 #[inline]
1728 pub unsafe fn cmd_draw_indexed_indirect(
1729 &self,
1730 command_buffer: vk::CommandBuffer,
1731 buffer: vk::Buffer,
1732 offset: vk::DeviceSize,
1733 draw_count: u32,
1734 stride: u32,
1735 ) {
1736 (self.device_fn_1_0.cmd_draw_indexed_indirect)(
1737 command_buffer,
1738 buffer,
1739 offset,
1740 draw_count,
1741 stride,
1742 );
1743 }
1744
1745 #[inline]
1747 pub unsafe fn cmd_execute_commands(
1748 &self,
1749 primary_command_buffer: vk::CommandBuffer,
1750 secondary_command_buffers: &[vk::CommandBuffer],
1751 ) {
1752 (self.device_fn_1_0.cmd_execute_commands)(
1753 primary_command_buffer,
1754 secondary_command_buffers.len() as u32,
1755 secondary_command_buffers.as_ptr(),
1756 );
1757 }
1758
1759 #[inline]
1761 pub unsafe fn cmd_bind_descriptor_sets(
1762 &self,
1763 command_buffer: vk::CommandBuffer,
1764 pipeline_bind_point: vk::PipelineBindPoint,
1765 layout: vk::PipelineLayout,
1766 first_set: u32,
1767 descriptor_sets: &[vk::DescriptorSet],
1768 dynamic_offsets: &[u32],
1769 ) {
1770 (self.device_fn_1_0.cmd_bind_descriptor_sets)(
1771 command_buffer,
1772 pipeline_bind_point,
1773 layout,
1774 first_set,
1775 descriptor_sets.len() as u32,
1776 descriptor_sets.as_ptr(),
1777 dynamic_offsets.len() as u32,
1778 dynamic_offsets.as_ptr(),
1779 );
1780 }
1781
1782 #[inline]
1784 pub unsafe fn cmd_copy_query_pool_results(
1785 &self,
1786 command_buffer: vk::CommandBuffer,
1787 query_pool: vk::QueryPool,
1788 first_query: u32,
1789 query_count: u32,
1790 dst_buffer: vk::Buffer,
1791 dst_offset: vk::DeviceSize,
1792 stride: vk::DeviceSize,
1793 flags: vk::QueryResultFlags,
1794 ) {
1795 (self.device_fn_1_0.cmd_copy_query_pool_results)(
1796 command_buffer,
1797 query_pool,
1798 first_query,
1799 query_count,
1800 dst_buffer,
1801 dst_offset,
1802 stride,
1803 flags,
1804 );
1805 }
1806
1807 #[inline]
1809 pub unsafe fn cmd_push_constants(
1810 &self,
1811 command_buffer: vk::CommandBuffer,
1812 layout: vk::PipelineLayout,
1813 stage_flags: vk::ShaderStageFlags,
1814 offset: u32,
1815 constants: &[u8],
1816 ) {
1817 (self.device_fn_1_0.cmd_push_constants)(
1818 command_buffer,
1819 layout,
1820 stage_flags,
1821 offset,
1822 constants.len() as _,
1823 constants.as_ptr() as _,
1824 );
1825 }
1826
1827 #[inline]
1829 pub unsafe fn cmd_begin_render_pass(
1830 &self,
1831 command_buffer: vk::CommandBuffer,
1832 render_pass_begin: &vk::RenderPassBeginInfo<'_>,
1833 contents: vk::SubpassContents,
1834 ) {
1835 (self.device_fn_1_0.cmd_begin_render_pass)(command_buffer, render_pass_begin, contents);
1836 }
1837
1838 #[inline]
1840 pub unsafe fn cmd_next_subpass(
1841 &self,
1842 command_buffer: vk::CommandBuffer,
1843 contents: vk::SubpassContents,
1844 ) {
1845 (self.device_fn_1_0.cmd_next_subpass)(command_buffer, contents);
1846 }
1847
1848 #[inline]
1850 pub unsafe fn cmd_bind_pipeline(
1851 &self,
1852 command_buffer: vk::CommandBuffer,
1853 pipeline_bind_point: vk::PipelineBindPoint,
1854 pipeline: vk::Pipeline,
1855 ) {
1856 (self.device_fn_1_0.cmd_bind_pipeline)(command_buffer, pipeline_bind_point, pipeline);
1857 }
1858
1859 #[inline]
1861 pub unsafe fn cmd_set_scissor(
1862 &self,
1863 command_buffer: vk::CommandBuffer,
1864 first_scissor: u32,
1865 scissors: &[vk::Rect2D],
1866 ) {
1867 (self.device_fn_1_0.cmd_set_scissor)(
1868 command_buffer,
1869 first_scissor,
1870 scissors.len() as u32,
1871 scissors.as_ptr(),
1872 );
1873 }
1874
1875 #[inline]
1877 pub unsafe fn cmd_set_line_width(&self, command_buffer: vk::CommandBuffer, line_width: f32) {
1878 (self.device_fn_1_0.cmd_set_line_width)(command_buffer, line_width);
1879 }
1880
1881 #[inline]
1883 pub unsafe fn cmd_bind_vertex_buffers(
1884 &self,
1885 command_buffer: vk::CommandBuffer,
1886 first_binding: u32,
1887 buffers: &[vk::Buffer],
1888 offsets: &[vk::DeviceSize],
1889 ) {
1890 debug_assert_eq!(buffers.len(), offsets.len());
1891 (self.device_fn_1_0.cmd_bind_vertex_buffers)(
1892 command_buffer,
1893 first_binding,
1894 buffers.len() as u32,
1895 buffers.as_ptr(),
1896 offsets.as_ptr(),
1897 );
1898 }
1899
1900 #[inline]
1902 pub unsafe fn cmd_end_render_pass(&self, command_buffer: vk::CommandBuffer) {
1903 (self.device_fn_1_0.cmd_end_render_pass)(command_buffer);
1904 }
1905
1906 #[inline]
1908 pub unsafe fn cmd_draw(
1909 &self,
1910 command_buffer: vk::CommandBuffer,
1911 vertex_count: u32,
1912 instance_count: u32,
1913 first_vertex: u32,
1914 first_instance: u32,
1915 ) {
1916 (self.device_fn_1_0.cmd_draw)(
1917 command_buffer,
1918 vertex_count,
1919 instance_count,
1920 first_vertex,
1921 first_instance,
1922 );
1923 }
1924
1925 #[inline]
1927 pub unsafe fn cmd_draw_indirect(
1928 &self,
1929 command_buffer: vk::CommandBuffer,
1930 buffer: vk::Buffer,
1931 offset: vk::DeviceSize,
1932 draw_count: u32,
1933 stride: u32,
1934 ) {
1935 (self.device_fn_1_0.cmd_draw_indirect)(command_buffer, buffer, offset, draw_count, stride);
1936 }
1937
1938 #[inline]
1940 pub unsafe fn cmd_dispatch(
1941 &self,
1942 command_buffer: vk::CommandBuffer,
1943 group_count_x: u32,
1944 group_count_y: u32,
1945 group_count_z: u32,
1946 ) {
1947 (self.device_fn_1_0.cmd_dispatch)(
1948 command_buffer,
1949 group_count_x,
1950 group_count_y,
1951 group_count_z,
1952 );
1953 }
1954
1955 #[inline]
1957 pub unsafe fn cmd_dispatch_indirect(
1958 &self,
1959 command_buffer: vk::CommandBuffer,
1960 buffer: vk::Buffer,
1961 offset: vk::DeviceSize,
1962 ) {
1963 (self.device_fn_1_0.cmd_dispatch_indirect)(command_buffer, buffer, offset);
1964 }
1965
1966 #[inline]
1968 pub unsafe fn cmd_set_viewport(
1969 &self,
1970 command_buffer: vk::CommandBuffer,
1971 first_viewport: u32,
1972 viewports: &[vk::Viewport],
1973 ) {
1974 (self.device_fn_1_0.cmd_set_viewport)(
1975 command_buffer,
1976 first_viewport,
1977 viewports.len() as u32,
1978 viewports.as_ptr(),
1979 );
1980 }
1981
1982 #[inline]
1984 pub unsafe fn cmd_set_depth_bias(
1985 &self,
1986 command_buffer: vk::CommandBuffer,
1987 constant_factor: f32,
1988 clamp: f32,
1989 slope_factor: f32,
1990 ) {
1991 (self.device_fn_1_0.cmd_set_depth_bias)(
1992 command_buffer,
1993 constant_factor,
1994 clamp,
1995 slope_factor,
1996 );
1997 }
1998
1999 #[inline]
2001 pub unsafe fn cmd_set_blend_constants(
2002 &self,
2003 command_buffer: vk::CommandBuffer,
2004 blend_constants: &[f32; 4],
2005 ) {
2006 (self.device_fn_1_0.cmd_set_blend_constants)(command_buffer, blend_constants);
2007 }
2008
2009 #[inline]
2011 pub unsafe fn cmd_set_depth_bounds(
2012 &self,
2013 command_buffer: vk::CommandBuffer,
2014 min_depth_bounds: f32,
2015 max_depth_bounds: f32,
2016 ) {
2017 (self.device_fn_1_0.cmd_set_depth_bounds)(
2018 command_buffer,
2019 min_depth_bounds,
2020 max_depth_bounds,
2021 );
2022 }
2023
2024 #[inline]
2026 pub unsafe fn cmd_set_stencil_compare_mask(
2027 &self,
2028 command_buffer: vk::CommandBuffer,
2029 face_mask: vk::StencilFaceFlags,
2030 compare_mask: u32,
2031 ) {
2032 (self.device_fn_1_0.cmd_set_stencil_compare_mask)(command_buffer, face_mask, compare_mask);
2033 }
2034
2035 #[inline]
2037 pub unsafe fn cmd_set_stencil_write_mask(
2038 &self,
2039 command_buffer: vk::CommandBuffer,
2040 face_mask: vk::StencilFaceFlags,
2041 write_mask: u32,
2042 ) {
2043 (self.device_fn_1_0.cmd_set_stencil_write_mask)(command_buffer, face_mask, write_mask);
2044 }
2045
2046 #[inline]
2048 pub unsafe fn cmd_set_stencil_reference(
2049 &self,
2050 command_buffer: vk::CommandBuffer,
2051 face_mask: vk::StencilFaceFlags,
2052 reference: u32,
2053 ) {
2054 (self.device_fn_1_0.cmd_set_stencil_reference)(command_buffer, face_mask, reference);
2055 }
2056
2057 #[inline]
2059 pub unsafe fn get_query_pool_results<T>(
2060 &self,
2061 query_pool: vk::QueryPool,
2062 first_query: u32,
2063 data: &mut [T],
2064 flags: vk::QueryResultFlags,
2065 ) -> VkResult<()> {
2066 let data_size = mem::size_of_val(data);
2067 (self.device_fn_1_0.get_query_pool_results)(
2068 self.handle(),
2069 query_pool,
2070 first_query,
2071 data.len() as u32,
2072 data_size,
2073 data.as_mut_ptr().cast(),
2074 mem::size_of::<T>() as _,
2075 flags,
2076 )
2077 .result()
2078 }
2079
2080 #[inline]
2082 pub unsafe fn cmd_begin_query(
2083 &self,
2084 command_buffer: vk::CommandBuffer,
2085 query_pool: vk::QueryPool,
2086 query: u32,
2087 flags: vk::QueryControlFlags,
2088 ) {
2089 (self.device_fn_1_0.cmd_begin_query)(command_buffer, query_pool, query, flags);
2090 }
2091
2092 #[inline]
2094 pub unsafe fn cmd_end_query(
2095 &self,
2096 command_buffer: vk::CommandBuffer,
2097 query_pool: vk::QueryPool,
2098 query: u32,
2099 ) {
2100 (self.device_fn_1_0.cmd_end_query)(command_buffer, query_pool, query);
2101 }
2102
2103 #[inline]
2105 pub unsafe fn cmd_reset_query_pool(
2106 &self,
2107 command_buffer: vk::CommandBuffer,
2108 pool: vk::QueryPool,
2109 first_query: u32,
2110 query_count: u32,
2111 ) {
2112 (self.device_fn_1_0.cmd_reset_query_pool)(command_buffer, pool, first_query, query_count);
2113 }
2114
2115 #[inline]
2117 pub unsafe fn cmd_write_timestamp(
2118 &self,
2119 command_buffer: vk::CommandBuffer,
2120 pipeline_stage: vk::PipelineStageFlags,
2121 query_pool: vk::QueryPool,
2122 query: u32,
2123 ) {
2124 (self.device_fn_1_0.cmd_write_timestamp)(command_buffer, pipeline_stage, query_pool, query);
2125 }
2126
2127 #[inline]
2129 pub unsafe fn create_semaphore(
2130 &self,
2131 create_info: &vk::SemaphoreCreateInfo<'_>,
2132 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2133 ) -> VkResult<vk::Semaphore> {
2134 let mut semaphore = mem::MaybeUninit::uninit();
2135 (self.device_fn_1_0.create_semaphore)(
2136 self.handle(),
2137 create_info,
2138 allocation_callbacks.as_raw_ptr(),
2139 semaphore.as_mut_ptr(),
2140 )
2141 .assume_init_on_success(semaphore)
2142 }
2143
2144 #[inline]
2150 pub unsafe fn create_graphics_pipelines(
2151 &self,
2152 pipeline_cache: vk::PipelineCache,
2153 create_infos: &[vk::GraphicsPipelineCreateInfo<'_>],
2154 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2155 ) -> Result<Vec<vk::Pipeline>, (Vec<vk::Pipeline>, vk::Result)> {
2156 let mut pipelines = Vec::with_capacity(create_infos.len());
2157 let err_code = (self.device_fn_1_0.create_graphics_pipelines)(
2158 self.handle(),
2159 pipeline_cache,
2160 create_infos.len() as u32,
2161 create_infos.as_ptr(),
2162 allocation_callbacks.as_raw_ptr(),
2163 pipelines.as_mut_ptr(),
2164 );
2165 pipelines.set_len(create_infos.len());
2166 match err_code {
2167 vk::Result::SUCCESS => Ok(pipelines),
2168 _ => Err((pipelines, err_code)),
2169 }
2170 }
2171
2172 #[inline]
2178 pub unsafe fn create_compute_pipelines(
2179 &self,
2180 pipeline_cache: vk::PipelineCache,
2181 create_infos: &[vk::ComputePipelineCreateInfo<'_>],
2182 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2183 ) -> Result<Vec<vk::Pipeline>, (Vec<vk::Pipeline>, vk::Result)> {
2184 let mut pipelines = Vec::with_capacity(create_infos.len());
2185 let err_code = (self.device_fn_1_0.create_compute_pipelines)(
2186 self.handle(),
2187 pipeline_cache,
2188 create_infos.len() as u32,
2189 create_infos.as_ptr(),
2190 allocation_callbacks.as_raw_ptr(),
2191 pipelines.as_mut_ptr(),
2192 );
2193 pipelines.set_len(create_infos.len());
2194 match err_code {
2195 vk::Result::SUCCESS => Ok(pipelines),
2196 _ => Err((pipelines, err_code)),
2197 }
2198 }
2199
2200 #[inline]
2202 pub unsafe fn create_buffer(
2203 &self,
2204 create_info: &vk::BufferCreateInfo<'_>,
2205 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2206 ) -> VkResult<vk::Buffer> {
2207 let mut buffer = mem::MaybeUninit::uninit();
2208 (self.device_fn_1_0.create_buffer)(
2209 self.handle(),
2210 create_info,
2211 allocation_callbacks.as_raw_ptr(),
2212 buffer.as_mut_ptr(),
2213 )
2214 .assume_init_on_success(buffer)
2215 }
2216
2217 #[inline]
2219 pub unsafe fn create_pipeline_layout(
2220 &self,
2221 create_info: &vk::PipelineLayoutCreateInfo<'_>,
2222 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2223 ) -> VkResult<vk::PipelineLayout> {
2224 let mut pipeline_layout = mem::MaybeUninit::uninit();
2225 (self.device_fn_1_0.create_pipeline_layout)(
2226 self.handle(),
2227 create_info,
2228 allocation_callbacks.as_raw_ptr(),
2229 pipeline_layout.as_mut_ptr(),
2230 )
2231 .assume_init_on_success(pipeline_layout)
2232 }
2233
2234 #[inline]
2236 pub unsafe fn create_pipeline_cache(
2237 &self,
2238 create_info: &vk::PipelineCacheCreateInfo<'_>,
2239 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2240 ) -> VkResult<vk::PipelineCache> {
2241 let mut pipeline_cache = mem::MaybeUninit::uninit();
2242 (self.device_fn_1_0.create_pipeline_cache)(
2243 self.handle(),
2244 create_info,
2245 allocation_callbacks.as_raw_ptr(),
2246 pipeline_cache.as_mut_ptr(),
2247 )
2248 .assume_init_on_success(pipeline_cache)
2249 }
2250
2251 #[inline]
2253 pub unsafe fn get_pipeline_cache_data(
2254 &self,
2255 pipeline_cache: vk::PipelineCache,
2256 ) -> VkResult<Vec<u8>> {
2257 read_into_uninitialized_vector(|count, data: *mut u8| {
2258 (self.device_fn_1_0.get_pipeline_cache_data)(
2259 self.handle(),
2260 pipeline_cache,
2261 count,
2262 data.cast(),
2263 )
2264 })
2265 }
2266
2267 #[inline]
2269 pub unsafe fn merge_pipeline_caches(
2270 &self,
2271 dst_cache: vk::PipelineCache,
2272 src_caches: &[vk::PipelineCache],
2273 ) -> VkResult<()> {
2274 (self.device_fn_1_0.merge_pipeline_caches)(
2275 self.handle(),
2276 dst_cache,
2277 src_caches.len() as u32,
2278 src_caches.as_ptr(),
2279 )
2280 .result()
2281 }
2282
2283 #[inline]
2285 pub unsafe fn map_memory(
2286 &self,
2287 memory: vk::DeviceMemory,
2288 offset: vk::DeviceSize,
2289 size: vk::DeviceSize,
2290 flags: vk::MemoryMapFlags,
2291 ) -> VkResult<*mut ffi::c_void> {
2292 let mut data = mem::MaybeUninit::uninit();
2293 (self.device_fn_1_0.map_memory)(
2294 self.handle(),
2295 memory,
2296 offset,
2297 size,
2298 flags,
2299 data.as_mut_ptr(),
2300 )
2301 .assume_init_on_success(data)
2302 }
2303
2304 #[inline]
2306 pub unsafe fn unmap_memory(&self, memory: vk::DeviceMemory) {
2307 (self.device_fn_1_0.unmap_memory)(self.handle(), memory);
2308 }
2309
2310 #[inline]
2312 pub unsafe fn invalidate_mapped_memory_ranges(
2313 &self,
2314 ranges: &[vk::MappedMemoryRange<'_>],
2315 ) -> VkResult<()> {
2316 (self.device_fn_1_0.invalidate_mapped_memory_ranges)(
2317 self.handle(),
2318 ranges.len() as u32,
2319 ranges.as_ptr(),
2320 )
2321 .result()
2322 }
2323
2324 #[inline]
2326 pub unsafe fn flush_mapped_memory_ranges(
2327 &self,
2328 ranges: &[vk::MappedMemoryRange<'_>],
2329 ) -> VkResult<()> {
2330 (self.device_fn_1_0.flush_mapped_memory_ranges)(
2331 self.handle(),
2332 ranges.len() as u32,
2333 ranges.as_ptr(),
2334 )
2335 .result()
2336 }
2337
2338 #[inline]
2340 pub unsafe fn create_framebuffer(
2341 &self,
2342 create_info: &vk::FramebufferCreateInfo<'_>,
2343 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2344 ) -> VkResult<vk::Framebuffer> {
2345 let mut framebuffer = mem::MaybeUninit::uninit();
2346 (self.device_fn_1_0.create_framebuffer)(
2347 self.handle(),
2348 create_info,
2349 allocation_callbacks.as_raw_ptr(),
2350 framebuffer.as_mut_ptr(),
2351 )
2352 .assume_init_on_success(framebuffer)
2353 }
2354
2355 #[inline]
2357 pub unsafe fn get_device_queue(&self, queue_family_index: u32, queue_index: u32) -> vk::Queue {
2358 let mut queue = mem::MaybeUninit::uninit();
2359 (self.device_fn_1_0.get_device_queue)(
2360 self.handle(),
2361 queue_family_index,
2362 queue_index,
2363 queue.as_mut_ptr(),
2364 );
2365 queue.assume_init()
2366 }
2367
2368 #[inline]
2370 pub unsafe fn cmd_pipeline_barrier(
2371 &self,
2372 command_buffer: vk::CommandBuffer,
2373 src_stage_mask: vk::PipelineStageFlags,
2374 dst_stage_mask: vk::PipelineStageFlags,
2375 dependency_flags: vk::DependencyFlags,
2376 memory_barriers: &[vk::MemoryBarrier<'_>],
2377 buffer_memory_barriers: &[vk::BufferMemoryBarrier<'_>],
2378 image_memory_barriers: &[vk::ImageMemoryBarrier<'_>],
2379 ) {
2380 (self.device_fn_1_0.cmd_pipeline_barrier)(
2381 command_buffer,
2382 src_stage_mask,
2383 dst_stage_mask,
2384 dependency_flags,
2385 memory_barriers.len() as u32,
2386 memory_barriers.as_ptr(),
2387 buffer_memory_barriers.len() as u32,
2388 buffer_memory_barriers.as_ptr(),
2389 image_memory_barriers.len() as u32,
2390 image_memory_barriers.as_ptr(),
2391 );
2392 }
2393
2394 #[inline]
2396 pub unsafe fn create_render_pass(
2397 &self,
2398 create_info: &vk::RenderPassCreateInfo<'_>,
2399 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2400 ) -> VkResult<vk::RenderPass> {
2401 let mut renderpass = mem::MaybeUninit::uninit();
2402 (self.device_fn_1_0.create_render_pass)(
2403 self.handle(),
2404 create_info,
2405 allocation_callbacks.as_raw_ptr(),
2406 renderpass.as_mut_ptr(),
2407 )
2408 .assume_init_on_success(renderpass)
2409 }
2410
2411 #[inline]
2413 pub unsafe fn begin_command_buffer(
2414 &self,
2415 command_buffer: vk::CommandBuffer,
2416 begin_info: &vk::CommandBufferBeginInfo<'_>,
2417 ) -> VkResult<()> {
2418 (self.device_fn_1_0.begin_command_buffer)(command_buffer, begin_info).result()
2419 }
2420
2421 #[inline]
2423 pub unsafe fn end_command_buffer(&self, command_buffer: vk::CommandBuffer) -> VkResult<()> {
2424 (self.device_fn_1_0.end_command_buffer)(command_buffer).result()
2425 }
2426
2427 #[inline]
2429 pub unsafe fn wait_for_fences(
2430 &self,
2431 fences: &[vk::Fence],
2432 wait_all: bool,
2433 timeout: u64,
2434 ) -> VkResult<()> {
2435 (self.device_fn_1_0.wait_for_fences)(
2436 self.handle(),
2437 fences.len() as u32,
2438 fences.as_ptr(),
2439 wait_all as u32,
2440 timeout,
2441 )
2442 .result()
2443 }
2444
2445 #[inline]
2447 pub unsafe fn get_fence_status(&self, fence: vk::Fence) -> VkResult<bool> {
2448 let err_code = (self.device_fn_1_0.get_fence_status)(self.handle(), fence);
2449 match err_code {
2450 vk::Result::SUCCESS => Ok(true),
2451 vk::Result::NOT_READY => Ok(false),
2452 _ => Err(err_code),
2453 }
2454 }
2455
2456 #[inline]
2458 pub unsafe fn queue_wait_idle(&self, queue: vk::Queue) -> VkResult<()> {
2459 (self.device_fn_1_0.queue_wait_idle)(queue).result()
2460 }
2461
2462 #[inline]
2464 pub unsafe fn queue_submit(
2465 &self,
2466 queue: vk::Queue,
2467 submits: &[vk::SubmitInfo<'_>],
2468 fence: vk::Fence,
2469 ) -> VkResult<()> {
2470 (self.device_fn_1_0.queue_submit)(queue, submits.len() as u32, submits.as_ptr(), fence)
2471 .result()
2472 }
2473
2474 #[inline]
2476 pub unsafe fn queue_bind_sparse(
2477 &self,
2478 queue: vk::Queue,
2479 bind_info: &[vk::BindSparseInfo<'_>],
2480 fence: vk::Fence,
2481 ) -> VkResult<()> {
2482 (self.device_fn_1_0.queue_bind_sparse)(
2483 queue,
2484 bind_info.len() as u32,
2485 bind_info.as_ptr(),
2486 fence,
2487 )
2488 .result()
2489 }
2490
2491 #[inline]
2493 pub unsafe fn create_buffer_view(
2494 &self,
2495 create_info: &vk::BufferViewCreateInfo<'_>,
2496 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2497 ) -> VkResult<vk::BufferView> {
2498 let mut buffer_view = mem::MaybeUninit::uninit();
2499 (self.device_fn_1_0.create_buffer_view)(
2500 self.handle(),
2501 create_info,
2502 allocation_callbacks.as_raw_ptr(),
2503 buffer_view.as_mut_ptr(),
2504 )
2505 .assume_init_on_success(buffer_view)
2506 }
2507
2508 #[inline]
2510 pub unsafe fn destroy_buffer_view(
2511 &self,
2512 buffer_view: vk::BufferView,
2513 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2514 ) {
2515 (self.device_fn_1_0.destroy_buffer_view)(
2516 self.handle(),
2517 buffer_view,
2518 allocation_callbacks.as_raw_ptr(),
2519 );
2520 }
2521
2522 #[inline]
2524 pub unsafe fn create_image_view(
2525 &self,
2526 create_info: &vk::ImageViewCreateInfo<'_>,
2527 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2528 ) -> VkResult<vk::ImageView> {
2529 let mut image_view = mem::MaybeUninit::uninit();
2530 (self.device_fn_1_0.create_image_view)(
2531 self.handle(),
2532 create_info,
2533 allocation_callbacks.as_raw_ptr(),
2534 image_view.as_mut_ptr(),
2535 )
2536 .assume_init_on_success(image_view)
2537 }
2538
2539 #[inline]
2541 pub unsafe fn allocate_command_buffers(
2542 &self,
2543 allocate_info: &vk::CommandBufferAllocateInfo<'_>,
2544 ) -> VkResult<Vec<vk::CommandBuffer>> {
2545 let mut buffers = Vec::with_capacity(allocate_info.command_buffer_count as usize);
2546 (self.device_fn_1_0.allocate_command_buffers)(
2547 self.handle(),
2548 allocate_info,
2549 buffers.as_mut_ptr(),
2550 )
2551 .set_vec_len_on_success(buffers, allocate_info.command_buffer_count as usize)
2552 }
2553
2554 #[inline]
2556 pub unsafe fn create_command_pool(
2557 &self,
2558 create_info: &vk::CommandPoolCreateInfo<'_>,
2559 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2560 ) -> VkResult<vk::CommandPool> {
2561 let mut pool = mem::MaybeUninit::uninit();
2562 (self.device_fn_1_0.create_command_pool)(
2563 self.handle(),
2564 create_info,
2565 allocation_callbacks.as_raw_ptr(),
2566 pool.as_mut_ptr(),
2567 )
2568 .assume_init_on_success(pool)
2569 }
2570
2571 #[inline]
2573 pub unsafe fn create_query_pool(
2574 &self,
2575 create_info: &vk::QueryPoolCreateInfo<'_>,
2576 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2577 ) -> VkResult<vk::QueryPool> {
2578 let mut pool = mem::MaybeUninit::uninit();
2579 (self.device_fn_1_0.create_query_pool)(
2580 self.handle(),
2581 create_info,
2582 allocation_callbacks.as_raw_ptr(),
2583 pool.as_mut_ptr(),
2584 )
2585 .assume_init_on_success(pool)
2586 }
2587
2588 #[inline]
2590 pub unsafe fn create_image(
2591 &self,
2592 create_info: &vk::ImageCreateInfo<'_>,
2593 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2594 ) -> VkResult<vk::Image> {
2595 let mut image = mem::MaybeUninit::uninit();
2596 (self.device_fn_1_0.create_image)(
2597 self.handle(),
2598 create_info,
2599 allocation_callbacks.as_raw_ptr(),
2600 image.as_mut_ptr(),
2601 )
2602 .assume_init_on_success(image)
2603 }
2604
2605 #[inline]
2607 pub unsafe fn get_image_subresource_layout(
2608 &self,
2609 image: vk::Image,
2610 subresource: vk::ImageSubresource,
2611 ) -> vk::SubresourceLayout {
2612 let mut layout = mem::MaybeUninit::uninit();
2613 (self.device_fn_1_0.get_image_subresource_layout)(
2614 self.handle(),
2615 image,
2616 &subresource,
2617 layout.as_mut_ptr(),
2618 );
2619 layout.assume_init()
2620 }
2621
2622 #[inline]
2624 pub unsafe fn get_image_memory_requirements(&self, image: vk::Image) -> vk::MemoryRequirements {
2625 let mut mem_req = mem::MaybeUninit::uninit();
2626 (self.device_fn_1_0.get_image_memory_requirements)(
2627 self.handle(),
2628 image,
2629 mem_req.as_mut_ptr(),
2630 );
2631 mem_req.assume_init()
2632 }
2633
2634 #[inline]
2636 pub unsafe fn get_buffer_memory_requirements(
2637 &self,
2638 buffer: vk::Buffer,
2639 ) -> vk::MemoryRequirements {
2640 let mut mem_req = mem::MaybeUninit::uninit();
2641 (self.device_fn_1_0.get_buffer_memory_requirements)(
2642 self.handle(),
2643 buffer,
2644 mem_req.as_mut_ptr(),
2645 );
2646 mem_req.assume_init()
2647 }
2648
2649 #[inline]
2651 pub unsafe fn allocate_memory(
2652 &self,
2653 allocate_info: &vk::MemoryAllocateInfo<'_>,
2654 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2655 ) -> VkResult<vk::DeviceMemory> {
2656 let mut memory = mem::MaybeUninit::uninit();
2657 (self.device_fn_1_0.allocate_memory)(
2658 self.handle(),
2659 allocate_info,
2660 allocation_callbacks.as_raw_ptr(),
2661 memory.as_mut_ptr(),
2662 )
2663 .assume_init_on_success(memory)
2664 }
2665
2666 #[inline]
2668 pub unsafe fn create_shader_module(
2669 &self,
2670 create_info: &vk::ShaderModuleCreateInfo<'_>,
2671 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2672 ) -> VkResult<vk::ShaderModule> {
2673 let mut shader = mem::MaybeUninit::uninit();
2674 (self.device_fn_1_0.create_shader_module)(
2675 self.handle(),
2676 create_info,
2677 allocation_callbacks.as_raw_ptr(),
2678 shader.as_mut_ptr(),
2679 )
2680 .assume_init_on_success(shader)
2681 }
2682
2683 #[inline]
2685 pub unsafe fn create_fence(
2686 &self,
2687 create_info: &vk::FenceCreateInfo<'_>,
2688 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
2689 ) -> VkResult<vk::Fence> {
2690 let mut fence = mem::MaybeUninit::uninit();
2691 (self.device_fn_1_0.create_fence)(
2692 self.handle(),
2693 create_info,
2694 allocation_callbacks.as_raw_ptr(),
2695 fence.as_mut_ptr(),
2696 )
2697 .assume_init_on_success(fence)
2698 }
2699
2700 #[inline]
2702 pub unsafe fn bind_buffer_memory(
2703 &self,
2704 buffer: vk::Buffer,
2705 device_memory: vk::DeviceMemory,
2706 offset: vk::DeviceSize,
2707 ) -> VkResult<()> {
2708 (self.device_fn_1_0.bind_buffer_memory)(self.handle(), buffer, device_memory, offset)
2709 .result()
2710 }
2711
2712 #[inline]
2714 pub unsafe fn bind_image_memory(
2715 &self,
2716 image: vk::Image,
2717 device_memory: vk::DeviceMemory,
2718 offset: vk::DeviceSize,
2719 ) -> VkResult<()> {
2720 (self.device_fn_1_0.bind_image_memory)(self.handle(), image, device_memory, offset).result()
2721 }
2722
2723 #[inline]
2725 pub unsafe fn get_render_area_granularity(&self, render_pass: vk::RenderPass) -> vk::Extent2D {
2726 let mut granularity = mem::MaybeUninit::uninit();
2727 (self.device_fn_1_0.get_render_area_granularity)(
2728 self.handle(),
2729 render_pass,
2730 granularity.as_mut_ptr(),
2731 );
2732 granularity.assume_init()
2733 }
2734
2735 #[inline]
2737 pub unsafe fn get_device_memory_commitment(&self, memory: vk::DeviceMemory) -> vk::DeviceSize {
2738 let mut committed_memory_in_bytes = mem::MaybeUninit::uninit();
2739 (self.device_fn_1_0.get_device_memory_commitment)(
2740 self.handle(),
2741 memory,
2742 committed_memory_in_bytes.as_mut_ptr(),
2743 );
2744 committed_memory_in_bytes.assume_init()
2745 }
2746
2747 #[inline]
2749 pub unsafe fn get_image_sparse_memory_requirements(
2750 &self,
2751 image: vk::Image,
2752 ) -> Vec<vk::SparseImageMemoryRequirements> {
2753 read_into_uninitialized_vector(|count, data| {
2754 (self.device_fn_1_0.get_image_sparse_memory_requirements)(
2755 self.handle(),
2756 image,
2757 count,
2758 data,
2759 );
2760 vk::Result::SUCCESS
2761 })
2762 .unwrap()
2764 }
2765}