1use crate::prelude::*;
4use crate::vk;
5use crate::RawPtr;
6use alloc::vec::Vec;
7use core::mem;
8
9impl crate::khr::acceleration_structure::Device {
10 #[inline]
12 pub unsafe fn create_acceleration_structure(
13 &self,
14 create_info: &vk::AccelerationStructureCreateInfoKHR<'_>,
15 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
16 ) -> VkResult<vk::AccelerationStructureKHR> {
17 let mut accel_struct = mem::MaybeUninit::uninit();
18 (self.fp.create_acceleration_structure_khr)(
19 self.handle,
20 create_info,
21 allocation_callbacks.as_raw_ptr(),
22 accel_struct.as_mut_ptr(),
23 )
24 .assume_init_on_success(accel_struct)
25 }
26
27 #[inline]
29 pub unsafe fn destroy_acceleration_structure(
30 &self,
31 accel_struct: vk::AccelerationStructureKHR,
32 allocation_callbacks: Option<&vk::AllocationCallbacks<'_>>,
33 ) {
34 (self.fp.destroy_acceleration_structure_khr)(
35 self.handle,
36 accel_struct,
37 allocation_callbacks.as_raw_ptr(),
38 );
39 }
40
41 #[inline]
43 pub unsafe fn cmd_build_acceleration_structures(
44 &self,
45 command_buffer: vk::CommandBuffer,
46 infos: &[vk::AccelerationStructureBuildGeometryInfoKHR<'_>],
47 build_range_infos: &[&[vk::AccelerationStructureBuildRangeInfoKHR]],
48 ) {
49 assert_eq!(infos.len(), build_range_infos.len());
50
51 let build_range_infos = build_range_infos
52 .iter()
53 .zip(infos.iter())
54 .map(|(range_info, info)| {
55 assert_eq!(range_info.len(), info.geometry_count as usize);
56 range_info.as_ptr()
57 })
58 .collect::<Vec<_>>();
59
60 (self.fp.cmd_build_acceleration_structures_khr)(
61 command_buffer,
62 infos.len() as _,
63 infos.as_ptr(),
64 build_range_infos.as_ptr(),
65 );
66 }
67
68 #[inline]
70 pub unsafe fn cmd_build_acceleration_structures_indirect(
71 &self,
72 command_buffer: vk::CommandBuffer,
73 infos: &[vk::AccelerationStructureBuildGeometryInfoKHR<'_>],
74 indirect_device_addresses: &[vk::DeviceAddress],
75 indirect_strides: &[u32],
76 max_primitive_counts: &[&[u32]],
77 ) {
78 assert_eq!(infos.len(), indirect_device_addresses.len());
79 assert_eq!(infos.len(), indirect_strides.len());
80 assert_eq!(infos.len(), max_primitive_counts.len());
81
82 let max_primitive_counts = max_primitive_counts
83 .iter()
84 .zip(infos.iter())
85 .map(|(cnt, info)| {
86 assert_eq!(cnt.len(), info.geometry_count as usize);
87 cnt.as_ptr()
88 })
89 .collect::<Vec<_>>();
90
91 (self.fp.cmd_build_acceleration_structures_indirect_khr)(
92 command_buffer,
93 infos.len() as _,
94 infos.as_ptr(),
95 indirect_device_addresses.as_ptr(),
96 indirect_strides.as_ptr(),
97 max_primitive_counts.as_ptr(),
98 );
99 }
100
101 #[inline]
103 pub unsafe fn build_acceleration_structures(
104 &self,
105 deferred_operation: vk::DeferredOperationKHR,
106 infos: &[vk::AccelerationStructureBuildGeometryInfoKHR<'_>],
107 build_range_infos: &[&[vk::AccelerationStructureBuildRangeInfoKHR]],
108 ) -> VkResult<()> {
109 assert_eq!(infos.len(), build_range_infos.len());
110
111 let build_range_infos = build_range_infos
112 .iter()
113 .zip(infos.iter())
114 .map(|(range_info, info)| {
115 assert_eq!(range_info.len(), info.geometry_count as usize);
116 range_info.as_ptr()
117 })
118 .collect::<Vec<_>>();
119
120 (self.fp.build_acceleration_structures_khr)(
121 self.handle,
122 deferred_operation,
123 infos.len() as _,
124 infos.as_ptr(),
125 build_range_infos.as_ptr(),
126 )
127 .result()
128 }
129
130 #[inline]
132 pub unsafe fn copy_acceleration_structure(
133 &self,
134 deferred_operation: vk::DeferredOperationKHR,
135 info: &vk::CopyAccelerationStructureInfoKHR<'_>,
136 ) -> VkResult<()> {
137 (self.fp.copy_acceleration_structure_khr)(self.handle, deferred_operation, info).result()
138 }
139
140 #[inline]
142 pub unsafe fn copy_acceleration_structure_to_memory(
143 &self,
144 deferred_operation: vk::DeferredOperationKHR,
145 info: &vk::CopyAccelerationStructureToMemoryInfoKHR<'_>,
146 ) -> VkResult<()> {
147 (self.fp.copy_acceleration_structure_to_memory_khr)(self.handle, deferred_operation, info)
148 .result()
149 }
150
151 #[inline]
153 pub unsafe fn copy_memory_to_acceleration_structure(
154 &self,
155 deferred_operation: vk::DeferredOperationKHR,
156 info: &vk::CopyMemoryToAccelerationStructureInfoKHR<'_>,
157 ) -> VkResult<()> {
158 (self.fp.copy_memory_to_acceleration_structure_khr)(self.handle, deferred_operation, info)
159 .result()
160 }
161
162 #[inline]
164 pub unsafe fn write_acceleration_structures_properties(
165 &self,
166 acceleration_structures: &[vk::AccelerationStructureKHR],
167 query_type: vk::QueryType,
168 data: &mut [u8],
169 stride: usize,
170 ) -> VkResult<()> {
171 (self.fp.write_acceleration_structures_properties_khr)(
172 self.handle,
173 acceleration_structures.len() as _,
174 acceleration_structures.as_ptr(),
175 query_type,
176 data.len(),
177 data.as_mut_ptr().cast(),
178 stride,
179 )
180 .result()
181 }
182
183 #[inline]
185 pub unsafe fn cmd_copy_acceleration_structure(
186 &self,
187 command_buffer: vk::CommandBuffer,
188 info: &vk::CopyAccelerationStructureInfoKHR<'_>,
189 ) {
190 (self.fp.cmd_copy_acceleration_structure_khr)(command_buffer, info);
191 }
192
193 #[inline]
195 pub unsafe fn cmd_copy_acceleration_structure_to_memory(
196 &self,
197 command_buffer: vk::CommandBuffer,
198 info: &vk::CopyAccelerationStructureToMemoryInfoKHR<'_>,
199 ) {
200 (self.fp.cmd_copy_acceleration_structure_to_memory_khr)(command_buffer, info);
201 }
202
203 #[inline]
205 pub unsafe fn cmd_copy_memory_to_acceleration_structure(
206 &self,
207 command_buffer: vk::CommandBuffer,
208 info: &vk::CopyMemoryToAccelerationStructureInfoKHR<'_>,
209 ) {
210 (self.fp.cmd_copy_memory_to_acceleration_structure_khr)(command_buffer, info);
211 }
212
213 #[inline]
215 pub unsafe fn get_acceleration_structure_device_address(
216 &self,
217 info: &vk::AccelerationStructureDeviceAddressInfoKHR<'_>,
218 ) -> vk::DeviceAddress {
219 (self.fp.get_acceleration_structure_device_address_khr)(self.handle, info)
220 }
221
222 #[inline]
224 pub unsafe fn cmd_write_acceleration_structures_properties(
225 &self,
226 command_buffer: vk::CommandBuffer,
227 structures: &[vk::AccelerationStructureKHR],
228 query_type: vk::QueryType,
229 query_pool: vk::QueryPool,
230 first_query: u32,
231 ) {
232 (self.fp.cmd_write_acceleration_structures_properties_khr)(
233 command_buffer,
234 structures.len() as _,
235 structures.as_ptr(),
236 query_type,
237 query_pool,
238 first_query,
239 );
240 }
241
242 #[inline]
244 pub unsafe fn get_device_acceleration_structure_compatibility(
245 &self,
246 version: &vk::AccelerationStructureVersionInfoKHR<'_>,
247 ) -> vk::AccelerationStructureCompatibilityKHR {
248 let mut compatibility = mem::MaybeUninit::uninit();
249 (self.fp.get_device_acceleration_structure_compatibility_khr)(
250 self.handle,
251 version,
252 compatibility.as_mut_ptr(),
253 );
254 compatibility.assume_init()
255 }
256
257 #[inline]
259 pub unsafe fn get_acceleration_structure_build_sizes(
260 &self,
261 build_type: vk::AccelerationStructureBuildTypeKHR,
262 build_info: &vk::AccelerationStructureBuildGeometryInfoKHR<'_>,
263 max_primitive_counts: &[u32],
264 size_info: &mut vk::AccelerationStructureBuildSizesInfoKHR<'_>,
265 ) {
266 assert_eq!(max_primitive_counts.len(), build_info.geometry_count as _);
267
268 (self.fp.get_acceleration_structure_build_sizes_khr)(
269 self.handle,
270 build_type,
271 build_info,
272 max_primitive_counts.as_ptr(),
273 size_info,
274 )
275 }
276}