1use crate::{
2 api,
3 dispatch::{self, BufferMappedRangeInterface, InterfaceTypes},
4 BindingResource, BufferBinding, BufferDescriptor, CompilationInfo, CompilationMessage,
5 CompilationMessageType, ErrorSource, Features, Label, LoadOp, MapMode, Operations,
6 ShaderSource, SurfaceTargetUnsafe, TextureDescriptor,
7};
8
9use arrayvec::ArrayVec;
10use parking_lot::Mutex;
11use smallvec::SmallVec;
12use std::{
13 borrow::Cow::Borrowed, error::Error, fmt, future::ready, ops::Range, pin::Pin, ptr::NonNull,
14 slice, sync::Arc,
15};
16use wgc::{command::bundle_ffi::*, error::ContextErrorSource, pipeline::CreateShaderModuleError};
17use wgt::WasmNotSendSync;
18
19#[derive(Clone)]
20pub struct ContextWgpuCore(Arc<wgc::global::Global>);
21
22impl Drop for ContextWgpuCore {
23 fn drop(&mut self) {
24 }
26}
27
28impl fmt::Debug for ContextWgpuCore {
29 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
30 f.debug_struct("ContextWgpuCore")
31 .field("type", &"Native")
32 .finish()
33 }
34}
35
36impl ContextWgpuCore {
37 pub unsafe fn from_hal_instance<A: wgc::hal_api::HalApi>(hal_instance: A::Instance) -> Self {
38 Self(unsafe {
39 Arc::new(wgc::global::Global::from_hal_instance::<A>(
40 "wgpu",
41 hal_instance,
42 ))
43 })
44 }
45
46 pub unsafe fn instance_as_hal<A: wgc::hal_api::HalApi>(&self) -> Option<&A::Instance> {
50 unsafe { self.0.instance_as_hal::<A>() }
51 }
52
53 pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
54 Self(unsafe { Arc::new(wgc::global::Global::from_instance(core_instance)) })
55 }
56
57 #[cfg(native)]
58 pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
59 self.0.enumerate_adapters(backends)
60 }
61
62 pub unsafe fn create_adapter_from_hal<A: wgc::hal_api::HalApi>(
63 &self,
64 hal_adapter: hal::ExposedAdapter<A>,
65 ) -> wgc::id::AdapterId {
66 unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
67 }
68
69 pub unsafe fn adapter_as_hal<
70 A: wgc::hal_api::HalApi,
71 F: FnOnce(Option<&A::Adapter>) -> R,
72 R,
73 >(
74 &self,
75 adapter: &CoreAdapter,
76 hal_adapter_callback: F,
77 ) -> R {
78 unsafe {
79 self.0
80 .adapter_as_hal::<A, F, R>(adapter.id, hal_adapter_callback)
81 }
82 }
83
84 pub unsafe fn buffer_as_hal<A: wgc::hal_api::HalApi, F: FnOnce(Option<&A::Buffer>) -> R, R>(
85 &self,
86 buffer: &CoreBuffer,
87 hal_buffer_callback: F,
88 ) -> R {
89 unsafe {
90 self.0
91 .buffer_as_hal::<A, F, R>(buffer.id, hal_buffer_callback)
92 }
93 }
94
95 pub unsafe fn create_device_from_hal<A: wgc::hal_api::HalApi>(
96 &self,
97 adapter: &CoreAdapter,
98 hal_device: hal::OpenDevice<A>,
99 desc: &crate::DeviceDescriptor<'_>,
100 trace_dir: Option<&std::path::Path>,
101 ) -> Result<(CoreDevice, CoreQueue), crate::RequestDeviceError> {
102 if trace_dir.is_some() {
103 log::error!("Feature 'trace' has been removed temporarily, see https://github.com/gfx-rs/wgpu/issues/5974");
104 }
105 let (device_id, queue_id) = unsafe {
106 self.0.create_device_from_hal(
107 adapter.id,
108 hal_device.into(),
109 &desc.map_label(|l| l.map(Borrowed)),
110 None,
111 None,
112 None,
113 )
114 }?;
115 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
116 let device = CoreDevice {
117 context: self.clone(),
118 id: device_id,
119 error_sink: error_sink.clone(),
120 features: desc.required_features,
121 };
122 let queue = CoreQueue {
123 context: self.clone(),
124 id: queue_id,
125 error_sink,
126 };
127 Ok((device, queue))
128 }
129
130 pub unsafe fn create_texture_from_hal<A: wgc::hal_api::HalApi>(
131 &self,
132 hal_texture: A::Texture,
133 device: &CoreDevice,
134 desc: &TextureDescriptor<'_>,
135 ) -> CoreTexture {
136 let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
137 let (id, error) = unsafe {
138 self.0
139 .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
140 };
141 if let Some(cause) = error {
142 self.handle_error(
143 &device.error_sink,
144 cause,
145 desc.label,
146 "Device::create_texture_from_hal",
147 );
148 }
149 CoreTexture {
150 context: self.clone(),
151 id,
152 error_sink: Arc::clone(&device.error_sink),
153 }
154 }
155
156 pub unsafe fn create_buffer_from_hal<A: wgc::hal_api::HalApi>(
157 &self,
158 hal_buffer: A::Buffer,
159 device: &CoreDevice,
160 desc: &BufferDescriptor<'_>,
161 ) -> CoreBuffer {
162 let (id, error) = unsafe {
163 self.0.create_buffer_from_hal::<A>(
164 hal_buffer,
165 device.id,
166 &desc.map_label(|l| l.map(Borrowed)),
167 None,
168 )
169 };
170 if let Some(cause) = error {
171 self.handle_error(
172 &device.error_sink,
173 cause,
174 desc.label,
175 "Device::create_buffer_from_hal",
176 );
177 }
178 CoreBuffer {
179 context: self.clone(),
180 id,
181 error_sink: Arc::clone(&device.error_sink),
182 }
183 }
184
185 pub unsafe fn device_as_hal<A: wgc::hal_api::HalApi, F: FnOnce(Option<&A::Device>) -> R, R>(
186 &self,
187 device: &CoreDevice,
188 hal_device_callback: F,
189 ) -> R {
190 unsafe {
191 self.0
192 .device_as_hal::<A, F, R>(device.id, hal_device_callback)
193 }
194 }
195
196 pub unsafe fn surface_as_hal<
197 A: wgc::hal_api::HalApi,
198 F: FnOnce(Option<&A::Surface>) -> R,
199 R,
200 >(
201 &self,
202 surface: &CoreSurface,
203 hal_surface_callback: F,
204 ) -> R {
205 unsafe {
206 self.0
207 .surface_as_hal::<A, F, R>(surface.id, hal_surface_callback)
208 }
209 }
210
211 pub unsafe fn texture_as_hal<
212 A: wgc::hal_api::HalApi,
213 F: FnOnce(Option<&A::Texture>) -> R,
214 R,
215 >(
216 &self,
217 texture: &CoreTexture,
218 hal_texture_callback: F,
219 ) -> R {
220 unsafe {
221 self.0
222 .texture_as_hal::<A, F, R>(texture.id, hal_texture_callback)
223 }
224 }
225
226 pub unsafe fn texture_view_as_hal<
227 A: wgc::hal_api::HalApi,
228 F: FnOnce(Option<&A::TextureView>) -> R,
229 R,
230 >(
231 &self,
232 texture_view: &CoreTextureView,
233 hal_texture_view_callback: F,
234 ) -> R {
235 unsafe {
236 self.0
237 .texture_view_as_hal::<A, F, R>(texture_view.id, hal_texture_view_callback)
238 }
239 }
240
241 pub unsafe fn command_encoder_as_hal_mut<
243 A: wgc::hal_api::HalApi,
244 F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
245 R,
246 >(
247 &self,
248 command_encoder: &CoreCommandEncoder,
249 hal_command_encoder_callback: F,
250 ) -> R {
251 unsafe {
252 self.0.command_encoder_as_hal_mut::<A, F, R>(
253 command_encoder.id,
254 hal_command_encoder_callback,
255 )
256 }
257 }
258
259 pub fn generate_report(&self) -> wgc::global::GlobalReport {
260 self.0.generate_report()
261 }
262
263 #[cold]
264 #[track_caller]
265 #[inline(never)]
266 fn handle_error_inner(
267 &self,
268 sink_mutex: &Mutex<ErrorSinkRaw>,
269 source: ContextErrorSource,
270 label: Label<'_>,
271 fn_ident: &'static str,
272 ) {
273 let source_error: ErrorSource = Box::new(wgc::error::ContextError {
274 fn_ident,
275 source,
276 label: label.unwrap_or_default().to_string(),
277 });
278 let mut sink = sink_mutex.lock();
279 let mut source_opt: Option<&(dyn Error + 'static)> = Some(&*source_error);
280 let error = loop {
281 if let Some(source) = source_opt {
282 if let Some(wgc::device::DeviceError::OutOfMemory) =
283 source.downcast_ref::<wgc::device::DeviceError>()
284 {
285 break crate::Error::OutOfMemory {
286 source: source_error,
287 };
288 }
289 source_opt = source.source();
290 } else {
291 break crate::Error::Validation {
293 description: self.format_error(&*source_error),
294 source: source_error,
295 };
296 }
297 };
298 sink.handle_error(error);
299 }
300
301 #[inline]
302 #[track_caller]
303 fn handle_error(
304 &self,
305 sink_mutex: &Mutex<ErrorSinkRaw>,
306 source: impl Error + WasmNotSendSync + 'static,
307 label: Label<'_>,
308 fn_ident: &'static str,
309 ) {
310 self.handle_error_inner(sink_mutex, Box::new(source), label, fn_ident)
311 }
312
313 #[inline]
314 #[track_caller]
315 fn handle_error_nolabel(
316 &self,
317 sink_mutex: &Mutex<ErrorSinkRaw>,
318 source: impl Error + WasmNotSendSync + 'static,
319 fn_ident: &'static str,
320 ) {
321 self.handle_error_inner(sink_mutex, Box::new(source), None, fn_ident)
322 }
323
324 #[track_caller]
325 #[cold]
326 fn handle_error_fatal(
327 &self,
328 cause: impl Error + WasmNotSendSync + 'static,
329 operation: &'static str,
330 ) -> ! {
331 panic!("Error in {operation}: {f}", f = self.format_error(&cause));
332 }
333
334 #[inline(never)]
335 fn format_error(&self, err: &(dyn Error + 'static)) -> String {
336 let mut output = String::new();
337 let mut level = 1;
338
339 fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
340 let mut print = |e: &(dyn Error + 'static)| {
341 use std::fmt::Write;
342 writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
343
344 if let Some(e) = e.source() {
345 *level += 1;
346 print_tree(output, level, e);
347 *level -= 1;
348 }
349 };
350 if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
351 for e in multi.errors() {
352 print(e);
353 }
354 } else {
355 print(e);
356 }
357 }
358
359 print_tree(&mut output, &mut level, err);
360
361 format!("Validation Error\n\nCaused by:\n{output}")
362 }
363}
364
365fn map_buffer_copy_view(view: crate::TexelCopyBufferInfo<'_>) -> wgc::command::TexelCopyBufferInfo {
366 wgc::command::TexelCopyBufferInfo {
367 buffer: view.buffer.inner.as_core().id,
368 layout: view.layout,
369 }
370}
371
372fn map_texture_copy_view(
373 view: crate::TexelCopyTextureInfo<'_>,
374) -> wgc::command::TexelCopyTextureInfo {
375 wgc::command::TexelCopyTextureInfo {
376 texture: view.texture.inner.as_core().id,
377 mip_level: view.mip_level,
378 origin: view.origin,
379 aspect: view.aspect,
380 }
381}
382
383#[cfg_attr(
384 any(not(target_arch = "wasm32"), target_os = "emscripten"),
385 expect(unused)
386)]
387fn map_texture_tagged_copy_view(
388 view: crate::CopyExternalImageDestInfo<&api::Texture>,
389) -> wgc::command::CopyExternalImageDestInfo {
390 wgc::command::CopyExternalImageDestInfo {
391 texture: view.texture.inner.as_core().id,
392 mip_level: view.mip_level,
393 origin: view.origin,
394 aspect: view.aspect,
395 color_space: view.color_space,
396 premultiplied_alpha: view.premultiplied_alpha,
397 }
398}
399
400fn map_load_op<V: Copy>(load: &LoadOp<V>) -> LoadOp<Option<V>> {
401 match load {
402 LoadOp::Clear(clear_value) => LoadOp::Clear(Some(*clear_value)),
403 LoadOp::Load => LoadOp::Load,
404 }
405}
406
407fn map_pass_channel<V: Copy>(ops: Option<&Operations<V>>) -> wgc::command::PassChannel<Option<V>> {
408 match ops {
409 Some(&Operations { load, store }) => wgc::command::PassChannel {
410 load_op: Some(map_load_op(&load)),
411 store_op: Some(store),
412 read_only: false,
413 },
414 None => wgc::command::PassChannel {
415 load_op: None,
416 store_op: None,
417 read_only: true,
418 },
419 }
420}
421
422#[derive(Debug)]
423pub struct CoreSurface {
424 pub(crate) context: ContextWgpuCore,
425 id: wgc::id::SurfaceId,
426 configured_device: Mutex<Option<wgc::id::DeviceId>>,
429 error_sink: Mutex<Option<ErrorSink>>,
432}
433
434#[derive(Debug)]
435pub struct CoreAdapter {
436 pub(crate) context: ContextWgpuCore,
437 pub(crate) id: wgc::id::AdapterId,
438}
439
440#[derive(Debug)]
441pub struct CoreDevice {
442 pub(crate) context: ContextWgpuCore,
443 id: wgc::id::DeviceId,
444 error_sink: ErrorSink,
445 features: Features,
446}
447
448#[derive(Debug)]
449pub struct CoreBuffer {
450 pub(crate) context: ContextWgpuCore,
451 id: wgc::id::BufferId,
452 error_sink: ErrorSink,
453}
454
455#[derive(Debug)]
456pub struct CoreShaderModule {
457 pub(crate) context: ContextWgpuCore,
458 id: wgc::id::ShaderModuleId,
459 compilation_info: CompilationInfo,
460}
461
462#[derive(Debug)]
463pub struct CoreBindGroupLayout {
464 pub(crate) context: ContextWgpuCore,
465 id: wgc::id::BindGroupLayoutId,
466}
467
468#[derive(Debug)]
469pub struct CoreBindGroup {
470 pub(crate) context: ContextWgpuCore,
471 id: wgc::id::BindGroupId,
472}
473
474#[derive(Debug)]
475pub struct CoreTexture {
476 pub(crate) context: ContextWgpuCore,
477 id: wgc::id::TextureId,
478 error_sink: ErrorSink,
479}
480
481#[derive(Debug)]
482pub struct CoreTextureView {
483 pub(crate) context: ContextWgpuCore,
484 id: wgc::id::TextureViewId,
485}
486
487#[derive(Debug)]
488pub struct CoreSampler {
489 pub(crate) context: ContextWgpuCore,
490 id: wgc::id::SamplerId,
491}
492
493#[derive(Debug)]
494pub struct CoreQuerySet {
495 pub(crate) context: ContextWgpuCore,
496 id: wgc::id::QuerySetId,
497}
498
499#[derive(Debug)]
500pub struct CorePipelineLayout {
501 pub(crate) context: ContextWgpuCore,
502 id: wgc::id::PipelineLayoutId,
503}
504
505#[derive(Debug)]
506pub struct CorePipelineCache {
507 pub(crate) context: ContextWgpuCore,
508 id: wgc::id::PipelineCacheId,
509}
510
511#[derive(Debug)]
512pub struct CoreCommandBuffer {
513 pub(crate) context: ContextWgpuCore,
514 id: wgc::id::CommandBufferId,
515}
516
517#[derive(Debug)]
518pub struct CoreRenderBundleEncoder {
519 pub(crate) context: ContextWgpuCore,
520 encoder: wgc::command::RenderBundleEncoder,
521 id: crate::cmp::Identifier,
522}
523
524#[derive(Debug)]
525pub struct CoreRenderBundle {
526 id: wgc::id::RenderBundleId,
527}
528
529#[derive(Debug)]
530pub struct CoreQueue {
531 pub(crate) context: ContextWgpuCore,
532 id: wgc::id::QueueId,
533 error_sink: ErrorSink,
534}
535
536#[derive(Debug)]
537pub struct CoreComputePipeline {
538 pub(crate) context: ContextWgpuCore,
539 id: wgc::id::ComputePipelineId,
540 error_sink: ErrorSink,
541}
542
543#[derive(Debug)]
544pub struct CoreRenderPipeline {
545 pub(crate) context: ContextWgpuCore,
546 id: wgc::id::RenderPipelineId,
547 error_sink: ErrorSink,
548}
549
550#[derive(Debug)]
551pub struct CoreComputePass {
552 pub(crate) context: ContextWgpuCore,
553 pass: wgc::command::ComputePass,
554 error_sink: ErrorSink,
555 id: crate::cmp::Identifier,
556}
557
558#[derive(Debug)]
559pub struct CoreRenderPass {
560 pub(crate) context: ContextWgpuCore,
561 pass: wgc::command::RenderPass,
562 error_sink: ErrorSink,
563 id: crate::cmp::Identifier,
564}
565
566#[derive(Debug)]
567pub struct CoreCommandEncoder {
568 pub(crate) context: ContextWgpuCore,
569 id: wgc::id::CommandEncoderId,
570 error_sink: ErrorSink,
571 open: bool,
572}
573
574#[derive(Debug)]
575pub struct CoreBlas {
576 pub(crate) context: ContextWgpuCore,
577 id: wgc::id::BlasId,
578 }
580
581#[derive(Debug)]
582pub struct CoreTlas {
583 pub(crate) context: ContextWgpuCore,
584 id: wgc::id::TlasId,
585 }
587
588#[derive(Debug)]
589pub struct CoreSurfaceOutputDetail {
590 context: ContextWgpuCore,
591 surface_id: wgc::id::SurfaceId,
592}
593
594type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
595
596struct ErrorScope {
597 error: Option<crate::Error>,
598 filter: crate::ErrorFilter,
599}
600
601struct ErrorSinkRaw {
602 scopes: Vec<ErrorScope>,
603 uncaptured_handler: Option<Box<dyn crate::UncapturedErrorHandler>>,
604}
605
606impl ErrorSinkRaw {
607 fn new() -> ErrorSinkRaw {
608 ErrorSinkRaw {
609 scopes: Vec::new(),
610 uncaptured_handler: None,
611 }
612 }
613
614 #[track_caller]
615 fn handle_error(&mut self, err: crate::Error) {
616 let filter = match err {
617 crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
618 crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
619 crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
620 };
621 match self
622 .scopes
623 .iter_mut()
624 .rev()
625 .find(|scope| scope.filter == filter)
626 {
627 Some(scope) => {
628 if scope.error.is_none() {
629 scope.error = Some(err);
630 }
631 }
632 None => {
633 if let Some(custom_handler) = self.uncaptured_handler.as_ref() {
634 (custom_handler)(err);
635 } else {
636 default_error_handler(err);
638 }
639 }
640 }
641 }
642}
643
644impl fmt::Debug for ErrorSinkRaw {
645 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
646 write!(f, "ErrorSink")
647 }
648}
649
650#[track_caller]
651fn default_error_handler(err: crate::Error) {
652 log::error!("Handling wgpu errors as fatal by default");
653 panic!("wgpu error: {err}\n");
654}
655
656impl From<CreateShaderModuleError> for CompilationInfo {
657 fn from(value: CreateShaderModuleError) -> Self {
658 match value {
659 #[cfg(feature = "wgsl")]
660 CreateShaderModuleError::Parsing(v) => v.into(),
661 #[cfg(feature = "glsl")]
662 CreateShaderModuleError::ParsingGlsl(v) => v.into(),
663 #[cfg(feature = "spirv")]
664 CreateShaderModuleError::ParsingSpirV(v) => v.into(),
665 CreateShaderModuleError::Validation(v) => v.into(),
666 CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
669 CompilationInfo {
670 messages: Vec::new(),
671 }
672 }
673 _ => CompilationInfo {
675 messages: vec![CompilationMessage {
676 message: value.to_string(),
677 message_type: CompilationMessageType::Error,
678 location: None,
679 }],
680 },
681 }
682 }
683}
684
685#[derive(Debug)]
686pub struct CoreQueueWriteBuffer {
687 buffer_id: wgc::id::StagingBufferId,
688 mapping: CoreBufferMappedRange,
689}
690
691#[derive(Debug)]
692pub struct CoreBufferMappedRange {
693 ptr: NonNull<u8>,
694 size: usize,
695}
696
697#[cfg(send_sync)]
698unsafe impl Send for CoreBufferMappedRange {}
699#[cfg(send_sync)]
700unsafe impl Sync for CoreBufferMappedRange {}
701
702impl Drop for CoreBufferMappedRange {
703 fn drop(&mut self) {
704 }
707}
708
709crate::cmp::impl_eq_ord_hash_arc_address!(ContextWgpuCore => .0);
710crate::cmp::impl_eq_ord_hash_proxy!(CoreAdapter => .id);
711crate::cmp::impl_eq_ord_hash_proxy!(CoreDevice => .id);
712crate::cmp::impl_eq_ord_hash_proxy!(CoreQueue => .id);
713crate::cmp::impl_eq_ord_hash_proxy!(CoreShaderModule => .id);
714crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroupLayout => .id);
715crate::cmp::impl_eq_ord_hash_proxy!(CoreBindGroup => .id);
716crate::cmp::impl_eq_ord_hash_proxy!(CoreTextureView => .id);
717crate::cmp::impl_eq_ord_hash_proxy!(CoreSampler => .id);
718crate::cmp::impl_eq_ord_hash_proxy!(CoreBuffer => .id);
719crate::cmp::impl_eq_ord_hash_proxy!(CoreTexture => .id);
720crate::cmp::impl_eq_ord_hash_proxy!(CoreBlas => .id);
721crate::cmp::impl_eq_ord_hash_proxy!(CoreTlas => .id);
722crate::cmp::impl_eq_ord_hash_proxy!(CoreQuerySet => .id);
723crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineLayout => .id);
724crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPipeline => .id);
725crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePipeline => .id);
726crate::cmp::impl_eq_ord_hash_proxy!(CorePipelineCache => .id);
727crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandEncoder => .id);
728crate::cmp::impl_eq_ord_hash_proxy!(CoreComputePass => .id);
729crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderPass => .id);
730crate::cmp::impl_eq_ord_hash_proxy!(CoreCommandBuffer => .id);
731crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundleEncoder => .id);
732crate::cmp::impl_eq_ord_hash_proxy!(CoreRenderBundle => .id);
733crate::cmp::impl_eq_ord_hash_proxy!(CoreSurface => .id);
734crate::cmp::impl_eq_ord_hash_proxy!(CoreSurfaceOutputDetail => .surface_id);
735crate::cmp::impl_eq_ord_hash_proxy!(CoreQueueWriteBuffer => .mapping.ptr);
736crate::cmp::impl_eq_ord_hash_proxy!(CoreBufferMappedRange => .ptr);
737
738impl InterfaceTypes for ContextWgpuCore {
739 type Instance = ContextWgpuCore;
740 type Adapter = CoreAdapter;
741 type Device = CoreDevice;
742 type Queue = CoreQueue;
743 type ShaderModule = CoreShaderModule;
744 type BindGroupLayout = CoreBindGroupLayout;
745 type BindGroup = CoreBindGroup;
746 type TextureView = CoreTextureView;
747 type Sampler = CoreSampler;
748 type Buffer = CoreBuffer;
749 type Texture = CoreTexture;
750 type Blas = CoreBlas;
751 type Tlas = CoreTlas;
752 type QuerySet = CoreQuerySet;
753 type PipelineLayout = CorePipelineLayout;
754 type RenderPipeline = CoreRenderPipeline;
755 type ComputePipeline = CoreComputePipeline;
756 type PipelineCache = CorePipelineCache;
757 type CommandEncoder = CoreCommandEncoder;
758 type ComputePass = CoreComputePass;
759 type RenderPass = CoreRenderPass;
760 type CommandBuffer = CoreCommandBuffer;
761 type RenderBundleEncoder = CoreRenderBundleEncoder;
762 type RenderBundle = CoreRenderBundle;
763 type Surface = CoreSurface;
764 type SurfaceOutputDetail = CoreSurfaceOutputDetail;
765 type QueueWriteBuffer = CoreQueueWriteBuffer;
766 type BufferMappedRange = CoreBufferMappedRange;
767}
768
769impl dispatch::InstanceInterface for ContextWgpuCore {
770 fn new(desc: &wgt::InstanceDescriptor) -> Self
771 where
772 Self: Sized,
773 {
774 Self(Arc::new(wgc::global::Global::new("wgpu", desc)))
775 }
776
777 unsafe fn create_surface(
778 &self,
779 target: crate::api::SurfaceTargetUnsafe,
780 ) -> Result<dispatch::DispatchSurface, crate::CreateSurfaceError> {
781 let id = match target {
782 SurfaceTargetUnsafe::RawHandle {
783 raw_display_handle,
784 raw_window_handle,
785 } => unsafe {
786 self.0
787 .instance_create_surface(raw_display_handle, raw_window_handle, None)
788 },
789
790 #[cfg(metal)]
791 SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
792 self.0.instance_create_surface_metal(layer, None)
793 },
794
795 #[cfg(dx12)]
796 SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
797 self.0.instance_create_surface_from_visual(visual, None)
798 },
799
800 #[cfg(dx12)]
801 SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
802 self.0
803 .instance_create_surface_from_surface_handle(surface_handle, None)
804 },
805
806 #[cfg(dx12)]
807 SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
808 self.0
809 .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
810 },
811 }?;
812
813 Ok(CoreSurface {
814 context: self.clone(),
815 id,
816 configured_device: Mutex::default(),
817 error_sink: Mutex::default(),
818 }
819 .into())
820 }
821
822 fn request_adapter(
823 &self,
824 options: &crate::api::RequestAdapterOptions<'_, '_>,
825 ) -> Pin<Box<dyn dispatch::RequestAdapterFuture>> {
826 let id = self.0.request_adapter(
827 &wgc::instance::RequestAdapterOptions {
828 power_preference: options.power_preference,
829 force_fallback_adapter: options.force_fallback_adapter,
830 compatible_surface: options
831 .compatible_surface
832 .map(|surface| surface.inner.as_core().id),
833 },
834 wgt::Backends::all(),
835 None,
836 );
837 let adapter = id.map(|id| {
838 let core = CoreAdapter {
839 context: self.clone(),
840 id,
841 };
842 let generic: dispatch::DispatchAdapter = core.into();
843 generic
844 });
845 Box::pin(ready(adapter.ok()))
846 }
847
848 fn poll_all_devices(&self, force_wait: bool) -> bool {
849 match self.0.poll_all_devices(force_wait) {
850 Ok(all_queue_empty) => all_queue_empty,
851 Err(err) => self.handle_error_fatal(err, "Instance::poll_all_devices"),
852 }
853 }
854
855 #[cfg(feature = "wgsl")]
856 fn wgsl_language_features(&self) -> crate::WgslLanguageFeatures {
857 wgc::naga::front::wgsl::ImplementedLanguageExtension::all()
858 .iter()
859 .copied()
860 .fold(
861 crate::WgslLanguageFeatures::empty(),
862 #[expect(unreachable_code)]
863 |acc, wle| acc | match wle {},
864 )
865 }
866}
867
868impl dispatch::AdapterInterface for CoreAdapter {
869 fn request_device(
870 &self,
871 desc: &crate::DeviceDescriptor<'_>,
872 trace_dir: Option<&std::path::Path>,
873 ) -> Pin<Box<dyn dispatch::RequestDeviceFuture>> {
874 if trace_dir.is_some() {
875 log::error!("Feature 'trace' has been removed temporarily, see https://github.com/gfx-rs/wgpu/issues/5974");
876 }
877 let res = self.context.0.adapter_request_device(
878 self.id,
879 &desc.map_label(|l| l.map(Borrowed)),
880 None,
881 None,
882 None,
883 );
884 let (device_id, queue_id) = match res {
885 Ok(ids) => ids,
886 Err(err) => {
887 return Box::pin(ready(Err(err.into())));
888 }
889 };
890 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
891 let device = CoreDevice {
892 context: self.context.clone(),
893 id: device_id,
894 error_sink: error_sink.clone(),
895 features: desc.required_features,
896 };
897 let queue = CoreQueue {
898 context: self.context.clone(),
899 id: queue_id,
900 error_sink,
901 };
902 Box::pin(ready(Ok((device.into(), queue.into()))))
903 }
904
905 fn is_surface_supported(&self, surface: &dispatch::DispatchSurface) -> bool {
906 let surface = surface.as_core();
907
908 self.context
909 .0
910 .adapter_is_surface_supported(self.id, surface.id)
911 }
912
913 fn features(&self) -> crate::Features {
914 self.context.0.adapter_features(self.id)
915 }
916
917 fn limits(&self) -> crate::Limits {
918 self.context.0.adapter_limits(self.id)
919 }
920
921 fn downlevel_capabilities(&self) -> crate::DownlevelCapabilities {
922 self.context.0.adapter_downlevel_capabilities(self.id)
923 }
924
925 fn get_info(&self) -> crate::AdapterInfo {
926 self.context.0.adapter_get_info(self.id)
927 }
928
929 fn get_texture_format_features(
930 &self,
931 format: crate::TextureFormat,
932 ) -> crate::TextureFormatFeatures {
933 self.context
934 .0
935 .adapter_get_texture_format_features(self.id, format)
936 }
937
938 fn get_presentation_timestamp(&self) -> crate::PresentationTimestamp {
939 self.context.0.adapter_get_presentation_timestamp(self.id)
940 }
941}
942
943impl Drop for CoreAdapter {
944 fn drop(&mut self) {
945 self.context.0.adapter_drop(self.id)
946 }
947}
948
949impl dispatch::DeviceInterface for CoreDevice {
950 fn features(&self) -> crate::Features {
951 self.context.0.device_features(self.id)
952 }
953
954 fn limits(&self) -> crate::Limits {
955 self.context.0.device_limits(self.id)
956 }
957
958 #[cfg_attr(
960 not(any(
961 feature = "spirv",
962 feature = "glsl",
963 feature = "wgsl",
964 feature = "naga-ir"
965 )),
966 expect(unused)
967 )]
968 fn create_shader_module(
969 &self,
970 desc: crate::ShaderModuleDescriptor<'_>,
971 shader_bound_checks: wgt::ShaderRuntimeChecks,
972 ) -> dispatch::DispatchShaderModule {
973 let descriptor = wgc::pipeline::ShaderModuleDescriptor {
974 label: desc.label.map(Borrowed),
975 runtime_checks: shader_bound_checks,
976 };
977 let source = match desc.source {
978 #[cfg(feature = "spirv")]
979 ShaderSource::SpirV(ref spv) => {
980 let options = naga::front::spv::Options {
982 adjust_coordinate_space: false, strict_capabilities: true,
984 block_ctx_dump_prefix: None,
985 };
986 wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
987 }
988 #[cfg(feature = "glsl")]
989 ShaderSource::Glsl {
990 ref shader,
991 stage,
992 defines,
993 } => {
994 let options = naga::front::glsl::Options { stage, defines };
995 wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
996 }
997 #[cfg(feature = "wgsl")]
998 ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
999 #[cfg(feature = "naga-ir")]
1000 ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
1001 ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
1002 };
1003 let (id, error) =
1004 self.context
1005 .0
1006 .device_create_shader_module(self.id, &descriptor, source, None);
1007 let compilation_info = match error {
1008 Some(cause) => {
1009 self.context.handle_error(
1010 &self.error_sink,
1011 cause.clone(),
1012 desc.label,
1013 "Device::create_shader_module",
1014 );
1015 CompilationInfo::from(cause)
1016 }
1017 None => CompilationInfo { messages: vec![] },
1018 };
1019
1020 CoreShaderModule {
1021 context: self.context.clone(),
1022 id,
1023 compilation_info,
1024 }
1025 .into()
1026 }
1027
1028 unsafe fn create_shader_module_spirv(
1029 &self,
1030 desc: &crate::ShaderModuleDescriptorSpirV<'_>,
1031 ) -> dispatch::DispatchShaderModule {
1032 let descriptor = wgc::pipeline::ShaderModuleDescriptor {
1033 label: desc.label.map(Borrowed),
1034 runtime_checks: wgt::ShaderRuntimeChecks::unchecked(),
1037 };
1038 let (id, error) = unsafe {
1039 self.context.0.device_create_shader_module_spirv(
1040 self.id,
1041 &descriptor,
1042 Borrowed(&desc.source),
1043 None,
1044 )
1045 };
1046 let compilation_info = match error {
1047 Some(cause) => {
1048 self.context.handle_error(
1049 &self.error_sink,
1050 cause.clone(),
1051 desc.label,
1052 "Device::create_shader_module_spirv",
1053 );
1054 CompilationInfo::from(cause)
1055 }
1056 None => CompilationInfo { messages: vec![] },
1057 };
1058 CoreShaderModule {
1059 context: self.context.clone(),
1060 id,
1061 compilation_info,
1062 }
1063 .into()
1064 }
1065
1066 fn create_bind_group_layout(
1067 &self,
1068 desc: &crate::BindGroupLayoutDescriptor<'_>,
1069 ) -> dispatch::DispatchBindGroupLayout {
1070 let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
1071 label: desc.label.map(Borrowed),
1072 entries: Borrowed(desc.entries),
1073 };
1074 let (id, error) =
1075 self.context
1076 .0
1077 .device_create_bind_group_layout(self.id, &descriptor, None);
1078 if let Some(cause) = error {
1079 self.context.handle_error(
1080 &self.error_sink,
1081 cause,
1082 desc.label,
1083 "Device::create_bind_group_layout",
1084 );
1085 }
1086 CoreBindGroupLayout {
1087 context: self.context.clone(),
1088 id,
1089 }
1090 .into()
1091 }
1092
1093 fn create_bind_group(
1094 &self,
1095 desc: &crate::BindGroupDescriptor<'_>,
1096 ) -> dispatch::DispatchBindGroup {
1097 use wgc::binding_model as bm;
1098
1099 let mut arrayed_texture_views = Vec::new();
1100 let mut arrayed_samplers = Vec::new();
1101 if self.features.contains(Features::TEXTURE_BINDING_ARRAY) {
1102 for entry in desc.entries.iter() {
1104 if let BindingResource::TextureViewArray(array) = entry.resource {
1105 arrayed_texture_views.extend(array.iter().map(|view| view.inner.as_core().id));
1106 }
1107 if let BindingResource::SamplerArray(array) = entry.resource {
1108 arrayed_samplers.extend(array.iter().map(|sampler| sampler.inner.as_core().id));
1109 }
1110 }
1111 }
1112 let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
1113 let mut remaining_arrayed_samplers = &arrayed_samplers[..];
1114
1115 let mut arrayed_buffer_bindings = Vec::new();
1116 if self.features.contains(Features::BUFFER_BINDING_ARRAY) {
1117 for entry in desc.entries.iter() {
1119 if let BindingResource::BufferArray(array) = entry.resource {
1120 arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
1121 buffer_id: binding.buffer.inner.as_core().id,
1122 offset: binding.offset,
1123 size: binding.size,
1124 }));
1125 }
1126 }
1127 }
1128 let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
1129
1130 let entries = desc
1131 .entries
1132 .iter()
1133 .map(|entry| bm::BindGroupEntry {
1134 binding: entry.binding,
1135 resource: match entry.resource {
1136 BindingResource::Buffer(BufferBinding {
1137 buffer,
1138 offset,
1139 size,
1140 }) => bm::BindingResource::Buffer(bm::BufferBinding {
1141 buffer_id: buffer.inner.as_core().id,
1142 offset,
1143 size,
1144 }),
1145 BindingResource::BufferArray(array) => {
1146 let slice = &remaining_arrayed_buffer_bindings[..array.len()];
1147 remaining_arrayed_buffer_bindings =
1148 &remaining_arrayed_buffer_bindings[array.len()..];
1149 bm::BindingResource::BufferArray(Borrowed(slice))
1150 }
1151 BindingResource::Sampler(sampler) => {
1152 bm::BindingResource::Sampler(sampler.inner.as_core().id)
1153 }
1154 BindingResource::SamplerArray(array) => {
1155 let slice = &remaining_arrayed_samplers[..array.len()];
1156 remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
1157 bm::BindingResource::SamplerArray(Borrowed(slice))
1158 }
1159 BindingResource::TextureView(texture_view) => {
1160 bm::BindingResource::TextureView(texture_view.inner.as_core().id)
1161 }
1162 BindingResource::TextureViewArray(array) => {
1163 let slice = &remaining_arrayed_texture_views[..array.len()];
1164 remaining_arrayed_texture_views =
1165 &remaining_arrayed_texture_views[array.len()..];
1166 bm::BindingResource::TextureViewArray(Borrowed(slice))
1167 }
1168 BindingResource::AccelerationStructure(acceleration_structure) => {
1169 bm::BindingResource::AccelerationStructure(
1170 acceleration_structure.shared.inner.as_core().id,
1171 )
1172 }
1173 },
1174 })
1175 .collect::<Vec<_>>();
1176 let descriptor = bm::BindGroupDescriptor {
1177 label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
1178 layout: desc.layout.inner.as_core().id,
1179 entries: Borrowed(&entries),
1180 };
1181
1182 let (id, error) = self
1183 .context
1184 .0
1185 .device_create_bind_group(self.id, &descriptor, None);
1186 if let Some(cause) = error {
1187 self.context.handle_error(
1188 &self.error_sink,
1189 cause,
1190 desc.label,
1191 "Device::create_bind_group",
1192 );
1193 }
1194 CoreBindGroup {
1195 context: self.context.clone(),
1196 id,
1197 }
1198 .into()
1199 }
1200
1201 fn create_pipeline_layout(
1202 &self,
1203 desc: &crate::PipelineLayoutDescriptor<'_>,
1204 ) -> dispatch::DispatchPipelineLayout {
1205 assert!(
1208 desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1209 "Bind group layout count {} exceeds device bind group limit {}",
1210 desc.bind_group_layouts.len(),
1211 wgc::MAX_BIND_GROUPS
1212 );
1213
1214 let temp_layouts = desc
1215 .bind_group_layouts
1216 .iter()
1217 .map(|bgl| bgl.inner.as_core().id)
1218 .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1219 let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1220 label: desc.label.map(Borrowed),
1221 bind_group_layouts: Borrowed(&temp_layouts),
1222 push_constant_ranges: Borrowed(desc.push_constant_ranges),
1223 };
1224
1225 let (id, error) = self
1226 .context
1227 .0
1228 .device_create_pipeline_layout(self.id, &descriptor, None);
1229 if let Some(cause) = error {
1230 self.context.handle_error(
1231 &self.error_sink,
1232 cause,
1233 desc.label,
1234 "Device::create_pipeline_layout",
1235 );
1236 }
1237 CorePipelineLayout {
1238 context: self.context.clone(),
1239 id,
1240 }
1241 .into()
1242 }
1243
1244 fn create_render_pipeline(
1245 &self,
1246 desc: &crate::RenderPipelineDescriptor<'_>,
1247 ) -> dispatch::DispatchRenderPipeline {
1248 use wgc::pipeline as pipe;
1249
1250 let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1251 .vertex
1252 .buffers
1253 .iter()
1254 .map(|vbuf| pipe::VertexBufferLayout {
1255 array_stride: vbuf.array_stride,
1256 step_mode: vbuf.step_mode,
1257 attributes: Borrowed(vbuf.attributes),
1258 })
1259 .collect();
1260
1261 let descriptor = pipe::RenderPipelineDescriptor {
1262 label: desc.label.map(Borrowed),
1263 layout: desc.layout.map(|layout| layout.inner.as_core().id),
1264 vertex: pipe::VertexState {
1265 stage: pipe::ProgrammableStageDescriptor {
1266 module: desc.vertex.module.inner.as_core().id,
1267 entry_point: desc.vertex.entry_point.map(Borrowed),
1268 constants: Borrowed(desc.vertex.compilation_options.constants),
1269 zero_initialize_workgroup_memory: desc
1270 .vertex
1271 .compilation_options
1272 .zero_initialize_workgroup_memory,
1273 },
1274 buffers: Borrowed(&vertex_buffers),
1275 },
1276 primitive: desc.primitive,
1277 depth_stencil: desc.depth_stencil.clone(),
1278 multisample: desc.multisample,
1279 fragment: desc.fragment.as_ref().map(|frag| pipe::FragmentState {
1280 stage: pipe::ProgrammableStageDescriptor {
1281 module: frag.module.inner.as_core().id,
1282 entry_point: frag.entry_point.map(Borrowed),
1283 constants: Borrowed(frag.compilation_options.constants),
1284 zero_initialize_workgroup_memory: frag
1285 .compilation_options
1286 .zero_initialize_workgroup_memory,
1287 },
1288 targets: Borrowed(frag.targets),
1289 }),
1290 multiview: desc.multiview,
1291 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1292 };
1293
1294 let (id, error) =
1295 self.context
1296 .0
1297 .device_create_render_pipeline(self.id, &descriptor, None, None);
1298 if let Some(cause) = error {
1299 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1300 log::error!("Shader translation error for stage {:?}: {}", stage, error);
1301 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1302 }
1303 self.context.handle_error(
1304 &self.error_sink,
1305 cause,
1306 desc.label,
1307 "Device::create_render_pipeline",
1308 );
1309 }
1310 CoreRenderPipeline {
1311 context: self.context.clone(),
1312 id,
1313 error_sink: Arc::clone(&self.error_sink),
1314 }
1315 .into()
1316 }
1317
1318 fn create_compute_pipeline(
1319 &self,
1320 desc: &crate::ComputePipelineDescriptor<'_>,
1321 ) -> dispatch::DispatchComputePipeline {
1322 use wgc::pipeline as pipe;
1323
1324 let descriptor = pipe::ComputePipelineDescriptor {
1325 label: desc.label.map(Borrowed),
1326 layout: desc.layout.map(|pll| pll.inner.as_core().id),
1327 stage: pipe::ProgrammableStageDescriptor {
1328 module: desc.module.inner.as_core().id,
1329 entry_point: desc.entry_point.map(Borrowed),
1330 constants: Borrowed(desc.compilation_options.constants),
1331 zero_initialize_workgroup_memory: desc
1332 .compilation_options
1333 .zero_initialize_workgroup_memory,
1334 },
1335 cache: desc.cache.map(|cache| cache.inner.as_core().id),
1336 };
1337
1338 let (id, error) =
1339 self.context
1340 .0
1341 .device_create_compute_pipeline(self.id, &descriptor, None, None);
1342 if let Some(cause) = error {
1343 if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1344 log::error!(
1345 "Shader translation error for stage {:?}: {}",
1346 wgt::ShaderStages::COMPUTE,
1347 error
1348 );
1349 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1350 }
1351 self.context.handle_error(
1352 &self.error_sink,
1353 cause,
1354 desc.label,
1355 "Device::create_compute_pipeline",
1356 );
1357 }
1358 CoreComputePipeline {
1359 context: self.context.clone(),
1360 id,
1361 error_sink: Arc::clone(&self.error_sink),
1362 }
1363 .into()
1364 }
1365
1366 unsafe fn create_pipeline_cache(
1367 &self,
1368 desc: &crate::PipelineCacheDescriptor<'_>,
1369 ) -> dispatch::DispatchPipelineCache {
1370 use wgc::pipeline as pipe;
1371
1372 let descriptor = pipe::PipelineCacheDescriptor {
1373 label: desc.label.map(Borrowed),
1374 data: desc.data.map(Borrowed),
1375 fallback: desc.fallback,
1376 };
1377 let (id, error) = unsafe {
1378 self.context
1379 .0
1380 .device_create_pipeline_cache(self.id, &descriptor, None)
1381 };
1382 if let Some(cause) = error {
1383 self.context.handle_error(
1384 &self.error_sink,
1385 cause,
1386 desc.label,
1387 "Device::device_create_pipeline_cache_init",
1388 );
1389 }
1390 CorePipelineCache {
1391 context: self.context.clone(),
1392 id,
1393 }
1394 .into()
1395 }
1396
1397 fn create_buffer(&self, desc: &crate::BufferDescriptor<'_>) -> dispatch::DispatchBuffer {
1398 let (id, error) = self.context.0.device_create_buffer(
1399 self.id,
1400 &desc.map_label(|l| l.map(Borrowed)),
1401 None,
1402 );
1403 if let Some(cause) = error {
1404 self.context
1405 .handle_error(&self.error_sink, cause, desc.label, "Device::create_buffer");
1406 }
1407
1408 CoreBuffer {
1409 context: self.context.clone(),
1410 id,
1411 error_sink: Arc::clone(&self.error_sink),
1412 }
1413 .into()
1414 }
1415
1416 fn create_texture(&self, desc: &crate::TextureDescriptor<'_>) -> dispatch::DispatchTexture {
1417 let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1418 let (id, error) = self
1419 .context
1420 .0
1421 .device_create_texture(self.id, &wgt_desc, None);
1422 if let Some(cause) = error {
1423 self.context.handle_error(
1424 &self.error_sink,
1425 cause,
1426 desc.label,
1427 "Device::create_texture",
1428 );
1429 }
1430
1431 CoreTexture {
1432 context: self.context.clone(),
1433 id,
1434 error_sink: Arc::clone(&self.error_sink),
1435 }
1436 .into()
1437 }
1438
1439 fn create_blas(
1440 &self,
1441 desc: &crate::CreateBlasDescriptor<'_>,
1442 sizes: crate::BlasGeometrySizeDescriptors,
1443 ) -> (Option<u64>, dispatch::DispatchBlas) {
1444 let global = &self.context.0;
1445 let (id, handle, error) =
1446 global.device_create_blas(self.id, &desc.map_label(|l| l.map(Borrowed)), sizes, None);
1447 if let Some(cause) = error {
1448 self.context
1449 .handle_error(&self.error_sink, cause, desc.label, "Device::create_blas");
1450 }
1451 (
1452 handle,
1453 CoreBlas {
1454 context: self.context.clone(),
1455 id,
1456 }
1458 .into(),
1459 )
1460 }
1461
1462 fn create_tlas(&self, desc: &crate::CreateTlasDescriptor<'_>) -> dispatch::DispatchTlas {
1463 let global = &self.context.0;
1464 let (id, error) =
1465 global.device_create_tlas(self.id, &desc.map_label(|l| l.map(Borrowed)), None);
1466 if let Some(cause) = error {
1467 self.context
1468 .handle_error(&self.error_sink, cause, desc.label, "Device::create_tlas");
1469 }
1470 CoreTlas {
1471 context: self.context.clone(),
1472 id,
1473 }
1475 .into()
1476 }
1477
1478 fn create_sampler(&self, desc: &crate::SamplerDescriptor<'_>) -> dispatch::DispatchSampler {
1479 let descriptor = wgc::resource::SamplerDescriptor {
1480 label: desc.label.map(Borrowed),
1481 address_modes: [
1482 desc.address_mode_u,
1483 desc.address_mode_v,
1484 desc.address_mode_w,
1485 ],
1486 mag_filter: desc.mag_filter,
1487 min_filter: desc.min_filter,
1488 mipmap_filter: desc.mipmap_filter,
1489 lod_min_clamp: desc.lod_min_clamp,
1490 lod_max_clamp: desc.lod_max_clamp,
1491 compare: desc.compare,
1492 anisotropy_clamp: desc.anisotropy_clamp,
1493 border_color: desc.border_color,
1494 };
1495
1496 let (id, error) = self
1497 .context
1498 .0
1499 .device_create_sampler(self.id, &descriptor, None);
1500 if let Some(cause) = error {
1501 self.context.handle_error(
1502 &self.error_sink,
1503 cause,
1504 desc.label,
1505 "Device::create_sampler",
1506 );
1507 }
1508 CoreSampler {
1509 context: self.context.clone(),
1510 id,
1511 }
1512 .into()
1513 }
1514
1515 fn create_query_set(&self, desc: &crate::QuerySetDescriptor<'_>) -> dispatch::DispatchQuerySet {
1516 let (id, error) = self.context.0.device_create_query_set(
1517 self.id,
1518 &desc.map_label(|l| l.map(Borrowed)),
1519 None,
1520 );
1521 if let Some(cause) = error {
1522 self.context
1523 .handle_error_nolabel(&self.error_sink, cause, "Device::create_query_set");
1524 }
1525 CoreQuerySet {
1526 context: self.context.clone(),
1527 id,
1528 }
1529 .into()
1530 }
1531
1532 fn create_command_encoder(
1533 &self,
1534 desc: &crate::CommandEncoderDescriptor<'_>,
1535 ) -> dispatch::DispatchCommandEncoder {
1536 let (id, error) = self.context.0.device_create_command_encoder(
1537 self.id,
1538 &desc.map_label(|l| l.map(Borrowed)),
1539 None,
1540 );
1541 if let Some(cause) = error {
1542 self.context.handle_error(
1543 &self.error_sink,
1544 cause,
1545 desc.label,
1546 "Device::create_command_encoder",
1547 );
1548 }
1549
1550 CoreCommandEncoder {
1551 context: self.context.clone(),
1552 id,
1553 error_sink: Arc::clone(&self.error_sink),
1554 open: true,
1555 }
1556 .into()
1557 }
1558
1559 fn create_render_bundle_encoder(
1560 &self,
1561 desc: &crate::RenderBundleEncoderDescriptor<'_>,
1562 ) -> dispatch::DispatchRenderBundleEncoder {
1563 let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1564 label: desc.label.map(Borrowed),
1565 color_formats: Borrowed(desc.color_formats),
1566 depth_stencil: desc.depth_stencil,
1567 sample_count: desc.sample_count,
1568 multiview: desc.multiview,
1569 };
1570 let encoder = match wgc::command::RenderBundleEncoder::new(&descriptor, self.id, None) {
1571 Ok(encoder) => encoder,
1572 Err(e) => panic!("Error in Device::create_render_bundle_encoder: {e}"),
1573 };
1574
1575 CoreRenderBundleEncoder {
1576 context: self.context.clone(),
1577 encoder,
1578 id: crate::cmp::Identifier::create(),
1579 }
1580 .into()
1581 }
1582
1583 fn set_device_lost_callback(&self, device_lost_callback: dispatch::BoxDeviceLostCallback) {
1584 self.context
1585 .0
1586 .device_set_device_lost_closure(self.id, device_lost_callback);
1587 }
1588
1589 fn on_uncaptured_error(&self, handler: Box<dyn crate::UncapturedErrorHandler>) {
1590 let mut error_sink = self.error_sink.lock();
1591 error_sink.uncaptured_handler = Some(handler);
1592 }
1593
1594 fn push_error_scope(&self, filter: crate::ErrorFilter) {
1595 let mut error_sink = self.error_sink.lock();
1596 error_sink.scopes.push(ErrorScope {
1597 error: None,
1598 filter,
1599 });
1600 }
1601
1602 fn pop_error_scope(&self) -> Pin<Box<dyn dispatch::PopErrorScopeFuture>> {
1603 let mut error_sink = self.error_sink.lock();
1604 let scope = error_sink.scopes.pop().unwrap();
1605 Box::pin(ready(scope.error))
1606 }
1607
1608 fn start_capture(&self) {
1609 self.context.0.device_start_capture(self.id);
1610 }
1611
1612 fn stop_capture(&self) {
1613 self.context.0.device_stop_capture(self.id);
1614 }
1615
1616 fn poll(&self, maintain: crate::Maintain) -> crate::MaintainResult {
1617 let maintain_inner = maintain.map_index(|i| i.index);
1618 match self.context.0.device_poll(self.id, maintain_inner) {
1619 Ok(done) => match done {
1620 true => wgt::MaintainResult::SubmissionQueueEmpty,
1621 false => wgt::MaintainResult::Ok,
1622 },
1623 Err(err) => self.context.handle_error_fatal(err, "Device::poll"),
1624 }
1625 }
1626
1627 fn get_internal_counters(&self) -> crate::InternalCounters {
1628 self.context.0.device_get_internal_counters(self.id)
1629 }
1630
1631 fn generate_allocator_report(&self) -> Option<wgt::AllocatorReport> {
1632 self.context.0.device_generate_allocator_report(self.id)
1633 }
1634
1635 fn destroy(&self) {
1636 self.context.0.device_destroy(self.id);
1637 }
1638}
1639
1640impl Drop for CoreDevice {
1641 fn drop(&mut self) {
1642 self.context.0.device_drop(self.id)
1643 }
1644}
1645
1646impl dispatch::QueueInterface for CoreQueue {
1647 fn write_buffer(
1648 &self,
1649 buffer: &dispatch::DispatchBuffer,
1650 offset: crate::BufferAddress,
1651 data: &[u8],
1652 ) {
1653 let buffer = buffer.as_core();
1654
1655 match self
1656 .context
1657 .0
1658 .queue_write_buffer(self.id, buffer.id, offset, data)
1659 {
1660 Ok(()) => (),
1661 Err(err) => {
1662 self.context
1663 .handle_error_nolabel(&self.error_sink, err, "Queue::write_buffer")
1664 }
1665 }
1666 }
1667
1668 fn create_staging_buffer(
1669 &self,
1670 size: crate::BufferSize,
1671 ) -> Option<dispatch::DispatchQueueWriteBuffer> {
1672 match self
1673 .context
1674 .0
1675 .queue_create_staging_buffer(self.id, size, None)
1676 {
1677 Ok((buffer_id, ptr)) => Some(
1678 CoreQueueWriteBuffer {
1679 buffer_id,
1680 mapping: CoreBufferMappedRange {
1681 ptr,
1682 size: size.get() as usize,
1683 },
1684 }
1685 .into(),
1686 ),
1687 Err(err) => {
1688 self.context.handle_error_nolabel(
1689 &self.error_sink,
1690 err,
1691 "Queue::write_buffer_with",
1692 );
1693 None
1694 }
1695 }
1696 }
1697
1698 fn validate_write_buffer(
1699 &self,
1700 buffer: &dispatch::DispatchBuffer,
1701 offset: wgt::BufferAddress,
1702 size: wgt::BufferSize,
1703 ) -> Option<()> {
1704 let buffer = buffer.as_core();
1705
1706 match self
1707 .context
1708 .0
1709 .queue_validate_write_buffer(self.id, buffer.id, offset, size)
1710 {
1711 Ok(()) => Some(()),
1712 Err(err) => {
1713 self.context.handle_error_nolabel(
1714 &self.error_sink,
1715 err,
1716 "Queue::write_buffer_with",
1717 );
1718 None
1719 }
1720 }
1721 }
1722
1723 fn write_staging_buffer(
1724 &self,
1725 buffer: &dispatch::DispatchBuffer,
1726 offset: crate::BufferAddress,
1727 staging_buffer: &dispatch::DispatchQueueWriteBuffer,
1728 ) {
1729 let buffer = buffer.as_core();
1730 let staging_buffer = staging_buffer.as_core();
1731
1732 match self.context.0.queue_write_staging_buffer(
1733 self.id,
1734 buffer.id,
1735 offset,
1736 staging_buffer.buffer_id,
1737 ) {
1738 Ok(()) => (),
1739 Err(err) => {
1740 self.context.handle_error_nolabel(
1741 &self.error_sink,
1742 err,
1743 "Queue::write_buffer_with",
1744 );
1745 }
1746 }
1747 }
1748
1749 fn write_texture(
1750 &self,
1751 texture: crate::TexelCopyTextureInfo<'_>,
1752 data: &[u8],
1753 data_layout: crate::TexelCopyBufferLayout,
1754 size: crate::Extent3d,
1755 ) {
1756 match self.context.0.queue_write_texture(
1757 self.id,
1758 &map_texture_copy_view(texture),
1759 data,
1760 &data_layout,
1761 &size,
1762 ) {
1763 Ok(()) => (),
1764 Err(err) => {
1765 self.context
1766 .handle_error_nolabel(&self.error_sink, err, "Queue::write_texture")
1767 }
1768 }
1769 }
1770
1771 #[cfg(any(webgpu, webgl))]
1772 fn copy_external_image_to_texture(
1773 &self,
1774 source: &crate::CopyExternalImageSourceInfo,
1775 dest: crate::CopyExternalImageDestInfo<&crate::api::Texture>,
1776 size: crate::Extent3d,
1777 ) {
1778 match self.context.0.queue_copy_external_image_to_texture(
1779 self.id,
1780 source,
1781 map_texture_tagged_copy_view(dest),
1782 size,
1783 ) {
1784 Ok(()) => (),
1785 Err(err) => self.context.handle_error_nolabel(
1786 &self.error_sink,
1787 err,
1788 "Queue::copy_external_image_to_texture",
1789 ),
1790 }
1791 }
1792
1793 fn submit(
1794 &self,
1795 command_buffers: &mut dyn Iterator<Item = dispatch::DispatchCommandBuffer>,
1796 ) -> u64 {
1797 let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
1798 let command_buffer_ids = temp_command_buffers
1799 .iter()
1800 .map(|cmdbuf| cmdbuf.as_core().id)
1801 .collect::<SmallVec<[_; 4]>>();
1802
1803 let index = match self.context.0.queue_submit(self.id, &command_buffer_ids) {
1804 Ok(index) => index,
1805 Err((index, err)) => {
1806 self.context
1807 .handle_error_nolabel(&self.error_sink, err, "Queue::submit");
1808 index
1809 }
1810 };
1811
1812 drop(temp_command_buffers);
1813
1814 index
1815 }
1816
1817 fn get_timestamp_period(&self) -> f32 {
1818 self.context.0.queue_get_timestamp_period(self.id)
1819 }
1820
1821 fn on_submitted_work_done(&self, callback: dispatch::BoxSubmittedWorkDoneCallback) {
1822 self.context
1823 .0
1824 .queue_on_submitted_work_done(self.id, callback);
1825 }
1826}
1827
1828impl Drop for CoreQueue {
1829 fn drop(&mut self) {
1830 self.context.0.queue_drop(self.id)
1831 }
1832}
1833
1834impl dispatch::ShaderModuleInterface for CoreShaderModule {
1835 fn get_compilation_info(&self) -> Pin<Box<dyn dispatch::ShaderCompilationInfoFuture>> {
1836 Box::pin(ready(self.compilation_info.clone()))
1837 }
1838}
1839
1840impl Drop for CoreShaderModule {
1841 fn drop(&mut self) {
1842 self.context.0.shader_module_drop(self.id)
1843 }
1844}
1845
1846impl dispatch::BindGroupLayoutInterface for CoreBindGroupLayout {}
1847
1848impl Drop for CoreBindGroupLayout {
1849 fn drop(&mut self) {
1850 self.context.0.bind_group_layout_drop(self.id)
1851 }
1852}
1853
1854impl dispatch::BindGroupInterface for CoreBindGroup {}
1855
1856impl Drop for CoreBindGroup {
1857 fn drop(&mut self) {
1858 self.context.0.bind_group_drop(self.id)
1859 }
1860}
1861
1862impl dispatch::TextureViewInterface for CoreTextureView {}
1863
1864impl Drop for CoreTextureView {
1865 fn drop(&mut self) {
1866 let _ = self.context.0.texture_view_drop(self.id);
1868 }
1869}
1870
1871impl dispatch::SamplerInterface for CoreSampler {}
1872
1873impl Drop for CoreSampler {
1874 fn drop(&mut self) {
1875 self.context.0.sampler_drop(self.id)
1876 }
1877}
1878
1879impl dispatch::BufferInterface for CoreBuffer {
1880 fn map_async(
1881 &self,
1882 mode: crate::MapMode,
1883 range: Range<crate::BufferAddress>,
1884 callback: dispatch::BufferMapCallback,
1885 ) {
1886 let operation = wgc::resource::BufferMapOperation {
1887 host: match mode {
1888 MapMode::Read => wgc::device::HostMap::Read,
1889 MapMode::Write => wgc::device::HostMap::Write,
1890 },
1891 callback: Some(Box::new(|status| {
1892 let res = status.map_err(|_| crate::BufferAsyncError);
1893 callback(res);
1894 })),
1895 };
1896
1897 match self.context.0.buffer_map_async(
1898 self.id,
1899 range.start,
1900 Some(range.end - range.start),
1901 operation,
1902 ) {
1903 Ok(_) => (),
1904 Err(cause) => {
1905 self.context
1906 .handle_error_nolabel(&self.error_sink, cause, "Buffer::map_async")
1907 }
1908 }
1909 }
1910
1911 fn get_mapped_range(
1912 &self,
1913 sub_range: Range<crate::BufferAddress>,
1914 ) -> dispatch::DispatchBufferMappedRange {
1915 let size = sub_range.end - sub_range.start;
1916 match self
1917 .context
1918 .0
1919 .buffer_get_mapped_range(self.id, sub_range.start, Some(size))
1920 {
1921 Ok((ptr, size)) => CoreBufferMappedRange {
1922 ptr,
1923 size: size as usize,
1924 }
1925 .into(),
1926 Err(err) => self
1927 .context
1928 .handle_error_fatal(err, "Buffer::get_mapped_range"),
1929 }
1930 }
1931
1932 #[cfg(webgpu)]
1933 fn get_mapped_range_as_array_buffer(
1934 &self,
1935 _sub_range: Range<wgt::BufferAddress>,
1936 ) -> Option<js_sys::ArrayBuffer> {
1937 None
1938 }
1939
1940 fn unmap(&self) {
1941 match self.context.0.buffer_unmap(self.id) {
1942 Ok(()) => (),
1943 Err(cause) => {
1944 self.context
1945 .handle_error_nolabel(&self.error_sink, cause, "Buffer::buffer_unmap")
1946 }
1947 }
1948 }
1949
1950 fn destroy(&self) {
1951 let _ = self.context.0.buffer_destroy(self.id);
1953 }
1954}
1955
1956impl Drop for CoreBuffer {
1957 fn drop(&mut self) {
1958 self.context.0.buffer_drop(self.id)
1959 }
1960}
1961
1962impl dispatch::TextureInterface for CoreTexture {
1963 fn create_view(
1964 &self,
1965 desc: &crate::TextureViewDescriptor<'_>,
1966 ) -> dispatch::DispatchTextureView {
1967 let descriptor = wgc::resource::TextureViewDescriptor {
1968 label: desc.label.map(Borrowed),
1969 format: desc.format,
1970 dimension: desc.dimension,
1971 usage: desc.usage,
1972 range: wgt::ImageSubresourceRange {
1973 aspect: desc.aspect,
1974 base_mip_level: desc.base_mip_level,
1975 mip_level_count: desc.mip_level_count,
1976 base_array_layer: desc.base_array_layer,
1977 array_layer_count: desc.array_layer_count,
1978 },
1979 };
1980 let (id, error) = self
1981 .context
1982 .0
1983 .texture_create_view(self.id, &descriptor, None);
1984 if let Some(cause) = error {
1985 self.context
1986 .handle_error(&self.error_sink, cause, desc.label, "Texture::create_view");
1987 }
1988 CoreTextureView {
1989 context: self.context.clone(),
1990 id,
1991 }
1992 .into()
1993 }
1994
1995 fn destroy(&self) {
1996 let _ = self.context.0.texture_destroy(self.id);
1998 }
1999}
2000
2001impl Drop for CoreTexture {
2002 fn drop(&mut self) {
2003 self.context.0.texture_drop(self.id)
2004 }
2005}
2006
2007impl dispatch::BlasInterface for CoreBlas {}
2008
2009impl Drop for CoreBlas {
2010 fn drop(&mut self) {
2011 self.context.0.blas_drop(self.id)
2012 }
2013}
2014
2015impl dispatch::TlasInterface for CoreTlas {}
2016
2017impl Drop for CoreTlas {
2018 fn drop(&mut self) {
2019 self.context.0.tlas_drop(self.id)
2020 }
2021}
2022
2023impl dispatch::QuerySetInterface for CoreQuerySet {}
2024
2025impl Drop for CoreQuerySet {
2026 fn drop(&mut self) {
2027 self.context.0.query_set_drop(self.id)
2028 }
2029}
2030
2031impl dispatch::PipelineLayoutInterface for CorePipelineLayout {}
2032
2033impl Drop for CorePipelineLayout {
2034 fn drop(&mut self) {
2035 self.context.0.pipeline_layout_drop(self.id)
2036 }
2037}
2038
2039impl dispatch::RenderPipelineInterface for CoreRenderPipeline {
2040 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2041 let (id, error) = self
2042 .context
2043 .0
2044 .render_pipeline_get_bind_group_layout(self.id, index, None);
2045 if let Some(err) = error {
2046 self.context.handle_error_nolabel(
2047 &self.error_sink,
2048 err,
2049 "RenderPipeline::get_bind_group_layout",
2050 )
2051 }
2052 CoreBindGroupLayout {
2053 context: self.context.clone(),
2054 id,
2055 }
2056 .into()
2057 }
2058}
2059
2060impl Drop for CoreRenderPipeline {
2061 fn drop(&mut self) {
2062 self.context.0.render_pipeline_drop(self.id)
2063 }
2064}
2065
2066impl dispatch::ComputePipelineInterface for CoreComputePipeline {
2067 fn get_bind_group_layout(&self, index: u32) -> dispatch::DispatchBindGroupLayout {
2068 let (id, error) = self
2069 .context
2070 .0
2071 .compute_pipeline_get_bind_group_layout(self.id, index, None);
2072 if let Some(err) = error {
2073 self.context.handle_error_nolabel(
2074 &self.error_sink,
2075 err,
2076 "ComputePipeline::get_bind_group_layout",
2077 )
2078 }
2079 CoreBindGroupLayout {
2080 context: self.context.clone(),
2081 id,
2082 }
2083 .into()
2084 }
2085}
2086
2087impl Drop for CoreComputePipeline {
2088 fn drop(&mut self) {
2089 self.context.0.compute_pipeline_drop(self.id)
2090 }
2091}
2092
2093impl dispatch::PipelineCacheInterface for CorePipelineCache {
2094 fn get_data(&self) -> Option<Vec<u8>> {
2095 self.context.0.pipeline_cache_get_data(self.id)
2096 }
2097}
2098
2099impl Drop for CorePipelineCache {
2100 fn drop(&mut self) {
2101 self.context.0.pipeline_cache_drop(self.id)
2102 }
2103}
2104
2105impl dispatch::CommandEncoderInterface for CoreCommandEncoder {
2106 fn copy_buffer_to_buffer(
2107 &self,
2108 source: &dispatch::DispatchBuffer,
2109 source_offset: crate::BufferAddress,
2110 destination: &dispatch::DispatchBuffer,
2111 destination_offset: crate::BufferAddress,
2112 copy_size: crate::BufferAddress,
2113 ) {
2114 let source = source.as_core();
2115 let destination = destination.as_core();
2116
2117 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_buffer(
2118 self.id,
2119 source.id,
2120 source_offset,
2121 destination.id,
2122 destination_offset,
2123 copy_size,
2124 ) {
2125 self.context.handle_error_nolabel(
2126 &self.error_sink,
2127 cause,
2128 "CommandEncoder::copy_buffer_to_buffer",
2129 );
2130 }
2131 }
2132
2133 fn copy_buffer_to_texture(
2134 &self,
2135 source: crate::TexelCopyBufferInfo<'_>,
2136 destination: crate::TexelCopyTextureInfo<'_>,
2137 copy_size: crate::Extent3d,
2138 ) {
2139 if let Err(cause) = self.context.0.command_encoder_copy_buffer_to_texture(
2140 self.id,
2141 &map_buffer_copy_view(source),
2142 &map_texture_copy_view(destination),
2143 ©_size,
2144 ) {
2145 self.context.handle_error_nolabel(
2146 &self.error_sink,
2147 cause,
2148 "CommandEncoder::copy_buffer_to_texture",
2149 );
2150 }
2151 }
2152
2153 fn copy_texture_to_buffer(
2154 &self,
2155 source: crate::TexelCopyTextureInfo<'_>,
2156 destination: crate::TexelCopyBufferInfo<'_>,
2157 copy_size: crate::Extent3d,
2158 ) {
2159 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_buffer(
2160 self.id,
2161 &map_texture_copy_view(source),
2162 &map_buffer_copy_view(destination),
2163 ©_size,
2164 ) {
2165 self.context.handle_error_nolabel(
2166 &self.error_sink,
2167 cause,
2168 "CommandEncoder::copy_texture_to_buffer",
2169 );
2170 }
2171 }
2172
2173 fn copy_texture_to_texture(
2174 &self,
2175 source: crate::TexelCopyTextureInfo<'_>,
2176 destination: crate::TexelCopyTextureInfo<'_>,
2177 copy_size: crate::Extent3d,
2178 ) {
2179 if let Err(cause) = self.context.0.command_encoder_copy_texture_to_texture(
2180 self.id,
2181 &map_texture_copy_view(source),
2182 &map_texture_copy_view(destination),
2183 ©_size,
2184 ) {
2185 self.context.handle_error_nolabel(
2186 &self.error_sink,
2187 cause,
2188 "CommandEncoder::copy_texture_to_texture",
2189 );
2190 }
2191 }
2192
2193 fn begin_compute_pass(
2194 &self,
2195 desc: &crate::ComputePassDescriptor<'_>,
2196 ) -> dispatch::DispatchComputePass {
2197 let timestamp_writes =
2198 desc.timestamp_writes
2199 .as_ref()
2200 .map(|tw| wgc::command::PassTimestampWrites {
2201 query_set: tw.query_set.inner.as_core().id,
2202 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2203 end_of_pass_write_index: tw.end_of_pass_write_index,
2204 });
2205
2206 let (pass, err) = self.context.0.command_encoder_create_compute_pass(
2207 self.id,
2208 &wgc::command::ComputePassDescriptor {
2209 label: desc.label.map(Borrowed),
2210 timestamp_writes: timestamp_writes.as_ref(),
2211 },
2212 );
2213
2214 if let Some(cause) = err {
2215 self.context.handle_error(
2216 &self.error_sink,
2217 cause,
2218 desc.label,
2219 "CommandEncoder::begin_compute_pass",
2220 );
2221 }
2222
2223 CoreComputePass {
2224 context: self.context.clone(),
2225 pass,
2226 error_sink: self.error_sink.clone(),
2227 id: crate::cmp::Identifier::create(),
2228 }
2229 .into()
2230 }
2231
2232 fn begin_render_pass(
2233 &self,
2234 desc: &crate::RenderPassDescriptor<'_>,
2235 ) -> dispatch::DispatchRenderPass {
2236 let colors = desc
2237 .color_attachments
2238 .iter()
2239 .map(|ca| {
2240 ca.as_ref()
2241 .map(|at| wgc::command::RenderPassColorAttachment {
2242 view: at.view.inner.as_core().id,
2243 resolve_target: at.resolve_target.map(|view| view.inner.as_core().id),
2244 load_op: at.ops.load,
2245 store_op: at.ops.store,
2246 })
2247 })
2248 .collect::<Vec<_>>();
2249
2250 let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
2251 wgc::command::RenderPassDepthStencilAttachment {
2252 view: dsa.view.inner.as_core().id,
2253 depth: map_pass_channel(dsa.depth_ops.as_ref()),
2254 stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
2255 }
2256 });
2257
2258 let timestamp_writes =
2259 desc.timestamp_writes
2260 .as_ref()
2261 .map(|tw| wgc::command::PassTimestampWrites {
2262 query_set: tw.query_set.inner.as_core().id,
2263 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
2264 end_of_pass_write_index: tw.end_of_pass_write_index,
2265 });
2266
2267 let (pass, err) = self.context.0.command_encoder_create_render_pass(
2268 self.id,
2269 &wgc::command::RenderPassDescriptor {
2270 label: desc.label.map(Borrowed),
2271 timestamp_writes: timestamp_writes.as_ref(),
2272 color_attachments: std::borrow::Cow::Borrowed(&colors),
2273 depth_stencil_attachment: depth_stencil.as_ref(),
2274 occlusion_query_set: desc.occlusion_query_set.map(|qs| qs.inner.as_core().id),
2275 },
2276 );
2277
2278 if let Some(cause) = err {
2279 self.context.handle_error(
2280 &self.error_sink,
2281 cause,
2282 desc.label,
2283 "CommandEncoder::begin_render_pass",
2284 );
2285 }
2286
2287 CoreRenderPass {
2288 context: self.context.clone(),
2289 pass,
2290 error_sink: self.error_sink.clone(),
2291 id: crate::cmp::Identifier::create(),
2292 }
2293 .into()
2294 }
2295
2296 fn finish(&mut self) -> dispatch::DispatchCommandBuffer {
2297 let descriptor = wgt::CommandBufferDescriptor::default();
2298 self.open = false; let (id, error) = self.context.0.command_encoder_finish(self.id, &descriptor);
2300 if let Some(cause) = error {
2301 self.context
2302 .handle_error_nolabel(&self.error_sink, cause, "a CommandEncoder");
2303 }
2304 CoreCommandBuffer {
2305 context: self.context.clone(),
2306 id,
2307 }
2308 .into()
2309 }
2310
2311 fn clear_texture(
2312 &self,
2313 texture: &dispatch::DispatchTexture,
2314 subresource_range: &crate::ImageSubresourceRange,
2315 ) {
2316 let texture = texture.as_core();
2317
2318 if let Err(cause) =
2319 self.context
2320 .0
2321 .command_encoder_clear_texture(self.id, texture.id, subresource_range)
2322 {
2323 self.context.handle_error_nolabel(
2324 &self.error_sink,
2325 cause,
2326 "CommandEncoder::clear_texture",
2327 );
2328 }
2329 }
2330
2331 fn clear_buffer(
2332 &self,
2333 buffer: &dispatch::DispatchBuffer,
2334 offset: crate::BufferAddress,
2335 size: Option<crate::BufferAddress>,
2336 ) {
2337 let buffer = buffer.as_core();
2338
2339 if let Err(cause) = self
2340 .context
2341 .0
2342 .command_encoder_clear_buffer(self.id, buffer.id, offset, size)
2343 {
2344 self.context.handle_error_nolabel(
2345 &self.error_sink,
2346 cause,
2347 "CommandEncoder::fill_buffer",
2348 );
2349 }
2350 }
2351
2352 fn insert_debug_marker(&self, label: &str) {
2353 if let Err(cause) = self
2354 .context
2355 .0
2356 .command_encoder_insert_debug_marker(self.id, label)
2357 {
2358 self.context.handle_error_nolabel(
2359 &self.error_sink,
2360 cause,
2361 "CommandEncoder::insert_debug_marker",
2362 );
2363 }
2364 }
2365
2366 fn push_debug_group(&self, label: &str) {
2367 if let Err(cause) = self
2368 .context
2369 .0
2370 .command_encoder_push_debug_group(self.id, label)
2371 {
2372 self.context.handle_error_nolabel(
2373 &self.error_sink,
2374 cause,
2375 "CommandEncoder::push_debug_group",
2376 );
2377 }
2378 }
2379
2380 fn pop_debug_group(&self) {
2381 if let Err(cause) = self.context.0.command_encoder_pop_debug_group(self.id) {
2382 self.context.handle_error_nolabel(
2383 &self.error_sink,
2384 cause,
2385 "CommandEncoder::pop_debug_group",
2386 );
2387 }
2388 }
2389
2390 fn write_timestamp(&self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2391 let query_set = query_set.as_core();
2392
2393 if let Err(cause) =
2394 self.context
2395 .0
2396 .command_encoder_write_timestamp(self.id, query_set.id, query_index)
2397 {
2398 self.context.handle_error_nolabel(
2399 &self.error_sink,
2400 cause,
2401 "CommandEncoder::write_timestamp",
2402 );
2403 }
2404 }
2405
2406 fn resolve_query_set(
2407 &self,
2408 query_set: &dispatch::DispatchQuerySet,
2409 first_query: u32,
2410 query_count: u32,
2411 destination: &dispatch::DispatchBuffer,
2412 destination_offset: crate::BufferAddress,
2413 ) {
2414 let query_set = query_set.as_core();
2415 let destination = destination.as_core();
2416
2417 if let Err(cause) = self.context.0.command_encoder_resolve_query_set(
2418 self.id,
2419 query_set.id,
2420 first_query,
2421 query_count,
2422 destination.id,
2423 destination_offset,
2424 ) {
2425 self.context.handle_error_nolabel(
2426 &self.error_sink,
2427 cause,
2428 "CommandEncoder::resolve_query_set",
2429 );
2430 }
2431 }
2432
2433 fn build_acceleration_structures_unsafe_tlas<'a>(
2434 &self,
2435 blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2436 tlas: &mut dyn Iterator<Item = &'a crate::TlasBuildEntry<'a>>,
2437 ) {
2438 let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2439 let geometries = match e.geometry {
2440 crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2441 let iter = triangle_geometries.iter().map(|tg| {
2442 wgc::ray_tracing::BlasTriangleGeometry {
2443 vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2444 index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2445 transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2446 size: tg.size,
2447 transform_buffer_offset: tg.transform_buffer_offset,
2448 first_vertex: tg.first_vertex,
2449 vertex_stride: tg.vertex_stride,
2450 first_index: tg.first_index,
2451 }
2452 });
2453 wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2454 }
2455 };
2456 wgc::ray_tracing::BlasBuildEntry {
2457 blas_id: e.blas.inner.as_core().id,
2458 geometries,
2459 }
2460 });
2461
2462 let tlas = tlas.into_iter().map(|e: &crate::TlasBuildEntry<'a>| {
2463 wgc::ray_tracing::TlasBuildEntry {
2464 tlas_id: e.tlas.shared.inner.as_core().id,
2465 instance_buffer_id: e.instance_buffer.inner.as_core().id,
2466 instance_count: e.instance_count,
2467 }
2468 });
2469
2470 if let Err(cause) = self
2471 .context
2472 .0
2473 .command_encoder_build_acceleration_structures_unsafe_tlas(self.id, blas, tlas)
2474 {
2475 self.context.handle_error_nolabel(
2476 &self.error_sink,
2477 cause,
2478 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2479 );
2480 }
2481 }
2482
2483 fn build_acceleration_structures<'a>(
2484 &self,
2485 blas: &mut dyn Iterator<Item = &'a crate::BlasBuildEntry<'a>>,
2486 tlas: &mut dyn Iterator<Item = &'a crate::TlasPackage>,
2487 ) {
2488 let blas = blas.map(|e: &crate::BlasBuildEntry<'_>| {
2489 let geometries = match e.geometry {
2490 crate::BlasGeometries::TriangleGeometries(ref triangle_geometries) => {
2491 let iter = triangle_geometries.iter().map(|tg| {
2492 wgc::ray_tracing::BlasTriangleGeometry {
2493 vertex_buffer: tg.vertex_buffer.inner.as_core().id,
2494 index_buffer: tg.index_buffer.map(|buf| buf.inner.as_core().id),
2495 transform_buffer: tg.transform_buffer.map(|buf| buf.inner.as_core().id),
2496 size: tg.size,
2497 transform_buffer_offset: tg.transform_buffer_offset,
2498 first_vertex: tg.first_vertex,
2499 vertex_stride: tg.vertex_stride,
2500 first_index: tg.first_index,
2501 }
2502 });
2503 wgc::ray_tracing::BlasGeometries::TriangleGeometries(Box::new(iter))
2504 }
2505 };
2506 wgc::ray_tracing::BlasBuildEntry {
2507 blas_id: e.blas.inner.as_core().id,
2508 geometries,
2509 }
2510 });
2511
2512 let tlas = tlas.into_iter().map(|e| {
2513 let instances = e
2514 .instances
2515 .iter()
2516 .map(|instance: &Option<crate::TlasInstance>| {
2517 instance
2518 .as_ref()
2519 .map(|instance| wgc::ray_tracing::TlasInstance {
2520 blas_id: instance.blas.as_core().id,
2521 transform: &instance.transform,
2522 custom_index: instance.custom_index,
2523 mask: instance.mask,
2524 })
2525 });
2526 wgc::ray_tracing::TlasPackage {
2527 tlas_id: e.tlas.shared.inner.as_core().id,
2528 instances: Box::new(instances),
2529 lowest_unmodified: e.lowest_unmodified,
2530 }
2531 });
2532
2533 if let Err(cause) = self
2534 .context
2535 .0
2536 .command_encoder_build_acceleration_structures(self.id, blas, tlas)
2537 {
2538 self.context.handle_error_nolabel(
2539 &self.error_sink,
2540 cause,
2541 "CommandEncoder::build_acceleration_structures_unsafe_tlas",
2542 );
2543 }
2544 }
2545}
2546
2547impl Drop for CoreCommandEncoder {
2548 fn drop(&mut self) {
2549 if self.open {
2550 self.context.0.command_encoder_drop(self.id)
2551 }
2552 }
2553}
2554
2555impl dispatch::CommandBufferInterface for CoreCommandBuffer {}
2556
2557impl Drop for CoreCommandBuffer {
2558 fn drop(&mut self) {
2559 self.context.0.command_buffer_drop(self.id)
2560 }
2561}
2562
2563impl dispatch::ComputePassInterface for CoreComputePass {
2564 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchComputePipeline) {
2565 let pipeline = pipeline.as_core();
2566
2567 if let Err(cause) = self
2568 .context
2569 .0
2570 .compute_pass_set_pipeline(&mut self.pass, pipeline.id)
2571 {
2572 self.context.handle_error(
2573 &self.error_sink,
2574 cause,
2575 self.pass.label(),
2576 "ComputePass::set_pipeline",
2577 );
2578 }
2579 }
2580
2581 fn set_bind_group(
2582 &mut self,
2583 index: u32,
2584 bind_group: Option<&dispatch::DispatchBindGroup>,
2585 offsets: &[crate::DynamicOffset],
2586 ) {
2587 let bg = bind_group.map(|bg| bg.as_core().id);
2588
2589 if let Err(cause) =
2590 self.context
2591 .0
2592 .compute_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2593 {
2594 self.context.handle_error(
2595 &self.error_sink,
2596 cause,
2597 self.pass.label(),
2598 "ComputePass::set_bind_group",
2599 );
2600 }
2601 }
2602
2603 fn set_push_constants(&mut self, offset: u32, data: &[u8]) {
2604 if let Err(cause) =
2605 self.context
2606 .0
2607 .compute_pass_set_push_constants(&mut self.pass, offset, data)
2608 {
2609 self.context.handle_error(
2610 &self.error_sink,
2611 cause,
2612 self.pass.label(),
2613 "ComputePass::set_push_constant",
2614 );
2615 }
2616 }
2617
2618 fn insert_debug_marker(&mut self, label: &str) {
2619 if let Err(cause) =
2620 self.context
2621 .0
2622 .compute_pass_insert_debug_marker(&mut self.pass, label, 0)
2623 {
2624 self.context.handle_error(
2625 &self.error_sink,
2626 cause,
2627 self.pass.label(),
2628 "ComputePass::insert_debug_marker",
2629 );
2630 }
2631 }
2632
2633 fn push_debug_group(&mut self, group_label: &str) {
2634 if let Err(cause) =
2635 self.context
2636 .0
2637 .compute_pass_push_debug_group(&mut self.pass, group_label, 0)
2638 {
2639 self.context.handle_error(
2640 &self.error_sink,
2641 cause,
2642 self.pass.label(),
2643 "ComputePass::push_debug_group",
2644 );
2645 }
2646 }
2647
2648 fn pop_debug_group(&mut self) {
2649 if let Err(cause) = self.context.0.compute_pass_pop_debug_group(&mut self.pass) {
2650 self.context.handle_error(
2651 &self.error_sink,
2652 cause,
2653 self.pass.label(),
2654 "ComputePass::pop_debug_group",
2655 );
2656 }
2657 }
2658
2659 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
2660 let query_set = query_set.as_core();
2661
2662 if let Err(cause) =
2663 self.context
2664 .0
2665 .compute_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
2666 {
2667 self.context.handle_error(
2668 &self.error_sink,
2669 cause,
2670 self.pass.label(),
2671 "ComputePass::write_timestamp",
2672 );
2673 }
2674 }
2675
2676 fn begin_pipeline_statistics_query(
2677 &mut self,
2678 query_set: &dispatch::DispatchQuerySet,
2679 query_index: u32,
2680 ) {
2681 let query_set = query_set.as_core();
2682
2683 if let Err(cause) = self.context.0.compute_pass_begin_pipeline_statistics_query(
2684 &mut self.pass,
2685 query_set.id,
2686 query_index,
2687 ) {
2688 self.context.handle_error(
2689 &self.error_sink,
2690 cause,
2691 self.pass.label(),
2692 "ComputePass::begin_pipeline_statistics_query",
2693 );
2694 }
2695 }
2696
2697 fn end_pipeline_statistics_query(&mut self) {
2698 if let Err(cause) = self
2699 .context
2700 .0
2701 .compute_pass_end_pipeline_statistics_query(&mut self.pass)
2702 {
2703 self.context.handle_error(
2704 &self.error_sink,
2705 cause,
2706 self.pass.label(),
2707 "ComputePass::end_pipeline_statistics_query",
2708 );
2709 }
2710 }
2711
2712 fn dispatch_workgroups(&mut self, x: u32, y: u32, z: u32) {
2713 if let Err(cause) = self
2714 .context
2715 .0
2716 .compute_pass_dispatch_workgroups(&mut self.pass, x, y, z)
2717 {
2718 self.context.handle_error(
2719 &self.error_sink,
2720 cause,
2721 self.pass.label(),
2722 "ComputePass::dispatch_workgroups",
2723 );
2724 }
2725 }
2726
2727 fn dispatch_workgroups_indirect(
2728 &mut self,
2729 indirect_buffer: &dispatch::DispatchBuffer,
2730 indirect_offset: crate::BufferAddress,
2731 ) {
2732 let indirect_buffer = indirect_buffer.as_core();
2733
2734 if let Err(cause) = self.context.0.compute_pass_dispatch_workgroups_indirect(
2735 &mut self.pass,
2736 indirect_buffer.id,
2737 indirect_offset,
2738 ) {
2739 self.context.handle_error(
2740 &self.error_sink,
2741 cause,
2742 self.pass.label(),
2743 "ComputePass::dispatch_workgroups_indirect",
2744 );
2745 }
2746 }
2747
2748 fn end(&mut self) {
2749 if let Err(cause) = self.context.0.compute_pass_end(&mut self.pass) {
2750 self.context.handle_error(
2751 &self.error_sink,
2752 cause,
2753 self.pass.label(),
2754 "ComputePass::end",
2755 );
2756 }
2757 }
2758}
2759
2760impl Drop for CoreComputePass {
2761 fn drop(&mut self) {
2762 dispatch::ComputePassInterface::end(self);
2763 }
2764}
2765
2766impl dispatch::RenderPassInterface for CoreRenderPass {
2767 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
2768 let pipeline = pipeline.as_core();
2769
2770 if let Err(cause) = self
2771 .context
2772 .0
2773 .render_pass_set_pipeline(&mut self.pass, pipeline.id)
2774 {
2775 self.context.handle_error(
2776 &self.error_sink,
2777 cause,
2778 self.pass.label(),
2779 "RenderPass::set_pipeline",
2780 );
2781 }
2782 }
2783
2784 fn set_bind_group(
2785 &mut self,
2786 index: u32,
2787 bind_group: Option<&dispatch::DispatchBindGroup>,
2788 offsets: &[crate::DynamicOffset],
2789 ) {
2790 let bg = bind_group.map(|bg| bg.as_core().id);
2791
2792 if let Err(cause) =
2793 self.context
2794 .0
2795 .render_pass_set_bind_group(&mut self.pass, index, bg, offsets)
2796 {
2797 self.context.handle_error(
2798 &self.error_sink,
2799 cause,
2800 self.pass.label(),
2801 "RenderPass::set_bind_group",
2802 );
2803 }
2804 }
2805
2806 fn set_index_buffer(
2807 &mut self,
2808 buffer: &dispatch::DispatchBuffer,
2809 index_format: crate::IndexFormat,
2810 offset: crate::BufferAddress,
2811 size: Option<crate::BufferSize>,
2812 ) {
2813 let buffer = buffer.as_core();
2814
2815 if let Err(cause) = self.context.0.render_pass_set_index_buffer(
2816 &mut self.pass,
2817 buffer.id,
2818 index_format,
2819 offset,
2820 size,
2821 ) {
2822 self.context.handle_error(
2823 &self.error_sink,
2824 cause,
2825 self.pass.label(),
2826 "RenderPass::set_index_buffer",
2827 );
2828 }
2829 }
2830
2831 fn set_vertex_buffer(
2832 &mut self,
2833 slot: u32,
2834 buffer: &dispatch::DispatchBuffer,
2835 offset: crate::BufferAddress,
2836 size: Option<crate::BufferSize>,
2837 ) {
2838 let buffer = buffer.as_core();
2839
2840 if let Err(cause) = self.context.0.render_pass_set_vertex_buffer(
2841 &mut self.pass,
2842 slot,
2843 buffer.id,
2844 offset,
2845 size,
2846 ) {
2847 self.context.handle_error(
2848 &self.error_sink,
2849 cause,
2850 self.pass.label(),
2851 "RenderPass::set_vertex_buffer",
2852 );
2853 }
2854 }
2855
2856 fn set_push_constants(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
2857 if let Err(cause) =
2858 self.context
2859 .0
2860 .render_pass_set_push_constants(&mut self.pass, stages, offset, data)
2861 {
2862 self.context.handle_error(
2863 &self.error_sink,
2864 cause,
2865 self.pass.label(),
2866 "RenderPass::set_push_constants",
2867 );
2868 }
2869 }
2870
2871 fn set_blend_constant(&mut self, color: crate::Color) {
2872 if let Err(cause) = self
2873 .context
2874 .0
2875 .render_pass_set_blend_constant(&mut self.pass, color)
2876 {
2877 self.context.handle_error(
2878 &self.error_sink,
2879 cause,
2880 self.pass.label(),
2881 "RenderPass::set_blend_constant",
2882 );
2883 }
2884 }
2885
2886 fn set_scissor_rect(&mut self, x: u32, y: u32, width: u32, height: u32) {
2887 if let Err(cause) =
2888 self.context
2889 .0
2890 .render_pass_set_scissor_rect(&mut self.pass, x, y, width, height)
2891 {
2892 self.context.handle_error(
2893 &self.error_sink,
2894 cause,
2895 self.pass.label(),
2896 "RenderPass::set_scissor_rect",
2897 );
2898 }
2899 }
2900
2901 fn set_viewport(
2902 &mut self,
2903 x: f32,
2904 y: f32,
2905 width: f32,
2906 height: f32,
2907 min_depth: f32,
2908 max_depth: f32,
2909 ) {
2910 if let Err(cause) = self.context.0.render_pass_set_viewport(
2911 &mut self.pass,
2912 x,
2913 y,
2914 width,
2915 height,
2916 min_depth,
2917 max_depth,
2918 ) {
2919 self.context.handle_error(
2920 &self.error_sink,
2921 cause,
2922 self.pass.label(),
2923 "RenderPass::set_viewport",
2924 );
2925 }
2926 }
2927
2928 fn set_stencil_reference(&mut self, reference: u32) {
2929 if let Err(cause) = self
2930 .context
2931 .0
2932 .render_pass_set_stencil_reference(&mut self.pass, reference)
2933 {
2934 self.context.handle_error(
2935 &self.error_sink,
2936 cause,
2937 self.pass.label(),
2938 "RenderPass::set_stencil_reference",
2939 );
2940 }
2941 }
2942
2943 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
2944 if let Err(cause) = self.context.0.render_pass_draw(
2945 &mut self.pass,
2946 vertices.end - vertices.start,
2947 instances.end - instances.start,
2948 vertices.start,
2949 instances.start,
2950 ) {
2951 self.context.handle_error(
2952 &self.error_sink,
2953 cause,
2954 self.pass.label(),
2955 "RenderPass::draw",
2956 );
2957 }
2958 }
2959
2960 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
2961 if let Err(cause) = self.context.0.render_pass_draw_indexed(
2962 &mut self.pass,
2963 indices.end - indices.start,
2964 instances.end - instances.start,
2965 indices.start,
2966 base_vertex,
2967 instances.start,
2968 ) {
2969 self.context.handle_error(
2970 &self.error_sink,
2971 cause,
2972 self.pass.label(),
2973 "RenderPass::draw_indexed",
2974 );
2975 }
2976 }
2977
2978 fn draw_indirect(
2979 &mut self,
2980 indirect_buffer: &dispatch::DispatchBuffer,
2981 indirect_offset: crate::BufferAddress,
2982 ) {
2983 let indirect_buffer = indirect_buffer.as_core();
2984
2985 if let Err(cause) = self.context.0.render_pass_draw_indirect(
2986 &mut self.pass,
2987 indirect_buffer.id,
2988 indirect_offset,
2989 ) {
2990 self.context.handle_error(
2991 &self.error_sink,
2992 cause,
2993 self.pass.label(),
2994 "RenderPass::draw_indirect",
2995 );
2996 }
2997 }
2998
2999 fn draw_indexed_indirect(
3000 &mut self,
3001 indirect_buffer: &dispatch::DispatchBuffer,
3002 indirect_offset: crate::BufferAddress,
3003 ) {
3004 let indirect_buffer = indirect_buffer.as_core();
3005
3006 if let Err(cause) = self.context.0.render_pass_draw_indexed_indirect(
3007 &mut self.pass,
3008 indirect_buffer.id,
3009 indirect_offset,
3010 ) {
3011 self.context.handle_error(
3012 &self.error_sink,
3013 cause,
3014 self.pass.label(),
3015 "RenderPass::draw_indexed_indirect",
3016 );
3017 }
3018 }
3019
3020 fn multi_draw_indirect(
3021 &mut self,
3022 indirect_buffer: &dispatch::DispatchBuffer,
3023 indirect_offset: crate::BufferAddress,
3024 count: u32,
3025 ) {
3026 let indirect_buffer = indirect_buffer.as_core();
3027
3028 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect(
3029 &mut self.pass,
3030 indirect_buffer.id,
3031 indirect_offset,
3032 count,
3033 ) {
3034 self.context.handle_error(
3035 &self.error_sink,
3036 cause,
3037 self.pass.label(),
3038 "RenderPass::multi_draw_indirect",
3039 );
3040 }
3041 }
3042
3043 fn multi_draw_indexed_indirect(
3044 &mut self,
3045 indirect_buffer: &dispatch::DispatchBuffer,
3046 indirect_offset: crate::BufferAddress,
3047 count: u32,
3048 ) {
3049 let indirect_buffer = indirect_buffer.as_core();
3050
3051 if let Err(cause) = self.context.0.render_pass_multi_draw_indexed_indirect(
3052 &mut self.pass,
3053 indirect_buffer.id,
3054 indirect_offset,
3055 count,
3056 ) {
3057 self.context.handle_error(
3058 &self.error_sink,
3059 cause,
3060 self.pass.label(),
3061 "RenderPass::multi_draw_indexed_indirect",
3062 );
3063 }
3064 }
3065
3066 fn multi_draw_indirect_count(
3067 &mut self,
3068 indirect_buffer: &dispatch::DispatchBuffer,
3069 indirect_offset: crate::BufferAddress,
3070 count_buffer: &dispatch::DispatchBuffer,
3071 count_buffer_offset: crate::BufferAddress,
3072 max_count: u32,
3073 ) {
3074 let indirect_buffer = indirect_buffer.as_core();
3075 let count_buffer = count_buffer.as_core();
3076
3077 if let Err(cause) = self.context.0.render_pass_multi_draw_indirect_count(
3078 &mut self.pass,
3079 indirect_buffer.id,
3080 indirect_offset,
3081 count_buffer.id,
3082 count_buffer_offset,
3083 max_count,
3084 ) {
3085 self.context.handle_error(
3086 &self.error_sink,
3087 cause,
3088 self.pass.label(),
3089 "RenderPass::multi_draw_indirect_count",
3090 );
3091 }
3092 }
3093
3094 fn multi_draw_indexed_indirect_count(
3095 &mut self,
3096 indirect_buffer: &dispatch::DispatchBuffer,
3097 indirect_offset: crate::BufferAddress,
3098 count_buffer: &dispatch::DispatchBuffer,
3099 count_buffer_offset: crate::BufferAddress,
3100 max_count: u32,
3101 ) {
3102 let indirect_buffer = indirect_buffer.as_core();
3103 let count_buffer = count_buffer.as_core();
3104
3105 if let Err(cause) = self
3106 .context
3107 .0
3108 .render_pass_multi_draw_indexed_indirect_count(
3109 &mut self.pass,
3110 indirect_buffer.id,
3111 indirect_offset,
3112 count_buffer.id,
3113 count_buffer_offset,
3114 max_count,
3115 )
3116 {
3117 self.context.handle_error(
3118 &self.error_sink,
3119 cause,
3120 self.pass.label(),
3121 "RenderPass::multi_draw_indexed_indirect_count",
3122 );
3123 }
3124 }
3125
3126 fn insert_debug_marker(&mut self, label: &str) {
3127 if let Err(cause) = self
3128 .context
3129 .0
3130 .render_pass_insert_debug_marker(&mut self.pass, label, 0)
3131 {
3132 self.context.handle_error(
3133 &self.error_sink,
3134 cause,
3135 self.pass.label(),
3136 "RenderPass::insert_debug_marker",
3137 );
3138 }
3139 }
3140
3141 fn push_debug_group(&mut self, group_label: &str) {
3142 if let Err(cause) =
3143 self.context
3144 .0
3145 .render_pass_push_debug_group(&mut self.pass, group_label, 0)
3146 {
3147 self.context.handle_error(
3148 &self.error_sink,
3149 cause,
3150 self.pass.label(),
3151 "RenderPass::push_debug_group",
3152 );
3153 }
3154 }
3155
3156 fn pop_debug_group(&mut self) {
3157 if let Err(cause) = self.context.0.render_pass_pop_debug_group(&mut self.pass) {
3158 self.context.handle_error(
3159 &self.error_sink,
3160 cause,
3161 self.pass.label(),
3162 "RenderPass::pop_debug_group",
3163 );
3164 }
3165 }
3166
3167 fn write_timestamp(&mut self, query_set: &dispatch::DispatchQuerySet, query_index: u32) {
3168 let query_set = query_set.as_core();
3169
3170 if let Err(cause) =
3171 self.context
3172 .0
3173 .render_pass_write_timestamp(&mut self.pass, query_set.id, query_index)
3174 {
3175 self.context.handle_error(
3176 &self.error_sink,
3177 cause,
3178 self.pass.label(),
3179 "RenderPass::write_timestamp",
3180 );
3181 }
3182 }
3183
3184 fn begin_occlusion_query(&mut self, query_index: u32) {
3185 if let Err(cause) = self
3186 .context
3187 .0
3188 .render_pass_begin_occlusion_query(&mut self.pass, query_index)
3189 {
3190 self.context.handle_error(
3191 &self.error_sink,
3192 cause,
3193 self.pass.label(),
3194 "RenderPass::begin_occlusion_query",
3195 );
3196 }
3197 }
3198
3199 fn end_occlusion_query(&mut self) {
3200 if let Err(cause) = self
3201 .context
3202 .0
3203 .render_pass_end_occlusion_query(&mut self.pass)
3204 {
3205 self.context.handle_error(
3206 &self.error_sink,
3207 cause,
3208 self.pass.label(),
3209 "RenderPass::end_occlusion_query",
3210 );
3211 }
3212 }
3213
3214 fn begin_pipeline_statistics_query(
3215 &mut self,
3216 query_set: &dispatch::DispatchQuerySet,
3217 query_index: u32,
3218 ) {
3219 let query_set = query_set.as_core();
3220
3221 if let Err(cause) = self.context.0.render_pass_begin_pipeline_statistics_query(
3222 &mut self.pass,
3223 query_set.id,
3224 query_index,
3225 ) {
3226 self.context.handle_error(
3227 &self.error_sink,
3228 cause,
3229 self.pass.label(),
3230 "RenderPass::begin_pipeline_statistics_query",
3231 );
3232 }
3233 }
3234
3235 fn end_pipeline_statistics_query(&mut self) {
3236 if let Err(cause) = self
3237 .context
3238 .0
3239 .render_pass_end_pipeline_statistics_query(&mut self.pass)
3240 {
3241 self.context.handle_error(
3242 &self.error_sink,
3243 cause,
3244 self.pass.label(),
3245 "RenderPass::end_pipeline_statistics_query",
3246 );
3247 }
3248 }
3249
3250 fn execute_bundles(
3251 &mut self,
3252 render_bundles: &mut dyn Iterator<Item = &dispatch::DispatchRenderBundle>,
3253 ) {
3254 let temp_render_bundles = render_bundles
3255 .map(|rb| rb.as_core().id)
3256 .collect::<SmallVec<[_; 4]>>();
3257 if let Err(cause) = self
3258 .context
3259 .0
3260 .render_pass_execute_bundles(&mut self.pass, &temp_render_bundles)
3261 {
3262 self.context.handle_error(
3263 &self.error_sink,
3264 cause,
3265 self.pass.label(),
3266 "RenderPass::execute_bundles",
3267 );
3268 }
3269 }
3270
3271 fn end(&mut self) {
3272 if let Err(cause) = self.context.0.render_pass_end(&mut self.pass) {
3273 self.context.handle_error(
3274 &self.error_sink,
3275 cause,
3276 self.pass.label(),
3277 "RenderPass::end",
3278 );
3279 }
3280 }
3281}
3282
3283impl Drop for CoreRenderPass {
3284 fn drop(&mut self) {
3285 dispatch::RenderPassInterface::end(self);
3286 }
3287}
3288
3289impl dispatch::RenderBundleEncoderInterface for CoreRenderBundleEncoder {
3290 fn set_pipeline(&mut self, pipeline: &dispatch::DispatchRenderPipeline) {
3291 let pipeline = pipeline.as_core();
3292
3293 wgpu_render_bundle_set_pipeline(&mut self.encoder, pipeline.id)
3294 }
3295
3296 fn set_bind_group(
3297 &mut self,
3298 index: u32,
3299 bind_group: Option<&dispatch::DispatchBindGroup>,
3300 offsets: &[crate::DynamicOffset],
3301 ) {
3302 let bg = bind_group.map(|bg| bg.as_core().id);
3303
3304 unsafe {
3305 wgpu_render_bundle_set_bind_group(
3306 &mut self.encoder,
3307 index,
3308 bg,
3309 offsets.as_ptr(),
3310 offsets.len(),
3311 )
3312 }
3313 }
3314
3315 fn set_index_buffer(
3316 &mut self,
3317 buffer: &dispatch::DispatchBuffer,
3318 index_format: crate::IndexFormat,
3319 offset: crate::BufferAddress,
3320 size: Option<crate::BufferSize>,
3321 ) {
3322 let buffer = buffer.as_core();
3323
3324 self.encoder
3325 .set_index_buffer(buffer.id, index_format, offset, size)
3326 }
3327
3328 fn set_vertex_buffer(
3329 &mut self,
3330 slot: u32,
3331 buffer: &dispatch::DispatchBuffer,
3332 offset: crate::BufferAddress,
3333 size: Option<crate::BufferSize>,
3334 ) {
3335 let buffer = buffer.as_core();
3336
3337 wgpu_render_bundle_set_vertex_buffer(&mut self.encoder, slot, buffer.id, offset, size)
3338 }
3339
3340 fn set_push_constants(&mut self, stages: crate::ShaderStages, offset: u32, data: &[u8]) {
3341 unsafe {
3342 wgpu_render_bundle_set_push_constants(
3343 &mut self.encoder,
3344 stages,
3345 offset,
3346 data.len().try_into().unwrap(),
3347 data.as_ptr(),
3348 )
3349 }
3350 }
3351
3352 fn draw(&mut self, vertices: Range<u32>, instances: Range<u32>) {
3353 wgpu_render_bundle_draw(
3354 &mut self.encoder,
3355 vertices.end - vertices.start,
3356 instances.end - instances.start,
3357 vertices.start,
3358 instances.start,
3359 )
3360 }
3361
3362 fn draw_indexed(&mut self, indices: Range<u32>, base_vertex: i32, instances: Range<u32>) {
3363 wgpu_render_bundle_draw_indexed(
3364 &mut self.encoder,
3365 indices.end - indices.start,
3366 instances.end - instances.start,
3367 indices.start,
3368 base_vertex,
3369 instances.start,
3370 )
3371 }
3372
3373 fn draw_indirect(
3374 &mut self,
3375 indirect_buffer: &dispatch::DispatchBuffer,
3376 indirect_offset: crate::BufferAddress,
3377 ) {
3378 let indirect_buffer = indirect_buffer.as_core();
3379
3380 wgpu_render_bundle_draw_indirect(&mut self.encoder, indirect_buffer.id, indirect_offset)
3381 }
3382
3383 fn draw_indexed_indirect(
3384 &mut self,
3385 indirect_buffer: &dispatch::DispatchBuffer,
3386 indirect_offset: crate::BufferAddress,
3387 ) {
3388 let indirect_buffer = indirect_buffer.as_core();
3389
3390 wgpu_render_bundle_draw_indexed_indirect(
3391 &mut self.encoder,
3392 indirect_buffer.id,
3393 indirect_offset,
3394 )
3395 }
3396
3397 fn finish(self, desc: &crate::RenderBundleDescriptor<'_>) -> dispatch::DispatchRenderBundle
3398 where
3399 Self: Sized,
3400 {
3401 let (id, error) = self.context.0.render_bundle_encoder_finish(
3402 self.encoder,
3403 &desc.map_label(|l| l.map(Borrowed)),
3404 None,
3405 );
3406 if let Some(err) = error {
3407 self.context
3408 .handle_error_fatal(err, "RenderBundleEncoder::finish");
3409 }
3410 CoreRenderBundle { id }.into()
3411 }
3412}
3413
3414impl dispatch::RenderBundleInterface for CoreRenderBundle {}
3415
3416impl dispatch::SurfaceInterface for CoreSurface {
3417 fn get_capabilities(&self, adapter: &dispatch::DispatchAdapter) -> wgt::SurfaceCapabilities {
3418 let adapter = adapter.as_core();
3419
3420 self.context
3421 .0
3422 .surface_get_capabilities(self.id, adapter.id)
3423 .unwrap_or_default()
3424 }
3425
3426 fn configure(&self, device: &dispatch::DispatchDevice, config: &crate::SurfaceConfiguration) {
3427 let device = device.as_core();
3428
3429 let error = self.context.0.surface_configure(self.id, device.id, config);
3430 if let Some(e) = error {
3431 self.context
3432 .handle_error_nolabel(&device.error_sink, e, "Surface::configure");
3433 } else {
3434 *self.configured_device.lock() = Some(device.id);
3435 *self.error_sink.lock() = Some(device.error_sink.clone());
3436 }
3437 }
3438
3439 fn get_current_texture(
3440 &self,
3441 ) -> (
3442 Option<dispatch::DispatchTexture>,
3443 crate::SurfaceStatus,
3444 dispatch::DispatchSurfaceOutputDetail,
3445 ) {
3446 let output_detail = CoreSurfaceOutputDetail {
3447 context: self.context.clone(),
3448 surface_id: self.id,
3449 }
3450 .into();
3451
3452 match self.context.0.surface_get_current_texture(self.id, None) {
3453 Ok(wgc::present::SurfaceOutput { status, texture_id }) => {
3454 let data = texture_id
3455 .map(|id| CoreTexture {
3456 context: self.context.clone(),
3457 id,
3458 error_sink: Arc::new(Mutex::new(ErrorSinkRaw::new())),
3459 })
3460 .map(Into::into);
3461
3462 (data, status, output_detail)
3463 }
3464 Err(err) => {
3465 let error_sink = self.error_sink.lock();
3466 match error_sink.as_ref() {
3467 Some(error_sink) => {
3468 self.context.handle_error_nolabel(
3469 error_sink,
3470 err,
3471 "Surface::get_current_texture_view",
3472 );
3473 (None, crate::SurfaceStatus::Unknown, output_detail)
3474 }
3475 None => self
3476 .context
3477 .handle_error_fatal(err, "Surface::get_current_texture_view"),
3478 }
3479 }
3480 }
3481 }
3482}
3483
3484impl Drop for CoreSurface {
3485 fn drop(&mut self) {
3486 self.context.0.surface_drop(self.id)
3487 }
3488}
3489
3490impl dispatch::SurfaceOutputDetailInterface for CoreSurfaceOutputDetail {
3491 fn present(&self) {
3492 match self.context.0.surface_present(self.surface_id) {
3493 Ok(_status) => (),
3494 Err(err) => self.context.handle_error_fatal(err, "Surface::present"),
3495 }
3496 }
3497
3498 fn texture_discard(&self) {
3499 match self.context.0.surface_texture_discard(self.surface_id) {
3500 Ok(_status) => (),
3501 Err(err) => self
3502 .context
3503 .handle_error_fatal(err, "Surface::discard_texture"),
3504 }
3505 }
3506}
3507impl Drop for CoreSurfaceOutputDetail {
3508 fn drop(&mut self) {
3509 }
3513}
3514
3515impl dispatch::QueueWriteBufferInterface for CoreQueueWriteBuffer {
3516 fn slice(&self) -> &[u8] {
3517 panic!()
3518 }
3519
3520 #[inline]
3521 fn slice_mut(&mut self) -> &mut [u8] {
3522 self.mapping.slice_mut()
3523 }
3524}
3525impl Drop for CoreQueueWriteBuffer {
3526 fn drop(&mut self) {
3527 }
3531}
3532
3533impl dispatch::BufferMappedRangeInterface for CoreBufferMappedRange {
3534 #[inline]
3535 fn slice(&self) -> &[u8] {
3536 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
3537 }
3538
3539 #[inline]
3540 fn slice_mut(&mut self) -> &mut [u8] {
3541 unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.size) }
3542 }
3543}