1use crate::{
2 context::downcast_ref, AdapterInfo, BindGroupDescriptor, BindGroupLayoutDescriptor,
3 BindingResource, BufferBinding, BufferDescriptor, CommandEncoderDescriptor, CompilationInfo,
4 CompilationMessage, CompilationMessageType, ComputePassDescriptor, ComputePipelineDescriptor,
5 DownlevelCapabilities, ErrorSource, Features, Label, Limits, LoadOp, MapMode, Operations,
6 PipelineCacheDescriptor, PipelineLayoutDescriptor, RenderBundleEncoderDescriptor,
7 RenderPipelineDescriptor, SamplerDescriptor, ShaderModuleDescriptor,
8 ShaderModuleDescriptorSpirV, ShaderSource, StoreOp, SurfaceStatus, SurfaceTargetUnsafe,
9 TextureDescriptor, TextureViewDescriptor, UncapturedErrorHandler,
10};
11
12use arrayvec::ArrayVec;
13use parking_lot::Mutex;
14use smallvec::SmallVec;
15use std::{
16 any::Any,
17 borrow::Cow::Borrowed,
18 error::Error,
19 fmt,
20 future::{ready, Ready},
21 ops::Range,
22 ptr::NonNull,
23 slice,
24 sync::Arc,
25};
26use wgc::error::ContextErrorSource;
27use wgc::{command::bundle_ffi::*, device::DeviceLostClosure, pipeline::CreateShaderModuleError};
28use wgt::WasmNotSendSync;
29
30pub struct ContextWgpuCore(wgc::global::Global);
31
32impl Drop for ContextWgpuCore {
33 fn drop(&mut self) {
34 }
36}
37
38impl fmt::Debug for ContextWgpuCore {
39 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
40 f.debug_struct("ContextWgpuCore")
41 .field("type", &"Native")
42 .finish()
43 }
44}
45
46impl ContextWgpuCore {
47 pub unsafe fn from_hal_instance<A: wgc::hal_api::HalApi>(hal_instance: A::Instance) -> Self {
48 Self(unsafe { wgc::global::Global::from_hal_instance::<A>("wgpu", hal_instance) })
49 }
50
51 pub unsafe fn instance_as_hal<A: wgc::hal_api::HalApi>(&self) -> Option<&A::Instance> {
55 unsafe { self.0.instance_as_hal::<A>() }
56 }
57
58 pub unsafe fn from_core_instance(core_instance: wgc::instance::Instance) -> Self {
59 Self(unsafe { wgc::global::Global::from_instance(core_instance) })
60 }
61
62 #[cfg(native)]
63 pub fn enumerate_adapters(&self, backends: wgt::Backends) -> Vec<wgc::id::AdapterId> {
64 self.0.enumerate_adapters(backends)
65 }
66
67 pub unsafe fn create_adapter_from_hal<A: wgc::hal_api::HalApi>(
68 &self,
69 hal_adapter: hal::ExposedAdapter<A>,
70 ) -> wgc::id::AdapterId {
71 unsafe { self.0.create_adapter_from_hal(hal_adapter.into(), None) }
72 }
73
74 pub unsafe fn adapter_as_hal<
75 A: wgc::hal_api::HalApi,
76 F: FnOnce(Option<&A::Adapter>) -> R,
77 R,
78 >(
79 &self,
80 adapter: &wgc::id::AdapterId,
81 hal_adapter_callback: F,
82 ) -> R {
83 unsafe {
84 self.0
85 .adapter_as_hal::<A, F, R>(*adapter, hal_adapter_callback)
86 }
87 }
88
89 pub unsafe fn buffer_as_hal<A: wgc::hal_api::HalApi, F: FnOnce(Option<&A::Buffer>) -> R, R>(
90 &self,
91 buffer: &Buffer,
92 hal_buffer_callback: F,
93 ) -> R {
94 unsafe {
95 self.0
96 .buffer_as_hal::<A, F, R>(buffer.id, hal_buffer_callback)
97 }
98 }
99
100 pub unsafe fn create_device_from_hal<A: wgc::hal_api::HalApi>(
101 &self,
102 adapter: &wgc::id::AdapterId,
103 hal_device: hal::OpenDevice<A>,
104 desc: &crate::DeviceDescriptor<'_>,
105 trace_dir: Option<&std::path::Path>,
106 ) -> Result<(Device, Queue), crate::RequestDeviceError> {
107 if trace_dir.is_some() {
108 log::error!("Feature 'trace' has been removed temporarily, see https://github.com/gfx-rs/wgpu/issues/5974");
109 }
110 let (device_id, queue_id) = unsafe {
111 self.0.create_device_from_hal(
112 *adapter,
113 hal_device.into(),
114 &desc.map_label(|l| l.map(Borrowed)),
115 None,
116 None,
117 None,
118 )
119 }?;
120 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
121 let device = Device {
122 id: device_id,
123 error_sink: error_sink.clone(),
124 features: desc.required_features,
125 };
126 let queue = Queue {
127 id: queue_id,
128 error_sink,
129 };
130 Ok((device, queue))
131 }
132
133 pub unsafe fn create_texture_from_hal<A: wgc::hal_api::HalApi>(
134 &self,
135 hal_texture: A::Texture,
136 device: &Device,
137 desc: &TextureDescriptor<'_>,
138 ) -> Texture {
139 let descriptor = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
140 let (id, error) = unsafe {
141 self.0
142 .create_texture_from_hal(Box::new(hal_texture), device.id, &descriptor, None)
143 };
144 if let Some(cause) = error {
145 self.handle_error(
146 &device.error_sink,
147 cause,
148 desc.label,
149 "Device::create_texture_from_hal",
150 );
151 }
152 Texture {
153 id,
154 error_sink: Arc::clone(&device.error_sink),
155 }
156 }
157
158 pub unsafe fn create_buffer_from_hal<A: wgc::hal_api::HalApi>(
159 &self,
160 hal_buffer: A::Buffer,
161 device: &Device,
162 desc: &BufferDescriptor<'_>,
163 ) -> Buffer {
164 let (id, error) = unsafe {
165 self.0.create_buffer_from_hal::<A>(
166 hal_buffer,
167 device.id,
168 &desc.map_label(|l| l.map(Borrowed)),
169 None,
170 )
171 };
172 if let Some(cause) = error {
173 self.handle_error(
174 &device.error_sink,
175 cause,
176 desc.label,
177 "Device::create_buffer_from_hal",
178 );
179 }
180 Buffer {
181 id,
182 error_sink: Arc::clone(&device.error_sink),
183 }
184 }
185
186 pub unsafe fn device_as_hal<A: wgc::hal_api::HalApi, F: FnOnce(Option<&A::Device>) -> R, R>(
187 &self,
188 device: &Device,
189 hal_device_callback: F,
190 ) -> R {
191 unsafe {
192 self.0
193 .device_as_hal::<A, F, R>(device.id, hal_device_callback)
194 }
195 }
196
197 pub unsafe fn surface_as_hal<
198 A: wgc::hal_api::HalApi,
199 F: FnOnce(Option<&A::Surface>) -> R,
200 R,
201 >(
202 &self,
203 surface: &Surface,
204 hal_surface_callback: F,
205 ) -> R {
206 unsafe {
207 self.0
208 .surface_as_hal::<A, F, R>(surface.id, hal_surface_callback)
209 }
210 }
211
212 pub unsafe fn texture_as_hal<
213 A: wgc::hal_api::HalApi,
214 F: FnOnce(Option<&A::Texture>) -> R,
215 R,
216 >(
217 &self,
218 texture: &Texture,
219 hal_texture_callback: F,
220 ) -> R {
221 unsafe {
222 self.0
223 .texture_as_hal::<A, F, R>(texture.id, hal_texture_callback)
224 }
225 }
226
227 pub unsafe fn texture_view_as_hal<
228 A: wgc::hal_api::HalApi,
229 F: FnOnce(Option<&A::TextureView>) -> R,
230 R,
231 >(
232 &self,
233 texture_view_data: &wgc::id::TextureViewId,
234 hal_texture_view_callback: F,
235 ) -> R {
236 unsafe {
237 self.0
238 .texture_view_as_hal::<A, F, R>(*texture_view_data, hal_texture_view_callback)
239 }
240 }
241
242 pub unsafe fn command_encoder_as_hal_mut<
244 A: wgc::hal_api::HalApi,
245 F: FnOnce(Option<&mut A::CommandEncoder>) -> R,
246 R,
247 >(
248 &self,
249 command_encoder: &CommandEncoder,
250 hal_command_encoder_callback: F,
251 ) -> R {
252 unsafe {
253 self.0.command_encoder_as_hal_mut::<A, F, R>(
254 command_encoder.id,
255 hal_command_encoder_callback,
256 )
257 }
258 }
259
260 pub fn generate_report(&self) -> wgc::global::GlobalReport {
261 self.0.generate_report()
262 }
263
264 #[cold]
265 #[track_caller]
266 #[inline(never)]
267 fn handle_error_inner(
268 &self,
269 sink_mutex: &Mutex<ErrorSinkRaw>,
270 source: ContextErrorSource,
271 label: Label<'_>,
272 fn_ident: &'static str,
273 ) {
274 let source_error: ErrorSource = Box::new(wgc::error::ContextError {
275 fn_ident,
276 source,
277 label: label.unwrap_or_default().to_string(),
278 });
279 let mut sink = sink_mutex.lock();
280 let mut source_opt: Option<&(dyn Error + 'static)> = Some(&*source_error);
281 let error = loop {
282 if let Some(source) = source_opt {
283 if let Some(wgc::device::DeviceError::OutOfMemory) =
284 source.downcast_ref::<wgc::device::DeviceError>()
285 {
286 break crate::Error::OutOfMemory {
287 source: source_error,
288 };
289 }
290 source_opt = source.source();
291 } else {
292 break crate::Error::Validation {
294 description: self.format_error(&*source_error),
295 source: source_error,
296 };
297 }
298 };
299 sink.handle_error(error);
300 }
301
302 #[inline]
303 #[track_caller]
304 fn handle_error(
305 &self,
306 sink_mutex: &Mutex<ErrorSinkRaw>,
307 source: impl Error + WasmNotSendSync + 'static,
308 label: Label<'_>,
309 fn_ident: &'static str,
310 ) {
311 self.handle_error_inner(sink_mutex, Box::new(source), label, fn_ident)
312 }
313
314 #[inline]
315 #[track_caller]
316 fn handle_error_nolabel(
317 &self,
318 sink_mutex: &Mutex<ErrorSinkRaw>,
319 source: impl Error + WasmNotSendSync + 'static,
320 fn_ident: &'static str,
321 ) {
322 self.handle_error_inner(sink_mutex, Box::new(source), None, fn_ident)
323 }
324
325 #[track_caller]
326 #[cold]
327 fn handle_error_fatal(
328 &self,
329 cause: impl Error + WasmNotSendSync + 'static,
330 operation: &'static str,
331 ) -> ! {
332 panic!("Error in {operation}: {f}", f = self.format_error(&cause));
333 }
334
335 #[inline(never)]
336 fn format_error(&self, err: &(dyn Error + 'static)) -> String {
337 let mut output = String::new();
338 let mut level = 1;
339
340 fn print_tree(output: &mut String, level: &mut usize, e: &(dyn Error + 'static)) {
341 let mut print = |e: &(dyn Error + 'static)| {
342 use std::fmt::Write;
343 writeln!(output, "{}{}", " ".repeat(*level * 2), e).unwrap();
344
345 if let Some(e) = e.source() {
346 *level += 1;
347 print_tree(output, level, e);
348 *level -= 1;
349 }
350 };
351 if let Some(multi) = e.downcast_ref::<wgc::error::MultiError>() {
352 for e in multi.errors() {
353 print(e);
354 }
355 } else {
356 print(e);
357 }
358 }
359
360 print_tree(&mut output, &mut level, err);
361
362 format!("Validation Error\n\nCaused by:\n{output}")
363 }
364}
365
366fn map_buffer_copy_view(view: crate::ImageCopyBuffer<'_>) -> wgc::command::ImageCopyBuffer {
367 wgc::command::ImageCopyBuffer {
368 buffer: downcast_buffer(view.buffer).id,
369 layout: view.layout,
370 }
371}
372
373fn map_texture_copy_view(view: crate::ImageCopyTexture<'_>) -> wgc::command::ImageCopyTexture {
374 wgc::command::ImageCopyTexture {
375 texture: downcast_texture(view.texture).id,
376 mip_level: view.mip_level,
377 origin: view.origin,
378 aspect: view.aspect,
379 }
380}
381
382#[cfg_attr(
383 any(not(target_arch = "wasm32"), target_os = "emscripten"),
384 allow(unused)
385)]
386fn map_texture_tagged_copy_view(
387 view: crate::ImageCopyTextureTagged<'_>,
388) -> wgc::command::ImageCopyTextureTagged {
389 wgc::command::ImageCopyTextureTagged {
390 texture: downcast_texture(view.texture).id,
391 mip_level: view.mip_level,
392 origin: view.origin,
393 aspect: view.aspect,
394 color_space: view.color_space,
395 premultiplied_alpha: view.premultiplied_alpha,
396 }
397}
398
399fn map_store_op(op: StoreOp) -> wgc::command::StoreOp {
400 match op {
401 StoreOp::Store => wgc::command::StoreOp::Store,
402 StoreOp::Discard => wgc::command::StoreOp::Discard,
403 }
404}
405
406fn map_pass_channel<V: Copy + Default>(
407 ops: Option<&Operations<V>>,
408) -> wgc::command::PassChannel<V> {
409 match ops {
410 Some(&Operations {
411 load: LoadOp::Clear(clear_value),
412 store,
413 }) => wgc::command::PassChannel {
414 load_op: wgc::command::LoadOp::Clear,
415 store_op: map_store_op(store),
416 clear_value,
417 read_only: false,
418 },
419 Some(&Operations {
420 load: LoadOp::Load,
421 store,
422 }) => wgc::command::PassChannel {
423 load_op: wgc::command::LoadOp::Load,
424 store_op: map_store_op(store),
425 clear_value: V::default(),
426 read_only: false,
427 },
428 None => wgc::command::PassChannel {
429 load_op: wgc::command::LoadOp::Load,
430 store_op: wgc::command::StoreOp::Store,
431 clear_value: V::default(),
432 read_only: true,
433 },
434 }
435}
436
437#[derive(Debug)]
438pub struct Surface {
439 id: wgc::id::SurfaceId,
440 configured_device: Mutex<Option<wgc::id::DeviceId>>,
443}
444
445#[derive(Debug)]
446pub struct Device {
447 id: wgc::id::DeviceId,
448 error_sink: ErrorSink,
449 features: Features,
450}
451
452#[derive(Debug)]
453pub struct Buffer {
454 id: wgc::id::BufferId,
455 error_sink: ErrorSink,
456}
457
458#[derive(Debug)]
459pub struct ShaderModule {
460 id: wgc::id::ShaderModuleId,
461 compilation_info: CompilationInfo,
462}
463
464#[derive(Debug)]
465pub struct Texture {
466 id: wgc::id::TextureId,
467 error_sink: ErrorSink,
468}
469
470#[derive(Debug)]
471pub struct Queue {
472 id: wgc::id::QueueId,
473 error_sink: ErrorSink,
474}
475
476#[derive(Debug)]
477pub struct ComputePipeline {
478 id: wgc::id::ComputePipelineId,
479 error_sink: ErrorSink,
480}
481
482#[derive(Debug)]
483pub struct RenderPipeline {
484 id: wgc::id::RenderPipelineId,
485 error_sink: ErrorSink,
486}
487
488#[derive(Debug)]
489pub struct ComputePass {
490 pass: wgc::command::ComputePass,
491 error_sink: ErrorSink,
492}
493
494#[derive(Debug)]
495pub struct RenderPass {
496 pass: wgc::command::RenderPass,
497 error_sink: ErrorSink,
498}
499
500#[derive(Debug)]
501pub struct CommandEncoder {
502 id: wgc::id::CommandEncoderId,
503 error_sink: ErrorSink,
504 open: bool,
505}
506
507impl crate::Context for ContextWgpuCore {
508 type AdapterData = wgc::id::AdapterId;
509 type DeviceData = Device;
510 type QueueData = Queue;
511 type ShaderModuleData = ShaderModule;
512 type BindGroupLayoutData = wgc::id::BindGroupLayoutId;
513 type BindGroupData = wgc::id::BindGroupId;
514 type TextureViewData = wgc::id::TextureViewId;
515 type SamplerData = wgc::id::SamplerId;
516 type BufferData = Buffer;
517 type TextureData = Texture;
518 type QuerySetData = wgc::id::QuerySetId;
519 type PipelineLayoutData = wgc::id::PipelineLayoutId;
520 type RenderPipelineData = RenderPipeline;
521 type ComputePipelineData = ComputePipeline;
522 type PipelineCacheData = wgc::id::PipelineCacheId;
523 type CommandEncoderData = CommandEncoder;
524 type ComputePassData = ComputePass;
525 type RenderPassData = RenderPass;
526 type CommandBufferData = wgc::id::CommandBufferId;
527 type RenderBundleEncoderData = wgc::command::RenderBundleEncoder;
528 type RenderBundleData = wgc::id::RenderBundleId;
529
530 type SurfaceData = Surface;
531 type SurfaceOutputDetail = SurfaceOutputDetail;
532 type SubmissionIndexData = wgc::SubmissionIndex;
533
534 type RequestAdapterFuture = Ready<Option<Self::AdapterData>>;
535
536 #[allow(clippy::type_complexity)]
537 type RequestDeviceFuture =
538 Ready<Result<(Self::DeviceData, Self::QueueData), crate::RequestDeviceError>>;
539
540 type PopErrorScopeFuture = Ready<Option<crate::Error>>;
541 type CompilationInfoFuture = Ready<CompilationInfo>;
542
543 fn init(instance_desc: wgt::InstanceDescriptor) -> Self {
544 Self(wgc::global::Global::new("wgpu", instance_desc))
545 }
546
547 unsafe fn instance_create_surface(
548 &self,
549 target: SurfaceTargetUnsafe,
550 ) -> Result<Self::SurfaceData, crate::CreateSurfaceError> {
551 let id = match target {
552 SurfaceTargetUnsafe::RawHandle {
553 raw_display_handle,
554 raw_window_handle,
555 } => unsafe {
556 self.0
557 .instance_create_surface(raw_display_handle, raw_window_handle, None)
558 },
559
560 #[cfg(metal)]
561 SurfaceTargetUnsafe::CoreAnimationLayer(layer) => unsafe {
562 self.0.instance_create_surface_metal(layer, None)
563 },
564
565 #[cfg(dx12)]
566 SurfaceTargetUnsafe::CompositionVisual(visual) => unsafe {
567 self.0.instance_create_surface_from_visual(visual, None)
568 },
569
570 #[cfg(dx12)]
571 SurfaceTargetUnsafe::SurfaceHandle(surface_handle) => unsafe {
572 self.0
573 .instance_create_surface_from_surface_handle(surface_handle, None)
574 },
575
576 #[cfg(dx12)]
577 SurfaceTargetUnsafe::SwapChainPanel(swap_chain_panel) => unsafe {
578 self.0
579 .instance_create_surface_from_swap_chain_panel(swap_chain_panel, None)
580 },
581 }?;
582
583 Ok(Surface {
584 id,
585 configured_device: Mutex::default(),
586 })
587 }
588
589 fn instance_request_adapter(
590 &self,
591 options: &crate::RequestAdapterOptions<'_, '_>,
592 ) -> Self::RequestAdapterFuture {
593 let id = self.0.request_adapter(
594 &wgc::instance::RequestAdapterOptions {
595 power_preference: options.power_preference,
596 force_fallback_adapter: options.force_fallback_adapter,
597 compatible_surface: options.compatible_surface.map(|surface| {
598 let surface: &<ContextWgpuCore as crate::Context>::SurfaceData =
599 downcast_ref(surface.surface_data.as_ref());
600 surface.id
601 }),
602 },
603 wgt::Backends::all(),
604 None,
605 );
606 ready(id.ok())
607 }
608
609 fn adapter_request_device(
610 &self,
611 adapter_data: &Self::AdapterData,
612 desc: &crate::DeviceDescriptor<'_>,
613 trace_dir: Option<&std::path::Path>,
614 ) -> Self::RequestDeviceFuture {
615 if trace_dir.is_some() {
616 log::error!("Feature 'trace' has been removed temporarily, see https://github.com/gfx-rs/wgpu/issues/5974");
617 }
618 let res = self.0.adapter_request_device(
619 *adapter_data,
620 &desc.map_label(|l| l.map(Borrowed)),
621 None,
622 None,
623 None,
624 );
625 let (device_id, queue_id) = match res {
626 Ok(ids) => ids,
627 Err(err) => {
628 return ready(Err(err.into()));
629 }
630 };
631 let error_sink = Arc::new(Mutex::new(ErrorSinkRaw::new()));
632 let device = Device {
633 id: device_id,
634 error_sink: error_sink.clone(),
635 features: desc.required_features,
636 };
637 let queue = Queue {
638 id: queue_id,
639 error_sink,
640 };
641 ready(Ok((device, queue)))
642 }
643
644 fn instance_poll_all_devices(&self, force_wait: bool) -> bool {
645 match self.0.poll_all_devices(force_wait) {
646 Ok(all_queue_empty) => all_queue_empty,
647 Err(err) => self.handle_error_fatal(err, "Device::poll"),
648 }
649 }
650
651 fn adapter_is_surface_supported(
652 &self,
653 adapter_data: &Self::AdapterData,
654 surface_data: &Self::SurfaceData,
655 ) -> bool {
656 self.0
657 .adapter_is_surface_supported(*adapter_data, surface_data.id)
658 }
659
660 fn adapter_features(&self, adapter_data: &Self::AdapterData) -> Features {
661 self.0.adapter_features(*adapter_data)
662 }
663
664 fn adapter_limits(&self, adapter_data: &Self::AdapterData) -> Limits {
665 self.0.adapter_limits(*adapter_data)
666 }
667
668 fn adapter_downlevel_capabilities(
669 &self,
670 adapter_data: &Self::AdapterData,
671 ) -> DownlevelCapabilities {
672 self.0.adapter_downlevel_capabilities(*adapter_data)
673 }
674
675 fn adapter_get_info(&self, adapter_data: &Self::AdapterData) -> AdapterInfo {
676 self.0.adapter_get_info(*adapter_data)
677 }
678
679 fn adapter_get_texture_format_features(
680 &self,
681 adapter_data: &Self::AdapterData,
682 format: wgt::TextureFormat,
683 ) -> wgt::TextureFormatFeatures {
684 self.0
685 .adapter_get_texture_format_features(*adapter_data, format)
686 }
687
688 fn adapter_get_presentation_timestamp(
689 &self,
690 adapter_data: &Self::AdapterData,
691 ) -> wgt::PresentationTimestamp {
692 self.0.adapter_get_presentation_timestamp(*adapter_data)
693 }
694
695 fn surface_get_capabilities(
696 &self,
697 surface_data: &Self::SurfaceData,
698 adapter_data: &Self::AdapterData,
699 ) -> wgt::SurfaceCapabilities {
700 match self
701 .0
702 .surface_get_capabilities(surface_data.id, *adapter_data)
703 {
704 Ok(caps) => caps,
705 Err(_) => wgt::SurfaceCapabilities::default(),
706 }
707 }
708
709 fn surface_configure(
710 &self,
711 surface_data: &Self::SurfaceData,
712 device_data: &Self::DeviceData,
713 config: &crate::SurfaceConfiguration,
714 ) {
715 let error = self
716 .0
717 .surface_configure(surface_data.id, device_data.id, config);
718 if let Some(e) = error {
719 self.handle_error_fatal(e, "Surface::configure");
720 } else {
721 *surface_data.configured_device.lock() = Some(device_data.id);
722 }
723 }
724
725 fn surface_get_current_texture(
726 &self,
727 surface_data: &Self::SurfaceData,
728 ) -> (
729 Option<Self::TextureData>,
730 SurfaceStatus,
731 Self::SurfaceOutputDetail,
732 ) {
733 match self.0.surface_get_current_texture(surface_data.id, None) {
734 Ok(wgc::present::SurfaceOutput { status, texture_id }) => {
735 let data = texture_id.map(|id| Texture {
736 id,
737 error_sink: Arc::new(Mutex::new(ErrorSinkRaw::new())),
738 });
739
740 (
741 data,
742 status,
743 SurfaceOutputDetail {
744 surface_id: surface_data.id,
745 },
746 )
747 }
748 Err(err) => self.handle_error_fatal(err, "Surface::get_current_texture_view"),
749 }
750 }
751
752 fn surface_present(&self, detail: &Self::SurfaceOutputDetail) {
753 match self.0.surface_present(detail.surface_id) {
754 Ok(_status) => (),
755 Err(err) => self.handle_error_fatal(err, "Surface::present"),
756 }
757 }
758
759 fn surface_texture_discard(&self, detail: &Self::SurfaceOutputDetail) {
760 match self.0.surface_texture_discard(detail.surface_id) {
761 Ok(_status) => (),
762 Err(err) => self.handle_error_fatal(err, "Surface::discard_texture"),
763 }
764 }
765
766 fn device_features(&self, device_data: &Self::DeviceData) -> Features {
767 self.0.device_features(device_data.id)
768 }
769
770 fn device_limits(&self, device_data: &Self::DeviceData) -> Limits {
771 self.0.device_limits(device_data.id)
772 }
773
774 #[cfg_attr(
775 not(any(
776 feature = "spirv",
777 feature = "glsl",
778 feature = "wgsl",
779 feature = "naga-ir"
780 )),
781 allow(unreachable_code, unused_variables)
782 )]
783 fn device_create_shader_module(
784 &self,
785 device_data: &Self::DeviceData,
786 desc: ShaderModuleDescriptor<'_>,
787 shader_bound_checks: wgt::ShaderBoundChecks,
788 ) -> Self::ShaderModuleData {
789 let descriptor = wgc::pipeline::ShaderModuleDescriptor {
790 label: desc.label.map(Borrowed),
791 shader_bound_checks,
792 };
793 let source = match desc.source {
794 #[cfg(feature = "spirv")]
795 ShaderSource::SpirV(ref spv) => {
796 let options = naga::front::spv::Options {
798 adjust_coordinate_space: false, strict_capabilities: true,
800 block_ctx_dump_prefix: None,
801 };
802 wgc::pipeline::ShaderModuleSource::SpirV(Borrowed(spv), options)
803 }
804 #[cfg(feature = "glsl")]
805 ShaderSource::Glsl {
806 ref shader,
807 stage,
808 defines,
809 } => {
810 let options = naga::front::glsl::Options { stage, defines };
811 wgc::pipeline::ShaderModuleSource::Glsl(Borrowed(shader), options)
812 }
813 #[cfg(feature = "wgsl")]
814 ShaderSource::Wgsl(ref code) => wgc::pipeline::ShaderModuleSource::Wgsl(Borrowed(code)),
815 #[cfg(feature = "naga-ir")]
816 ShaderSource::Naga(module) => wgc::pipeline::ShaderModuleSource::Naga(module),
817 ShaderSource::Dummy(_) => panic!("found `ShaderSource::Dummy`"),
818 };
819 let (id, error) =
820 self.0
821 .device_create_shader_module(device_data.id, &descriptor, source, None);
822 let compilation_info = match error {
823 Some(cause) => {
824 self.handle_error(
825 &device_data.error_sink,
826 cause.clone(),
827 desc.label,
828 "Device::create_shader_module",
829 );
830 CompilationInfo::from(cause)
831 }
832 None => CompilationInfo { messages: vec![] },
833 };
834
835 ShaderModule {
836 id,
837 compilation_info,
838 }
839 }
840
841 unsafe fn device_create_shader_module_spirv(
842 &self,
843 device_data: &Self::DeviceData,
844 desc: &ShaderModuleDescriptorSpirV<'_>,
845 ) -> Self::ShaderModuleData {
846 let descriptor = wgc::pipeline::ShaderModuleDescriptor {
847 label: desc.label.map(Borrowed),
848 shader_bound_checks: unsafe { wgt::ShaderBoundChecks::unchecked() },
851 };
852 let (id, error) = unsafe {
853 self.0.device_create_shader_module_spirv(
854 device_data.id,
855 &descriptor,
856 Borrowed(&desc.source),
857 None,
858 )
859 };
860 let compilation_info = match error {
861 Some(cause) => {
862 self.handle_error(
863 &device_data.error_sink,
864 cause.clone(),
865 desc.label,
866 "Device::create_shader_module_spirv",
867 );
868 CompilationInfo::from(cause)
869 }
870 None => CompilationInfo { messages: vec![] },
871 };
872 ShaderModule {
873 id,
874 compilation_info,
875 }
876 }
877
878 fn device_create_bind_group_layout(
879 &self,
880 device_data: &Self::DeviceData,
881 desc: &BindGroupLayoutDescriptor<'_>,
882 ) -> Self::BindGroupLayoutData {
883 let descriptor = wgc::binding_model::BindGroupLayoutDescriptor {
884 label: desc.label.map(Borrowed),
885 entries: Borrowed(desc.entries),
886 };
887 let (id, error) = self
888 .0
889 .device_create_bind_group_layout(device_data.id, &descriptor, None);
890 if let Some(cause) = error {
891 self.handle_error(
892 &device_data.error_sink,
893 cause,
894 desc.label,
895 "Device::create_bind_group_layout",
896 );
897 }
898 id
899 }
900 fn device_create_bind_group(
901 &self,
902 device_data: &Self::DeviceData,
903 desc: &BindGroupDescriptor<'_>,
904 ) -> Self::BindGroupData {
905 use wgc::binding_model as bm;
906
907 let mut arrayed_texture_views = Vec::new();
908 let mut arrayed_samplers = Vec::new();
909 if device_data
910 .features
911 .contains(Features::TEXTURE_BINDING_ARRAY)
912 {
913 for entry in desc.entries.iter() {
915 if let BindingResource::TextureViewArray(array) = entry.resource {
916 arrayed_texture_views
917 .extend(array.iter().map(|view| *downcast_texture_view(view)));
918 }
919 if let BindingResource::SamplerArray(array) = entry.resource {
920 arrayed_samplers.extend(array.iter().map(|sampler| *downcast_sampler(sampler)));
921 }
922 }
923 }
924 let mut remaining_arrayed_texture_views = &arrayed_texture_views[..];
925 let mut remaining_arrayed_samplers = &arrayed_samplers[..];
926
927 let mut arrayed_buffer_bindings = Vec::new();
928 if device_data
929 .features
930 .contains(Features::BUFFER_BINDING_ARRAY)
931 {
932 for entry in desc.entries.iter() {
934 if let BindingResource::BufferArray(array) = entry.resource {
935 arrayed_buffer_bindings.extend(array.iter().map(|binding| bm::BufferBinding {
936 buffer_id: downcast_buffer(binding.buffer).id,
937 offset: binding.offset,
938 size: binding.size,
939 }));
940 }
941 }
942 }
943 let mut remaining_arrayed_buffer_bindings = &arrayed_buffer_bindings[..];
944
945 let entries = desc
946 .entries
947 .iter()
948 .map(|entry| bm::BindGroupEntry {
949 binding: entry.binding,
950 resource: match entry.resource {
951 BindingResource::Buffer(BufferBinding {
952 buffer,
953 offset,
954 size,
955 }) => bm::BindingResource::Buffer(bm::BufferBinding {
956 buffer_id: downcast_buffer(buffer).id,
957 offset,
958 size,
959 }),
960 BindingResource::BufferArray(array) => {
961 let slice = &remaining_arrayed_buffer_bindings[..array.len()];
962 remaining_arrayed_buffer_bindings =
963 &remaining_arrayed_buffer_bindings[array.len()..];
964 bm::BindingResource::BufferArray(Borrowed(slice))
965 }
966 BindingResource::Sampler(sampler) => {
967 bm::BindingResource::Sampler(*downcast_sampler(sampler))
968 }
969 BindingResource::SamplerArray(array) => {
970 let slice = &remaining_arrayed_samplers[..array.len()];
971 remaining_arrayed_samplers = &remaining_arrayed_samplers[array.len()..];
972 bm::BindingResource::SamplerArray(Borrowed(slice))
973 }
974 BindingResource::TextureView(texture_view) => {
975 bm::BindingResource::TextureView(*downcast_texture_view(texture_view))
976 }
977 BindingResource::TextureViewArray(array) => {
978 let slice = &remaining_arrayed_texture_views[..array.len()];
979 remaining_arrayed_texture_views =
980 &remaining_arrayed_texture_views[array.len()..];
981 bm::BindingResource::TextureViewArray(Borrowed(slice))
982 }
983 },
984 })
985 .collect::<Vec<_>>();
986 let descriptor = bm::BindGroupDescriptor {
987 label: desc.label.as_ref().map(|label| Borrowed(&label[..])),
988 layout: *downcast_bind_group_layout(desc.layout),
989 entries: Borrowed(&entries),
990 };
991
992 let (id, error) = self
993 .0
994 .device_create_bind_group(device_data.id, &descriptor, None);
995 if let Some(cause) = error {
996 self.handle_error(
997 &device_data.error_sink,
998 cause,
999 desc.label,
1000 "Device::create_bind_group",
1001 );
1002 }
1003 id
1004 }
1005 fn device_create_pipeline_layout(
1006 &self,
1007 device_data: &Self::DeviceData,
1008 desc: &PipelineLayoutDescriptor<'_>,
1009 ) -> Self::PipelineLayoutData {
1010 assert!(
1013 desc.bind_group_layouts.len() <= wgc::MAX_BIND_GROUPS,
1014 "Bind group layout count {} exceeds device bind group limit {}",
1015 desc.bind_group_layouts.len(),
1016 wgc::MAX_BIND_GROUPS
1017 );
1018
1019 let temp_layouts = desc
1020 .bind_group_layouts
1021 .iter()
1022 .map(|bgl| *downcast_bind_group_layout(bgl))
1023 .collect::<ArrayVec<_, { wgc::MAX_BIND_GROUPS }>>();
1024 let descriptor = wgc::binding_model::PipelineLayoutDescriptor {
1025 label: desc.label.map(Borrowed),
1026 bind_group_layouts: Borrowed(&temp_layouts),
1027 push_constant_ranges: Borrowed(desc.push_constant_ranges),
1028 };
1029
1030 let (id, error) = self
1031 .0
1032 .device_create_pipeline_layout(device_data.id, &descriptor, None);
1033 if let Some(cause) = error {
1034 self.handle_error(
1035 &device_data.error_sink,
1036 cause,
1037 desc.label,
1038 "Device::create_pipeline_layout",
1039 );
1040 }
1041 id
1042 }
1043 fn device_create_render_pipeline(
1044 &self,
1045 device_data: &Self::DeviceData,
1046 desc: &RenderPipelineDescriptor<'_>,
1047 ) -> Self::RenderPipelineData {
1048 use wgc::pipeline as pipe;
1049
1050 let vertex_buffers: ArrayVec<_, { wgc::MAX_VERTEX_BUFFERS }> = desc
1051 .vertex
1052 .buffers
1053 .iter()
1054 .map(|vbuf| pipe::VertexBufferLayout {
1055 array_stride: vbuf.array_stride,
1056 step_mode: vbuf.step_mode,
1057 attributes: Borrowed(vbuf.attributes),
1058 })
1059 .collect();
1060
1061 let descriptor = pipe::RenderPipelineDescriptor {
1062 label: desc.label.map(Borrowed),
1063 layout: desc.layout.map(downcast_pipeline_layout).copied(),
1064 vertex: pipe::VertexState {
1065 stage: pipe::ProgrammableStageDescriptor {
1066 module: downcast_shader_module(desc.vertex.module).id,
1067 entry_point: desc.vertex.entry_point.map(Borrowed),
1068 constants: Borrowed(desc.vertex.compilation_options.constants),
1069 zero_initialize_workgroup_memory: desc
1070 .vertex
1071 .compilation_options
1072 .zero_initialize_workgroup_memory,
1073 },
1074 buffers: Borrowed(&vertex_buffers),
1075 },
1076 primitive: desc.primitive,
1077 depth_stencil: desc.depth_stencil.clone(),
1078 multisample: desc.multisample,
1079 fragment: desc.fragment.as_ref().map(|frag| pipe::FragmentState {
1080 stage: pipe::ProgrammableStageDescriptor {
1081 module: downcast_shader_module(frag.module).id,
1082 entry_point: frag.entry_point.map(Borrowed),
1083 constants: Borrowed(frag.compilation_options.constants),
1084 zero_initialize_workgroup_memory: frag
1085 .compilation_options
1086 .zero_initialize_workgroup_memory,
1087 },
1088 targets: Borrowed(frag.targets),
1089 }),
1090 multiview: desc.multiview,
1091 cache: desc.cache.map(downcast_pipeline_cache).copied(),
1092 };
1093
1094 let (id, error) =
1095 self.0
1096 .device_create_render_pipeline(device_data.id, &descriptor, None, None);
1097 if let Some(cause) = error {
1098 if let wgc::pipeline::CreateRenderPipelineError::Internal { stage, ref error } = cause {
1099 log::error!("Shader translation error for stage {:?}: {}", stage, error);
1100 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1101 }
1102 self.handle_error(
1103 &device_data.error_sink,
1104 cause,
1105 desc.label,
1106 "Device::create_render_pipeline",
1107 );
1108 }
1109 RenderPipeline {
1110 id,
1111 error_sink: Arc::clone(&device_data.error_sink),
1112 }
1113 }
1114 fn device_create_compute_pipeline(
1115 &self,
1116 device_data: &Self::DeviceData,
1117 desc: &ComputePipelineDescriptor<'_>,
1118 ) -> Self::ComputePipelineData {
1119 use wgc::pipeline as pipe;
1120
1121 let descriptor = pipe::ComputePipelineDescriptor {
1122 label: desc.label.map(Borrowed),
1123 layout: desc.layout.map(downcast_pipeline_layout).copied(),
1124 stage: pipe::ProgrammableStageDescriptor {
1125 module: downcast_shader_module(desc.module).id,
1126 entry_point: desc.entry_point.map(Borrowed),
1127 constants: Borrowed(desc.compilation_options.constants),
1128 zero_initialize_workgroup_memory: desc
1129 .compilation_options
1130 .zero_initialize_workgroup_memory,
1131 },
1132 cache: desc.cache.map(downcast_pipeline_cache).copied(),
1133 };
1134
1135 let (id, error) =
1136 self.0
1137 .device_create_compute_pipeline(device_data.id, &descriptor, None, None);
1138 if let Some(cause) = error {
1139 if let wgc::pipeline::CreateComputePipelineError::Internal(ref error) = cause {
1140 log::error!(
1141 "Shader translation error for stage {:?}: {}",
1142 wgt::ShaderStages::COMPUTE,
1143 error
1144 );
1145 log::error!("Please report it to https://github.com/gfx-rs/wgpu");
1146 }
1147 self.handle_error(
1148 &device_data.error_sink,
1149 cause,
1150 desc.label,
1151 "Device::create_compute_pipeline",
1152 );
1153 }
1154 ComputePipeline {
1155 id,
1156 error_sink: Arc::clone(&device_data.error_sink),
1157 }
1158 }
1159
1160 unsafe fn device_create_pipeline_cache(
1161 &self,
1162 device_data: &Self::DeviceData,
1163 desc: &PipelineCacheDescriptor<'_>,
1164 ) -> Self::PipelineCacheData {
1165 use wgc::pipeline as pipe;
1166
1167 let descriptor = pipe::PipelineCacheDescriptor {
1168 label: desc.label.map(Borrowed),
1169 data: desc.data.map(Borrowed),
1170 fallback: desc.fallback,
1171 };
1172 let (id, error) = unsafe {
1173 self.0
1174 .device_create_pipeline_cache(device_data.id, &descriptor, None)
1175 };
1176 if let Some(cause) = error {
1177 self.handle_error(
1178 &device_data.error_sink,
1179 cause,
1180 desc.label,
1181 "Device::device_create_pipeline_cache_init",
1182 );
1183 }
1184 id
1185 }
1186
1187 fn device_create_buffer(
1188 &self,
1189 device_data: &Self::DeviceData,
1190 desc: &crate::BufferDescriptor<'_>,
1191 ) -> Self::BufferData {
1192 let (id, error) =
1193 self.0
1194 .device_create_buffer(device_data.id, &desc.map_label(|l| l.map(Borrowed)), None);
1195 if let Some(cause) = error {
1196 self.handle_error(
1197 &device_data.error_sink,
1198 cause,
1199 desc.label,
1200 "Device::create_buffer",
1201 );
1202 }
1203
1204 Buffer {
1205 id,
1206 error_sink: Arc::clone(&device_data.error_sink),
1207 }
1208 }
1209 fn device_create_texture(
1210 &self,
1211 device_data: &Self::DeviceData,
1212 desc: &TextureDescriptor<'_>,
1213 ) -> Self::TextureData {
1214 let wgt_desc = desc.map_label_and_view_formats(|l| l.map(Borrowed), |v| v.to_vec());
1215 let (id, error) = self
1216 .0
1217 .device_create_texture(device_data.id, &wgt_desc, None);
1218 if let Some(cause) = error {
1219 self.handle_error(
1220 &device_data.error_sink,
1221 cause,
1222 desc.label,
1223 "Device::create_texture",
1224 );
1225 }
1226
1227 Texture {
1228 id,
1229 error_sink: Arc::clone(&device_data.error_sink),
1230 }
1231 }
1232 fn device_create_sampler(
1233 &self,
1234 device_data: &Self::DeviceData,
1235 desc: &SamplerDescriptor<'_>,
1236 ) -> Self::SamplerData {
1237 let descriptor = wgc::resource::SamplerDescriptor {
1238 label: desc.label.map(Borrowed),
1239 address_modes: [
1240 desc.address_mode_u,
1241 desc.address_mode_v,
1242 desc.address_mode_w,
1243 ],
1244 mag_filter: desc.mag_filter,
1245 min_filter: desc.min_filter,
1246 mipmap_filter: desc.mipmap_filter,
1247 lod_min_clamp: desc.lod_min_clamp,
1248 lod_max_clamp: desc.lod_max_clamp,
1249 compare: desc.compare,
1250 anisotropy_clamp: desc.anisotropy_clamp,
1251 border_color: desc.border_color,
1252 };
1253
1254 let (id, error) = self
1255 .0
1256 .device_create_sampler(device_data.id, &descriptor, None);
1257 if let Some(cause) = error {
1258 self.handle_error(
1259 &device_data.error_sink,
1260 cause,
1261 desc.label,
1262 "Device::create_sampler",
1263 );
1264 }
1265 id
1266 }
1267 fn device_create_query_set(
1268 &self,
1269 device_data: &Self::DeviceData,
1270 desc: &wgt::QuerySetDescriptor<Label<'_>>,
1271 ) -> Self::QuerySetData {
1272 let (id, error) = self.0.device_create_query_set(
1273 device_data.id,
1274 &desc.map_label(|l| l.map(Borrowed)),
1275 None,
1276 );
1277 if let Some(cause) = error {
1278 self.handle_error_nolabel(&device_data.error_sink, cause, "Device::create_query_set");
1279 }
1280 id
1281 }
1282 fn device_create_command_encoder(
1283 &self,
1284 device_data: &Self::DeviceData,
1285 desc: &CommandEncoderDescriptor<'_>,
1286 ) -> Self::CommandEncoderData {
1287 let (id, error) = self.0.device_create_command_encoder(
1288 device_data.id,
1289 &desc.map_label(|l| l.map(Borrowed)),
1290 None,
1291 );
1292 if let Some(cause) = error {
1293 self.handle_error(
1294 &device_data.error_sink,
1295 cause,
1296 desc.label,
1297 "Device::create_command_encoder",
1298 );
1299 }
1300
1301 CommandEncoder {
1302 id,
1303 error_sink: Arc::clone(&device_data.error_sink),
1304 open: true,
1305 }
1306 }
1307 fn device_create_render_bundle_encoder(
1308 &self,
1309 device_data: &Self::DeviceData,
1310 desc: &RenderBundleEncoderDescriptor<'_>,
1311 ) -> Self::RenderBundleEncoderData {
1312 let descriptor = wgc::command::RenderBundleEncoderDescriptor {
1313 label: desc.label.map(Borrowed),
1314 color_formats: Borrowed(desc.color_formats),
1315 depth_stencil: desc.depth_stencil,
1316 sample_count: desc.sample_count,
1317 multiview: desc.multiview,
1318 };
1319 match wgc::command::RenderBundleEncoder::new(&descriptor, device_data.id, None) {
1320 Ok(encoder) => encoder,
1321 Err(e) => panic!("Error in Device::create_render_bundle_encoder: {e}"),
1322 }
1323 }
1324 #[cfg_attr(not(any(native, Emscripten)), allow(unused))]
1325 fn device_drop(&self, device_data: &Self::DeviceData) {
1326 #[cfg(any(native, Emscripten))]
1327 {
1328 let _ = self.0.device_poll(device_data.id, wgt::Maintain::wait());
1331 self.0.device_drop(device_data.id);
1332 }
1333 }
1334 #[cfg_attr(target_arch = "wasm32", allow(unused))]
1335 fn queue_drop(&self, queue_data: &Self::QueueData) {
1336 self.0.queue_drop(queue_data.id);
1337 }
1338 fn device_set_device_lost_callback(
1339 &self,
1340 device_data: &Self::DeviceData,
1341 device_lost_callback: crate::context::DeviceLostCallback,
1342 ) {
1343 let device_lost_closure = DeviceLostClosure::from_rust(device_lost_callback);
1344 self.0
1345 .device_set_device_lost_closure(device_data.id, device_lost_closure);
1346 }
1347 fn device_destroy(&self, device_data: &Self::DeviceData) {
1348 self.0.device_destroy(device_data.id);
1349 }
1350 fn device_poll(
1351 &self,
1352 device_data: &Self::DeviceData,
1353 maintain: crate::Maintain,
1354 ) -> wgt::MaintainResult {
1355 let maintain_inner = maintain.map_index(|i| *i.data.as_ref().downcast_ref().unwrap());
1356 match self.0.device_poll(device_data.id, maintain_inner) {
1357 Ok(done) => match done {
1358 true => wgt::MaintainResult::SubmissionQueueEmpty,
1359 false => wgt::MaintainResult::Ok,
1360 },
1361 Err(err) => self.handle_error_fatal(err, "Device::poll"),
1362 }
1363 }
1364 fn device_on_uncaptured_error(
1365 &self,
1366 device_data: &Self::DeviceData,
1367 handler: Box<dyn UncapturedErrorHandler>,
1368 ) {
1369 let mut error_sink = device_data.error_sink.lock();
1370 error_sink.uncaptured_handler = Some(handler);
1371 }
1372 fn device_push_error_scope(&self, device_data: &Self::DeviceData, filter: crate::ErrorFilter) {
1373 let mut error_sink = device_data.error_sink.lock();
1374 error_sink.scopes.push(ErrorScope {
1375 error: None,
1376 filter,
1377 });
1378 }
1379 fn device_pop_error_scope(&self, device_data: &Self::DeviceData) -> Self::PopErrorScopeFuture {
1380 let mut error_sink = device_data.error_sink.lock();
1381 let scope = error_sink.scopes.pop().unwrap();
1382 ready(scope.error)
1383 }
1384
1385 fn buffer_map_async(
1386 &self,
1387 buffer_data: &Self::BufferData,
1388 mode: MapMode,
1389 range: Range<wgt::BufferAddress>,
1390 callback: crate::context::BufferMapCallback,
1391 ) {
1392 let operation = wgc::resource::BufferMapOperation {
1393 host: match mode {
1394 MapMode::Read => wgc::device::HostMap::Read,
1395 MapMode::Write => wgc::device::HostMap::Write,
1396 },
1397 callback: Some(wgc::resource::BufferMapCallback::from_rust(Box::new(
1398 |status| {
1399 let res = status.map_err(|_| crate::BufferAsyncError);
1400 callback(res);
1401 },
1402 ))),
1403 };
1404
1405 match self.0.buffer_map_async(
1406 buffer_data.id,
1407 range.start,
1408 Some(range.end - range.start),
1409 operation,
1410 ) {
1411 Ok(()) => (),
1412 Err(cause) => {
1413 self.handle_error_nolabel(&buffer_data.error_sink, cause, "Buffer::map_async")
1414 }
1415 }
1416 }
1417 fn buffer_get_mapped_range(
1418 &self,
1419 buffer_data: &Self::BufferData,
1420 sub_range: Range<wgt::BufferAddress>,
1421 ) -> Box<dyn crate::context::BufferMappedRange> {
1422 let size = sub_range.end - sub_range.start;
1423 match self
1424 .0
1425 .buffer_get_mapped_range(buffer_data.id, sub_range.start, Some(size))
1426 {
1427 Ok((ptr, size)) => Box::new(BufferMappedRange {
1428 ptr,
1429 size: size as usize,
1430 }),
1431 Err(err) => self.handle_error_fatal(err, "Buffer::get_mapped_range"),
1432 }
1433 }
1434
1435 fn buffer_unmap(&self, buffer_data: &Self::BufferData) {
1436 match self.0.buffer_unmap(buffer_data.id) {
1437 Ok(()) => (),
1438 Err(cause) => {
1439 self.handle_error_nolabel(&buffer_data.error_sink, cause, "Buffer::buffer_unmap")
1440 }
1441 }
1442 }
1443
1444 fn shader_get_compilation_info(
1445 &self,
1446 shader_data: &Self::ShaderModuleData,
1447 ) -> Self::CompilationInfoFuture {
1448 ready(shader_data.compilation_info.clone())
1449 }
1450
1451 fn texture_create_view(
1452 &self,
1453 texture_data: &Self::TextureData,
1454 desc: &TextureViewDescriptor<'_>,
1455 ) -> Self::TextureViewData {
1456 let descriptor = wgc::resource::TextureViewDescriptor {
1457 label: desc.label.map(Borrowed),
1458 format: desc.format,
1459 dimension: desc.dimension,
1460 range: wgt::ImageSubresourceRange {
1461 aspect: desc.aspect,
1462 base_mip_level: desc.base_mip_level,
1463 mip_level_count: desc.mip_level_count,
1464 base_array_layer: desc.base_array_layer,
1465 array_layer_count: desc.array_layer_count,
1466 },
1467 };
1468 let (id, error) = self
1469 .0
1470 .texture_create_view(texture_data.id, &descriptor, None);
1471 if let Some(cause) = error {
1472 self.handle_error(
1473 &texture_data.error_sink,
1474 cause,
1475 desc.label,
1476 "Texture::create_view",
1477 );
1478 }
1479 id
1480 }
1481
1482 fn surface_drop(&self, surface_data: &Self::SurfaceData) {
1483 self.0.surface_drop(surface_data.id)
1484 }
1485
1486 fn adapter_drop(&self, adapter_data: &Self::AdapterData) {
1487 self.0.adapter_drop(*adapter_data)
1488 }
1489
1490 fn buffer_destroy(&self, buffer_data: &Self::BufferData) {
1491 let _ = self.0.buffer_destroy(buffer_data.id);
1493 }
1494
1495 fn buffer_drop(&self, buffer_data: &Self::BufferData) {
1496 self.0.buffer_drop(buffer_data.id)
1497 }
1498
1499 fn texture_destroy(&self, texture_data: &Self::TextureData) {
1500 let _ = self.0.texture_destroy(texture_data.id);
1502 }
1503
1504 fn texture_drop(&self, texture_data: &Self::TextureData) {
1505 self.0.texture_drop(texture_data.id)
1506 }
1507
1508 fn texture_view_drop(&self, texture_view_data: &Self::TextureViewData) {
1509 let _ = self.0.texture_view_drop(*texture_view_data);
1510 }
1511
1512 fn sampler_drop(&self, sampler_data: &Self::SamplerData) {
1513 self.0.sampler_drop(*sampler_data)
1514 }
1515
1516 fn query_set_drop(&self, query_set_data: &Self::QuerySetData) {
1517 self.0.query_set_drop(*query_set_data)
1518 }
1519
1520 fn bind_group_drop(&self, bind_group_data: &Self::BindGroupData) {
1521 self.0.bind_group_drop(*bind_group_data)
1522 }
1523
1524 fn bind_group_layout_drop(&self, bind_group_layout_data: &Self::BindGroupLayoutData) {
1525 self.0.bind_group_layout_drop(*bind_group_layout_data)
1526 }
1527
1528 fn pipeline_layout_drop(&self, pipeline_layout_data: &Self::PipelineLayoutData) {
1529 self.0.pipeline_layout_drop(*pipeline_layout_data)
1530 }
1531 fn shader_module_drop(&self, shader_module_data: &Self::ShaderModuleData) {
1532 self.0.shader_module_drop(shader_module_data.id)
1533 }
1534 fn command_encoder_drop(&self, command_encoder_data: &Self::CommandEncoderData) {
1535 if command_encoder_data.open {
1536 self.0.command_encoder_drop(command_encoder_data.id)
1537 }
1538 }
1539
1540 fn command_buffer_drop(&self, command_buffer_data: &Self::CommandBufferData) {
1541 self.0.command_buffer_drop(*command_buffer_data)
1542 }
1543
1544 fn render_bundle_drop(&self, render_bundle_data: &Self::RenderBundleData) {
1545 self.0.render_bundle_drop(*render_bundle_data)
1546 }
1547
1548 fn compute_pipeline_drop(&self, pipeline_data: &Self::ComputePipelineData) {
1549 self.0.compute_pipeline_drop(pipeline_data.id)
1550 }
1551
1552 fn render_pipeline_drop(&self, pipeline_data: &Self::RenderPipelineData) {
1553 self.0.render_pipeline_drop(pipeline_data.id)
1554 }
1555
1556 fn pipeline_cache_drop(&self, cache_data: &Self::PipelineCacheData) {
1557 self.0.pipeline_cache_drop(*cache_data)
1558 }
1559
1560 fn compute_pipeline_get_bind_group_layout(
1561 &self,
1562 pipeline_data: &Self::ComputePipelineData,
1563 index: u32,
1564 ) -> Self::BindGroupLayoutData {
1565 let (id, error) =
1566 self.0
1567 .compute_pipeline_get_bind_group_layout(pipeline_data.id, index, None);
1568 if let Some(err) = error {
1569 self.handle_error_nolabel(
1570 &pipeline_data.error_sink,
1571 err,
1572 "ComputePipeline::get_bind_group_layout",
1573 )
1574 }
1575 id
1576 }
1577
1578 fn render_pipeline_get_bind_group_layout(
1579 &self,
1580 pipeline_data: &Self::RenderPipelineData,
1581 index: u32,
1582 ) -> Self::BindGroupLayoutData {
1583 let (id, error) =
1584 self.0
1585 .render_pipeline_get_bind_group_layout(pipeline_data.id, index, None);
1586 if let Some(err) = error {
1587 self.handle_error_nolabel(
1588 &pipeline_data.error_sink,
1589 err,
1590 "RenderPipeline::get_bind_group_layout",
1591 )
1592 }
1593 id
1594 }
1595
1596 fn command_encoder_copy_buffer_to_buffer(
1597 &self,
1598 encoder_data: &Self::CommandEncoderData,
1599 source_data: &Self::BufferData,
1600 source_offset: wgt::BufferAddress,
1601 destination_data: &Self::BufferData,
1602 destination_offset: wgt::BufferAddress,
1603 copy_size: wgt::BufferAddress,
1604 ) {
1605 if let Err(cause) = self.0.command_encoder_copy_buffer_to_buffer(
1606 encoder_data.id,
1607 source_data.id,
1608 source_offset,
1609 destination_data.id,
1610 destination_offset,
1611 copy_size,
1612 ) {
1613 self.handle_error_nolabel(
1614 &encoder_data.error_sink,
1615 cause,
1616 "CommandEncoder::copy_buffer_to_buffer",
1617 );
1618 }
1619 }
1620
1621 fn command_encoder_copy_buffer_to_texture(
1622 &self,
1623 encoder_data: &Self::CommandEncoderData,
1624 source: crate::ImageCopyBuffer<'_>,
1625 destination: crate::ImageCopyTexture<'_>,
1626 copy_size: wgt::Extent3d,
1627 ) {
1628 if let Err(cause) = self.0.command_encoder_copy_buffer_to_texture(
1629 encoder_data.id,
1630 &map_buffer_copy_view(source),
1631 &map_texture_copy_view(destination),
1632 ©_size,
1633 ) {
1634 self.handle_error_nolabel(
1635 &encoder_data.error_sink,
1636 cause,
1637 "CommandEncoder::copy_buffer_to_texture",
1638 );
1639 }
1640 }
1641
1642 fn command_encoder_copy_texture_to_buffer(
1643 &self,
1644 encoder_data: &Self::CommandEncoderData,
1645 source: crate::ImageCopyTexture<'_>,
1646 destination: crate::ImageCopyBuffer<'_>,
1647 copy_size: wgt::Extent3d,
1648 ) {
1649 if let Err(cause) = self.0.command_encoder_copy_texture_to_buffer(
1650 encoder_data.id,
1651 &map_texture_copy_view(source),
1652 &map_buffer_copy_view(destination),
1653 ©_size,
1654 ) {
1655 self.handle_error_nolabel(
1656 &encoder_data.error_sink,
1657 cause,
1658 "CommandEncoder::copy_texture_to_buffer",
1659 );
1660 }
1661 }
1662
1663 fn command_encoder_copy_texture_to_texture(
1664 &self,
1665 encoder_data: &Self::CommandEncoderData,
1666 source: crate::ImageCopyTexture<'_>,
1667 destination: crate::ImageCopyTexture<'_>,
1668 copy_size: wgt::Extent3d,
1669 ) {
1670 if let Err(cause) = self.0.command_encoder_copy_texture_to_texture(
1671 encoder_data.id,
1672 &map_texture_copy_view(source),
1673 &map_texture_copy_view(destination),
1674 ©_size,
1675 ) {
1676 self.handle_error_nolabel(
1677 &encoder_data.error_sink,
1678 cause,
1679 "CommandEncoder::copy_texture_to_texture",
1680 );
1681 }
1682 }
1683
1684 fn command_encoder_begin_compute_pass(
1685 &self,
1686 encoder_data: &Self::CommandEncoderData,
1687 desc: &ComputePassDescriptor<'_>,
1688 ) -> Self::ComputePassData {
1689 let timestamp_writes =
1690 desc.timestamp_writes
1691 .as_ref()
1692 .map(|tw| wgc::command::PassTimestampWrites {
1693 query_set: *downcast_query_set(tw.query_set),
1694 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
1695 end_of_pass_write_index: tw.end_of_pass_write_index,
1696 });
1697
1698 let (pass, err) = self.0.command_encoder_create_compute_pass(
1699 encoder_data.id,
1700 &wgc::command::ComputePassDescriptor {
1701 label: desc.label.map(Borrowed),
1702 timestamp_writes: timestamp_writes.as_ref(),
1703 },
1704 );
1705
1706 if let Some(cause) = err {
1707 self.handle_error(
1708 &encoder_data.error_sink,
1709 cause,
1710 desc.label,
1711 "CommandEncoder::begin_compute_pass",
1712 );
1713 }
1714
1715 Self::ComputePassData {
1716 pass,
1717 error_sink: encoder_data.error_sink.clone(),
1718 }
1719 }
1720
1721 fn command_encoder_begin_render_pass(
1722 &self,
1723 encoder_data: &Self::CommandEncoderData,
1724 desc: &crate::RenderPassDescriptor<'_>,
1725 ) -> Self::RenderPassData {
1726 let colors = desc
1727 .color_attachments
1728 .iter()
1729 .map(|ca| {
1730 ca.as_ref()
1731 .map(|at| wgc::command::RenderPassColorAttachment {
1732 view: *downcast_texture_view(at.view),
1733 resolve_target: at.resolve_target.map(downcast_texture_view).copied(),
1734 channel: map_pass_channel(Some(&at.ops)),
1735 })
1736 })
1737 .collect::<Vec<_>>();
1738
1739 let depth_stencil = desc.depth_stencil_attachment.as_ref().map(|dsa| {
1740 wgc::command::RenderPassDepthStencilAttachment {
1741 view: *downcast_texture_view(dsa.view),
1742 depth: map_pass_channel(dsa.depth_ops.as_ref()),
1743 stencil: map_pass_channel(dsa.stencil_ops.as_ref()),
1744 }
1745 });
1746
1747 let timestamp_writes =
1748 desc.timestamp_writes
1749 .as_ref()
1750 .map(|tw| wgc::command::PassTimestampWrites {
1751 query_set: *downcast_query_set(tw.query_set),
1752 beginning_of_pass_write_index: tw.beginning_of_pass_write_index,
1753 end_of_pass_write_index: tw.end_of_pass_write_index,
1754 });
1755
1756 let (pass, err) = self.0.command_encoder_create_render_pass(
1757 encoder_data.id,
1758 &wgc::command::RenderPassDescriptor {
1759 label: desc.label.map(Borrowed),
1760 timestamp_writes: timestamp_writes.as_ref(),
1761 color_attachments: std::borrow::Cow::Borrowed(&colors),
1762 depth_stencil_attachment: depth_stencil.as_ref(),
1763 occlusion_query_set: desc.occlusion_query_set.map(downcast_query_set).copied(),
1764 },
1765 );
1766
1767 if let Some(cause) = err {
1768 self.handle_error(
1769 &encoder_data.error_sink,
1770 cause,
1771 desc.label,
1772 "CommandEncoder::begin_render_pass",
1773 );
1774 }
1775
1776 Self::RenderPassData {
1777 pass,
1778 error_sink: encoder_data.error_sink.clone(),
1779 }
1780 }
1781
1782 fn command_encoder_finish(
1783 &self,
1784 encoder_data: &mut Self::CommandEncoderData,
1785 ) -> Self::CommandBufferData {
1786 let descriptor = wgt::CommandBufferDescriptor::default();
1787 encoder_data.open = false; let (id, error) = self.0.command_encoder_finish(encoder_data.id, &descriptor);
1789 if let Some(cause) = error {
1790 self.handle_error_nolabel(&encoder_data.error_sink, cause, "a CommandEncoder");
1791 }
1792 id
1793 }
1794
1795 fn command_encoder_clear_texture(
1796 &self,
1797 encoder_data: &Self::CommandEncoderData,
1798 texture_data: &Self::TextureData,
1799 subresource_range: &wgt::ImageSubresourceRange,
1800 ) {
1801 if let Err(cause) = self.0.command_encoder_clear_texture(
1802 encoder_data.id,
1803 texture_data.id,
1804 subresource_range,
1805 ) {
1806 self.handle_error_nolabel(
1807 &encoder_data.error_sink,
1808 cause,
1809 "CommandEncoder::clear_texture",
1810 );
1811 }
1812 }
1813
1814 fn command_encoder_clear_buffer(
1815 &self,
1816 encoder_data: &Self::CommandEncoderData,
1817 buffer_data: &Self::BufferData,
1818 offset: wgt::BufferAddress,
1819 size: Option<wgt::BufferAddress>,
1820 ) {
1821 if let Err(cause) =
1822 self.0
1823 .command_encoder_clear_buffer(encoder_data.id, buffer_data.id, offset, size)
1824 {
1825 self.handle_error_nolabel(
1826 &encoder_data.error_sink,
1827 cause,
1828 "CommandEncoder::fill_buffer",
1829 );
1830 }
1831 }
1832
1833 fn command_encoder_insert_debug_marker(
1834 &self,
1835 encoder_data: &Self::CommandEncoderData,
1836 label: &str,
1837 ) {
1838 if let Err(cause) = self
1839 .0
1840 .command_encoder_insert_debug_marker(encoder_data.id, label)
1841 {
1842 self.handle_error_nolabel(
1843 &encoder_data.error_sink,
1844 cause,
1845 "CommandEncoder::insert_debug_marker",
1846 );
1847 }
1848 }
1849
1850 fn command_encoder_push_debug_group(
1851 &self,
1852 encoder_data: &Self::CommandEncoderData,
1853 label: &str,
1854 ) {
1855 if let Err(cause) = self
1856 .0
1857 .command_encoder_push_debug_group(encoder_data.id, label)
1858 {
1859 self.handle_error_nolabel(
1860 &encoder_data.error_sink,
1861 cause,
1862 "CommandEncoder::push_debug_group",
1863 );
1864 }
1865 }
1866
1867 fn command_encoder_pop_debug_group(&self, encoder_data: &Self::CommandEncoderData) {
1868 if let Err(cause) = self.0.command_encoder_pop_debug_group(encoder_data.id) {
1869 self.handle_error_nolabel(
1870 &encoder_data.error_sink,
1871 cause,
1872 "CommandEncoder::pop_debug_group",
1873 );
1874 }
1875 }
1876
1877 fn command_encoder_write_timestamp(
1878 &self,
1879 encoder_data: &Self::CommandEncoderData,
1880 query_set_data: &Self::QuerySetData,
1881 query_index: u32,
1882 ) {
1883 if let Err(cause) =
1884 self.0
1885 .command_encoder_write_timestamp(encoder_data.id, *query_set_data, query_index)
1886 {
1887 self.handle_error_nolabel(
1888 &encoder_data.error_sink,
1889 cause,
1890 "CommandEncoder::write_timestamp",
1891 );
1892 }
1893 }
1894
1895 fn command_encoder_resolve_query_set(
1896 &self,
1897 encoder_data: &Self::CommandEncoderData,
1898 query_set_data: &Self::QuerySetData,
1899 first_query: u32,
1900 query_count: u32,
1901 destination_data: &Self::BufferData,
1902 destination_offset: wgt::BufferAddress,
1903 ) {
1904 if let Err(cause) = self.0.command_encoder_resolve_query_set(
1905 encoder_data.id,
1906 *query_set_data,
1907 first_query,
1908 query_count,
1909 destination_data.id,
1910 destination_offset,
1911 ) {
1912 self.handle_error_nolabel(
1913 &encoder_data.error_sink,
1914 cause,
1915 "CommandEncoder::resolve_query_set",
1916 );
1917 }
1918 }
1919
1920 fn render_bundle_encoder_finish(
1921 &self,
1922 encoder_data: Self::RenderBundleEncoderData,
1923 desc: &crate::RenderBundleDescriptor<'_>,
1924 ) -> Self::RenderBundleData {
1925 let (id, error) = self.0.render_bundle_encoder_finish(
1926 encoder_data,
1927 &desc.map_label(|l| l.map(Borrowed)),
1928 None,
1929 );
1930 if let Some(err) = error {
1931 self.handle_error_fatal(err, "RenderBundleEncoder::finish");
1932 }
1933 id
1934 }
1935
1936 fn queue_write_buffer(
1937 &self,
1938 queue_data: &Self::QueueData,
1939 buffer_data: &Self::BufferData,
1940 offset: wgt::BufferAddress,
1941 data: &[u8],
1942 ) {
1943 match self
1944 .0
1945 .queue_write_buffer(queue_data.id, buffer_data.id, offset, data)
1946 {
1947 Ok(()) => (),
1948 Err(err) => {
1949 self.handle_error_nolabel(&queue_data.error_sink, err, "Queue::write_buffer")
1950 }
1951 }
1952 }
1953
1954 fn queue_validate_write_buffer(
1955 &self,
1956 queue_data: &Self::QueueData,
1957 buffer_data: &Self::BufferData,
1958 offset: wgt::BufferAddress,
1959 size: wgt::BufferSize,
1960 ) -> Option<()> {
1961 match self
1962 .0
1963 .queue_validate_write_buffer(queue_data.id, buffer_data.id, offset, size)
1964 {
1965 Ok(()) => Some(()),
1966 Err(err) => {
1967 self.handle_error_nolabel(&queue_data.error_sink, err, "Queue::write_buffer_with");
1968 None
1969 }
1970 }
1971 }
1972
1973 fn queue_create_staging_buffer(
1974 &self,
1975 queue_data: &Self::QueueData,
1976 size: wgt::BufferSize,
1977 ) -> Option<Box<dyn crate::context::QueueWriteBuffer>> {
1978 match self
1979 .0
1980 .queue_create_staging_buffer(queue_data.id, size, None)
1981 {
1982 Ok((buffer_id, ptr)) => Some(Box::new(QueueWriteBuffer {
1983 buffer_id,
1984 mapping: BufferMappedRange {
1985 ptr,
1986 size: size.get() as usize,
1987 },
1988 })),
1989 Err(err) => {
1990 self.handle_error_nolabel(&queue_data.error_sink, err, "Queue::write_buffer_with");
1991 None
1992 }
1993 }
1994 }
1995
1996 fn queue_write_staging_buffer(
1997 &self,
1998 queue_data: &Self::QueueData,
1999 buffer_data: &Self::BufferData,
2000 offset: wgt::BufferAddress,
2001 staging_buffer: &dyn crate::context::QueueWriteBuffer,
2002 ) {
2003 let staging_buffer = staging_buffer
2004 .as_any()
2005 .downcast_ref::<QueueWriteBuffer>()
2006 .unwrap();
2007 match self.0.queue_write_staging_buffer(
2008 queue_data.id,
2009 buffer_data.id,
2010 offset,
2011 staging_buffer.buffer_id,
2012 ) {
2013 Ok(()) => (),
2014 Err(err) => {
2015 self.handle_error_nolabel(&queue_data.error_sink, err, "Queue::write_buffer_with");
2016 }
2017 }
2018 }
2019
2020 fn queue_write_texture(
2021 &self,
2022 queue_data: &Self::QueueData,
2023 texture: crate::ImageCopyTexture<'_>,
2024 data: &[u8],
2025 data_layout: wgt::ImageDataLayout,
2026 size: wgt::Extent3d,
2027 ) {
2028 match self.0.queue_write_texture(
2029 queue_data.id,
2030 &map_texture_copy_view(texture),
2031 data,
2032 &data_layout,
2033 &size,
2034 ) {
2035 Ok(()) => (),
2036 Err(err) => {
2037 self.handle_error_nolabel(&queue_data.error_sink, err, "Queue::write_texture")
2038 }
2039 }
2040 }
2041
2042 #[cfg(any(webgpu, webgl))]
2043 fn queue_copy_external_image_to_texture(
2044 &self,
2045 queue_data: &Self::QueueData,
2046 source: &wgt::ImageCopyExternalImage,
2047 dest: crate::ImageCopyTextureTagged<'_>,
2048 size: wgt::Extent3d,
2049 ) {
2050 match self.0.queue_copy_external_image_to_texture(
2051 queue_data.id,
2052 source,
2053 map_texture_tagged_copy_view(dest),
2054 size,
2055 ) {
2056 Ok(()) => (),
2057 Err(err) => self.handle_error_nolabel(
2058 &queue_data.error_sink,
2059 err,
2060 "Queue::copy_external_image_to_texture",
2061 ),
2062 }
2063 }
2064
2065 fn queue_submit<I: Iterator<Item = Self::CommandBufferData>>(
2066 &self,
2067 queue_data: &Self::QueueData,
2068 command_buffers: I,
2069 ) -> Self::SubmissionIndexData {
2070 let temp_command_buffers = command_buffers.collect::<SmallVec<[_; 4]>>();
2071
2072 let index = match self.0.queue_submit(queue_data.id, &temp_command_buffers) {
2073 Ok(index) => index,
2074 Err((index, err)) => {
2075 self.handle_error_nolabel(&queue_data.error_sink, err, "Queue::submit");
2076 index
2077 }
2078 };
2079
2080 for cmdbuf in &temp_command_buffers {
2081 self.0.command_buffer_drop(*cmdbuf);
2082 }
2083
2084 index
2085 }
2086
2087 fn queue_get_timestamp_period(&self, queue_data: &Self::QueueData) -> f32 {
2088 self.0.queue_get_timestamp_period(queue_data.id)
2089 }
2090
2091 fn queue_on_submitted_work_done(
2092 &self,
2093 queue_data: &Self::QueueData,
2094 callback: crate::context::SubmittedWorkDoneCallback,
2095 ) {
2096 let closure = wgc::device::queue::SubmittedWorkDoneClosure::from_rust(callback);
2097 self.0.queue_on_submitted_work_done(queue_data.id, closure);
2098 }
2099
2100 fn device_start_capture(&self, device_data: &Self::DeviceData) {
2101 self.0.device_start_capture(device_data.id);
2102 }
2103
2104 fn device_stop_capture(&self, device_data: &Self::DeviceData) {
2105 self.0.device_stop_capture(device_data.id);
2106 }
2107
2108 fn device_get_internal_counters(
2109 &self,
2110 device_data: &Self::DeviceData,
2111 ) -> wgt::InternalCounters {
2112 self.0.device_get_internal_counters(device_data.id)
2113 }
2114
2115 fn device_generate_allocator_report(
2116 &self,
2117 device_data: &Self::DeviceData,
2118 ) -> Option<wgt::AllocatorReport> {
2119 self.0.device_generate_allocator_report(device_data.id)
2120 }
2121
2122 fn pipeline_cache_get_data(
2123 &self,
2124 cache_data: &Self::PipelineCacheData,
2126 ) -> Option<Vec<u8>> {
2127 self.0.pipeline_cache_get_data(*cache_data)
2128 }
2129
2130 fn compute_pass_set_pipeline(
2131 &self,
2132 pass_data: &mut Self::ComputePassData,
2133 pipeline_data: &Self::ComputePipelineData,
2134 ) {
2135 if let Err(cause) = self
2136 .0
2137 .compute_pass_set_pipeline(&mut pass_data.pass, pipeline_data.id)
2138 {
2139 self.handle_error(
2140 &pass_data.error_sink,
2141 cause,
2142 pass_data.pass.label(),
2143 "ComputePass::set_pipeline",
2144 );
2145 }
2146 }
2147
2148 fn compute_pass_set_bind_group(
2149 &self,
2150 pass_data: &mut Self::ComputePassData,
2151 index: u32,
2152 bind_group_data: Option<&Self::BindGroupData>,
2153 offsets: &[wgt::DynamicOffset],
2154 ) {
2155 let bg = bind_group_data.cloned();
2156 if let Err(cause) =
2157 self.0
2158 .compute_pass_set_bind_group(&mut pass_data.pass, index, bg, offsets)
2159 {
2160 self.handle_error(
2161 &pass_data.error_sink,
2162 cause,
2163 pass_data.pass.label(),
2164 "ComputePass::set_bind_group",
2165 );
2166 }
2167 }
2168
2169 fn compute_pass_set_push_constants(
2170 &self,
2171 pass_data: &mut Self::ComputePassData,
2172 offset: u32,
2173 data: &[u8],
2174 ) {
2175 if let Err(cause) =
2176 self.0
2177 .compute_pass_set_push_constants(&mut pass_data.pass, offset, data)
2178 {
2179 self.handle_error(
2180 &pass_data.error_sink,
2181 cause,
2182 pass_data.pass.label(),
2183 "ComputePass::set_push_constant",
2184 );
2185 }
2186 }
2187
2188 fn compute_pass_insert_debug_marker(&self, pass_data: &mut Self::ComputePassData, label: &str) {
2189 if let Err(cause) = self
2190 .0
2191 .compute_pass_insert_debug_marker(&mut pass_data.pass, label, 0)
2192 {
2193 self.handle_error(
2194 &pass_data.error_sink,
2195 cause,
2196 pass_data.pass.label(),
2197 "ComputePass::insert_debug_marker",
2198 );
2199 }
2200 }
2201
2202 fn compute_pass_push_debug_group(
2203 &self,
2204 pass_data: &mut Self::ComputePassData,
2205 group_label: &str,
2206 ) {
2207 if let Err(cause) =
2208 self.0
2209 .compute_pass_push_debug_group(&mut pass_data.pass, group_label, 0)
2210 {
2211 self.handle_error(
2212 &pass_data.error_sink,
2213 cause,
2214 pass_data.pass.label(),
2215 "ComputePass::push_debug_group",
2216 );
2217 }
2218 }
2219
2220 fn compute_pass_pop_debug_group(&self, pass_data: &mut Self::ComputePassData) {
2221 if let Err(cause) = self.0.compute_pass_pop_debug_group(&mut pass_data.pass) {
2222 self.handle_error(
2223 &pass_data.error_sink,
2224 cause,
2225 pass_data.pass.label(),
2226 "ComputePass::pop_debug_group",
2227 );
2228 }
2229 }
2230
2231 fn compute_pass_write_timestamp(
2232 &self,
2233 pass_data: &mut Self::ComputePassData,
2234 query_set_data: &Self::QuerySetData,
2235 query_index: u32,
2236 ) {
2237 if let Err(cause) =
2238 self.0
2239 .compute_pass_write_timestamp(&mut pass_data.pass, *query_set_data, query_index)
2240 {
2241 self.handle_error(
2242 &pass_data.error_sink,
2243 cause,
2244 pass_data.pass.label(),
2245 "ComputePass::write_timestamp",
2246 );
2247 }
2248 }
2249
2250 fn compute_pass_begin_pipeline_statistics_query(
2251 &self,
2252 pass_data: &mut Self::ComputePassData,
2253 query_set_data: &Self::QuerySetData,
2254 query_index: u32,
2255 ) {
2256 if let Err(cause) = self.0.compute_pass_begin_pipeline_statistics_query(
2257 &mut pass_data.pass,
2258 *query_set_data,
2259 query_index,
2260 ) {
2261 self.handle_error(
2262 &pass_data.error_sink,
2263 cause,
2264 pass_data.pass.label(),
2265 "ComputePass::begin_pipeline_statistics_query",
2266 );
2267 }
2268 }
2269
2270 fn compute_pass_end_pipeline_statistics_query(&self, pass_data: &mut Self::ComputePassData) {
2271 if let Err(cause) = self
2272 .0
2273 .compute_pass_end_pipeline_statistics_query(&mut pass_data.pass)
2274 {
2275 self.handle_error(
2276 &pass_data.error_sink,
2277 cause,
2278 pass_data.pass.label(),
2279 "ComputePass::end_pipeline_statistics_query",
2280 );
2281 }
2282 }
2283
2284 fn compute_pass_dispatch_workgroups(
2285 &self,
2286 pass_data: &mut Self::ComputePassData,
2287 x: u32,
2288 y: u32,
2289 z: u32,
2290 ) {
2291 if let Err(cause) = self
2292 .0
2293 .compute_pass_dispatch_workgroups(&mut pass_data.pass, x, y, z)
2294 {
2295 self.handle_error(
2296 &pass_data.error_sink,
2297 cause,
2298 pass_data.pass.label(),
2299 "ComputePass::dispatch_workgroups",
2300 );
2301 }
2302 }
2303
2304 fn compute_pass_dispatch_workgroups_indirect(
2305 &self,
2306 pass_data: &mut Self::ComputePassData,
2307 indirect_buffer_data: &Self::BufferData,
2308 indirect_offset: wgt::BufferAddress,
2309 ) {
2310 if let Err(cause) = self.0.compute_pass_dispatch_workgroups_indirect(
2311 &mut pass_data.pass,
2312 indirect_buffer_data.id,
2313 indirect_offset,
2314 ) {
2315 self.handle_error(
2316 &pass_data.error_sink,
2317 cause,
2318 pass_data.pass.label(),
2319 "ComputePass::dispatch_workgroups_indirect",
2320 );
2321 }
2322 }
2323
2324 fn compute_pass_end(&self, pass_data: &mut Self::ComputePassData) {
2325 if let Err(cause) = self.0.compute_pass_end(&mut pass_data.pass) {
2326 self.handle_error(
2327 &pass_data.error_sink,
2328 cause,
2329 pass_data.pass.label(),
2330 "ComputePass::end",
2331 );
2332 }
2333 }
2334
2335 fn render_bundle_encoder_set_pipeline(
2336 &self,
2337 encoder_data: &mut Self::RenderBundleEncoderData,
2338 pipeline_data: &Self::RenderPipelineData,
2339 ) {
2340 wgpu_render_bundle_set_pipeline(encoder_data, pipeline_data.id)
2341 }
2342
2343 fn render_bundle_encoder_set_bind_group(
2344 &self,
2345 encoder_data: &mut Self::RenderBundleEncoderData,
2346 index: u32,
2347 bind_group_data: Option<&Self::BindGroupData>,
2348 offsets: &[wgt::DynamicOffset],
2349 ) {
2350 let bg = bind_group_data.cloned();
2351 unsafe {
2352 wgpu_render_bundle_set_bind_group(
2353 encoder_data,
2354 index,
2355 bg,
2356 offsets.as_ptr(),
2357 offsets.len(),
2358 )
2359 }
2360 }
2361
2362 fn render_bundle_encoder_set_index_buffer(
2363 &self,
2364 encoder_data: &mut Self::RenderBundleEncoderData,
2365 buffer_data: &Self::BufferData,
2366 index_format: wgt::IndexFormat,
2367 offset: wgt::BufferAddress,
2368 size: Option<wgt::BufferSize>,
2369 ) {
2370 encoder_data.set_index_buffer(buffer_data.id, index_format, offset, size)
2371 }
2372
2373 fn render_bundle_encoder_set_vertex_buffer(
2374 &self,
2375 encoder_data: &mut Self::RenderBundleEncoderData,
2376 slot: u32,
2377 buffer_data: &Self::BufferData,
2378 offset: wgt::BufferAddress,
2379 size: Option<wgt::BufferSize>,
2380 ) {
2381 wgpu_render_bundle_set_vertex_buffer(encoder_data, slot, buffer_data.id, offset, size)
2382 }
2383
2384 fn render_bundle_encoder_set_push_constants(
2385 &self,
2386 encoder_data: &mut Self::RenderBundleEncoderData,
2387 stages: wgt::ShaderStages,
2388 offset: u32,
2389 data: &[u8],
2390 ) {
2391 unsafe {
2392 wgpu_render_bundle_set_push_constants(
2393 encoder_data,
2394 stages,
2395 offset,
2396 data.len().try_into().unwrap(),
2397 data.as_ptr(),
2398 )
2399 }
2400 }
2401
2402 fn render_bundle_encoder_draw(
2403 &self,
2404 encoder_data: &mut Self::RenderBundleEncoderData,
2405 vertices: Range<u32>,
2406 instances: Range<u32>,
2407 ) {
2408 wgpu_render_bundle_draw(
2409 encoder_data,
2410 vertices.end - vertices.start,
2411 instances.end - instances.start,
2412 vertices.start,
2413 instances.start,
2414 )
2415 }
2416
2417 fn render_bundle_encoder_draw_indexed(
2418 &self,
2419 encoder_data: &mut Self::RenderBundleEncoderData,
2420 indices: Range<u32>,
2421 base_vertex: i32,
2422 instances: Range<u32>,
2423 ) {
2424 wgpu_render_bundle_draw_indexed(
2425 encoder_data,
2426 indices.end - indices.start,
2427 instances.end - instances.start,
2428 indices.start,
2429 base_vertex,
2430 instances.start,
2431 )
2432 }
2433
2434 fn render_bundle_encoder_draw_indirect(
2435 &self,
2436 encoder_data: &mut Self::RenderBundleEncoderData,
2437 indirect_buffer_data: &Self::BufferData,
2438 indirect_offset: wgt::BufferAddress,
2439 ) {
2440 wgpu_render_bundle_draw_indirect(encoder_data, indirect_buffer_data.id, indirect_offset)
2441 }
2442
2443 fn render_bundle_encoder_draw_indexed_indirect(
2444 &self,
2445 encoder_data: &mut Self::RenderBundleEncoderData,
2446 indirect_buffer_data: &Self::BufferData,
2447 indirect_offset: wgt::BufferAddress,
2448 ) {
2449 wgpu_render_bundle_draw_indexed_indirect(
2450 encoder_data,
2451 indirect_buffer_data.id,
2452 indirect_offset,
2453 )
2454 }
2455
2456 fn render_pass_set_pipeline(
2457 &self,
2458 pass_data: &mut Self::RenderPassData,
2459 pipeline_data: &Self::RenderPipelineData,
2460 ) {
2461 if let Err(cause) = self
2462 .0
2463 .render_pass_set_pipeline(&mut pass_data.pass, pipeline_data.id)
2464 {
2465 self.handle_error(
2466 &pass_data.error_sink,
2467 cause,
2468 pass_data.pass.label(),
2469 "RenderPass::set_pipeline",
2470 );
2471 }
2472 }
2473
2474 fn render_pass_set_bind_group(
2475 &self,
2476 pass_data: &mut Self::RenderPassData,
2477 index: u32,
2478 bind_group_data: Option<&Self::BindGroupData>,
2479 offsets: &[wgt::DynamicOffset],
2480 ) {
2481 let bg = bind_group_data.cloned();
2482 if let Err(cause) =
2483 self.0
2484 .render_pass_set_bind_group(&mut pass_data.pass, index, bg, offsets)
2485 {
2486 self.handle_error(
2487 &pass_data.error_sink,
2488 cause,
2489 pass_data.pass.label(),
2490 "RenderPass::set_bind_group",
2491 );
2492 }
2493 }
2494
2495 fn render_pass_set_index_buffer(
2496 &self,
2497 pass_data: &mut Self::RenderPassData,
2498 buffer_data: &Self::BufferData,
2499 index_format: wgt::IndexFormat,
2500 offset: wgt::BufferAddress,
2501 size: Option<wgt::BufferSize>,
2502 ) {
2503 if let Err(cause) = self.0.render_pass_set_index_buffer(
2504 &mut pass_data.pass,
2505 buffer_data.id,
2506 index_format,
2507 offset,
2508 size,
2509 ) {
2510 self.handle_error(
2511 &pass_data.error_sink,
2512 cause,
2513 pass_data.pass.label(),
2514 "RenderPass::set_index_buffer",
2515 );
2516 }
2517 }
2518
2519 fn render_pass_set_vertex_buffer(
2520 &self,
2521 pass_data: &mut Self::RenderPassData,
2522 slot: u32,
2523 buffer_data: &Self::BufferData,
2524 offset: wgt::BufferAddress,
2525 size: Option<wgt::BufferSize>,
2526 ) {
2527 if let Err(cause) = self.0.render_pass_set_vertex_buffer(
2528 &mut pass_data.pass,
2529 slot,
2530 buffer_data.id,
2531 offset,
2532 size,
2533 ) {
2534 self.handle_error(
2535 &pass_data.error_sink,
2536 cause,
2537 pass_data.pass.label(),
2538 "RenderPass::set_vertex_buffer",
2539 );
2540 }
2541 }
2542
2543 fn render_pass_set_push_constants(
2544 &self,
2545 pass_data: &mut Self::RenderPassData,
2546 stages: wgt::ShaderStages,
2547 offset: u32,
2548 data: &[u8],
2549 ) {
2550 if let Err(cause) =
2551 self.0
2552 .render_pass_set_push_constants(&mut pass_data.pass, stages, offset, data)
2553 {
2554 self.handle_error(
2555 &pass_data.error_sink,
2556 cause,
2557 pass_data.pass.label(),
2558 "RenderPass::set_push_constants",
2559 );
2560 }
2561 }
2562
2563 fn render_pass_draw(
2564 &self,
2565 pass_data: &mut Self::RenderPassData,
2566 vertices: Range<u32>,
2567 instances: Range<u32>,
2568 ) {
2569 if let Err(cause) = self.0.render_pass_draw(
2570 &mut pass_data.pass,
2571 vertices.end - vertices.start,
2572 instances.end - instances.start,
2573 vertices.start,
2574 instances.start,
2575 ) {
2576 self.handle_error(
2577 &pass_data.error_sink,
2578 cause,
2579 pass_data.pass.label(),
2580 "RenderPass::draw",
2581 );
2582 }
2583 }
2584
2585 fn render_pass_draw_indexed(
2586 &self,
2587 pass_data: &mut Self::RenderPassData,
2588 indices: Range<u32>,
2589 base_vertex: i32,
2590 instances: Range<u32>,
2591 ) {
2592 if let Err(cause) = self.0.render_pass_draw_indexed(
2593 &mut pass_data.pass,
2594 indices.end - indices.start,
2595 instances.end - instances.start,
2596 indices.start,
2597 base_vertex,
2598 instances.start,
2599 ) {
2600 self.handle_error(
2601 &pass_data.error_sink,
2602 cause,
2603 pass_data.pass.label(),
2604 "RenderPass::draw_indexed",
2605 );
2606 }
2607 }
2608
2609 fn render_pass_draw_indirect(
2610 &self,
2611 pass_data: &mut Self::RenderPassData,
2612 indirect_buffer_data: &Self::BufferData,
2613 indirect_offset: wgt::BufferAddress,
2614 ) {
2615 if let Err(cause) = self.0.render_pass_draw_indirect(
2616 &mut pass_data.pass,
2617 indirect_buffer_data.id,
2618 indirect_offset,
2619 ) {
2620 self.handle_error(
2621 &pass_data.error_sink,
2622 cause,
2623 pass_data.pass.label(),
2624 "RenderPass::draw_indirect",
2625 );
2626 }
2627 }
2628
2629 fn render_pass_draw_indexed_indirect(
2630 &self,
2631 pass_data: &mut Self::RenderPassData,
2632 indirect_buffer_data: &Self::BufferData,
2633 indirect_offset: wgt::BufferAddress,
2634 ) {
2635 if let Err(cause) = self.0.render_pass_draw_indexed_indirect(
2636 &mut pass_data.pass,
2637 indirect_buffer_data.id,
2638 indirect_offset,
2639 ) {
2640 self.handle_error(
2641 &pass_data.error_sink,
2642 cause,
2643 pass_data.pass.label(),
2644 "RenderPass::draw_indexed_indirect",
2645 );
2646 }
2647 }
2648
2649 fn render_pass_multi_draw_indirect(
2650 &self,
2651 pass_data: &mut Self::RenderPassData,
2652 indirect_buffer_data: &Self::BufferData,
2653 indirect_offset: wgt::BufferAddress,
2654 count: u32,
2655 ) {
2656 if let Err(cause) = self.0.render_pass_multi_draw_indirect(
2657 &mut pass_data.pass,
2658 indirect_buffer_data.id,
2659 indirect_offset,
2660 count,
2661 ) {
2662 self.handle_error(
2663 &pass_data.error_sink,
2664 cause,
2665 pass_data.pass.label(),
2666 "RenderPass::multi_draw_indirect",
2667 );
2668 }
2669 }
2670
2671 fn render_pass_multi_draw_indexed_indirect(
2672 &self,
2673 pass_data: &mut Self::RenderPassData,
2674 indirect_buffer_data: &Self::BufferData,
2675 indirect_offset: wgt::BufferAddress,
2676 count: u32,
2677 ) {
2678 if let Err(cause) = self.0.render_pass_multi_draw_indexed_indirect(
2679 &mut pass_data.pass,
2680 indirect_buffer_data.id,
2681 indirect_offset,
2682 count,
2683 ) {
2684 self.handle_error(
2685 &pass_data.error_sink,
2686 cause,
2687 pass_data.pass.label(),
2688 "RenderPass::multi_draw_indexed_indirect",
2689 );
2690 }
2691 }
2692
2693 fn render_pass_multi_draw_indirect_count(
2694 &self,
2695 pass_data: &mut Self::RenderPassData,
2696 indirect_buffer_data: &Self::BufferData,
2697 indirect_offset: wgt::BufferAddress,
2698 count_buffer_data: &Self::BufferData,
2699 count_buffer_offset: wgt::BufferAddress,
2700 max_count: u32,
2701 ) {
2702 if let Err(cause) = self.0.render_pass_multi_draw_indirect_count(
2703 &mut pass_data.pass,
2704 indirect_buffer_data.id,
2705 indirect_offset,
2706 count_buffer_data.id,
2707 count_buffer_offset,
2708 max_count,
2709 ) {
2710 self.handle_error(
2711 &pass_data.error_sink,
2712 cause,
2713 pass_data.pass.label(),
2714 "RenderPass::multi_draw_indirect_count",
2715 );
2716 }
2717 }
2718
2719 fn render_pass_multi_draw_indexed_indirect_count(
2720 &self,
2721 pass_data: &mut Self::RenderPassData,
2722 indirect_buffer_data: &Self::BufferData,
2723 indirect_offset: wgt::BufferAddress,
2724 count_buffer_data: &Self::BufferData,
2725 count_buffer_offset: wgt::BufferAddress,
2726 max_count: u32,
2727 ) {
2728 if let Err(cause) = self.0.render_pass_multi_draw_indexed_indirect_count(
2729 &mut pass_data.pass,
2730 indirect_buffer_data.id,
2731 indirect_offset,
2732 count_buffer_data.id,
2733 count_buffer_offset,
2734 max_count,
2735 ) {
2736 self.handle_error(
2737 &pass_data.error_sink,
2738 cause,
2739 pass_data.pass.label(),
2740 "RenderPass::multi_draw_indexed_indirect_count",
2741 );
2742 }
2743 }
2744
2745 fn render_pass_set_blend_constant(
2746 &self,
2747 pass_data: &mut Self::RenderPassData,
2748 color: wgt::Color,
2749 ) {
2750 if let Err(cause) = self
2751 .0
2752 .render_pass_set_blend_constant(&mut pass_data.pass, color)
2753 {
2754 self.handle_error(
2755 &pass_data.error_sink,
2756 cause,
2757 pass_data.pass.label(),
2758 "RenderPass::set_blend_constant",
2759 );
2760 }
2761 }
2762
2763 fn render_pass_set_scissor_rect(
2764 &self,
2765 pass_data: &mut Self::RenderPassData,
2766 x: u32,
2767 y: u32,
2768 width: u32,
2769 height: u32,
2770 ) {
2771 if let Err(cause) =
2772 self.0
2773 .render_pass_set_scissor_rect(&mut pass_data.pass, x, y, width, height)
2774 {
2775 self.handle_error(
2776 &pass_data.error_sink,
2777 cause,
2778 pass_data.pass.label(),
2779 "RenderPass::set_scissor_rect",
2780 );
2781 }
2782 }
2783
2784 fn render_pass_set_viewport(
2785 &self,
2786 pass_data: &mut Self::RenderPassData,
2787 x: f32,
2788 y: f32,
2789 width: f32,
2790 height: f32,
2791 min_depth: f32,
2792 max_depth: f32,
2793 ) {
2794 if let Err(cause) = self.0.render_pass_set_viewport(
2795 &mut pass_data.pass,
2796 x,
2797 y,
2798 width,
2799 height,
2800 min_depth,
2801 max_depth,
2802 ) {
2803 self.handle_error(
2804 &pass_data.error_sink,
2805 cause,
2806 pass_data.pass.label(),
2807 "RenderPass::set_viewport",
2808 );
2809 }
2810 }
2811
2812 fn render_pass_set_stencil_reference(
2813 &self,
2814 pass_data: &mut Self::RenderPassData,
2815 reference: u32,
2816 ) {
2817 if let Err(cause) = self
2818 .0
2819 .render_pass_set_stencil_reference(&mut pass_data.pass, reference)
2820 {
2821 self.handle_error(
2822 &pass_data.error_sink,
2823 cause,
2824 pass_data.pass.label(),
2825 "RenderPass::set_stencil_reference",
2826 );
2827 }
2828 }
2829
2830 fn render_pass_insert_debug_marker(&self, pass_data: &mut Self::RenderPassData, label: &str) {
2831 if let Err(cause) = self
2832 .0
2833 .render_pass_insert_debug_marker(&mut pass_data.pass, label, 0)
2834 {
2835 self.handle_error(
2836 &pass_data.error_sink,
2837 cause,
2838 pass_data.pass.label(),
2839 "RenderPass::insert_debug_marker",
2840 );
2841 }
2842 }
2843
2844 fn render_pass_push_debug_group(
2845 &self,
2846 pass_data: &mut Self::RenderPassData,
2847 group_label: &str,
2848 ) {
2849 if let Err(cause) = self
2850 .0
2851 .render_pass_push_debug_group(&mut pass_data.pass, group_label, 0)
2852 {
2853 self.handle_error(
2854 &pass_data.error_sink,
2855 cause,
2856 pass_data.pass.label(),
2857 "RenderPass::push_debug_group",
2858 );
2859 }
2860 }
2861
2862 fn render_pass_pop_debug_group(&self, pass_data: &mut Self::RenderPassData) {
2863 if let Err(cause) = self.0.render_pass_pop_debug_group(&mut pass_data.pass) {
2864 self.handle_error(
2865 &pass_data.error_sink,
2866 cause,
2867 pass_data.pass.label(),
2868 "RenderPass::pop_debug_group",
2869 );
2870 }
2871 }
2872
2873 fn render_pass_write_timestamp(
2874 &self,
2875 pass_data: &mut Self::RenderPassData,
2876 query_set_data: &Self::QuerySetData,
2877 query_index: u32,
2878 ) {
2879 if let Err(cause) =
2880 self.0
2881 .render_pass_write_timestamp(&mut pass_data.pass, *query_set_data, query_index)
2882 {
2883 self.handle_error(
2884 &pass_data.error_sink,
2885 cause,
2886 pass_data.pass.label(),
2887 "RenderPass::write_timestamp",
2888 );
2889 }
2890 }
2891
2892 fn render_pass_begin_occlusion_query(
2893 &self,
2894 pass_data: &mut Self::RenderPassData,
2895 query_index: u32,
2896 ) {
2897 if let Err(cause) = self
2898 .0
2899 .render_pass_begin_occlusion_query(&mut pass_data.pass, query_index)
2900 {
2901 self.handle_error(
2902 &pass_data.error_sink,
2903 cause,
2904 pass_data.pass.label(),
2905 "RenderPass::begin_occlusion_query",
2906 );
2907 }
2908 }
2909
2910 fn render_pass_end_occlusion_query(&self, pass_data: &mut Self::RenderPassData) {
2911 if let Err(cause) = self.0.render_pass_end_occlusion_query(&mut pass_data.pass) {
2912 self.handle_error(
2913 &pass_data.error_sink,
2914 cause,
2915 pass_data.pass.label(),
2916 "RenderPass::end_occlusion_query",
2917 );
2918 }
2919 }
2920
2921 fn render_pass_begin_pipeline_statistics_query(
2922 &self,
2923 pass_data: &mut Self::RenderPassData,
2924 query_set_data: &Self::QuerySetData,
2925 query_index: u32,
2926 ) {
2927 if let Err(cause) = self.0.render_pass_begin_pipeline_statistics_query(
2928 &mut pass_data.pass,
2929 *query_set_data,
2930 query_index,
2931 ) {
2932 self.handle_error(
2933 &pass_data.error_sink,
2934 cause,
2935 pass_data.pass.label(),
2936 "RenderPass::begin_pipeline_statistics_query",
2937 );
2938 }
2939 }
2940
2941 fn render_pass_end_pipeline_statistics_query(&self, pass_data: &mut Self::RenderPassData) {
2942 if let Err(cause) = self
2943 .0
2944 .render_pass_end_pipeline_statistics_query(&mut pass_data.pass)
2945 {
2946 self.handle_error(
2947 &pass_data.error_sink,
2948 cause,
2949 pass_data.pass.label(),
2950 "RenderPass::end_pipeline_statistics_query",
2951 );
2952 }
2953 }
2954
2955 fn render_pass_execute_bundles(
2956 &self,
2957 pass_data: &mut Self::RenderPassData,
2958 render_bundles: &mut dyn Iterator<Item = &Self::RenderBundleData>,
2959 ) {
2960 let temp_render_bundles = render_bundles.copied().collect::<SmallVec<[_; 4]>>();
2961 if let Err(cause) = self
2962 .0
2963 .render_pass_execute_bundles(&mut pass_data.pass, &temp_render_bundles)
2964 {
2965 self.handle_error(
2966 &pass_data.error_sink,
2967 cause,
2968 pass_data.pass.label(),
2969 "RenderPass::execute_bundles",
2970 );
2971 }
2972 }
2973
2974 fn render_pass_end(&self, pass_data: &mut Self::RenderPassData) {
2975 if let Err(cause) = self.0.render_pass_end(&mut pass_data.pass) {
2976 self.handle_error(
2977 &pass_data.error_sink,
2978 cause,
2979 pass_data.pass.label(),
2980 "RenderPass::end",
2981 );
2982 }
2983 }
2984}
2985
2986#[derive(Debug)]
2987pub struct SurfaceOutputDetail {
2988 surface_id: wgc::id::SurfaceId,
2989}
2990
2991type ErrorSink = Arc<Mutex<ErrorSinkRaw>>;
2992
2993struct ErrorScope {
2994 error: Option<crate::Error>,
2995 filter: crate::ErrorFilter,
2996}
2997
2998struct ErrorSinkRaw {
2999 scopes: Vec<ErrorScope>,
3000 uncaptured_handler: Option<Box<dyn crate::UncapturedErrorHandler>>,
3001}
3002
3003impl ErrorSinkRaw {
3004 fn new() -> ErrorSinkRaw {
3005 ErrorSinkRaw {
3006 scopes: Vec::new(),
3007 uncaptured_handler: None,
3008 }
3009 }
3010
3011 #[track_caller]
3012 fn handle_error(&mut self, err: crate::Error) {
3013 let filter = match err {
3014 crate::Error::OutOfMemory { .. } => crate::ErrorFilter::OutOfMemory,
3015 crate::Error::Validation { .. } => crate::ErrorFilter::Validation,
3016 crate::Error::Internal { .. } => crate::ErrorFilter::Internal,
3017 };
3018 match self
3019 .scopes
3020 .iter_mut()
3021 .rev()
3022 .find(|scope| scope.filter == filter)
3023 {
3024 Some(scope) => {
3025 if scope.error.is_none() {
3026 scope.error = Some(err);
3027 }
3028 }
3029 None => {
3030 if let Some(custom_handler) = self.uncaptured_handler.as_ref() {
3031 (custom_handler)(err);
3032 } else {
3033 default_error_handler(err);
3035 }
3036 }
3037 }
3038 }
3039}
3040
3041impl fmt::Debug for ErrorSinkRaw {
3042 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
3043 write!(f, "ErrorSink")
3044 }
3045}
3046
3047#[track_caller]
3048fn default_error_handler(err: crate::Error) {
3049 log::error!("Handling wgpu errors as fatal by default");
3050 panic!("wgpu error: {err}\n");
3051}
3052
3053impl From<CreateShaderModuleError> for CompilationInfo {
3054 fn from(value: CreateShaderModuleError) -> Self {
3055 match value {
3056 #[cfg(feature = "wgsl")]
3057 CreateShaderModuleError::Parsing(v) => v.into(),
3058 #[cfg(feature = "glsl")]
3059 CreateShaderModuleError::ParsingGlsl(v) => v.into(),
3060 #[cfg(feature = "spirv")]
3061 CreateShaderModuleError::ParsingSpirV(v) => v.into(),
3062 CreateShaderModuleError::Validation(v) => v.into(),
3063 CreateShaderModuleError::Device(_) | CreateShaderModuleError::Generation => {
3066 CompilationInfo {
3067 messages: Vec::new(),
3068 }
3069 }
3070 _ => CompilationInfo {
3072 messages: vec![CompilationMessage {
3073 message: value.to_string(),
3074 message_type: CompilationMessageType::Error,
3075 location: None,
3076 }],
3077 },
3078 }
3079 }
3080}
3081
3082#[derive(Debug)]
3083pub struct QueueWriteBuffer {
3084 buffer_id: wgc::id::StagingBufferId,
3085 mapping: BufferMappedRange,
3086}
3087
3088impl crate::context::QueueWriteBuffer for QueueWriteBuffer {
3089 fn slice(&self) -> &[u8] {
3090 panic!()
3091 }
3092
3093 #[inline]
3094 fn slice_mut(&mut self) -> &mut [u8] {
3095 use crate::context::BufferMappedRange;
3096 self.mapping.slice_mut()
3097 }
3098
3099 fn as_any(&self) -> &dyn Any {
3100 self
3101 }
3102}
3103
3104#[derive(Debug)]
3105pub struct BufferMappedRange {
3106 ptr: NonNull<u8>,
3107 size: usize,
3108}
3109
3110#[cfg(send_sync)]
3111unsafe impl Send for BufferMappedRange {}
3112#[cfg(send_sync)]
3113unsafe impl Sync for BufferMappedRange {}
3114
3115impl crate::context::BufferMappedRange for BufferMappedRange {
3116 #[inline]
3117 fn slice(&self) -> &[u8] {
3118 unsafe { slice::from_raw_parts(self.ptr.as_ptr(), self.size) }
3119 }
3120
3121 #[inline]
3122 fn slice_mut(&mut self) -> &mut [u8] {
3123 unsafe { slice::from_raw_parts_mut(self.ptr.as_ptr(), self.size) }
3124 }
3125}
3126
3127impl Drop for BufferMappedRange {
3128 fn drop(&mut self) {
3129 }
3132}
3133
3134fn downcast_buffer(buffer: &crate::Buffer) -> &<ContextWgpuCore as crate::Context>::BufferData {
3135 downcast_ref(buffer.data.as_ref())
3136}
3137fn downcast_texture(texture: &crate::Texture) -> &<ContextWgpuCore as crate::Context>::TextureData {
3138 downcast_ref(texture.data.as_ref())
3139}
3140fn downcast_texture_view(
3141 texture_view: &crate::TextureView,
3142) -> &<ContextWgpuCore as crate::Context>::TextureViewData {
3143 downcast_ref(texture_view.data.as_ref())
3144}
3145fn downcast_sampler(sampler: &crate::Sampler) -> &<ContextWgpuCore as crate::Context>::SamplerData {
3146 downcast_ref(sampler.data.as_ref())
3147}
3148fn downcast_query_set(
3149 query_set: &crate::QuerySet,
3150) -> &<ContextWgpuCore as crate::Context>::QuerySetData {
3151 downcast_ref(query_set.data.as_ref())
3152}
3153fn downcast_bind_group_layout(
3154 bind_group_layout: &crate::BindGroupLayout,
3155) -> &<ContextWgpuCore as crate::Context>::BindGroupLayoutData {
3156 downcast_ref(bind_group_layout.data.as_ref())
3157}
3158fn downcast_pipeline_layout(
3159 pipeline_layout: &crate::PipelineLayout,
3160) -> &<ContextWgpuCore as crate::Context>::PipelineLayoutData {
3161 downcast_ref(pipeline_layout.data.as_ref())
3162}
3163fn downcast_shader_module(
3164 shader_module: &crate::ShaderModule,
3165) -> &<ContextWgpuCore as crate::Context>::ShaderModuleData {
3166 downcast_ref(shader_module.data.as_ref())
3167}
3168fn downcast_pipeline_cache(
3169 pipeline_cache: &crate::PipelineCache,
3170) -> &<ContextWgpuCore as crate::Context>::PipelineCacheData {
3171 downcast_ref(pipeline_cache.data.as_ref())
3172}