gpu_alloc_types/
device.rs

1use {
2    crate::types::{MemoryHeap, MemoryType},
3    alloc::borrow::Cow,
4    core::ptr::NonNull,
5};
6
7/// Memory exhausted error.
8#[derive(Debug)]
9pub enum OutOfMemory {
10    /// Device memory exhausted.
11    OutOfDeviceMemory,
12
13    /// Host memory exhausted.
14    OutOfHostMemory,
15}
16
17/// Memory mapped error.
18#[derive(Debug)]
19pub enum DeviceMapError {
20    /// Device memory exhausted.
21    OutOfDeviceMemory,
22
23    /// Host memory exhausted.
24    OutOfHostMemory,
25
26    /// Map failed due to implementation specific error.
27    MapFailed,
28}
29
30/// Specifies range of the mapped memory region.
31#[derive(Debug)]
32pub struct MappedMemoryRange<'a, M> {
33    /// Memory object reference.
34    pub memory: &'a M,
35
36    /// Offset in bytes from start of the memory object.
37    pub offset: u64,
38
39    /// Size in bytes of the memory range.
40    pub size: u64,
41}
42
43/// Properties of the device that will be used for allocating memory objects.
44///
45/// See `gpu-alloc-<backend>` crate to learn how to obtain one for backend of choice.
46#[derive(Debug)]
47pub struct DeviceProperties<'a> {
48    /// Array of memory types provided by the device.
49    pub memory_types: Cow<'a, [MemoryType]>,
50
51    /// Array of memory heaps provided by the device.
52    pub memory_heaps: Cow<'a, [MemoryHeap]>,
53
54    /// Maximum number of valid memory allocations that can exist simultaneously within the device.
55    pub max_memory_allocation_count: u32,
56
57    /// Maximum size for single allocation supported by the device.
58    pub max_memory_allocation_size: u64,
59
60    /// Atom size for host mappable non-coherent memory.
61    pub non_coherent_atom_size: u64,
62
63    /// Specifies if feature required to fetch device address is enabled.
64    pub buffer_device_address: bool,
65}
66
67bitflags::bitflags! {
68    /// Allocation flags
69    #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
70    #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
71    pub struct AllocationFlags : u8 {
72        /// Specifies that the memory can be used for buffers created
73        /// with flag that allows fetching device address.
74        const DEVICE_ADDRESS = 0x1;
75    }
76}
77
78/// Abstract device that can be used to allocate memory objects.
79pub trait MemoryDevice<M> {
80    /// Allocates new memory object from device.
81    /// This function may be expensive and even limit maximum number of memory
82    /// objects allocated.
83    /// Which is the reason for sub-allocation this crate provides.
84    ///
85    /// # Safety
86    ///
87    /// `memory_type` must be valid index for memory type associated with this device.
88    /// Retrieving this information is implementation specific.
89    ///
90    /// `flags` must be supported by the device.
91    unsafe fn allocate_memory(
92        &self,
93        size: u64,
94        memory_type: u32,
95        flags: AllocationFlags,
96    ) -> Result<M, OutOfMemory>;
97
98    /// Deallocate memory object.
99    ///
100    /// # Safety
101    ///
102    /// Memory object must have been allocated from this device.\
103    /// All clones of specified memory handle must be dropped before calling this function.
104    unsafe fn deallocate_memory(&self, memory: M);
105
106    /// Map region of device memory to host memory space.
107    ///
108    /// # Safety
109    ///
110    /// * Memory object must have been allocated from this device.
111    /// * Memory object must not be already mapped.
112    /// * Memory must be allocated from type with `HOST_VISIBLE` property.
113    /// * `offset + size` must not overflow.
114    /// * `offset + size` must not be larger than memory object size specified when
115    ///   memory object was allocated from this device.
116    unsafe fn map_memory(
117        &self,
118        memory: &mut M,
119        offset: u64,
120        size: u64,
121    ) -> Result<NonNull<u8>, DeviceMapError>;
122
123    /// Unmap previously mapped memory region.
124    ///
125    /// # Safety
126    ///
127    /// * Memory object must have been allocated from this device.
128    /// * Memory object must be mapped
129    unsafe fn unmap_memory(&self, memory: &mut M);
130
131    /// Invalidates ranges of memory mapped regions.
132    ///
133    /// # Safety
134    ///
135    /// * Memory objects must have been allocated from this device.
136    /// * `offset` and `size` in each element of `ranges` must specify
137    ///   subregion of currently mapped memory region
138    /// * if `memory` in some element of `ranges` does not contain `HOST_COHERENT` property
139    ///   then `offset` and `size` of that element must be multiple of `non_coherent_atom_size`.
140    unsafe fn invalidate_memory_ranges(
141        &self,
142        ranges: &[MappedMemoryRange<'_, M>],
143    ) -> Result<(), OutOfMemory>;
144
145    /// Flushes ranges of memory mapped regions.
146    ///
147    /// # Safety
148    ///
149    /// * Memory objects must have been allocated from this device.
150    /// * `offset` and `size` in each element of `ranges` must specify
151    ///   subregion of currently mapped memory region
152    /// * if `memory` in some element of `ranges` does not contain `HOST_COHERENT` property
153    ///   then `offset` and `size` of that element must be multiple of `non_coherent_atom_size`.
154    unsafe fn flush_memory_ranges(
155        &self,
156        ranges: &[MappedMemoryRange<'_, M>],
157    ) -> Result<(), OutOfMemory>;
158}