smithay_client_toolkit/shm/
slot.rs1use std::io;
4use std::{
5 os::unix::io::{AsRawFd, OwnedFd},
6 sync::{
7 atomic::{AtomicU8, AtomicUsize, Ordering},
8 Arc, Mutex, Weak,
9 },
10};
11
12use wayland_client::{
13 protocol::{wl_buffer, wl_shm, wl_surface},
14 Proxy,
15};
16
17use crate::{globals::ProvidesBoundGlobal, shm::raw::RawPool, shm::CreatePoolError};
18
19#[derive(Debug, thiserror::Error)]
20pub enum CreateBufferError {
21 #[error(transparent)]
23 Io(#[from] io::Error),
24
25 #[error("Incorrect pool for slot")]
27 PoolMismatch,
28
29 #[error("Requested buffer size is too large for slot")]
31 SlotTooSmall,
32}
33
34#[derive(Debug, thiserror::Error)]
35pub enum ActivateSlotError {
36 #[error("Buffer was already active")]
38 AlreadyActive,
39}
40
41#[derive(Debug)]
42pub struct SlotPool {
43 pub(crate) inner: RawPool,
44 free_list: Arc<Mutex<Vec<FreelistEntry>>>,
45}
46
47#[derive(Debug)]
48struct FreelistEntry {
49 offset: usize,
50 len: usize,
51}
52
53#[derive(Debug)]
58pub struct Slot {
59 inner: Arc<SlotInner>,
60}
61
62#[derive(Debug)]
63struct SlotInner {
64 free_list: Weak<Mutex<Vec<FreelistEntry>>>,
65 offset: usize,
66 len: usize,
67 active_buffers: AtomicUsize,
68 all_refs: AtomicUsize,
73}
74
75#[derive(Debug)]
80pub struct Buffer {
81 buffer: wl_buffer::WlBuffer,
82 height: i32,
83 stride: i32,
84 slot: Slot,
85}
86
87#[derive(Debug)]
89struct BufferData {
90 inner: Arc<SlotInner>,
91 state: AtomicU8,
92}
93
94impl BufferData {
96 const ACTIVE: u8 = 0;
98
99 const INACTIVE: u8 = 1;
101
102 const DESTROY_ON_RELEASE: u8 = 2;
104
105 const DEAD: u8 = 3;
107
108 const RELEASE_SET: u8 = 1;
110
111 const DESTROY_SET: u8 = 2;
113
114 fn record_death(&self) {
116 drop(Slot { inner: self.inner.clone() })
117 }
118}
119
120impl SlotPool {
121 pub fn new(
122 len: usize,
123 shm: &impl ProvidesBoundGlobal<wl_shm::WlShm, 1>,
124 ) -> Result<Self, CreatePoolError> {
125 let inner = RawPool::new(len, shm)?;
126 let free_list = Arc::new(Mutex::new(vec![FreelistEntry { offset: 0, len: inner.len() }]));
127 Ok(SlotPool { inner, free_list })
128 }
129
130 pub fn create_buffer(
148 &mut self,
149 width: i32,
150 height: i32,
151 stride: i32,
152 format: wl_shm::Format,
153 ) -> Result<(Buffer, &mut [u8]), CreateBufferError> {
154 let len = (height as usize) * (stride as usize);
155 let slot = self.new_slot(len)?;
156 let buffer = self.create_buffer_in(&slot, width, height, stride, format)?;
157 let canvas = self.raw_data_mut(&slot);
158 Ok((buffer, canvas))
159 }
160
161 pub fn canvas(&mut self, key: &impl CanvasKey) -> Option<&mut [u8]> {
166 key.canvas(self)
167 }
168
169 #[allow(clippy::len_without_is_empty)]
171 pub fn len(&self) -> usize {
172 self.inner.len()
173 }
174
175 pub fn resize(&mut self, size: usize) -> io::Result<()> {
179 let old_len = self.inner.len();
180 self.inner.resize(size)?;
181 let new_len = self.inner.len();
182 if old_len == new_len {
183 return Ok(());
184 }
185 let mut free = self.free_list.lock().unwrap();
187 if let Some(FreelistEntry { offset, len }) = free.last_mut() {
188 if *offset + *len == old_len {
189 *len += new_len - old_len;
190 return Ok(());
191 }
192 }
193 free.push(FreelistEntry { offset: old_len, len: new_len - old_len });
194 Ok(())
195 }
196
197 fn alloc(&mut self, size: usize) -> io::Result<usize> {
198 let mut free = self.free_list.lock().unwrap();
199 for FreelistEntry { offset, len } in free.iter_mut() {
200 if *len >= size {
201 let rv = *offset;
202 *len -= size;
203 *offset += size;
204 return Ok(rv);
205 }
206 }
207 let mut rv = self.inner.len();
208 let mut pop_tail = false;
209 if let Some(FreelistEntry { offset, len }) = free.last() {
210 if offset + len == self.inner.len() {
211 rv -= len;
212 pop_tail = true;
213 }
214 }
215 let target = std::cmp::max(rv + size, self.inner.len() * 2);
217 self.inner.resize(target)?;
218 if pop_tail {
220 free.pop();
221 }
222 if target > rv + size {
223 free.push(FreelistEntry { offset: rv + size, len: target - rv - size });
224 }
225 Ok(rv)
226 }
227
228 fn free(free_list: &Mutex<Vec<FreelistEntry>>, mut offset: usize, mut len: usize) {
229 let mut free = free_list.lock().unwrap();
230 let mut nf = Vec::with_capacity(free.len() + 1);
231 for &FreelistEntry { offset: ioff, len: ilen } in free.iter() {
232 if ioff + ilen == offset {
233 offset = ioff;
234 len += ilen;
235 continue;
236 }
237 if ioff == offset + len {
238 len += ilen;
239 continue;
240 }
241 if ioff > offset + len && len != 0 {
242 nf.push(FreelistEntry { offset, len });
243 len = 0;
244 }
245 if ilen != 0 {
246 nf.push(FreelistEntry { offset: ioff, len: ilen });
247 }
248 }
249 if len != 0 {
250 nf.push(FreelistEntry { offset, len });
251 }
252 *free = nf;
253 }
254
255 pub fn new_slot(&mut self, mut len: usize) -> io::Result<Slot> {
257 len = (len + 63) & !63;
258 let offset = self.alloc(len)?;
259
260 Ok(Slot {
261 inner: Arc::new(SlotInner {
262 free_list: Arc::downgrade(&self.free_list),
263 offset,
264 len,
265 active_buffers: AtomicUsize::new(0),
266 all_refs: AtomicUsize::new(1),
267 }),
268 })
269 }
270
271 pub fn raw_data_mut(&mut self, slot: &Slot) -> &mut [u8] {
278 if slot.inner.free_list.as_ptr() == Arc::as_ptr(&self.free_list) {
279 &mut self.inner.mmap()[slot.inner.offset..][..slot.inner.len]
280 } else {
281 &mut []
282 }
283 }
284
285 pub fn create_buffer_in(
296 &mut self,
297 slot: &Slot,
298 width: i32,
299 height: i32,
300 stride: i32,
301 format: wl_shm::Format,
302 ) -> Result<Buffer, CreateBufferError> {
303 let offset = slot.inner.offset as i32;
304 let len = (height as usize) * (stride as usize);
305 if len > slot.inner.len {
306 return Err(CreateBufferError::SlotTooSmall);
307 }
308
309 if slot.inner.free_list.as_ptr() != Arc::as_ptr(&self.free_list) {
310 return Err(CreateBufferError::PoolMismatch);
311 }
312
313 let slot = slot.clone();
314 slot.inner.all_refs.fetch_add(1, Ordering::Relaxed);
316 let data = Arc::new(BufferData {
317 inner: slot.inner.clone(),
318 state: AtomicU8::new(BufferData::INACTIVE),
319 });
320 let buffer = self.inner.create_buffer_raw(offset, width, height, stride, format, data);
321 Ok(Buffer { buffer, height, stride, slot })
322 }
323}
324
325impl Clone for Slot {
326 fn clone(&self) -> Self {
327 let inner = self.inner.clone();
328 inner.all_refs.fetch_add(1, Ordering::Relaxed);
329 Slot { inner }
330 }
331}
332
333impl Drop for Slot {
334 fn drop(&mut self) {
335 if self.inner.all_refs.fetch_sub(1, Ordering::Relaxed) == 1 {
336 if let Some(free_list) = self.inner.free_list.upgrade() {
337 SlotPool::free(&free_list, self.inner.offset, self.inner.len);
338 }
339 }
340 }
341}
342
343impl Drop for SlotInner {
344 fn drop(&mut self) {
345 debug_assert_eq!(*self.all_refs.get_mut(), 0);
346 }
347}
348
349pub trait CanvasKey {
351 fn canvas<'pool>(&self, pool: &'pool mut SlotPool) -> Option<&'pool mut [u8]>;
352}
353
354impl Slot {
355 pub fn has_active_buffers(&self) -> bool {
358 self.inner.active_buffers.load(Ordering::Relaxed) != 0
359 }
360
361 #[allow(clippy::len_without_is_empty)]
363 pub fn len(&self) -> usize {
364 self.inner.len
365 }
366
367 pub fn canvas<'pool>(&self, pool: &'pool mut SlotPool) -> Option<&'pool mut [u8]> {
372 if self.has_active_buffers() {
373 return None;
374 }
375 if self.inner.free_list.as_ptr() == Arc::as_ptr(&pool.free_list) {
376 Some(&mut pool.inner.mmap()[self.inner.offset..][..self.inner.len])
377 } else {
378 None
379 }
380 }
381}
382
383impl CanvasKey for Slot {
384 fn canvas<'pool>(&self, pool: &'pool mut SlotPool) -> Option<&'pool mut [u8]> {
385 self.canvas(pool)
386 }
387}
388
389impl Buffer {
390 pub fn attach_to(&self, surface: &wl_surface::WlSurface) -> Result<(), ActivateSlotError> {
399 self.activate()?;
400 surface.attach(Some(&self.buffer), 0, 0);
401 Ok(())
402 }
403
404 pub fn wl_buffer(&self) -> &wl_buffer::WlBuffer {
406 &self.buffer
407 }
408
409 pub fn height(&self) -> i32 {
410 self.height
411 }
412
413 pub fn stride(&self) -> i32 {
414 self.stride
415 }
416
417 fn data(&self) -> Option<&BufferData> {
418 self.buffer.object_data()?.downcast_ref()
419 }
420
421 pub fn canvas<'pool>(&self, pool: &'pool mut SlotPool) -> Option<&'pool mut [u8]> {
425 let len = (self.height as usize) * (self.stride as usize);
426 if self.slot.inner.active_buffers.load(Ordering::Relaxed) != 0 {
427 return None;
428 }
429 if self.slot.inner.free_list.as_ptr() == Arc::as_ptr(&pool.free_list) {
430 Some(&mut pool.inner.mmap()[self.slot.inner.offset..][..len])
431 } else {
432 None
433 }
434 }
435
436 pub fn slot(&self) -> Slot {
438 self.slot.clone()
439 }
440
441 pub fn activate(&self) -> Result<(), ActivateSlotError> {
446 let data = self.data().expect("UserData type mismatch");
447
448 match data.state.fetch_and(!BufferData::RELEASE_SET, Ordering::Relaxed) {
452 BufferData::INACTIVE => {
453 data.inner.active_buffers.fetch_add(1, Ordering::Relaxed);
454 Ok(())
455 }
456 BufferData::ACTIVE => Err(ActivateSlotError::AlreadyActive),
457 _ => unreachable!("Invalid state in BufferData"),
458 }
459 }
460
461 pub fn deactivate(&self) -> Result<(), ActivateSlotError> {
467 let data = self.data().expect("UserData type mismatch");
468
469 match data.state.fetch_or(BufferData::RELEASE_SET, Ordering::Relaxed) {
471 BufferData::ACTIVE => {
472 data.inner.active_buffers.fetch_sub(1, Ordering::Relaxed);
473 Ok(())
474 }
475 BufferData::INACTIVE => Err(ActivateSlotError::AlreadyActive),
476 _ => unreachable!("Invalid state in BufferData"),
477 }
478 }
479}
480
481impl CanvasKey for Buffer {
482 fn canvas<'pool>(&self, pool: &'pool mut SlotPool) -> Option<&'pool mut [u8]> {
483 self.canvas(pool)
484 }
485}
486
487impl Drop for Buffer {
488 fn drop(&mut self) {
489 if let Some(data) = self.data() {
490 match data.state.fetch_or(BufferData::DESTROY_SET, Ordering::Relaxed) {
491 BufferData::ACTIVE => {
492 }
494 BufferData::INACTIVE => {
495 data.record_death();
496 self.buffer.destroy();
497 }
498 _ => unreachable!("Invalid state in BufferData"),
499 }
500 }
501 }
502}
503
504impl wayland_client::backend::ObjectData for BufferData {
505 fn event(
506 self: Arc<Self>,
507 handle: &wayland_client::backend::Backend,
508 msg: wayland_backend::protocol::Message<wayland_backend::client::ObjectId, OwnedFd>,
509 ) -> Option<Arc<dyn wayland_backend::client::ObjectData>> {
510 debug_assert!(wayland_client::backend::protocol::same_interface(
511 msg.sender_id.interface(),
512 wl_buffer::WlBuffer::interface()
513 ));
514 debug_assert!(msg.opcode == 0);
515
516 match self.state.fetch_or(BufferData::RELEASE_SET, Ordering::Relaxed) {
517 BufferData::ACTIVE => {
518 self.inner.active_buffers.fetch_sub(1, Ordering::Relaxed);
519 }
520 BufferData::INACTIVE => {
521 log::debug!("Unexpected WlBuffer::Release on an inactive buffer");
523 }
524 BufferData::DESTROY_ON_RELEASE => {
525 self.record_death();
526 self.inner.active_buffers.fetch_sub(1, Ordering::Relaxed);
527
528 handle
530 .send_request(msg.map_fd(|x| x.as_raw_fd()), None, None)
531 .expect("Unexpected invalid ID");
532 }
533 BufferData::DEAD => {
534 }
536 _ => unreachable!("Invalid state in BufferData"),
537 }
538
539 None
540 }
541
542 fn destroyed(&self, _: wayland_backend::client::ObjectId) {}
543}
544
545impl Drop for BufferData {
546 fn drop(&mut self) {
547 let state = *self.state.get_mut();
548 if state == BufferData::ACTIVE || state == BufferData::DESTROY_ON_RELEASE {
549 self.inner.active_buffers.fetch_sub(1, Ordering::Relaxed);
551 }
552
553 if state != BufferData::DEAD {
554 self.record_death();
557 }
558 }
559}