imago/
io_buffers.rs

1//! Types for I/O buffers.
2//!
3//! This module provides:
4//! - buffer types that can be allocated with arbitrary alignment,
5//! - references to buffers that more or less ensure the content is read only once (because it can
6//!   change for buffers owned by VM guests),
7//! - buffer vector types.
8
9use crate::macros::passthrough_trait_fn;
10#[cfg(feature = "vm-memory")]
11use crate::misc_helpers::ImagoAsRef;
12use std::alloc::{self, GlobalAlloc};
13use std::fmt::{self, Debug, Formatter};
14use std::io::{IoSlice, IoSliceMut};
15use std::marker::PhantomData;
16#[cfg(unix)]
17use std::mem;
18use std::mem::{size_of, size_of_val};
19use std::ops::Range;
20use std::{cmp, io, ptr, slice};
21
22/// Owned memory buffer.
23pub struct IoBuffer {
24    /// Raw pointer to the start of the buffer.
25    pointer: *mut u8,
26
27    /// Size in bytes.
28    size: usize,
29
30    /// Allocation layout.  `None` only for null buffers.
31    layout: Option<alloc::Layout>,
32}
33
34/// Reference to any immutable memory buffer.
35pub struct IoBufferRef<'a> {
36    /// Raw pointer to the start of the buffer.
37    pointer: *const u8,
38
39    /// Size in bytes.
40    size: usize,
41
42    /// Lifetime marker.
43    _lifetime: PhantomData<&'a [u8]>,
44}
45
46/// Reference to any mutable memory buffer.
47pub struct IoBufferMut<'a> {
48    /// Raw pointer to the start of the buffer.
49    pointer: *mut u8,
50
51    /// Size in bytes.
52    size: usize,
53
54    /// Lifetime marker.
55    _lifetime: PhantomData<&'a mut [u8]>,
56}
57
58// Blocked because of the pointer, but we want this to be usable across threads
59unsafe impl Send for IoBuffer {}
60unsafe impl Sync for IoBuffer {}
61unsafe impl Send for IoBufferRef<'_> {}
62unsafe impl Sync for IoBufferRef<'_> {}
63unsafe impl Send for IoBufferMut<'_> {}
64unsafe impl Sync for IoBufferMut<'_> {}
65
66impl IoBuffer {
67    /// Create a new owned buffer, containing uninitialized data.
68    ///
69    /// Do note that the returned buffer contains uninitialized data, which however is perfectly
70    /// fine for an I/O buffer.
71    pub fn new(size: usize, alignment: usize) -> io::Result<Self> {
72        let layout = alloc::Layout::from_size_align(size, alignment).map_err(io::Error::other)?;
73        Self::new_with_layout(layout)
74    }
75
76    /// Create a new owned buffer, containing uninitialized data, with the given `layout`.
77    pub fn new_with_layout(layout: alloc::Layout) -> io::Result<Self> {
78        if layout.size() == 0 {
79            return Ok(IoBuffer {
80                pointer: ptr::null_mut(),
81                size: 0,
82                layout: None,
83            });
84        }
85
86        // We guarantee the size not to be 0 and do not care about the memory being uninitialized,
87        // so this is safe
88        let pointer = unsafe { alloc::System.alloc(layout) };
89
90        if pointer.is_null() {
91            return Err(io::Error::new(
92                io::ErrorKind::OutOfMemory,
93                format!(
94                    "Failed to allocate memory (size={}, alignment={})",
95                    layout.size(),
96                    layout.align(),
97                ),
98            ));
99        }
100
101        Ok(IoBuffer {
102            pointer,
103            size: layout.size(),
104            layout: Some(layout),
105        })
106    }
107
108    /// Length in bytes.
109    pub fn len(&self) -> usize {
110        self.size
111    }
112
113    /// Whether this is a null buffer (length is 0).
114    pub fn is_empty(&self) -> bool {
115        self.size == 0
116    }
117
118    /// Generate an immutable reference.
119    pub fn as_ref(&self) -> IoBufferRef<'_> {
120        IoBufferRef {
121            pointer: self.pointer as *const u8,
122            size: self.size,
123            _lifetime: PhantomData,
124        }
125    }
126
127    /// Generate an immutable reference to a sub-range.
128    pub fn as_ref_range(&self, range: Range<usize>) -> IoBufferRef<'_> {
129        IoBufferRef::from_slice(&self.as_ref().into_slice()[range])
130    }
131
132    /// Generate a mutable reference.
133    pub fn as_mut(&mut self) -> IoBufferMut<'_> {
134        IoBufferMut {
135            pointer: self.pointer,
136            size: self.size,
137            _lifetime: PhantomData,
138        }
139    }
140
141    /// Generate a mutable reference to a sub-range.
142    pub fn as_mut_range(&mut self, range: Range<usize>) -> IoBufferMut<'_> {
143        (&mut self.as_mut().into_slice()[range]).into()
144    }
145}
146
147impl Drop for IoBuffer {
148    /// Free this buffer.
149    fn drop(&mut self) {
150        if let Some(layout) = self.layout {
151            // Safe because we have allocated this buffer using `alloc::System`
152            unsafe {
153                alloc::System.dealloc(self.pointer, layout);
154            }
155        }
156    }
157}
158
159/// Common functions for both `IoBufferRef` and `IoBufferMut`.
160#[allow(dead_code)]
161pub(crate) trait IoBufferRefTrait<'a>: Sized {
162    /// `&[T]` or `&mut [T]`.
163    type SliceType<T: Copy + Sized + 'a>;
164
165    /// `*const T` or `*mut T`.
166    type PointerType<T: Copy + Sized + 'a>;
167
168    /// Create a reference to a slice.
169    fn from_slice(slice: Self::SliceType<u8>) -> Self;
170
171    /// Create an owned [`IoBuffer`] with the same data (copied).
172    fn try_into_owned(self, alignment: usize) -> io::Result<IoBuffer>;
173
174    /// Size in bytes.
175    fn len(&self) -> usize;
176
177    /// Whether the length is 0.
178    fn is_empty(&self) -> bool {
179        self.len() == 0
180    }
181
182    /// Return the pointer to the start of the buffer.
183    fn as_ptr(&self) -> Self::PointerType<u8>;
184
185    /// Turn this reference into a slice.
186    ///
187    /// References to `IoBuffer`s must not be copied/cloned (so they can only be accessed once;
188    /// they are considered volatile due to potential VM guest accesses), so this consumes the
189    /// object.
190    fn into_slice(self) -> Self::SliceType<u8> {
191        // Alignment requirement is always met, resulting data is pure binary data
192        unsafe { self.into_typed_slice::<u8>() }
193    }
194
195    /// Turn this reference into a slice with the given element type.
196    ///
197    /// # Safety
198    /// Caller must ensure that alignment and length requirements are met and that the resulting
199    /// data is valid.
200    unsafe fn into_typed_slice<T: Copy + Sized>(self) -> Self::SliceType<T>;
201
202    /// Split the buffer at `mid`.
203    ///
204    /// Return `&self[..mid]` and `&self[mid..]`.
205    ///
206    /// If `mid > self.len()`, return `&self[..]` and `[]`.
207    fn split_at(self, mid: usize) -> (Self, Self);
208
209    /// Make this reference immutable.
210    fn into_ref(self) -> IoBufferRef<'a>;
211}
212
213impl<'a> IoBufferRef<'a> {
214    /// Create a reference to a slice.
215    pub fn from_slice(slice: &'a [u8]) -> Self {
216        IoBufferRef {
217            pointer: slice.as_ptr(),
218            size: size_of_val(slice),
219            _lifetime: PhantomData,
220        }
221    }
222
223    /// Create an owned [`IoBuffer`] with the same data (copied).
224    pub fn try_into_owned(self, alignment: usize) -> io::Result<IoBuffer> {
225        let mut new_buf = IoBuffer::new(self.len(), alignment)?;
226        new_buf
227            .as_mut()
228            .into_slice()
229            .copy_from_slice(self.into_slice());
230        Ok(new_buf)
231    }
232
233    /// Size in bytes.
234    pub fn len(&self) -> usize {
235        self.size
236    }
237
238    /// Whether the length is 0.
239    pub fn is_empty(&self) -> bool {
240        self.len() == 0
241    }
242
243    /// Return the pointer to the start of the buffer.
244    pub fn as_ptr(&self) -> *const u8 {
245        self.pointer
246    }
247
248    /// Turn this reference into a slice.
249    ///
250    /// References to `IoBuffer`s must not be copied/cloned (so they can only be accessed once;
251    /// they are considered volatile due to potential VM guest accesses), so this consumes the
252    /// object.
253    pub fn into_slice(self) -> &'a [u8] {
254        // Alignment requirement is always met, resulting data is pure binary data
255        unsafe { self.into_typed_slice::<u8>() }
256    }
257
258    /// Turn this reference into a slice with the given element type.
259    ///
260    /// # Safety
261    /// Caller must ensure that alignment and length requirements are met and that the resulting
262    /// data is valid.
263    pub unsafe fn into_typed_slice<T: Copy + Sized>(self) -> &'a [T] {
264        if self.is_empty() {
265            return &[];
266        }
267
268        // Safety ensured by the caller; we ensure that nothing outside of this buffer will be part
269        // of the slice
270        unsafe { slice::from_raw_parts(self.as_ptr() as *const T, self.len() / size_of::<T>()) }
271    }
272
273    /// Split the buffer at `mid`.
274    ///
275    /// Return `&self[..mid]` and `&self[mid..]`.
276    ///
277    /// If `mid > self.len()`, return `&self[..]` and `[]`.
278    pub fn split_at(self, mid: usize) -> (IoBufferRef<'a>, IoBufferRef<'a>) {
279        let head_len = cmp::min(mid, self.size);
280
281        (
282            IoBufferRef {
283                pointer: self.pointer,
284                size: head_len,
285                _lifetime: PhantomData,
286            },
287            IoBufferRef {
288                // Safe because we have limited this to `self.size`
289                pointer: unsafe { self.pointer.add(head_len) },
290                size: self.size - head_len,
291                _lifetime: PhantomData,
292            },
293        )
294    }
295
296    /// Make this reference immutable.
297    pub fn into_ref(self) -> IoBufferRef<'a> {
298        self
299    }
300}
301
302impl<'a> IoBufferRefTrait<'a> for IoBufferRef<'a> {
303    type SliceType<T: Copy + Sized + 'a> = &'a [T];
304    type PointerType<T: Copy + Sized + 'a> = *const T;
305
306    passthrough_trait_fn! { fn from_slice(slice: Self::SliceType<u8>) -> Self; }
307    passthrough_trait_fn! { fn try_into_owned(self, alignment: usize) -> io::Result<IoBuffer>; }
308    passthrough_trait_fn! { fn len(&self) -> usize; }
309    passthrough_trait_fn! { fn as_ptr(&self) -> Self::PointerType<u8>; }
310    passthrough_trait_fn! { fn split_at(self, mid: usize) -> (Self, Self); }
311    passthrough_trait_fn! { fn into_ref(self) -> IoBufferRef<'a>; }
312
313    unsafe fn into_typed_slice<T: Copy + Sized>(self) -> Self::SliceType<T> {
314        // Safety ensured by caller
315        unsafe { Self::into_typed_slice(self) }
316    }
317}
318
319impl<'a> From<IoSlice<'a>> for IoBufferRef<'a> {
320    fn from(slice: IoSlice<'a>) -> Self {
321        IoBufferRef {
322            pointer: slice.as_ptr(),
323            size: slice.len(),
324            _lifetime: PhantomData,
325        }
326    }
327}
328
329impl<'a> From<IoBufferRef<'a>> for IoSlice<'a> {
330    fn from(buf: IoBufferRef<'a>) -> Self {
331        IoSlice::new(buf.into_slice())
332    }
333}
334
335impl<'a> IoBufferMut<'a> {
336    /// Create a reference to a slice.
337    pub fn from_slice(slice: &'a mut [u8]) -> Self {
338        IoBufferMut {
339            pointer: slice.as_mut_ptr(),
340            size: size_of_val(slice),
341            _lifetime: PhantomData,
342        }
343    }
344
345    /// Create an owned [`IoBuffer`] with the same data (copied).
346    pub fn try_into_owned(self, alignment: usize) -> io::Result<IoBuffer> {
347        let mut new_buf = IoBuffer::new(self.len(), alignment)?;
348        new_buf
349            .as_mut()
350            .into_slice()
351            .copy_from_slice(self.into_slice());
352        Ok(new_buf)
353    }
354
355    /// Size in bytes.
356    pub fn len(&self) -> usize {
357        self.size
358    }
359
360    /// Whether the length is 0.
361    pub fn is_empty(&self) -> bool {
362        self.len() == 0
363    }
364
365    /// Return the pointer to the start of the buffer.
366    pub fn as_ptr(&self) -> *mut u8 {
367        self.pointer
368    }
369
370    /// Turn this reference into a slice.
371    ///
372    /// References to `IoBuffer`s must not be copied/cloned (so they can only be accessed once;
373    /// they are considered volatile due to potential VM guest accesses), so this consumes the
374    /// object.
375    pub fn into_slice(self) -> &'a mut [u8] {
376        // Alignment requirement is always met, resulting data is pure binary data
377        unsafe { self.into_typed_slice::<u8>() }
378    }
379
380    /// Turn this reference into a slice with the given element type.
381    ///
382    /// # Safety
383    /// Caller must ensure that alignment and length requirements are met and that the resulting
384    /// data is valid.
385    pub unsafe fn into_typed_slice<T: Copy + Sized>(self) -> &'a mut [T] {
386        if self.is_empty() {
387            return &mut [];
388        }
389
390        // Safety ensured by the caller; we ensure that nothing outside of this buffer will be part
391        // of the slice
392        unsafe { slice::from_raw_parts_mut(self.as_ptr() as *mut T, self.len() / size_of::<T>()) }
393    }
394
395    /// Split the buffer at `mid`.
396    ///
397    /// Return `&self[..mid]` and `&self[mid..]`.
398    ///
399    /// If `mid > self.len()`, return `&self[..]` and `[]`.
400    pub fn split_at(self, mid: usize) -> (IoBufferMut<'a>, IoBufferMut<'a>) {
401        let head_len = cmp::min(mid, self.size);
402
403        (
404            IoBufferMut {
405                pointer: self.pointer,
406                size: head_len,
407                _lifetime: PhantomData,
408            },
409            IoBufferMut {
410                // Safe because we have limited this to `self.size`
411                pointer: unsafe { self.pointer.add(head_len) },
412                size: self.size - head_len,
413                _lifetime: PhantomData,
414            },
415        )
416    }
417
418    /// Make this reference immutable.
419    pub fn into_ref(self) -> IoBufferRef<'a> {
420        IoBufferRef {
421            pointer: self.pointer,
422            size: self.size,
423            _lifetime: PhantomData,
424        }
425    }
426}
427
428impl<'a> IoBufferRefTrait<'a> for IoBufferMut<'a> {
429    type SliceType<T: Copy + Sized + 'a> = &'a mut [T];
430    type PointerType<T: Copy + Sized + 'a> = *mut T;
431
432    passthrough_trait_fn! { fn from_slice(slice: Self::SliceType<u8>) -> Self; }
433    passthrough_trait_fn! { fn try_into_owned(self, alignment: usize) -> io::Result<IoBuffer>; }
434    passthrough_trait_fn! { fn len(&self) -> usize; }
435    passthrough_trait_fn! { fn as_ptr(&self) -> Self::PointerType<u8>; }
436    passthrough_trait_fn! { fn split_at(self, mid: usize) -> (Self, Self); }
437    passthrough_trait_fn! { fn into_ref(self) -> IoBufferRef<'a>; }
438
439    unsafe fn into_typed_slice<T: Copy + Sized>(self) -> Self::SliceType<T> {
440        // Safety ensured by caller
441        unsafe { Self::into_typed_slice(self) }
442    }
443}
444
445impl<'a, T: Sized> From<&'a mut [T]> for IoBufferMut<'a> {
446    fn from(slice: &'a mut [T]) -> Self {
447        IoBufferMut {
448            pointer: slice.as_mut_ptr() as *mut u8,
449            size: size_of_val(slice),
450            _lifetime: PhantomData,
451        }
452    }
453}
454
455impl<'a> From<IoSliceMut<'a>> for IoBufferMut<'a> {
456    fn from(mut slice: IoSliceMut<'a>) -> Self {
457        IoBufferMut {
458            pointer: slice.as_mut_ptr(),
459            size: slice.len(),
460            _lifetime: PhantomData,
461        }
462    }
463}
464
465impl<'a> From<IoBufferMut<'a>> for IoSliceMut<'a> {
466    fn from(buf: IoBufferMut<'a>) -> Self {
467        IoSliceMut::new(buf.into_slice())
468    }
469}
470
471/// Common functions for both `IoVector` and `IoVectorMut`.
472#[allow(dead_code)]
473pub(crate) trait IoVectorTrait: Sized {
474    /// `&[u8]` or `&mut [u8]`.
475    type SliceType;
476
477    /// `IoSlice` or `IoSliceMut`.
478    type BufferType;
479
480    /// Create an empty vector.
481    fn new() -> Self;
482
483    /// Create an empty vector, pre-allocating space for `cap` buffers.
484    ///
485    /// This does not allocate an memory buffer, only space in the buffer vector.
486    fn with_capacity(cap: usize) -> Self;
487
488    /// Append a slice.
489    fn push(&mut self, slice: Self::SliceType);
490
491    /// Append a slice.
492    fn push_ioslice(&mut self, ioslice: Self::BufferType);
493
494    /// Insert a slice at the given `index` in the buffer vector.
495    fn insert(&mut self, index: usize, slice: Self::SliceType);
496
497    /// Return the sum total length in bytes of all buffers in this vector.
498    fn len(&self) -> u64;
499
500    /// Return the number of buffers in this vector.
501    fn buffer_count(&self) -> usize;
502
503    /// Return `true` if and only if this vector’s length is zero.
504    ///
505    /// Synonymous with whether this vector’s buffer count is zero.
506    fn is_empty(&self) -> bool {
507        debug_assert!((self.len() == 0) == (self.buffer_count() == 0));
508        self.len() == 0
509    }
510
511    /// Append all buffers from the given other vector to this vector.
512    fn append(&mut self, other: Self);
513
514    /// Split the vector into two.
515    ///
516    /// The first returned vector contains the bytes in the `[..mid]` range, and the second one
517    /// covers the `[mid..]` range.
518    fn split_at(self, mid: u64) -> (Self, Self);
519
520    /// Like [`IoVectorTrait::split_at()`], but discards the head, only returning the tail.
521    ///
522    /// More efficient than to use `self.split_at(mid).1` because the former requires creating a
523    /// new `Vec` object for the head, which this version skips.
524    fn split_tail_at(self, mid: u64) -> Self;
525
526    /// Copy the data from `self` into `slice`.
527    ///
528    /// Both must have the same length.
529    fn copy_into_slice(&self, slice: &mut [u8]);
530
531    /// Create a single owned [`IoBuffer`] with the same data (copied).
532    fn try_into_owned(self, alignment: usize) -> io::Result<IoBuffer>;
533
534    /// Return a corresponding `&[libc::iovec]`.
535    ///
536    /// # Safety
537    /// `iovec` has no lifetime information.  Callers must ensure no elements in the returned slice
538    /// are used beyond the lifetime `'_`.
539    #[cfg(unix)]
540    unsafe fn as_iovec<'a>(&'a self) -> &'a [libc::iovec]
541    where
542        Self: 'a;
543
544    /// Check whether `self` is aligned.
545    ///
546    /// Each buffer must be aligned to `mem_alignment`, and each buffer’s length must be aligned to
547    /// both `mem_alignment` and `req_alignment` (the I/O request offset/size alignment).
548    fn is_aligned(&self, mem_alignment: usize, req_alignment: usize) -> bool;
549
550    /// Return the internal vector of `IoSlice` objects.
551    fn into_inner(self) -> Vec<Self::BufferType>;
552}
553
554/// Implement most of both `IoVector` and `IoVectorMut`.
555macro_rules! impl_io_vector {
556    ($type:tt, $inner_type:tt, $buffer_type:tt, $slice_type:ty, $slice_type_lifetime_b:ty) => {
557        /// Vector of memory buffers.
558        pub struct $type<'a> {
559            /// Buffer list.
560            vector: Vec<$inner_type<'a>>,
561
562            /// Complete size in bytes.
563            total_size: u64,
564        }
565
566        impl<'a> $type<'a> {
567            /// Create an empty vector.
568            pub fn new() -> Self {
569                Self::default()
570            }
571
572            /// Create an empty vector, pre-allocating space for `cap` buffers.
573            ///
574            /// This does not allocate an memory buffer, only space in the buffer vector.
575            pub fn with_capacity(cap: usize) -> Self {
576                $type {
577                    vector: Vec::with_capacity(cap),
578                    total_size: 0,
579                }
580            }
581
582            /// Append a slice.
583            pub fn push(&mut self, slice: $slice_type) {
584                debug_assert!(!slice.is_empty());
585                self.total_size += slice.len() as u64;
586                self.vector.push($inner_type::new(slice));
587            }
588
589            /// Append a slice.
590            pub fn push_ioslice(&mut self, ioslice: $inner_type<'a>) {
591                debug_assert!(!ioslice.is_empty());
592                self.total_size += ioslice.len() as u64;
593                self.vector.push(ioslice);
594            }
595
596            /// Insert a slice at the given `index` in the buffer vector.
597            pub fn insert(&mut self, index: usize, slice: $slice_type) {
598                debug_assert!(!slice.is_empty());
599                self.total_size += slice.len() as u64;
600                self.vector.insert(index, $inner_type::new(slice));
601            }
602
603            /// Return the sum total length in bytes of all buffers in this vector.
604            pub fn len(&self) -> u64 {
605                self.total_size
606            }
607
608            /// Return the number of buffers in this vector.
609            pub fn buffer_count(&self) -> usize {
610                self.vector.len()
611            }
612
613            /// Return `true` if and only if this vector’s length is zero.
614            ///
615            /// Synonymous with whether this vector’s buffer count is zero.
616            pub fn is_empty(&self) -> bool {
617                debug_assert!((self.len() == 0) == (self.buffer_count() == 0));
618                self.len() == 0
619            }
620
621            /// Append all buffers from the given other vector to this vector.
622            pub fn append(&mut self, mut other: Self) {
623                self.total_size += other.total_size;
624                self.vector.append(&mut other.vector);
625            }
626
627            /// Split the vector into two.
628            ///
629            /// The first returned vector contains the bytes in the `[..mid]` range, and the second
630            /// one covers the `[mid..]` range.
631            pub fn split_at(self, mid: u64) -> (Self, Self) {
632                let (head, tail) = self.do_split_at(mid, true);
633                (head.unwrap(), tail)
634            }
635
636            /// Like [`Self::split_at()`], but discards the head, only returning the tail.
637            ///
638            /// More efficient than to use `self.split_at(mid).1` because the former requires
639            /// creating a new `Vec` object for the head, which this version skips.
640            pub fn split_tail_at(self, mid: u64) -> Self {
641                self.do_split_at(mid, false).1
642            }
643
644            /// Copy the data from `self` into `slice`.
645            ///
646            /// Both must have the same length.
647            pub fn copy_into_slice(&self, slice: &mut [u8]) {
648                if slice.len() as u64 != self.total_size {
649                    panic!("IoVectorTrait::copy_into_slice() called on a slice of different length from the vector");
650                }
651
652                assert!(self.total_size <= usize::MAX as u64);
653
654                let mut offset = 0usize;
655                for elem in self.vector.iter() {
656                    let next_offset = offset + elem.len();
657                    slice[offset..next_offset].copy_from_slice(&elem[..]);
658                    offset = next_offset;
659                }
660            }
661
662            /// Create a single owned [`IoBuffer`] with the same data (copied).
663            pub fn try_into_owned(self, alignment: usize) -> io::Result<IoBuffer> {
664                let size = self.total_size.try_into().map_err(|_| {
665                    io::Error::other(format!("Buffer is too big ({})", self.total_size))
666                })?;
667                let mut new_buf = IoBuffer::new(size, alignment)?;
668                self.copy_into_slice(new_buf.as_mut().into_slice());
669                Ok(new_buf)
670            }
671
672            /// Return a corresponding `&[libc::iovec]`.
673            ///
674            /// # Safety
675            /// `iovec` has no lifetime information.  Callers must ensure no elements in the
676            /// returned slice are used beyond the lifetime `'_`.
677            #[cfg(unix)]
678            pub unsafe fn as_iovec<'b>(&'b self) -> &'b [libc::iovec] where Self: 'b {
679                // IoSlice and IoSliceMut are defined to have the same representation in memory as
680                // libc::iovec does
681                unsafe {
682                    mem::transmute::<&'b [$inner_type<'b>], &'b [libc::iovec]>(&self.vector[..])
683                }
684            }
685
686            /// Check whether `self` is aligned.
687            ///
688            /// Each buffer must be aligned to `mem_alignment`, and each buffer’s length must be
689            /// aligned to both `mem_alignment` and `req_alignment` (the I/O request offset/size
690            /// alignment).
691            pub fn is_aligned(&self, mem_alignment: usize, req_alignment: usize) -> bool {
692                // Trivial case
693                if mem_alignment == 1 && req_alignment == 1 {
694                    return true;
695                }
696
697                debug_assert!(mem_alignment.is_power_of_two() && req_alignment.is_power_of_two());
698                let base_align_mask = mem_alignment - 1;
699                let len_align_mask = base_align_mask | (req_alignment - 1);
700
701                self.vector.iter().all(|buf| {
702                    buf.as_ptr() as usize & base_align_mask == 0 &&
703                        buf.len() & len_align_mask == 0
704                })
705            }
706
707            /// Return the internal vector of `IoSlice` objects.
708            pub fn into_inner(self) -> Vec<$inner_type<'a>> {
709                self.vector
710            }
711
712            /// Same as [`Self::push()`], but takes ownership of `self`.
713            ///
714            /// By taking ownership of `self` and returning it, this method allows reducing the
715            /// lifetime of `self` to that of `slice`, if necessary.
716            pub fn with_pushed<'b>(self, slice: $slice_type_lifetime_b) -> $type<'b>
717            where
718                'a: 'b,
719            {
720                let mut vec: $type<'b> = self;
721                vec.push(slice);
722                vec
723            }
724
725            /// Same as [`Self::insert()`], but takes ownership of `self.`
726            ///
727            /// By taking ownership of `self` and returning it, this method allows reducing the
728            /// lifetime of `self` to that of `slice`, if necessary.
729            pub fn with_inserted<'b>(self, index: usize, slice: $slice_type_lifetime_b) -> $type<'b>
730            where
731                'a: 'b,
732            {
733                let mut vec: $type<'b> = self;
734                vec.insert(index, slice);
735                vec
736            }
737
738            /// Implementation for [`Self::split_at()`] and [`Self::split_tail_at()`].
739            ///
740            /// If `keep_head` is true, both head and tail are returned ([`Self::split_at()`]).
741            /// Otherwise, the head is discarded ([`Self::split_tail_at()`]).
742            fn do_split_at(mut self, mid: u64, keep_head: bool) -> (Option<$type<'a>>, $type<'a>) {
743                if mid >= self.total_size {
744                    // Special case: Empty tail
745                    return (
746                        keep_head.then_some(self),
747                        $type {
748                            vector: Vec::new(),
749                            total_size: 0,
750                        },
751                    );
752                }
753
754                let mut i = 0; // Current element index
755                let mut offset = 0u64; // Current element offset
756                let (vec_head, vec_tail) = loop {
757                    if offset == mid {
758                        // Clean split: `i` is fully behind `mid`, the rest is fully ahead
759                        if keep_head {
760                            let mut vec_head = self.vector;
761                            let vec_tail = vec_head.split_off(i);
762                            break (Some(vec_head), vec_tail);
763                        } else {
764                            break (None, self.vector.split_off(i));
765                        }
766                    }
767
768                    let post_elm_offset = offset + self.vector[i].len() as u64;
769
770                    if post_elm_offset > mid {
771                        // Not so clean split: The beginning of this element was before `mid`, the end is
772                        // behind it, so we must split this element between head and tail
773                        let mut vec_head = self.vector;
774                        let mut tail_iter = vec_head.drain(i..);
775
776                        // This is the current element (at `i`), which must be present
777                        let mid_elm = tail_iter.next().unwrap();
778                        let mid_elm: $buffer_type<'a> = mid_elm.into();
779
780                        // Each element's length is of type usize, so this must fit into usize
781                        let mid_elm_head_len: usize = (mid - offset).try_into().unwrap();
782                        let (mid_head, mid_tail) = mid_elm.split_at(mid_elm_head_len);
783
784                        let mut vec_tail: Vec<$inner_type<'a>> = vec![mid_tail.into()];
785                        vec_tail.extend(tail_iter);
786
787                        if keep_head {
788                            vec_head.push(mid_head.into());
789                            break (Some(vec_head), vec_tail);
790                        } else {
791                            break (None, vec_tail);
792                        }
793                    }
794
795                    offset = post_elm_offset;
796
797                    i += 1;
798                    // We know that `mid < self.total_size`, so we must encounter `mid before the end of
799                    // the vector
800                    assert!(i < self.vector.len());
801                };
802
803                let head = keep_head.then(|| $type {
804                    vector: vec_head.unwrap(),
805                    total_size: mid,
806                });
807                let tail = $type {
808                    vector: vec_tail,
809                    total_size: self.total_size - mid,
810                };
811
812                (head, tail)
813            }
814        }
815
816        impl<'a> IoVectorTrait for $type<'a> {
817            type SliceType = $slice_type;
818            type BufferType = $inner_type<'a>;
819
820            passthrough_trait_fn! { fn new() -> Self; }
821            passthrough_trait_fn! { fn with_capacity(cap: usize) -> Self; }
822            passthrough_trait_fn! { fn push(&mut self, slice: Self::SliceType); }
823            passthrough_trait_fn! { fn push_ioslice(&mut self, ioslice: Self::BufferType); }
824            passthrough_trait_fn! { fn insert(&mut self, index: usize, slice: Self::SliceType); }
825            passthrough_trait_fn! { fn len(&self) -> u64; }
826            passthrough_trait_fn! { fn buffer_count(&self) -> usize; }
827            passthrough_trait_fn! { fn append(&mut self, other: Self); }
828            passthrough_trait_fn! { fn split_at(self, mid: u64) -> (Self, Self); }
829            passthrough_trait_fn! { fn split_tail_at(self, mid: u64) -> Self; }
830            passthrough_trait_fn! { fn copy_into_slice(&self, slice: &mut [u8]); }
831            passthrough_trait_fn! { fn try_into_owned(self, alignment: usize) -> io::Result<IoBuffer>; }
832            passthrough_trait_fn! { fn is_aligned(&self, mem_alignment: usize, req_alignment: usize) -> bool; }
833            passthrough_trait_fn! { fn into_inner(self) -> Vec<Self::BufferType>; }
834
835            #[cfg(unix)]
836            unsafe fn as_iovec<'b>(&'b self) -> &'b [libc::iovec]
837            where
838                Self: 'b
839            {
840                // Safety ensured by caller
841                unsafe { Self::as_iovec(self) }
842            }
843        }
844
845        impl<'a> From<Vec<$inner_type<'a>>> for $type<'a> {
846            fn from(vector: Vec<$inner_type<'a>>) -> Self {
847                let total_size = vector
848                    .iter()
849                    .map(|e| e.len())
850                    .fold(0u64, |sum, e| sum + e as u64);
851
852                $type { vector, total_size }
853            }
854        }
855
856        impl<'a> From<$buffer_type<'a>> for $type<'a> {
857            fn from(buffer: $buffer_type<'a>) -> Self {
858                let total_size = buffer.len() as u64;
859                if total_size > 0 {
860                    $type {
861                        vector: vec![buffer.into()],
862                        total_size,
863                    }
864                } else {
865                    $type {
866                        vector: Vec::new(),
867                        total_size: 0,
868                    }
869                }
870            }
871        }
872
873        impl<'a> From<$slice_type> for $type<'a> {
874            fn from(slice: $slice_type) -> Self {
875                let total_size = slice.len() as u64;
876                if total_size > 0 {
877                    $type {
878                        vector: vec![$inner_type::new(slice)],
879                        total_size,
880                    }
881                } else {
882                    $type {
883                        vector: Vec::new(),
884                        total_size: 0,
885                    }
886                }
887            }
888        }
889
890        impl<'a> Default for $type<'a> {
891            fn default() -> Self {
892                $type {
893                    vector: Vec::new(),
894                    total_size: 0,
895                }
896            }
897        }
898
899        impl Debug for $type<'_> {
900            fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
901                f.debug_struct(std::stringify!($type))
902                    .field("vector.len()", &self.vector.len())
903                    .field("total_size", &self.total_size)
904                    .finish()
905            }
906        }
907    };
908}
909
910impl_io_vector!(IoVector, IoSlice, IoBufferRef, &'a [u8], &'b [u8]);
911impl_io_vector!(
912    IoVectorMut,
913    IoSliceMut,
914    IoBufferMut,
915    &'a mut [u8],
916    &'b mut [u8]
917);
918
919#[cfg(feature = "vm-memory")]
920impl<'a> IoVector<'a> {
921    /// Converts a `VolatileSlice` array (from vm-memory) into an `IoVector`.
922    ///
923    /// In addition to a the vector, return a guard that ensures that the memory in `slices` is
924    /// indeed mapped while in use.  This guard must not be dropped while this vector is in use!
925    pub fn from_volatile_slice<
926        B: vm_memory::bitmap::BitmapSlice,
927        I: IntoIterator<
928            Item: ImagoAsRef<'a, vm_memory::VolatileSlice<'a, B>>,
929            IntoIter: ExactSizeIterator,
930        >,
931    >(
932        slices: I,
933    ) -> (
934        Self,
935        VolatileSliceGuard<'a, vm_memory::volatile_memory::PtrGuard, B>,
936    ) {
937        let ptr_guards = slices
938            .into_iter()
939            .map(|slice| slice.as_ref().ptr_guard())
940            .collect::<Vec<_>>();
941        let buffers = ptr_guards
942            .iter()
943            .map(|pg| {
944                let slice = if pg.len() == 0 {
945                    &[]
946                } else {
947                    // Safe because this whole module basically exists to follow the same design concepts
948                    // as `VolatileSlice`.
949                    unsafe { std::slice::from_raw_parts(pg.as_ptr(), pg.len()) }
950                };
951                IoSlice::new(slice)
952            })
953            .collect::<Vec<_>>();
954
955        let vector = IoVector::from(buffers);
956        let guard = VolatileSliceGuard {
957            _ptr_guards: ptr_guards,
958            // `IoVector` is immutable, so no need to dirty
959            dirty_on_drop: None,
960        };
961
962        (vector, guard)
963    }
964}
965
966impl IoVectorMut<'_> {
967    /// Fill all buffers in the vector with the given byte pattern.
968    pub fn fill(&mut self, value: u8) {
969        for slice in self.vector.iter_mut() {
970            slice.fill(value);
971        }
972    }
973
974    /// Copy data from `slice` into the buffers in this vector.
975    ///
976    /// The vector and the slice must have the same total length.
977    pub fn copy_from_slice(&mut self, slice: &[u8]) {
978        if slice.len() as u64 != self.total_size {
979            panic!("IoVectorMut::copy_from_slice() called on a slice of different length from the vector");
980        }
981
982        assert!(self.total_size <= usize::MAX as u64);
983
984        let mut offset = 0usize;
985        for elem in self.vector.iter_mut() {
986            let next_offset = offset + elem.len();
987            elem.copy_from_slice(&slice[offset..next_offset]);
988            offset = next_offset;
989        }
990    }
991}
992
993#[cfg(feature = "vm-memory")]
994impl<'a> IoVectorMut<'a> {
995    /// Converts a `VolatileSlice` array (from vm-memory) into an `IoVectorMut`.
996    ///
997    /// In addition to a the vector, return a guard that ensures that the memory in `slices` is
998    /// indeed mapped while in use.  This guard must not be dropped while this vector is in use!
999    pub fn from_volatile_slice<
1000        B: vm_memory::bitmap::BitmapSlice,
1001        I: IntoIterator<
1002            Item: ImagoAsRef<'a, vm_memory::VolatileSlice<'a, B>>,
1003            IntoIter: ExactSizeIterator,
1004        >,
1005    >(
1006        slices: I,
1007    ) -> (
1008        Self,
1009        VolatileSliceGuard<'a, vm_memory::volatile_memory::PtrGuardMut, B>,
1010    ) {
1011        let slices = slices.into_iter();
1012        let slice_count = slices.len();
1013        let mut ptr_guards = Vec::with_capacity(slice_count);
1014        let mut dirty_on_drop = Vec::with_capacity(slice_count);
1015
1016        for slice in slices {
1017            let slice = slice.as_ref();
1018            ptr_guards.push(slice.ptr_guard_mut());
1019            // `IoVector` is mutable, so we can assume it will all be written
1020            dirty_on_drop.push((slice.bitmap(), slice.len()));
1021        }
1022
1023        let buffers = ptr_guards
1024            .iter()
1025            .map(|pg| {
1026                let slice = if pg.len() == 0 {
1027                    &mut []
1028                } else {
1029                    // Safe because this whole module basically exists to follow the same design concepts
1030                    // as `VolatileSlice`.
1031                    unsafe { std::slice::from_raw_parts_mut(pg.as_ptr(), pg.len()) }
1032                };
1033                IoSliceMut::new(slice)
1034            })
1035            .collect::<Vec<_>>();
1036
1037        let vector = IoVectorMut::from(buffers);
1038        let guard = VolatileSliceGuard {
1039            _ptr_guards: ptr_guards,
1040            dirty_on_drop: Some(dirty_on_drop),
1041        };
1042
1043        (vector, guard)
1044    }
1045}
1046
1047impl<'a> From<&'a Vec<u8>> for IoVector<'a> {
1048    fn from(vec: &'a Vec<u8>) -> Self {
1049        vec.as_slice().into()
1050    }
1051}
1052
1053impl<'a> From<&'a IoBuffer> for IoVector<'a> {
1054    fn from(buf: &'a IoBuffer) -> Self {
1055        buf.as_ref().into_slice().into()
1056    }
1057}
1058
1059impl<'a> From<&'a mut Vec<u8>> for IoVectorMut<'a> {
1060    fn from(vec: &'a mut Vec<u8>) -> Self {
1061        vec.as_mut_slice().into()
1062    }
1063}
1064
1065impl<'a> From<&'a mut IoBuffer> for IoVectorMut<'a> {
1066    fn from(buf: &'a mut IoBuffer) -> Self {
1067        buf.as_mut().into_slice().into()
1068    }
1069}
1070
1071/// Ensures an I/O vector’s validity when created from `[VolatileSlice]`.
1072///
1073/// `[VolatileSlice]` arrays may require being explicitly mapped before use (and unmapped after),
1074/// and this guard ensures that the memory is mapped until it is dropped.
1075///
1076/// Further, for mutable vectors ([`IoVectorMut`]), it will also dirty the corresponding bitmap
1077/// slices when dropped, assuming the whole vector has been written.
1078#[cfg(feature = "vm-memory")]
1079pub struct VolatileSliceGuard<'a, PtrGuardType, BitmapType: vm_memory::bitmap::Bitmap> {
1080    /// vm-memory’s pointer guards ensuring the memory remains mapped while used.
1081    _ptr_guards: Vec<PtrGuardType>,
1082
1083    /// If given, mark the given dirty bitmap range as dirty when dropping this guard.
1084    ///
1085    /// `.1` is the length of the respective `VolatileSlice` (i.e. the length of the area to
1086    /// dirty).
1087    dirty_on_drop: Option<Vec<(&'a BitmapType, usize)>>,
1088}
1089
1090#[cfg(feature = "vm-memory")]
1091impl<P, B: vm_memory::bitmap::Bitmap> Drop for VolatileSliceGuard<'_, P, B> {
1092    fn drop(&mut self) {
1093        if let Some(dirty_on_drop) = self.dirty_on_drop.take() {
1094            for (bitmap, len) in dirty_on_drop {
1095                // Every bitmap is a window into the full bitmap for its specific `VolatileSlice`,
1096                // so marking the whole thing is dirty is correct.
1097                bitmap.mark_dirty(0, len);
1098            }
1099        }
1100    }
1101}
1102
1103#[cfg(all(test, feature = "vm-memory"))]
1104mod vm_memory_test {
1105    use crate::io_buffers::{IoVector, IoVectorMut};
1106    use vm_memory::bitmap::BitmapSlice;
1107    use vm_memory::VolatileSlice;
1108
1109    pub fn do_test_volatile_slice_owned<B: BitmapSlice>(slices: &[VolatileSlice<B>]) {
1110        {
1111            let _vec = IoVector::from_volatile_slice(slices);
1112        }
1113        {
1114            let _vec = IoVectorMut::from_volatile_slice(slices);
1115        }
1116    }
1117
1118    #[test]
1119    fn test_volatile_slice_owned() {
1120        let empty: Vec<VolatileSlice<()>> = Vec::new();
1121        do_test_volatile_slice_owned(&empty);
1122    }
1123
1124    pub fn do_test_volatile_slice_ref<B: BitmapSlice>(slices: &[&VolatileSlice<B>]) {
1125        {
1126            let _vec = IoVector::from_volatile_slice(slices);
1127        }
1128        {
1129            let _vec = IoVectorMut::from_volatile_slice(slices);
1130        }
1131    }
1132
1133    #[test]
1134    fn test_volatile_slice_ref() {
1135        let empty: Vec<&vm_memory::VolatileSlice<()>> = Vec::new();
1136        do_test_volatile_slice_ref(&empty);
1137    }
1138}