Skip to main content

compio_buf/
io_buf.rs

1#[cfg(feature = "allocator_api")]
2use std::alloc::Allocator;
3use std::{error::Error, fmt::Display, mem::MaybeUninit, ops::RangeBounds, rc::Rc, sync::Arc};
4
5use crate::*;
6
7/// A trait for immutable buffers.
8///
9/// The `IoBuf` trait is implemented by buffer types that can be passed to
10/// immutable completion-based IO operations, like writing its content to a
11/// file. This trait will only take initialized bytes of a buffer into account.
12pub trait IoBuf: 'static {
13    /// Get the slice of initialized bytes.
14    fn as_init(&self) -> &[u8];
15
16    /// Length of initialized bytes in the buffer.
17    fn buf_len(&self) -> usize {
18        self.as_init().len()
19    }
20
21    /// Raw pointer to the buffer.
22    fn buf_ptr(&self) -> *const u8 {
23        self.as_init().as_ptr()
24    }
25
26    /// Check if the buffer is empty.
27    fn is_empty(&self) -> bool {
28        self.buf_len() == 0
29    }
30
31    /// Returns a view of the buffer with the specified range.
32    ///
33    /// This method is similar to Rust's slicing (`&buf[..]`), but takes
34    /// ownership of the buffer.
35    ///
36    /// # Examples
37    ///
38    /// ```
39    /// use compio_buf::IoBuf;
40    ///
41    /// let buf = b"hello world";
42    /// assert_eq!(buf.slice(6..).as_init(), b"world");
43    /// ```
44    ///
45    /// # Panics
46    /// Panics if:
47    /// * begin > buf_len()
48    /// * end < begin
49    fn slice(self, range: impl std::ops::RangeBounds<usize>) -> Slice<Self>
50    where
51        Self: Sized,
52    {
53        use std::ops::Bound;
54
55        let begin = match range.start_bound() {
56            Bound::Included(&n) => n,
57            Bound::Excluded(&n) => n + 1,
58            Bound::Unbounded => 0,
59        };
60
61        let end = match range.end_bound() {
62            Bound::Included(&n) => Some(n.checked_add(1).expect("out of range")),
63            Bound::Excluded(&n) => Some(n),
64            Bound::Unbounded => None,
65        };
66
67        assert!(begin <= self.buf_len());
68
69        if let Some(end) = end {
70            assert!(begin <= end);
71        }
72
73        // SAFETY: begin <= self.buf_len()
74        unsafe { Slice::new(self, begin, end) }
75    }
76
77    /// Create a [`Reader`] from this buffer, which implements
78    /// [`std::io::Read`].
79    fn into_reader(self) -> Reader<Self>
80    where
81        Self: Sized,
82    {
83        Reader::new(self)
84    }
85
86    /// Create a [`ReaderRef`] from a reference of the buffer, which
87    /// implements [`std::io::Read`].
88    fn as_reader(&self) -> ReaderRef<'_, Self> {
89        ReaderRef::new(self)
90    }
91}
92
93impl<B: IoBuf + ?Sized> IoBuf for &'static B {
94    fn as_init(&self) -> &[u8] {
95        (**self).as_init()
96    }
97}
98
99impl<B: IoBuf + ?Sized> IoBuf for &'static mut B {
100    fn as_init(&self) -> &[u8] {
101        (**self).as_init()
102    }
103}
104
105impl<B: IoBuf + ?Sized, #[cfg(feature = "allocator_api")] A: Allocator + 'static> IoBuf
106    for t_alloc!(Box, B, A)
107{
108    fn as_init(&self) -> &[u8] {
109        (**self).as_init()
110    }
111}
112
113impl<B: IoBuf + ?Sized, #[cfg(feature = "allocator_api")] A: Allocator + 'static> IoBuf
114    for t_alloc!(Rc, B, A)
115{
116    fn as_init(&self) -> &[u8] {
117        (**self).as_init()
118    }
119}
120
121impl IoBuf for [u8] {
122    fn as_init(&self) -> &[u8] {
123        self
124    }
125}
126
127impl<const N: usize> IoBuf for [u8; N] {
128    fn as_init(&self) -> &[u8] {
129        self
130    }
131}
132
133impl<#[cfg(feature = "allocator_api")] A: Allocator + 'static> IoBuf for t_alloc!(Vec, u8, A) {
134    fn as_init(&self) -> &[u8] {
135        self
136    }
137}
138
139impl IoBuf for str {
140    fn as_init(&self) -> &[u8] {
141        self.as_bytes()
142    }
143}
144
145impl IoBuf for String {
146    fn as_init(&self) -> &[u8] {
147        self.as_bytes()
148    }
149}
150
151impl<B: IoBuf + ?Sized, #[cfg(feature = "allocator_api")] A: Allocator + 'static> IoBuf
152    for t_alloc!(Arc, B, A)
153{
154    fn as_init(&self) -> &[u8] {
155        (**self).as_init()
156    }
157}
158
159#[cfg(feature = "bytes")]
160impl IoBuf for bytes::Bytes {
161    fn as_init(&self) -> &[u8] {
162        self
163    }
164}
165
166#[cfg(feature = "bytes")]
167impl IoBuf for bytes::BytesMut {
168    fn as_init(&self) -> &[u8] {
169        self
170    }
171}
172
173#[cfg(feature = "read_buf")]
174impl IoBuf for std::io::BorrowedBuf<'static> {
175    fn as_init(&self) -> &[u8] {
176        self.filled()
177    }
178}
179
180#[cfg(feature = "arrayvec")]
181impl<const N: usize> IoBuf for arrayvec::ArrayVec<u8, N> {
182    fn as_init(&self) -> &[u8] {
183        self
184    }
185}
186
187#[cfg(feature = "smallvec")]
188impl<const N: usize> IoBuf for smallvec::SmallVec<[u8; N]>
189where
190    [u8; N]: smallvec::Array<Item = u8>,
191{
192    fn as_init(&self) -> &[u8] {
193        self
194    }
195}
196
197#[cfg(feature = "memmap2")]
198impl IoBuf for memmap2::Mmap {
199    fn as_init(&self) -> &[u8] {
200        self
201    }
202}
203
204#[cfg(feature = "memmap2")]
205impl IoBuf for memmap2::MmapMut {
206    fn as_init(&self) -> &[u8] {
207        self
208    }
209}
210
211/// An error indicating that reserving capacity for a buffer failed.
212#[must_use]
213#[derive(Debug)]
214pub enum ReserveError {
215    /// Reservation is not supported.
216    NotSupported,
217
218    /// Reservation failed.
219    ///
220    /// This is usually caused by out-of-memory.
221    ReserveFailed(Box<dyn Error + Send + Sync>),
222}
223
224impl ReserveError {
225    /// Check if the error is [`NotSupported`].
226    ///
227    /// [`NotSupported`]: ReserveError::NotSupported
228    pub fn is_not_supported(&self) -> bool {
229        matches!(self, ReserveError::NotSupported)
230    }
231}
232
233impl Display for ReserveError {
234    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
235        match self {
236            ReserveError::NotSupported => write!(f, "reservation is not supported"),
237            ReserveError::ReserveFailed(src) => write!(f, "reservation failed: {src}"),
238        }
239    }
240}
241
242impl Error for ReserveError {
243    fn source(&self) -> Option<&(dyn Error + 'static)> {
244        match self {
245            ReserveError::ReserveFailed(src) => Some(src.as_ref()),
246            _ => None,
247        }
248    }
249}
250
251impl From<ReserveError> for std::io::Error {
252    fn from(value: ReserveError) -> Self {
253        match value {
254            ReserveError::NotSupported => {
255                std::io::Error::new(std::io::ErrorKind::Unsupported, "reservation not supported")
256            }
257            ReserveError::ReserveFailed(src) => {
258                std::io::Error::new(std::io::ErrorKind::OutOfMemory, src)
259            }
260        }
261    }
262}
263
264/// An error indicating that reserving exact capacity for a buffer failed.
265#[must_use]
266#[derive(Debug)]
267pub enum ReserveExactError {
268    /// Reservation is not supported.
269    NotSupported,
270
271    /// Reservation failed.
272    ///
273    /// This is usually caused by out-of-memory.
274    ReserveFailed(Box<dyn Error + Send + Sync>),
275
276    /// Reserved size does not match the expected size.
277    ExactSizeMismatch {
278        /// Expected size to reserve
279        expected: usize,
280
281        /// Actual size reserved
282        reserved: usize,
283    },
284}
285
286impl ReserveExactError {
287    /// Check if the error is [`NotSupported`]
288    ///
289    /// [`NotSupported`]: ReserveExactError::NotSupported
290    pub fn is_not_supported(&self) -> bool {
291        matches!(self, ReserveExactError::NotSupported)
292    }
293}
294
295impl Display for ReserveExactError {
296    fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
297        match self {
298            ReserveExactError::NotSupported => write!(f, "reservation is not supported"),
299            ReserveExactError::ReserveFailed(src) => write!(f, "reservation failed: {src}"),
300            ReserveExactError::ExactSizeMismatch { reserved, expected } => {
301                write!(
302                    f,
303                    "reserved size mismatch: expected {}, reserved {}",
304                    expected, reserved
305                )
306            }
307        }
308    }
309}
310
311impl From<ReserveError> for ReserveExactError {
312    fn from(err: ReserveError) -> Self {
313        match err {
314            ReserveError::NotSupported => ReserveExactError::NotSupported,
315            ReserveError::ReserveFailed(src) => ReserveExactError::ReserveFailed(src),
316        }
317    }
318}
319
320impl Error for ReserveExactError {
321    fn source(&self) -> Option<&(dyn Error + 'static)> {
322        match self {
323            ReserveExactError::ReserveFailed(src) => Some(src.as_ref()),
324            _ => None,
325        }
326    }
327}
328
329impl From<ReserveExactError> for std::io::Error {
330    fn from(value: ReserveExactError) -> Self {
331        match value {
332            ReserveExactError::NotSupported => {
333                std::io::Error::new(std::io::ErrorKind::Unsupported, "reservation not supported")
334            }
335            ReserveExactError::ReserveFailed(src) => {
336                std::io::Error::new(std::io::ErrorKind::OutOfMemory, src)
337            }
338            ReserveExactError::ExactSizeMismatch { expected, reserved } => std::io::Error::other(
339                format!("reserved size mismatch: expected {expected}, reserved {reserved}",),
340            ),
341        }
342    }
343}
344
345#[cfg(feature = "smallvec")]
346mod smallvec_err {
347    use std::{error::Error, fmt::Display};
348
349    use smallvec::CollectionAllocErr;
350
351    #[derive(Debug)]
352    pub(super) struct SmallVecErr(pub CollectionAllocErr);
353
354    impl Display for SmallVecErr {
355        fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
356            write!(f, "SmallVec allocation error: {}", self.0)
357        }
358    }
359
360    impl Error for SmallVecErr {}
361}
362
363/// A trait for mutable buffers.
364///
365/// The `IoBufMut` trait is implemented by buffer types that can be passed to
366/// mutable completion-based IO operations, like reading content from a file and
367/// write to the buffer. This trait will take all space of a buffer into
368/// account, including uninitialized bytes.
369pub trait IoBufMut: IoBuf + SetLen {
370    /// Get the full mutable slice of the buffer, including both initialized
371    /// and uninitialized bytes.
372    fn as_uninit(&mut self) -> &mut [MaybeUninit<u8>];
373
374    /// Total capacity of the buffer, including both initialized and
375    /// uninitialized bytes.
376    fn buf_capacity(&mut self) -> usize {
377        self.as_uninit().len()
378    }
379
380    /// Get the raw mutable pointer to the buffer.
381    fn buf_mut_ptr(&mut self) -> *mut MaybeUninit<u8> {
382        self.as_uninit().as_mut_ptr()
383    }
384
385    /// Get the mutable slice of initialized bytes. The content is the same as
386    /// [`IoBuf::as_init`], but mutable.
387    fn as_mut_slice(&mut self) -> &mut [u8] {
388        let len = (*self).buf_len();
389        let ptr = (*self).buf_mut_ptr();
390        // SAFETY:
391        // - lifetime of the returned slice is bounded by &mut self
392        // - bytes within `len` are guaranteed to be initialized
393        // - the pointer is derived from
394        unsafe { std::slice::from_raw_parts_mut(ptr as *mut u8, len) }
395    }
396
397    /// Extend the buffer by copying bytes from `src`.
398    ///
399    /// The buffer will reserve additional capacity if necessary, and return an
400    /// error when reservation failed.
401    ///
402    /// Notice that this may move the memory of the buffer, so it's UB to
403    /// call this after the buffer is being pinned.
404    // FIXME: Change to `slice::write_copy_of_slice` when stabilized
405    fn extend_from_slice(&mut self, src: &[u8]) -> Result<(), ReserveError> {
406        let len = src.len();
407        let init = (*self).buf_len();
408        self.reserve(len)?;
409        let ptr = self.buf_mut_ptr().wrapping_add(init);
410
411        unsafe {
412            // SAFETY:
413            // - we have reserved enough capacity so the ptr and len stays in one allocation
414            // - src is valid for len bytes
415            // - ptr is valid for len bytes
416            // - &mut self guarantees that src cannot overlap with dst
417            std::ptr::copy_nonoverlapping(src.as_ptr() as _, ptr, len);
418
419            // SAFETY: the bytes in range [init, init + len) are initialized now
420            self.advance_to(init + len);
421        }
422
423        Ok(())
424    }
425
426    /// Like [`slice::copy_within`], copy a range of bytes within the buffer to
427    /// another location in the same buffer. This will count in both initialized
428    /// and uninitialized bytes.
429    ///
430    /// # Panics
431    ///
432    /// This method will panic if the source or destination range is out of
433    /// bounds.
434    ///
435    /// [`slice::copy_within`]: https://doc.rust-lang.org/std/primitive.slice.html#method.copy_within
436    fn copy_within<R>(&mut self, src: R, dest: usize)
437    where
438        R: RangeBounds<usize>,
439    {
440        self.as_uninit().copy_within(src, dest);
441    }
442
443    /// Reserve additional capacity for the buffer.
444    ///
445    /// By default, this checks if the spare capacity is enough to fit in
446    /// `len`-bytes. If it does, returns `Ok(())`, and otherwise returns
447    /// [`Err(ReserveError::NotSupported)`]. Types that support dynamic
448    /// resizing (like `Vec<u8>`) will override this method to actually
449    /// reserve capacity. The return value indicates whether the reservation
450    /// succeeded. See [`ReserveError`] for details.
451    ///
452    /// Notice that this may move the memory of the buffer, so it's UB to
453    /// call this after the buffer is being pinned.
454    ///
455    /// [`Err(ReserveError::NotSupported)`]: ReserveError::NotSupported
456    fn reserve(&mut self, len: usize) -> Result<(), ReserveError> {
457        let init = (*self).buf_len();
458        if len <= self.buf_capacity() - init {
459            return Ok(());
460        }
461        Err(ReserveError::NotSupported)
462    }
463
464    /// Reserve exactly `len` additional capacity for the buffer.
465    ///
466    /// By default this falls back to [`IoBufMut::reserve`]. Types that support
467    /// dynamic resizing (like `Vec<u8>`) will override this method to
468    /// actually reserve capacity. The return value indicates whether the
469    /// exact reservation succeeded. See [`ReserveExactError`] for details.
470    ///
471    /// Notice that this may move the memory of the buffer, so it's UB to
472    /// call this after the buffer is being pinned.
473    fn reserve_exact(&mut self, len: usize) -> Result<(), ReserveExactError> {
474        self.reserve(len)?;
475        Ok(())
476    }
477
478    /// Returns an [`Uninit`], which is a [`Slice`] that only exposes
479    /// uninitialized bytes.
480    ///
481    /// It will always point to the uninitialized area of a [`IoBufMut`] even
482    /// after reading in some bytes, which is done by [`SetLen`]. This
483    /// is useful for writing data into buffer without overwriting any
484    /// existing bytes.
485    ///
486    /// # Examples
487    ///
488    /// ```
489    /// use compio_buf::{IoBuf, IoBufMut};
490    ///
491    /// let mut buf = Vec::from(b"hello world");
492    /// buf.reserve_exact(10);
493    /// let mut slice = buf.uninit();
494    ///
495    /// assert_eq!(slice.as_init(), b"");
496    /// assert_eq!(slice.buf_capacity(), 10);
497    /// ```
498    fn uninit(self) -> Uninit<Self>
499    where
500        Self: Sized,
501    {
502        Uninit::new(self)
503    }
504
505    /// Create a [`Writer`] from this buffer, which implements
506    /// [`std::io::Write`].
507    fn into_writer(self) -> Writer<Self>
508    where
509        Self: Sized,
510    {
511        Writer::new(self)
512    }
513
514    /// Create a [`Writer`] from a mutable reference of the buffer, which
515    /// implements [`std::io::Write`].
516    fn as_writer(&mut self) -> WriterRef<'_, Self> {
517        WriterRef::new(self)
518    }
519
520    /// Indicate whether the buffer has been filled (uninit portion is empty)
521    fn is_filled(&mut self) -> bool {
522        let len = (*self).as_init().len();
523        let cap = (*self).buf_capacity();
524        len == cap
525    }
526}
527
528impl<B: IoBufMut + ?Sized> IoBufMut for &'static mut B {
529    fn as_uninit(&mut self) -> &mut [MaybeUninit<u8>] {
530        (**self).as_uninit()
531    }
532
533    fn reserve(&mut self, len: usize) -> Result<(), ReserveError> {
534        (**self).reserve(len)
535    }
536
537    fn reserve_exact(&mut self, len: usize) -> Result<(), ReserveExactError> {
538        (**self).reserve_exact(len)
539    }
540}
541
542impl<B: IoBufMut + ?Sized, #[cfg(feature = "allocator_api")] A: Allocator + 'static> IoBufMut
543    for t_alloc!(Box, B, A)
544{
545    fn as_uninit(&mut self) -> &mut [MaybeUninit<u8>] {
546        (**self).as_uninit()
547    }
548
549    fn reserve(&mut self, len: usize) -> Result<(), ReserveError> {
550        (**self).reserve(len)
551    }
552
553    fn reserve_exact(&mut self, len: usize) -> Result<(), ReserveExactError> {
554        (**self).reserve_exact(len)
555    }
556}
557
558impl<#[cfg(feature = "allocator_api")] A: Allocator + 'static> IoBufMut for t_alloc!(Vec, u8, A) {
559    fn as_uninit(&mut self) -> &mut [MaybeUninit<u8>] {
560        let ptr = self.as_mut_ptr() as *mut MaybeUninit<u8>;
561        let cap = self.capacity();
562        // SAFETY: Vec guarantees that the pointer is valid for `capacity` bytes
563        unsafe { std::slice::from_raw_parts_mut(ptr, cap) }
564    }
565
566    fn reserve(&mut self, len: usize) -> Result<(), ReserveError> {
567        if let Err(e) = Vec::try_reserve(self, len) {
568            return Err(ReserveError::ReserveFailed(Box::new(e)));
569        }
570
571        Ok(())
572    }
573
574    fn reserve_exact(&mut self, len: usize) -> Result<(), ReserveExactError> {
575        if self.capacity() - self.len() >= len {
576            return Ok(());
577        }
578
579        if let Err(e) = Vec::try_reserve_exact(self, len) {
580            return Err(ReserveExactError::ReserveFailed(Box::new(e)));
581        }
582
583        if self.capacity() - self.len() != len {
584            return Err(ReserveExactError::ExactSizeMismatch {
585                reserved: self.capacity() - self.len(),
586                expected: len,
587            });
588        }
589        Ok(())
590    }
591}
592
593impl IoBufMut for [u8] {
594    fn as_uninit(&mut self) -> &mut [MaybeUninit<u8>] {
595        let ptr = self.as_mut_ptr() as *mut MaybeUninit<u8>;
596        let len = self.len();
597        // SAFETY: slice is fully initialized, so treating it as MaybeUninit is safe
598        unsafe { std::slice::from_raw_parts_mut(ptr, len) }
599    }
600}
601
602impl<const N: usize> IoBufMut for [u8; N] {
603    fn as_uninit(&mut self) -> &mut [MaybeUninit<u8>] {
604        let ptr = self.as_mut_ptr() as *mut MaybeUninit<u8>;
605        // SAFETY: array is fully initialized, so treating it as MaybeUninit is safe
606        unsafe { std::slice::from_raw_parts_mut(ptr, N) }
607    }
608}
609
610#[cfg(feature = "bytes")]
611impl IoBufMut for bytes::BytesMut {
612    fn as_uninit(&mut self) -> &mut [MaybeUninit<u8>] {
613        let ptr = self.as_mut_ptr() as *mut MaybeUninit<u8>;
614        let cap = self.capacity();
615        // SAFETY: BytesMut guarantees that the pointer is valid for `capacity` bytes
616        unsafe { std::slice::from_raw_parts_mut(ptr, cap) }
617    }
618
619    fn reserve(&mut self, len: usize) -> Result<(), ReserveError> {
620        bytes::BytesMut::reserve(self, len);
621        Ok(())
622    }
623
624    fn reserve_exact(&mut self, len: usize) -> Result<(), ReserveExactError> {
625        if self.capacity() - self.len() >= len {
626            return Ok(());
627        }
628
629        bytes::BytesMut::reserve(self, len);
630
631        if self.capacity() - self.len() != len {
632            Err(ReserveExactError::ExactSizeMismatch {
633                reserved: self.capacity() - self.len(),
634                expected: len,
635            })
636        } else {
637            Ok(())
638        }
639    }
640}
641
642#[cfg(feature = "read_buf")]
643impl IoBufMut for std::io::BorrowedBuf<'static> {
644    fn as_uninit(&mut self) -> &mut [MaybeUninit<u8>] {
645        let total_cap = self.capacity();
646
647        // SAFETY: We reconstruct the full buffer from the filled portion pointer.
648        // BorrowedBuf guarantees that the underlying buffer has capacity bytes.
649        unsafe {
650            let filled_ptr = self.filled().as_ptr() as *mut MaybeUninit<u8>;
651            std::slice::from_raw_parts_mut(filled_ptr, total_cap)
652        }
653    }
654}
655
656#[cfg(feature = "arrayvec")]
657impl<const N: usize> IoBufMut for arrayvec::ArrayVec<u8, N> {
658    fn as_uninit(&mut self) -> &mut [MaybeUninit<u8>] {
659        let ptr = self.as_mut_ptr() as *mut MaybeUninit<u8>;
660        // SAFETY: ArrayVec guarantees that the pointer is valid for N bytes
661        unsafe { std::slice::from_raw_parts_mut(ptr, N) }
662    }
663}
664
665#[cfg(feature = "smallvec")]
666impl<const N: usize> IoBufMut for smallvec::SmallVec<[u8; N]>
667where
668    [u8; N]: smallvec::Array<Item = u8>,
669{
670    fn as_uninit(&mut self) -> &mut [MaybeUninit<u8>] {
671        let ptr = self.as_mut_ptr() as *mut MaybeUninit<u8>;
672        let cap = self.capacity();
673        // SAFETY: SmallVec guarantees that the pointer is valid for `capacity` bytes
674        unsafe { std::slice::from_raw_parts_mut(ptr, cap) }
675    }
676
677    fn reserve(&mut self, len: usize) -> Result<(), ReserveError> {
678        if let Err(e) = smallvec::SmallVec::try_reserve(self, len) {
679            return Err(ReserveError::ReserveFailed(Box::new(
680                smallvec_err::SmallVecErr(e),
681            )));
682        }
683        Ok(())
684    }
685
686    fn reserve_exact(&mut self, len: usize) -> Result<(), ReserveExactError> {
687        if self.capacity() - self.len() >= len {
688            return Ok(());
689        }
690
691        if let Err(e) = smallvec::SmallVec::try_reserve_exact(self, len) {
692            return Err(ReserveExactError::ReserveFailed(Box::new(
693                smallvec_err::SmallVecErr(e),
694            )));
695        }
696
697        if self.capacity() - self.len() != len {
698            return Err(ReserveExactError::ExactSizeMismatch {
699                reserved: self.capacity() - self.len(),
700                expected: len,
701            });
702        }
703        Ok(())
704    }
705}
706
707#[cfg(feature = "memmap2")]
708impl IoBufMut for memmap2::MmapMut {
709    fn as_uninit(&mut self) -> &mut [MaybeUninit<u8>] {
710        // Safety: &mut [u8] is valid &mut [MaybeUninit<u8>]
711        unsafe { std::mem::transmute(self.as_mut_slice()) }
712    }
713}
714
715/// A helper trait for `set_len` like methods.
716pub trait SetLen {
717    /// Set the buffer length.
718    ///
719    /// # Safety
720    ///
721    /// * `len` must be less or equal than `as_uninit().len()`.
722    /// * The bytes in the range `[buf_len(), len)` must be initialized.
723    unsafe fn set_len(&mut self, len: usize);
724
725    /// Advance the buffer length by `len`.
726    ///
727    /// # Safety
728    ///
729    /// * The bytes in the range `[buf_len(), buf_len() + len)` must be
730    ///   initialized.
731    unsafe fn advance(&mut self, len: usize)
732    where
733        Self: IoBuf,
734    {
735        let current_len = (*self).buf_len();
736        let new_len = current_len.checked_add(len).expect("length overflow");
737        unsafe { self.set_len(new_len) };
738    }
739
740    /// Set the buffer length to `len`. If `len` is less than the current
741    /// length, this operation is a no-op.
742    ///
743    /// # Safety
744    ///
745    /// * `len` must be less or equal than `as_uninit().len()`.
746    /// * The bytes in the range `[buf_len(), len)` must be initialized.
747    unsafe fn advance_to(&mut self, len: usize)
748    where
749        Self: IoBuf,
750    {
751        let current_len = (*self).buf_len();
752        if len > current_len {
753            unsafe { self.set_len(len) };
754        }
755    }
756
757    /// Set the vector buffer's total length to `len`. If `len` is less than the
758    /// current total length, this operation is a no-op.
759    ///
760    /// # Safety
761    ///
762    /// * `len` must be less or equal than `total_len()`.
763    /// * The bytes in the range `[total_len(), len)` must be initialized.
764    unsafe fn advance_vec_to(&mut self, len: usize)
765    where
766        Self: IoVectoredBuf,
767    {
768        let current_len = (*self).total_len();
769        if len > current_len {
770            unsafe { self.set_len(len) };
771        }
772    }
773
774    /// Clear the buffer, setting its length to 0 without touching its content
775    /// or capacity.
776    fn clear(&mut self)
777    where
778        Self: IoBuf,
779    {
780        // SAFETY: setting length to 0 is always valid
781        unsafe { self.set_len(0) };
782    }
783}
784
785impl<B: SetLen + ?Sized> SetLen for &'static mut B {
786    unsafe fn set_len(&mut self, len: usize) {
787        unsafe { (**self).set_len(len) }
788    }
789}
790
791impl<B: SetLen + ?Sized, #[cfg(feature = "allocator_api")] A: Allocator + 'static> SetLen
792    for t_alloc!(Box, B, A)
793{
794    unsafe fn set_len(&mut self, len: usize) {
795        unsafe { (**self).set_len(len) }
796    }
797}
798
799impl<#[cfg(feature = "allocator_api")] A: Allocator + 'static> SetLen for t_alloc!(Vec, u8, A) {
800    unsafe fn set_len(&mut self, len: usize) {
801        unsafe { self.set_len(len) };
802    }
803}
804
805impl SetLen for [u8] {
806    unsafe fn set_len(&mut self, len: usize) {
807        debug_assert!(len <= self.len());
808    }
809}
810
811impl<const N: usize> SetLen for [u8; N] {
812    unsafe fn set_len(&mut self, len: usize) {
813        debug_assert!(len <= N);
814    }
815}
816
817#[cfg(feature = "bytes")]
818impl SetLen for bytes::BytesMut {
819    unsafe fn set_len(&mut self, len: usize) {
820        unsafe { self.set_len(len) };
821    }
822}
823
824#[cfg(feature = "read_buf")]
825impl SetLen for std::io::BorrowedBuf<'static> {
826    unsafe fn set_len(&mut self, len: usize) {
827        debug_assert!(self.capacity() >= len);
828
829        // SAFETY: `len` range is initialized guaranteed by invariant of `set_len`
830        self.clear().unfilled().advance(len);
831    }
832}
833
834#[cfg(feature = "arrayvec")]
835impl<const N: usize> SetLen for arrayvec::ArrayVec<u8, N> {
836    unsafe fn set_len(&mut self, len: usize) {
837        if (**self).buf_len() < len {
838            unsafe { self.set_len(len) };
839        }
840    }
841}
842
843#[cfg(feature = "smallvec")]
844impl<const N: usize> SetLen for smallvec::SmallVec<[u8; N]>
845where
846    [u8; N]: smallvec::Array<Item = u8>,
847{
848    unsafe fn set_len(&mut self, len: usize) {
849        if (**self).buf_len() < len {
850            unsafe { self.set_len(len) };
851        }
852    }
853}
854
855#[cfg(feature = "memmap2")]
856impl SetLen for memmap2::MmapMut {
857    unsafe fn set_len(&mut self, len: usize) {
858        debug_assert!(len <= self.len())
859    }
860}
861
862impl<T: IoBufMut> SetLen for [T] {
863    unsafe fn set_len(&mut self, len: usize) {
864        unsafe { default_set_len(self.iter_mut(), len) }
865    }
866}
867
868impl<T: IoBufMut, const N: usize> SetLen for [T; N] {
869    unsafe fn set_len(&mut self, len: usize) {
870        unsafe { default_set_len(self.iter_mut(), len) }
871    }
872}
873
874impl<T: IoBufMut, #[cfg(feature = "allocator_api")] A: Allocator + 'static> SetLen
875    for t_alloc!(Vec, T, A)
876{
877    unsafe fn set_len(&mut self, len: usize) {
878        unsafe { default_set_len(self.iter_mut(), len) }
879    }
880}
881
882#[cfg(feature = "arrayvec")]
883impl<T: IoBufMut, const N: usize> SetLen for arrayvec::ArrayVec<T, N> {
884    unsafe fn set_len(&mut self, len: usize) {
885        unsafe { default_set_len(self.iter_mut(), len) }
886    }
887}
888
889#[cfg(feature = "smallvec")]
890impl<T: IoBufMut, const N: usize> SetLen for smallvec::SmallVec<[T; N]>
891where
892    [T; N]: smallvec::Array<Item = T>,
893{
894    unsafe fn set_len(&mut self, len: usize) {
895        unsafe { default_set_len(self.iter_mut(), len) }
896    }
897}
898
899/// # Safety
900/// * `len` should be less or equal than the sum of `buf_capacity()` of all
901///   buffers.
902/// * The bytes in the range `[buf_len(), new_len)` of each buffer must be
903///   initialized
904unsafe fn default_set_len<'a, B: IoBufMut>(
905    iter: impl IntoIterator<Item = &'a mut B>,
906    mut len: usize,
907) {
908    let mut iter = iter.into_iter();
909    while len > 0 {
910        let Some(curr) = iter.next() else { return };
911        let sub = (*curr).buf_capacity().min(len);
912        unsafe { curr.set_len(sub) };
913        len -= sub;
914    }
915}
916
917#[cfg(test)]
918mod test {
919    use crate::IoBufMut;
920
921    #[test]
922    fn test_vec_reserve() {
923        let mut buf = Vec::new();
924        IoBufMut::reserve(&mut buf, 10).unwrap();
925        assert!(buf.capacity() >= 10);
926
927        let mut buf = Vec::new();
928        IoBufMut::reserve_exact(&mut buf, 10).unwrap();
929        assert!(buf.capacity() == 10);
930
931        let mut buf = Box::new(Vec::new());
932        IoBufMut::reserve_exact(&mut buf, 10).unwrap();
933        assert!(buf.capacity() == 10);
934    }
935
936    #[test]
937    #[cfg(feature = "bytes")]
938    fn test_bytes_reserve() {
939        let mut buf = bytes::BytesMut::new();
940        IoBufMut::reserve(&mut buf, 10).unwrap();
941        assert!(buf.capacity() >= 10);
942    }
943
944    #[test]
945    #[cfg(feature = "smallvec")]
946    fn test_smallvec_reserve() {
947        let mut buf = smallvec::SmallVec::<[u8; 8]>::new();
948        IoBufMut::reserve(&mut buf, 10).unwrap();
949        assert!(buf.capacity() >= 10);
950    }
951
952    #[test]
953    #[cfg(feature = "memmap2")]
954    fn tests_memmap2() {
955        use std::{
956            fs::{OpenOptions, remove_file},
957            io::{Seek, SeekFrom, Write},
958        };
959
960        use memmap2::MmapOptions;
961
962        use super::*;
963
964        let path = std::env::temp_dir().join("compio_buf_mmap_mut_test");
965
966        let mut file = OpenOptions::new()
967            .read(true)
968            .write(true)
969            .create(true)
970            .truncate(true)
971            .open(&path)
972            .unwrap();
973        let data = b"hello memmap2";
974        file.write_all(data).unwrap();
975        file.flush().unwrap();
976        file.seek(SeekFrom::Start(0)).unwrap();
977        let mmap = unsafe { MmapOptions::new().map(&file).unwrap() };
978
979        let uninit_slice = mmap.as_init();
980        assert_eq!(uninit_slice, data);
981
982        remove_file(path).unwrap();
983    }
984
985    #[test]
986    fn test_other_reserve() {
987        let mut buf = [1, 1, 4, 5, 1, 4];
988        let res = IoBufMut::reserve(&mut buf, 10);
989        assert!(res.is_err_and(|x| x.is_not_supported()));
990        assert!(buf.buf_capacity() == 6);
991    }
992
993    #[test]
994    fn test_extend() {
995        let mut buf = Vec::from(b"hello");
996        IoBufMut::extend_from_slice(&mut buf, b" world").unwrap();
997        assert_eq!(buf.as_slice(), b"hello world");
998
999        let mut buf = [];
1000        let res = IoBufMut::extend_from_slice(&mut buf, b" ");
1001        assert!(res.is_err_and(|x| x.is_not_supported()));
1002    }
1003}