@@ -7,6 +7,7 @@ use core::{cmp, fmt, hash, isize, slice, usize};
77use alloc:: {
88 borrow:: { Borrow , BorrowMut } ,
99 boxed:: Box ,
10+ collections:: TryReserveError ,
1011 string:: String ,
1112 vec,
1213 vec:: Vec ,
@@ -526,6 +527,8 @@ impl BytesMut {
526527 /// and the original buffer is large enough to fit the requested additional
527528 /// capacity, then reallocations will never happen.
528529 ///
530+ /// See also [`Self::try_reserve()`].
531+ ///
529532 /// # Examples
530533 ///
531534 /// In the following example, a new buffer is allocated.
@@ -564,21 +567,89 @@ impl BytesMut {
564567 /// Panics if the new capacity overflows `usize`.
565568 #[ inline]
566569 pub fn reserve ( & mut self , additional : usize ) {
570+ match self . try_reserve ( additional) {
571+ Err ( err) => panic ! ( "fail to reserve: {}" , err) ,
572+ Ok ( _) => { }
573+ }
574+ }
575+
576+ /// Tries to reserves capacity for at least `additional` more bytes to be inserted
577+ /// into the given `BytesMut`.
578+ ///
579+ /// More than `additional` bytes may be reserved in order to avoid frequent
580+ /// reallocations. A call to `try_reserve` may result in an allocation.
581+ ///
582+ /// Before allocating new buffer space, the function will attempt to reclaim
583+ /// space in the existing buffer. If the current handle references a view
584+ /// into a larger original buffer, and all other handles referencing part
585+ /// of the same original buffer have been dropped, then the current view
586+ /// can be copied/shifted to the front of the buffer and the handle can take
587+ /// ownership of the full buffer, provided that the full buffer is large
588+ /// enough to fit the requested additional capacity.
589+ ///
590+ /// This optimization will only happen if shifting the data from the current
591+ /// view to the front of the buffer is not too expensive in terms of the
592+ /// (amortized) time required. The precise condition is subject to change;
593+ /// as of now, the length of the data being shifted needs to be at least as
594+ /// large as the distance that it's shifted by. If the current view is empty
595+ /// and the original buffer is large enough to fit the requested additional
596+ /// capacity, then reallocations will never happen.
597+ ///
598+ /// # Errors
599+ ///
600+ /// If the capacity overflows, or the allocator reports a failure, then an error is returned.
601+ ///
602+ /// # Examples
603+ ///
604+ /// In the following example, a new buffer is allocated.
605+ ///
606+ /// ```
607+ /// use bytes::BytesMut;
608+ ///
609+ /// let mut buf = BytesMut::from(&b"hello"[..]);
610+ /// let res = buf.try_reserve(64);
611+ /// assert!(res.is_ok());
612+ /// assert!(buf.capacity() >= 69);
613+ /// ```
614+ ///
615+ /// In the following example, the existing buffer is reclaimed.
616+ ///
617+ /// ```
618+ /// use bytes::{BytesMut, BufMut};
619+ ///
620+ /// let mut buf = BytesMut::with_capacity(128);
621+ /// buf.put(&[0; 64][..]);
622+ ///
623+ /// let ptr = buf.as_ptr();
624+ /// let other = buf.split();
625+ ///
626+ /// assert!(buf.is_empty());
627+ /// assert_eq!(buf.capacity(), 64);
628+ ///
629+ /// drop(other);
630+ /// let res = buf.try_reserve(128);
631+ ///
632+ /// assert!(res.is_ok());
633+ /// assert_eq!(buf.capacity(), 128);
634+ /// assert_eq!(buf.as_ptr(), ptr);
635+ /// ```
636+ #[ inline]
637+ pub fn try_reserve ( & mut self , additional : usize ) -> Result < ( ) , TryReserveError > {
567638 let len = self . len ( ) ;
568639 let rem = self . capacity ( ) - len;
569640
570641 if additional <= rem {
571642 // The handle can already store at least `additional` more bytes, so
572643 // there is no further work needed to be done.
573- return ;
644+ return Ok ( ( ) ) ;
574645 }
575646
576- self . reserve_inner ( additional) ;
647+ self . reserve_inner ( additional)
577648 }
578649
579- // In separate function to allow the short-circuits in `reserve ` to
650+ // In separate function to allow the short-circuits in `try_reserve ` to
580651 // be inline-able. Significant helps performance.
581- fn reserve_inner ( & mut self , additional : usize ) {
652+ fn reserve_inner ( & mut self , additional : usize ) -> Result < ( ) , TryReserveError > {
582653 let len = self . len ( ) ;
583654 let kind = self . kind ( ) ;
584655
@@ -627,15 +698,15 @@ impl BytesMut {
627698 // allocate more space!
628699 let mut v =
629700 ManuallyDrop :: new ( rebuild_vec ( self . ptr . as_ptr ( ) , self . len , self . cap , off) ) ;
630- v. reserve ( additional) ;
701+ v. try_reserve ( additional) ? ;
631702
632703 // Update the info
633704 self . ptr = vptr ( v. as_mut_ptr ( ) . add ( off) ) ;
634705 self . len = v. len ( ) - off;
635706 self . cap = v. capacity ( ) - off;
636707 }
637708
638- return ;
709+ return Ok ( ( ) ) ;
639710 }
640711 }
641712
@@ -714,21 +785,23 @@ impl BytesMut {
714785 // care about in the unused capacity before calling `reserve`.
715786 debug_assert ! ( off + len <= v. capacity( ) ) ;
716787 v. set_len ( off + len) ;
717- v. reserve ( new_cap - v. len ( ) ) ;
788+ v. try_reserve ( new_cap - v. len ( ) ) ? ;
718789
719790 // Update the info
720791 self . ptr = vptr ( v. as_mut_ptr ( ) . add ( off) ) ;
721792 self . cap = v. capacity ( ) - off;
722793 }
723794
724- return ;
795+ return Ok ( ( ) ) ;
725796 } else {
726797 new_cap = cmp:: max ( new_cap, original_capacity) ;
727798 }
728799 }
729800
730801 // Create a new vector to store the data
731- let mut v = ManuallyDrop :: new ( Vec :: with_capacity ( new_cap) ) ;
802+ let mut v = Vec :: new ( ) ;
803+ v. try_reserve ( new_cap) ?;
804+ let mut v = ManuallyDrop :: new ( v) ;
732805
733806 // Copy the bytes
734807 v. extend_from_slice ( self . as_ref ( ) ) ;
@@ -743,6 +816,8 @@ impl BytesMut {
743816 self . ptr = vptr ( v. as_mut_ptr ( ) ) ;
744817 self . len = v. len ( ) ;
745818 self . cap = v. capacity ( ) ;
819+
820+ Ok ( ( ) )
746821 }
747822
748823 /// Appends given bytes to this `BytesMut`.
0 commit comments