@@ -2199,7 +2199,8 @@ impl<T> SpecFromIter<T, IntoIter<T>> for Vec<T> {
21992199 // But it is a conservative choice.
22002200 let has_advanced = iterator. buf . as_ptr ( ) as * const _ != iterator. ptr ;
22012201 if !has_advanced || iterator. len ( ) >= iterator. cap / 2 {
2202- return iterator. into_vec ( ) ;
2202+ // Safety: passing 0 is always valid
2203+ return unsafe { iterator. into_vec ( 0 ) } ;
22032204 }
22042205
22052206 let mut vec = Vec :: new ( ) ;
@@ -2384,8 +2385,20 @@ where
23842385
23852386impl < T > SpecExtend < T , IntoIter < T > > for Vec < T > {
23862387 fn spec_extend ( & mut self , iterator : IntoIter < T > ) {
2387- if mem:: size_of :: < T > ( ) > 0 && self . len == 0 && self . capacity ( ) < iterator. len ( ) {
2388- * self = iterator. into_vec ( ) ;
2388+ // Avoid reallocation if we can use iterator's storage instead. This requires 1 memcpy and 0-1 memmove
2389+ // while reallocation would require 1 alloc, 1-2 memcpy, 1-2 free
2390+ if mem:: size_of :: < T > ( ) > 0
2391+ && self . capacity ( ) - self . len ( ) < iterator. len ( )
2392+ && iterator. cap - iterator. len ( ) >= self . len ( )
2393+ {
2394+ // Safety: we just checked that IntoIter has sufficient capacity to prepend our elements.
2395+ // Prepending will then fill the uninitialized prefix.
2396+ * self = unsafe {
2397+ let mut v = iterator. into_vec ( self . len ( ) as isize ) ;
2398+ ptr:: copy_nonoverlapping ( self . as_ptr ( ) , v. as_mut_ptr ( ) , self . len ) ;
2399+ self . set_len ( 0 ) ;
2400+ v
2401+ } ;
23892402 return ;
23902403 }
23912404 iterator. move_to ( self ) ;
@@ -2922,14 +2935,25 @@ impl<T> IntoIter<T> {
29222935 self . end = self . buf . as_ptr ( ) ;
29232936 }
29242937
2925- /// Shifts the remaining elements to the front and then converts the whole allocation to a Vec
2926- fn into_vec ( self ) -> Vec < T > {
2927- if self . ptr != self . buf . as_ptr ( ) as * const _ {
2928- unsafe { ptr:: copy ( self . ptr , self . buf . as_ptr ( ) , self . len ( ) ) }
2938+ /// Shifts the remaining elements to `offset` and then converts the whole allocation into a Vec
2939+ /// with `vec.len() == offset + self.len()`
2940+ ///
2941+ /// # Safety
2942+ ///
2943+ /// When a non-zero offset is passed the resulting Vec will have an uninitialized prefix
2944+ /// that needs to be filled before the Vec is valid again.
2945+ ///
2946+ /// * `offset + self.len()` must not exceed `self.cap`
2947+ /// * `offset == 0` is always valid
2948+ /// * `offset` must be positive
2949+ unsafe fn into_vec ( self , offset : isize ) -> Vec < T > {
2950+ let dst = unsafe { self . buf . as_ptr ( ) . offset ( offset) } ;
2951+ if self . ptr != dst as * const _ {
2952+ unsafe { ptr:: copy ( self . ptr , dst, self . len ( ) ) }
29292953 }
29302954
29312955 let iter = ManuallyDrop :: new ( self ) ;
2932- unsafe { Vec :: from_raw_parts ( iter. buf . as_ptr ( ) , iter. len ( ) , iter. cap ) }
2956+ unsafe { Vec :: from_raw_parts ( iter. buf . as_ptr ( ) , offset as usize + iter. len ( ) , iter. cap ) }
29332957 }
29342958
29352959 fn move_to ( mut self , dest : & mut Vec < T > ) {
0 commit comments