diff --git a/library/alloc/src/vec.rs b/library/alloc/src/vec.rs
index b20ccd388d1f1..c97e2bbea321c 100644
--- a/library/alloc/src/vec.rs
+++ b/library/alloc/src/vec.rs
@@ -2199,19 +2199,12 @@ impl<T> SpecFromIter<T, IntoIter<T>> for Vec<T> {
         // But it is a conservative choice.
         let has_advanced = iterator.buf.as_ptr() as *const _ != iterator.ptr;
         if !has_advanced || iterator.len() >= iterator.cap / 2 {
-            unsafe {
-                let it = ManuallyDrop::new(iterator);
-                if has_advanced {
-                    ptr::copy(it.ptr, it.buf.as_ptr(), it.len());
-                }
-                return Vec::from_raw_parts(it.buf.as_ptr(), it.len(), it.cap);
-            }
+            // Safety: passing 0 is always valid
+            return unsafe { iterator.into_vec_with_uninit_prefix(0) };
         }
 
         let mut vec = Vec::new();
-        // must delegate to spec_extend() since extend() itself delegates
-        // to spec_from for empty Vecs
-        vec.spec_extend(iterator);
+        iterator.move_into(&mut vec);
         vec
     }
 }
@@ -2391,11 +2384,62 @@ where
 }
 
 impl<T> SpecExtend<T, IntoIter<T>> for Vec<T> {
-    fn spec_extend(&mut self, mut iterator: IntoIter<T>) {
-        unsafe {
-            self.append_elements(iterator.as_slice() as _);
+    fn spec_extend(&mut self, iterator: IntoIter<T>) {
+        // Avoid reallocation if we can use iterator's storage instead. This requires 1 memcpy and 0-1 memmove
+        // while reallocation would require 1 alloc, 1-2 memcpy, 1-2 free.
+        //
+        // # illustration of some extend scenarios (not exhaustive)
+        //
+        //     = step ==              == memory ==              == self ==         == iter / v ==
+        //                    0123456789abcdef0123456789abcdef
+        //                    0---------------1---------------
+        //
+        // ## non-empty self, partially consumed iterator
+        //
+        //     [initial]      AAAA_-----__BBB___--------------  Vec(0x00, 4, 5)    IntoIter(0x0a, 0x0c, 0x0f, 8)
+        // ³   into_vec       AAAA_-----____BBB_--------------  Vec(0x00, 4, 5)    Vec(0x0a, 7, 8)
+        // ²   prepend        _____-----AAAABBB_--------------  Vec(0x00, 0, 5)    Vec(0x0a, 7, 8)
+        // ⁴   *self = v      ----------AAAABBB_--------------  Vec(0x0a, 7, 8)
+        //
+        // ## empty self, partially consumed iterator
+        //
+        //     [initial]      ____------__BBBB__--------------  Vec(0x00, 0, 4)    IntoIter(0x0a, 0x0c, 0x10, 8)
+        // ³   into_vec       ____------BBBB____--------------  Vec(0x00, 0, 4)    Vec(0x0a, 4, 8)
+        // ⁴   *self = v      ----------BBBB____--------------  Vec(0x0a, 4, 8)
+        //
+        // ## empty self, pristine iterator
+        //
+        //     [initial]      ----------BBBB____--------------  Vec(0x00, 0, 0)    IntoIter(0x0a, 0x0a, 0x0e, 8)
+        //     *self = v      ----------BBBB____--------------  Vec(0x0a, 4, 8)
+        //
+        // ## insufficient capacity
+        //
+        //     [initial]      AAAAA-----BBBBBB__--------------  Vec(0x00, 5,  5)   IntoIter(0x0a, 0x0a, 0x0f, 8)
+        // ¹²⁴ reserve(6)     ----------BBBBBB__--AAAAA______-  Vec(0x14, 5,  11)  IntoIter(0x0a, 0x0a, 0x0f, 8)
+        // ²   ptr:copy_n     ----------________--AAAAABBBBBB-  Vec(0x14, 11, 11)  IntoIter(0x0a, 0x0f, 0x0f, 8)
+        // ⁴   drop           --------------------AAAAABBBBBB-  Vec(0x14, 11, 11)
+        //
+        //  ¹ malloc
+        //  ² memcpy
+        //  ³ memmove
+        //  ⁴ free
+        //
+        if mem::size_of::<T>() > 0
+            && self.capacity() - self.len() < iterator.len()
+            && iterator.cap - iterator.len() >= self.len()
+        {
+            // Safety: we just checked that IntoIter has sufficient capacity to prepend our elements.
+            // Prepending will then fill the uninitialized prefix.
+            let v = unsafe {
+                let mut v = iterator.into_vec_with_uninit_prefix(self.len() as isize);
+                ptr::copy_nonoverlapping(self.as_ptr(), v.as_mut_ptr(), self.len);
+                self.set_len(0);
+                v
+            };
+            *self = v;
+            return;
         }
-        iterator.ptr = iterator.end;
+        iterator.move_into(self);
     }
 }
 
@@ -2928,6 +2972,39 @@ impl<T> IntoIter<T> {
         self.ptr = self.buf.as_ptr();
         self.end = self.buf.as_ptr();
     }
+
+    /// Shifts the remaining elements to `offset` and then converts the whole allocation into a Vec
+    /// with `vec.len() == offset + self.len()`
+    ///
+    /// # Safety
+    ///
+    /// When a non-zero offset is passed the resulting Vec will have an uninitialized prefix
+    /// that needs to be filled before the Vec is valid again. Conversely this means
+    /// that passing `offset = 0` results in a Vec that's immediately valid.
+    ///
+    /// * `offset + self.len()` must not exceed `self.cap`
+    /// * `offset >= 0`
+    unsafe fn into_vec_with_uninit_prefix(self, offset: isize) -> Vec<T> {
+        debug_assert!(offset >= 0);
+        debug_assert!(offset as usize + self.len() <= self.cap);
+        let dst = unsafe { self.buf.as_ptr().offset(offset) };
+        if self.ptr != dst as *const _ {
+            // Move the remaining items to the offset. Even when offset == 0 it can still be necessary
+            // to move the data if the iterator was partially consumed.
+            unsafe { ptr::copy(self.ptr, dst, self.len()) }
+        }
+
+        let iter = ManuallyDrop::new(self);
+        unsafe { Vec::from_raw_parts(iter.buf.as_ptr(), offset as usize + iter.len(), iter.cap) }
+    }
+
+    /// Move remaining elements to the end of `dest`.
+    fn move_into(mut self, dest: &mut Vec<T>) {
+        unsafe {
+            dest.append_elements(self.as_slice() as _);
+        }
+        self.ptr = self.end;
+    }
 }
 
 #[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]