Skip to content

Add #[track_caller] to allocating methods of Vec & VecDeque #126557

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Oct 14, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions library/alloc/src/collections/vec_deque/mod.rs
Original file line number Diff line number Diff line change
@@ -103,6 +103,7 @@ pub struct VecDeque<

#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Clone, A: Allocator + Clone> Clone for VecDeque<T, A> {
#[track_caller]
fn clone(&self) -> Self {
let mut deq = Self::with_capacity_in(self.len(), self.allocator().clone());
deq.extend(self.iter().cloned());
@@ -113,6 +114,7 @@ impl<T: Clone, A: Allocator + Clone> Clone for VecDeque<T, A> {
///
/// This method is preferred over simply assigning `source.clone()` to `self`,
/// as it avoids reallocation if possible.
#[track_caller]
fn clone_from(&mut self, source: &Self) {
self.clear();
self.extend(source.iter().cloned());
@@ -570,6 +572,7 @@ impl<T> VecDeque<T> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[must_use]
#[track_caller]
pub fn with_capacity(capacity: usize) -> VecDeque<T> {
Self::with_capacity_in(capacity, Global)
}
@@ -625,6 +628,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// let deque: VecDeque<u32> = VecDeque::with_capacity(10);
/// ```
#[unstable(feature = "allocator_api", issue = "32838")]
#[track_caller]
pub fn with_capacity_in(capacity: usize, alloc: A) -> VecDeque<T, A> {
VecDeque { head: 0, len: 0, buf: RawVec::with_capacity_in(capacity, alloc) }
}
@@ -789,6 +793,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
///
/// [`reserve`]: VecDeque::reserve
#[stable(feature = "rust1", since = "1.0.0")]
#[track_caller]
pub fn reserve_exact(&mut self, additional: usize) {
let new_cap = self.len.checked_add(additional).expect("capacity overflow");
let old_cap = self.capacity();
@@ -818,6 +823,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// assert!(buf.capacity() >= 11);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[track_caller]
pub fn reserve(&mut self, additional: usize) {
let new_cap = self.len.checked_add(additional).expect("capacity overflow");
let old_cap = self.capacity();
@@ -949,6 +955,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// assert!(buf.capacity() >= 4);
/// ```
#[stable(feature = "deque_extras_15", since = "1.5.0")]
#[track_caller]
pub fn shrink_to_fit(&mut self) {
self.shrink_to(0);
}
@@ -974,6 +981,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// assert!(buf.capacity() >= 4);
/// ```
#[stable(feature = "shrink_to", since = "1.56.0")]
#[track_caller]
pub fn shrink_to(&mut self, min_capacity: usize) {
let target_cap = min_capacity.max(self.len);

@@ -1740,6 +1748,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// assert_eq!(d.front(), Some(&2));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[track_caller]
pub fn push_front(&mut self, value: T) {
if self.is_full() {
self.grow();
@@ -1767,6 +1776,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_confusables("push", "put", "append")]
#[track_caller]
pub fn push_back(&mut self, value: T) {
if self.is_full() {
self.grow();
@@ -1876,6 +1886,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// assert_eq!(vec_deque, &['a', 'd', 'b', 'c']);
/// ```
#[stable(feature = "deque_extras_15", since = "1.5.0")]
#[track_caller]
pub fn insert(&mut self, index: usize, value: T) {
assert!(index <= self.len(), "index out of bounds");
if self.is_full() {
@@ -1979,6 +1990,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
#[inline]
#[must_use = "use `.truncate()` if you don't need the other half"]
#[stable(feature = "split_off", since = "1.4.0")]
#[track_caller]
pub fn split_off(&mut self, at: usize) -> Self
where
A: Clone,
@@ -2045,6 +2057,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// ```
#[inline]
#[stable(feature = "append", since = "1.4.0")]
#[track_caller]
pub fn append(&mut self, other: &mut Self) {
if T::IS_ZST {
self.len = self.len.checked_add(other.len).expect("capacity overflow");
@@ -2167,6 +2180,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
// be called in cold paths.
// This may panic or abort
#[inline(never)]
#[track_caller]
fn grow(&mut self) {
// Extend or possibly remove this assertion when valid use-cases for growing the
// buffer without it being full emerge
@@ -2205,6 +2219,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// assert_eq!(buf, [5, 10, 101, 102, 103]);
/// ```
#[stable(feature = "vec_resize_with", since = "1.33.0")]
#[track_caller]
pub fn resize_with(&mut self, new_len: usize, generator: impl FnMut() -> T) {
let len = self.len;

@@ -2751,6 +2766,7 @@ impl<T: Clone, A: Allocator> VecDeque<T, A> {
/// assert_eq!(buf, [5, 10, 20, 20, 20]);
/// ```
#[stable(feature = "deque_extras", since = "1.16.0")]
#[track_caller]
pub fn resize(&mut self, new_len: usize, value: T) {
if new_len > self.len() {
let extra = new_len - self.len();
@@ -2870,6 +2886,7 @@ impl<T, A: Allocator> IndexMut<usize> for VecDeque<T, A> {

#[stable(feature = "rust1", since = "1.0.0")]
impl<T> FromIterator<T> for VecDeque<T> {
#[track_caller]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> VecDeque<T> {
SpecFromIter::spec_from_iter(iter.into_iter())
}
@@ -2909,16 +2926,19 @@ impl<'a, T, A: Allocator> IntoIterator for &'a mut VecDeque<T, A> {

#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: Allocator> Extend<T> for VecDeque<T, A> {
#[track_caller]
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
<Self as SpecExtend<T, I::IntoIter>>::spec_extend(self, iter.into_iter());
}

#[inline]
#[track_caller]
fn extend_one(&mut self, elem: T) {
self.push_back(elem);
}

#[inline]
#[track_caller]
fn extend_reserve(&mut self, additional: usize) {
self.reserve(additional);
}
@@ -2934,16 +2954,19 @@ impl<T, A: Allocator> Extend<T> for VecDeque<T, A> {

#[stable(feature = "extend_ref", since = "1.2.0")]
impl<'a, T: 'a + Copy, A: Allocator> Extend<&'a T> for VecDeque<T, A> {
#[track_caller]
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.spec_extend(iter.into_iter());
}

#[inline]
#[track_caller]
fn extend_one(&mut self, &elem: &'a T) {
self.push_back(elem);
}

#[inline]
#[track_caller]
fn extend_reserve(&mut self, additional: usize) {
self.reserve(additional);
}
@@ -3041,6 +3064,7 @@ impl<T, const N: usize> From<[T; N]> for VecDeque<T> {
/// let deq2: VecDeque<_> = [1, 2, 3, 4].into();
/// assert_eq!(deq1, deq2);
/// ```
#[track_caller]
fn from(arr: [T; N]) -> Self {
let mut deq = VecDeque::with_capacity(N);
let arr = ManuallyDrop::new(arr);
6 changes: 6 additions & 0 deletions library/alloc/src/collections/vec_deque/spec_extend.rs
Original file line number Diff line number Diff line change
@@ -7,13 +7,15 @@ use crate::vec;

// Specialization trait used for VecDeque::extend
pub(super) trait SpecExtend<T, I> {
#[track_caller]
fn spec_extend(&mut self, iter: I);
}

impl<T, I, A: Allocator> SpecExtend<T, I> for VecDeque<T, A>
where
I: Iterator<Item = T>,
{
#[track_caller]
default fn spec_extend(&mut self, mut iter: I) {
// This function should be the moral equivalent of:
//
@@ -44,6 +46,7 @@ impl<T, I, A: Allocator> SpecExtend<T, I> for VecDeque<T, A>
where
I: TrustedLen<Item = T>,
{
#[track_caller]
default fn spec_extend(&mut self, iter: I) {
// This is the case for a TrustedLen iterator.
let (low, high) = iter.size_hint();
@@ -76,6 +79,7 @@ where
}

impl<T, A: Allocator> SpecExtend<T, vec::IntoIter<T>> for VecDeque<T, A> {
#[track_caller]
fn spec_extend(&mut self, mut iterator: vec::IntoIter<T>) {
let slice = iterator.as_slice();
self.reserve(slice.len());
@@ -93,6 +97,7 @@ where
I: Iterator<Item = &'a T>,
T: Copy,
{
#[track_caller]
default fn spec_extend(&mut self, iterator: I) {
self.spec_extend(iterator.copied())
}
@@ -102,6 +107,7 @@ impl<'a, T: 'a, A: Allocator> SpecExtend<&'a T, slice::Iter<'a, T>> for VecDeque
where
T: Copy,
{
#[track_caller]
fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) {
let slice = iterator.as_slice();
self.reserve(slice.len());
1 change: 1 addition & 0 deletions library/alloc/src/collections/vec_deque/spec_from_iter.rs
Original file line number Diff line number Diff line change
@@ -9,6 +9,7 @@ impl<T, I> SpecFromIter<T, I> for VecDeque<T>
where
I: Iterator<Item = T>,
{
#[track_caller]
default fn spec_from_iter(iterator: I) -> Self {
// Since converting is O(1) now, just re-use the `Vec` logic for
// anything where we can't do something extra-special for `VecDeque`,
17 changes: 17 additions & 0 deletions library/alloc/src/raw_vec.rs
Original file line number Diff line number Diff line change
@@ -20,6 +20,7 @@ mod tests;
// only one location which panics rather than a bunch throughout the module.
#[cfg(not(no_global_oom_handling))]
#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
#[track_caller]
fn capacity_overflow() -> ! {
panic!("capacity overflow");
}
@@ -125,6 +126,7 @@ impl<T> RawVec<T, Global> {
#[cfg(not(any(no_global_oom_handling, test)))]
#[must_use]
#[inline]
#[track_caller]
pub fn with_capacity(capacity: usize) -> Self {
Self { inner: RawVecInner::with_capacity(capacity, T::LAYOUT), _marker: PhantomData }
}
@@ -133,6 +135,7 @@ impl<T> RawVec<T, Global> {
#[cfg(not(any(no_global_oom_handling, test)))]
#[must_use]
#[inline]
#[track_caller]
pub fn with_capacity_zeroed(capacity: usize) -> Self {
Self {
inner: RawVecInner::with_capacity_zeroed_in(capacity, Global, T::LAYOUT),
@@ -145,6 +148,7 @@ impl RawVecInner<Global> {
#[cfg(not(any(no_global_oom_handling, test)))]
#[must_use]
#[inline]
#[track_caller]
fn with_capacity(capacity: usize, elem_layout: Layout) -> Self {
match Self::try_allocate_in(capacity, AllocInit::Uninitialized, Global, elem_layout) {
Ok(res) => res,
@@ -184,6 +188,7 @@ impl<T, A: Allocator> RawVec<T, A> {
/// allocator for the returned `RawVec`.
#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
Self {
inner: RawVecInner::with_capacity_in(capacity, alloc, T::LAYOUT),
@@ -205,6 +210,7 @@ impl<T, A: Allocator> RawVec<T, A> {
/// of allocator for the returned `RawVec`.
#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self {
Self {
inner: RawVecInner::with_capacity_zeroed_in(capacity, alloc, T::LAYOUT),
@@ -324,6 +330,7 @@ impl<T, A: Allocator> RawVec<T, A> {
/// Aborts on OOM.
#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
pub fn reserve(&mut self, len: usize, additional: usize) {
self.inner.reserve(len, additional, T::LAYOUT)
}
@@ -332,6 +339,7 @@ impl<T, A: Allocator> RawVec<T, A> {
/// caller to ensure `len == self.capacity()`.
#[cfg(not(no_global_oom_handling))]
#[inline(never)]
#[track_caller]
pub fn grow_one(&mut self) {
self.inner.grow_one(T::LAYOUT)
}
@@ -359,6 +367,7 @@ impl<T, A: Allocator> RawVec<T, A> {
///
/// Aborts on OOM.
#[cfg(not(no_global_oom_handling))]
#[track_caller]
pub fn reserve_exact(&mut self, len: usize, additional: usize) {
self.inner.reserve_exact(len, additional, T::LAYOUT)
}
@@ -383,6 +392,7 @@ impl<T, A: Allocator> RawVec<T, A> {
///
/// Aborts on OOM.
#[cfg(not(no_global_oom_handling))]
#[track_caller]
#[inline]
pub fn shrink_to_fit(&mut self, cap: usize) {
self.inner.shrink_to_fit(cap, T::LAYOUT)
@@ -408,6 +418,7 @@ impl<A: Allocator> RawVecInner<A> {

#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
match Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout) {
Ok(this) => {
@@ -432,6 +443,7 @@ impl<A: Allocator> RawVecInner<A> {

#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
fn with_capacity_zeroed_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
match Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc, elem_layout) {
Ok(res) => res,
@@ -526,6 +538,7 @@ impl<A: Allocator> RawVecInner<A> {

#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
fn reserve(&mut self, len: usize, additional: usize, elem_layout: Layout) {
// Callers expect this function to be very cheap when there is already sufficient capacity.
// Therefore, we move all the resizing and error-handling logic from grow_amortized and
@@ -550,6 +563,7 @@ impl<A: Allocator> RawVecInner<A> {

#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
fn grow_one(&mut self, elem_layout: Layout) {
if let Err(err) = self.grow_amortized(self.cap.0, 1, elem_layout) {
handle_error(err);
@@ -573,6 +587,7 @@ impl<A: Allocator> RawVecInner<A> {
}

#[cfg(not(no_global_oom_handling))]
#[track_caller]
fn reserve_exact(&mut self, len: usize, additional: usize, elem_layout: Layout) {
if let Err(err) = self.try_reserve_exact(len, additional, elem_layout) {
handle_error(err);
@@ -597,6 +612,7 @@ impl<A: Allocator> RawVecInner<A> {

#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
fn shrink_to_fit(&mut self, cap: usize, elem_layout: Layout) {
if let Err(err) = self.shrink(cap, elem_layout) {
handle_error(err);
@@ -770,6 +786,7 @@ where
#[cfg(not(no_global_oom_handling))]
#[cold]
#[optimize(size)]
#[track_caller]
fn handle_error(e: TryReserveError) -> ! {
match e.kind() {
CapacityOverflow => capacity_overflow(),
1 change: 1 addition & 0 deletions library/alloc/src/vec/cow.rs
Original file line number Diff line number Diff line change
@@ -58,6 +58,7 @@ impl<'a, T> FromIterator<T> for Cow<'a, [T]>
where
T: Clone,
{
#[track_caller]
fn from_iter<I: IntoIterator<Item = T>>(it: I) -> Cow<'a, [T]> {
Cow::Owned(FromIterator::from_iter(it))
}
2 changes: 2 additions & 0 deletions library/alloc/src/vec/in_place_collect.rs
Original file line number Diff line number Diff line change
@@ -229,6 +229,7 @@ where
I: Iterator<Item = T> + InPlaceCollect,
<I as SourceIter>::Source: AsVecIntoIter,
{
#[track_caller]
default fn from_iter(iterator: I) -> Self {
// Select the implementation in const eval to avoid codegen of the dead branch to improve compile times.
let fun: fn(I) -> Vec<T> = const {
@@ -246,6 +247,7 @@ where
}
}

#[track_caller]
fn from_iter_in_place<I, T>(mut iterator: I) -> Vec<T>
where
I: Iterator<Item = T> + InPlaceCollect,
38 changes: 38 additions & 0 deletions library/alloc/src/vec/mod.rs
Original file line number Diff line number Diff line change
@@ -478,6 +478,7 @@ impl<T> Vec<T> {
#[stable(feature = "rust1", since = "1.0.0")]
#[must_use]
#[cfg_attr(not(test), rustc_diagnostic_item = "vec_with_capacity")]
#[track_caller]
pub fn with_capacity(capacity: usize) -> Self {
Self::with_capacity_in(capacity, Global)
}
@@ -797,6 +798,7 @@ impl<T, A: Allocator> Vec<T, A> {
#[cfg(not(no_global_oom_handling))]
#[inline]
#[unstable(feature = "allocator_api", issue = "32838")]
#[track_caller]
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
Vec { buf: RawVec::with_capacity_in(capacity, alloc), len: 0 }
}
@@ -1263,6 +1265,7 @@ impl<T, A: Allocator> Vec<T, A> {
/// ```
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "rust1", since = "1.0.0")]
#[track_caller]
pub fn reserve(&mut self, additional: usize) {
self.buf.reserve(self.len, additional);
}
@@ -1293,6 +1296,7 @@ impl<T, A: Allocator> Vec<T, A> {
/// ```
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "rust1", since = "1.0.0")]
#[track_caller]
pub fn reserve_exact(&mut self, additional: usize) {
self.buf.reserve_exact(self.len, additional);
}
@@ -1396,6 +1400,7 @@ impl<T, A: Allocator> Vec<T, A> {
/// ```
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "rust1", since = "1.0.0")]
#[track_caller]
#[inline]
pub fn shrink_to_fit(&mut self) {
// The capacity is never less than the length, and there's nothing to do when
@@ -1426,6 +1431,7 @@ impl<T, A: Allocator> Vec<T, A> {
/// ```
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "shrink_to", since = "1.56.0")]
#[track_caller]
pub fn shrink_to(&mut self, min_capacity: usize) {
if self.capacity() > min_capacity {
self.buf.shrink_to_fit(cmp::max(self.len, min_capacity));
@@ -1459,6 +1465,7 @@ impl<T, A: Allocator> Vec<T, A> {
/// ```
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "rust1", since = "1.0.0")]
#[track_caller]
pub fn into_boxed_slice(mut self) -> Box<[T], A> {
unsafe {
self.shrink_to_fit();
@@ -1860,6 +1867,7 @@ impl<T, A: Allocator> Vec<T, A> {
/// the insertion index is 0.
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "rust1", since = "1.0.0")]
#[track_caller]
pub fn insert(&mut self, index: usize, element: T) {
#[cold]
#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
@@ -2299,6 +2307,7 @@ impl<T, A: Allocator> Vec<T, A> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_confusables("push_back", "put", "append")]
#[track_caller]
pub fn push(&mut self, value: T) {
// Inform codegen that the length does not change across grow_one().
let len = self.len;
@@ -2440,6 +2449,7 @@ impl<T, A: Allocator> Vec<T, A> {
#[cfg(not(no_global_oom_handling))]
#[inline]
#[stable(feature = "append", since = "1.4.0")]
#[track_caller]
pub fn append(&mut self, other: &mut Self) {
unsafe {
self.append_elements(other.as_slice() as _);
@@ -2450,6 +2460,7 @@ impl<T, A: Allocator> Vec<T, A> {
/// Appends elements to `self` from other buffer.
#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
unsafe fn append_elements(&mut self, other: *const [T]) {
let count = unsafe { (*other).len() };
self.reserve(count);
@@ -2611,6 +2622,7 @@ impl<T, A: Allocator> Vec<T, A> {
#[inline]
#[must_use = "use `.truncate()` if you don't need the other half"]
#[stable(feature = "split_off", since = "1.4.0")]
#[track_caller]
pub fn split_off(&mut self, at: usize) -> Self
where
A: Clone,
@@ -2668,6 +2680,7 @@ impl<T, A: Allocator> Vec<T, A> {
/// ```
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "vec_resize_with", since = "1.33.0")]
#[track_caller]
pub fn resize_with<F>(&mut self, new_len: usize, f: F)
where
F: FnMut() -> T,
@@ -2873,6 +2886,7 @@ impl<T: Clone, A: Allocator> Vec<T, A> {
/// ```
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "vec_resize", since = "1.5.0")]
#[track_caller]
pub fn resize(&mut self, new_len: usize, value: T) {
let len = self.len();

@@ -2904,6 +2918,7 @@ impl<T: Clone, A: Allocator> Vec<T, A> {
/// [`extend`]: Vec::extend
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "vec_extend_from_slice", since = "1.6.0")]
#[track_caller]
pub fn extend_from_slice(&mut self, other: &[T]) {
self.spec_extend(other.iter())
}
@@ -2931,6 +2946,7 @@ impl<T: Clone, A: Allocator> Vec<T, A> {
/// ```
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "vec_extend_from_within", since = "1.53.0")]
#[track_caller]
pub fn extend_from_within<R>(&mut self, src: R)
where
R: RangeBounds<usize>,
@@ -2991,6 +3007,7 @@ impl<T, A: Allocator, const N: usize> Vec<[T; N], A> {

impl<T: Clone, A: Allocator> Vec<T, A> {
#[cfg(not(no_global_oom_handling))]
#[track_caller]
/// Extend the vector by `n` clones of value.
fn extend_with(&mut self, n: usize, value: T) {
self.reserve(n);
@@ -3051,13 +3068,15 @@ impl<T: PartialEq, A: Allocator> Vec<T, A> {
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "rust1", since = "1.0.0")]
#[cfg_attr(not(test), rustc_diagnostic_item = "vec_from_elem")]
#[track_caller]
pub fn from_elem<T: Clone>(elem: T, n: usize) -> Vec<T> {
<T as SpecFromElem>::from_elem(elem, n, Global)
}

#[doc(hidden)]
#[cfg(not(no_global_oom_handling))]
#[unstable(feature = "allocator_api", issue = "32838")]
#[track_caller]
pub fn from_elem_in<T: Clone, A: Allocator>(elem: T, n: usize, alloc: A) -> Vec<T, A> {
<T as SpecFromElem>::from_elem(elem, n, alloc)
}
@@ -3149,6 +3168,7 @@ unsafe impl<T, A: Allocator> ops::DerefPure for Vec<T, A> {}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Clone, A: Allocator + Clone> Clone for Vec<T, A> {
#[cfg(not(test))]
#[track_caller]
fn clone(&self) -> Self {
let alloc = self.allocator().clone();
<[T]>::to_vec_in(&**self, alloc)
@@ -3186,6 +3206,7 @@ impl<T: Clone, A: Allocator + Clone> Clone for Vec<T, A> {
/// // And no reallocation occurred
/// assert_eq!(yp, y.as_ptr());
/// ```
#[track_caller]
fn clone_from(&mut self, source: &Self) {
crate::slice::SpecCloneIntoVec::clone_into(source.as_slice(), self);
}
@@ -3284,6 +3305,7 @@ impl<T, I: SliceIndex<[T]>, A: Allocator> IndexMut<I> for Vec<T, A> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> FromIterator<T> for Vec<T> {
#[inline]
#[track_caller]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Vec<T> {
<Self as SpecFromIter<T, I::IntoIter>>::from_iter(iter.into_iter())
}
@@ -3352,16 +3374,19 @@ impl<'a, T, A: Allocator> IntoIterator for &'a mut Vec<T, A> {
#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: Allocator> Extend<T> for Vec<T, A> {
#[inline]
#[track_caller]
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
<Self as SpecExtend<T, I::IntoIter>>::spec_extend(self, iter.into_iter())
}

#[inline]
#[track_caller]
fn extend_one(&mut self, item: T) {
self.push(item);
}

#[inline]
#[track_caller]
fn extend_reserve(&mut self, additional: usize) {
self.reserve(additional);
}
@@ -3381,6 +3406,7 @@ impl<T, A: Allocator> Vec<T, A> {
// leaf method to which various SpecFrom/SpecExtend implementations delegate when
// they have no further optimizations to apply
#[cfg(not(no_global_oom_handling))]
#[track_caller]
fn extend_desugared<I: Iterator<Item = T>>(&mut self, mut iterator: I) {
// This is the case for a general iterator.
//
@@ -3408,6 +3434,7 @@ impl<T, A: Allocator> Vec<T, A> {
// specific extend for `TrustedLen` iterators, called both by the specializations
// and internal places where resolving specialization makes compilation slower
#[cfg(not(no_global_oom_handling))]
#[track_caller]
fn extend_trusted(&mut self, iterator: impl iter::TrustedLen<Item = T>) {
let (low, high) = iterator.size_hint();
if let Some(additional) = high {
@@ -3558,16 +3585,19 @@ impl<T, A: Allocator> Vec<T, A> {
#[cfg(not(no_global_oom_handling))]
#[stable(feature = "extend_ref", since = "1.2.0")]
impl<'a, T: Copy + 'a, A: Allocator> Extend<&'a T> for Vec<T, A> {
#[track_caller]
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.spec_extend(iter.into_iter())
}

#[inline]
#[track_caller]
fn extend_one(&mut self, &item: &'a T) {
self.push(item);
}

#[inline]
#[track_caller]
fn extend_reserve(&mut self, additional: usize) {
self.reserve(additional);
}
@@ -3678,6 +3708,7 @@ impl<T: Clone> From<&[T]> for Vec<T> {
/// assert_eq!(Vec::from(&[1, 2, 3][..]), vec![1, 2, 3]);
/// ```
#[cfg(not(test))]
#[track_caller]
fn from(s: &[T]) -> Vec<T> {
s.to_vec()
}
@@ -3698,6 +3729,7 @@ impl<T: Clone> From<&mut [T]> for Vec<T> {
/// assert_eq!(Vec::from(&mut [1, 2, 3][..]), vec![1, 2, 3]);
/// ```
#[cfg(not(test))]
#[track_caller]
fn from(s: &mut [T]) -> Vec<T> {
s.to_vec()
}
@@ -3717,6 +3749,7 @@ impl<T: Clone, const N: usize> From<&[T; N]> for Vec<T> {
/// ```
/// assert_eq!(Vec::from(&[1, 2, 3]), vec![1, 2, 3]);
/// ```
#[track_caller]
fn from(s: &[T; N]) -> Vec<T> {
Self::from(s.as_slice())
}
@@ -3732,6 +3765,7 @@ impl<T: Clone, const N: usize> From<&mut [T; N]> for Vec<T> {
/// ```
/// assert_eq!(Vec::from(&mut [1, 2, 3]), vec![1, 2, 3]);
/// ```
#[track_caller]
fn from(s: &mut [T; N]) -> Vec<T> {
Self::from(s.as_mut_slice())
}
@@ -3748,6 +3782,7 @@ impl<T, const N: usize> From<[T; N]> for Vec<T> {
/// assert_eq!(Vec::from([1, 2, 3]), vec![1, 2, 3]);
/// ```
#[cfg(not(test))]
#[track_caller]
fn from(s: [T; N]) -> Vec<T> {
<[T]>::into_vec(Box::new(s))
}
@@ -3777,6 +3812,7 @@ where
/// let b: Cow<'_, [i32]> = Cow::Borrowed(&[1, 2, 3]);
/// assert_eq!(Vec::from(o), Vec::from(b));
/// ```
#[track_caller]
fn from(s: Cow<'a, [T]>) -> Vec<T> {
s.into_owned()
}
@@ -3825,6 +3861,7 @@ impl<T, A: Allocator> From<Vec<T, A>> for Box<[T], A> {
///
/// assert_eq!(Box::from(vec), vec![1, 2, 3].into_boxed_slice());
/// ```
#[track_caller]
fn from(v: Vec<T, A>) -> Self {
v.into_boxed_slice()
}
@@ -3840,6 +3877,7 @@ impl From<&str> for Vec<u8> {
/// ```
/// assert_eq!(Vec::from("123"), vec![b'1', b'2', b'3']);
/// ```
#[track_caller]
fn from(s: &str) -> Vec<u8> {
From::from(s.as_bytes())
}
6 changes: 6 additions & 0 deletions library/alloc/src/vec/spec_extend.rs
Original file line number Diff line number Diff line change
@@ -6,13 +6,15 @@ use crate::alloc::Allocator;

// Specialization trait used for Vec::extend
pub(super) trait SpecExtend<T, I> {
#[track_caller]
fn spec_extend(&mut self, iter: I);
}

impl<T, I, A: Allocator> SpecExtend<T, I> for Vec<T, A>
where
I: Iterator<Item = T>,
{
#[track_caller]
default fn spec_extend(&mut self, iter: I) {
self.extend_desugared(iter)
}
@@ -22,12 +24,14 @@ impl<T, I, A: Allocator> SpecExtend<T, I> for Vec<T, A>
where
I: TrustedLen<Item = T>,
{
#[track_caller]
default fn spec_extend(&mut self, iterator: I) {
self.extend_trusted(iterator)
}
}

impl<T, A: Allocator> SpecExtend<T, IntoIter<T>> for Vec<T, A> {
#[track_caller]
fn spec_extend(&mut self, mut iterator: IntoIter<T>) {
unsafe {
self.append_elements(iterator.as_slice() as _);
@@ -41,6 +45,7 @@ where
I: Iterator<Item = &'a T>,
T: Clone,
{
#[track_caller]
default fn spec_extend(&mut self, iterator: I) {
self.spec_extend(iterator.cloned())
}
@@ -50,6 +55,7 @@ impl<'a, T: 'a, A: Allocator> SpecExtend<&'a T, slice::Iter<'a, T>> for Vec<T, A
where
T: Copy,
{
#[track_caller]
fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) {
let slice = iterator.as_slice();
unsafe { self.append_elements(slice) };
4 changes: 4 additions & 0 deletions library/alloc/src/vec/spec_from_elem.rs
Original file line number Diff line number Diff line change
@@ -10,6 +10,7 @@ pub(super) trait SpecFromElem: Sized {
}

impl<T: Clone> SpecFromElem for T {
#[track_caller]
default fn from_elem<A: Allocator>(elem: Self, n: usize, alloc: A) -> Vec<Self, A> {
let mut v = Vec::with_capacity_in(n, alloc);
v.extend_with(n, elem);
@@ -19,6 +20,7 @@ impl<T: Clone> SpecFromElem for T {

impl<T: Clone + IsZero> SpecFromElem for T {
#[inline]
#[track_caller]
default fn from_elem<A: Allocator>(elem: T, n: usize, alloc: A) -> Vec<T, A> {
if elem.is_zero() {
return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n };
@@ -31,6 +33,7 @@ impl<T: Clone + IsZero> SpecFromElem for T {

impl SpecFromElem for i8 {
#[inline]
#[track_caller]
fn from_elem<A: Allocator>(elem: i8, n: usize, alloc: A) -> Vec<i8, A> {
if elem == 0 {
return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n };
@@ -46,6 +49,7 @@ impl SpecFromElem for i8 {

impl SpecFromElem for u8 {
#[inline]
#[track_caller]
fn from_elem<A: Allocator>(elem: u8, n: usize, alloc: A) -> Vec<u8, A> {
if elem == 0 {
return Vec { buf: RawVec::with_capacity_zeroed_in(n, alloc), len: n };
2 changes: 2 additions & 0 deletions library/alloc/src/vec/spec_from_iter.rs
Original file line number Diff line number Diff line change
@@ -29,12 +29,14 @@ impl<T, I> SpecFromIter<T, I> for Vec<T>
where
I: Iterator<Item = T>,
{
#[track_caller]
default fn from_iter(iterator: I) -> Self {
SpecFromIterNested::from_iter(iterator)
}
}

impl<T> SpecFromIter<T, IntoIter<T>> for Vec<T> {
#[track_caller]
fn from_iter(iterator: IntoIter<T>) -> Self {
// A common case is passing a vector into a function which immediately
// re-collects into a vector. We can short circuit this if the IntoIter
2 changes: 2 additions & 0 deletions library/alloc/src/vec/spec_from_iter_nested.rs
Original file line number Diff line number Diff line change
@@ -15,6 +15,7 @@ impl<T, I> SpecFromIterNested<T, I> for Vec<T>
where
I: Iterator<Item = T>,
{
#[track_caller]
default fn from_iter(mut iterator: I) -> Self {
// Unroll the first iteration, as the vector is going to be
// expanded on this iteration in every case when the iterable is not
@@ -47,6 +48,7 @@ impl<T, I> SpecFromIterNested<T, I> for Vec<T>
where
I: TrustedLen<Item = T>,
{
#[track_caller]
fn from_iter(iterator: I) -> Self {
let mut vector = match iterator.size_hint() {
(_, Some(upper)) => Vec::with_capacity(upper),
2 changes: 2 additions & 0 deletions library/alloc/src/vec/splice.rs
Original file line number Diff line number Diff line change
@@ -52,6 +52,7 @@ impl<I: Iterator, A: Allocator> ExactSizeIterator for Splice<'_, I, A> {}

#[stable(feature = "vec_splice", since = "1.21.0")]
impl<I: Iterator, A: Allocator> Drop for Splice<'_, I, A> {
#[track_caller]
fn drop(&mut self) {
self.drain.by_ref().for_each(drop);
// At this point draining is done and the only remaining tasks are splicing
@@ -123,6 +124,7 @@ impl<T, A: Allocator> Drain<'_, T, A> {
}

/// Makes room for inserting more elements before the tail.
#[track_caller]
unsafe fn move_tail(&mut self, additional: usize) {
let vec = unsafe { self.vec.as_mut() };
let len = self.tail_start + self.tail_len;
3 changes: 1 addition & 2 deletions tests/ui/hygiene/panic-location.rs
Original file line number Diff line number Diff line change
@@ -4,8 +4,7 @@
//@ normalize-stderr-test: ".rs:\d+:\d+" -> ".rs:LL:CC"
//
// Regression test for issue #70963
// The captured stderr from this test reports a location
// inside `VecDeque::with_capacity`, instead of `<::core::macros::panic macros>`
// The reported panic location should not be `<::core::macros::panic macros>`.
fn main() {
std::collections::VecDeque::<String>::with_capacity(!0);
}
2 changes: 1 addition & 1 deletion tests/ui/hygiene/panic-location.run.stderr
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
thread 'main' panicked at alloc/src/raw_vec.rs:LL:CC:
thread 'main' panicked at $DIR/panic-location.rs:LL:CC:
capacity overflow
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace