Skip to content

Add #[track_caller] to allocating methods of Vec & VecDeque #126557

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Oct 14, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 24 additions & 0 deletions library/alloc/src/collections/vec_deque/mod.rs
Original file line number Diff line number Diff line change
@@ -103,6 +103,7 @@ pub struct VecDeque<

#[stable(feature = "rust1", since = "1.0.0")]
impl<T: Clone, A: Allocator + Clone> Clone for VecDeque<T, A> {
#[track_caller]
fn clone(&self) -> Self {
let mut deq = Self::with_capacity_in(self.len(), self.allocator().clone());
deq.extend(self.iter().cloned());
@@ -113,6 +114,7 @@ impl<T: Clone, A: Allocator + Clone> Clone for VecDeque<T, A> {
///
/// This method is preferred over simply assigning `source.clone()` to `self`,
/// as it avoids reallocation if possible.
#[track_caller]
fn clone_from(&mut self, source: &Self) {
self.clear();
self.extend(source.iter().cloned());
@@ -570,6 +572,7 @@ impl<T> VecDeque<T> {
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[must_use]
#[track_caller]
pub fn with_capacity(capacity: usize) -> VecDeque<T> {
Self::with_capacity_in(capacity, Global)
}
@@ -625,6 +628,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// let deque: VecDeque<u32> = VecDeque::with_capacity(10);
/// ```
#[unstable(feature = "allocator_api", issue = "32838")]
#[track_caller]
pub fn with_capacity_in(capacity: usize, alloc: A) -> VecDeque<T, A> {
VecDeque { head: 0, len: 0, buf: RawVec::with_capacity_in(capacity, alloc) }
}
@@ -789,6 +793,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
///
/// [`reserve`]: VecDeque::reserve
#[stable(feature = "rust1", since = "1.0.0")]
#[track_caller]
pub fn reserve_exact(&mut self, additional: usize) {
let new_cap = self.len.checked_add(additional).expect("capacity overflow");
let old_cap = self.capacity();
@@ -818,6 +823,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// assert!(buf.capacity() >= 11);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[track_caller]
pub fn reserve(&mut self, additional: usize) {
let new_cap = self.len.checked_add(additional).expect("capacity overflow");
let old_cap = self.capacity();
@@ -949,6 +955,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// assert!(buf.capacity() >= 4);
/// ```
#[stable(feature = "deque_extras_15", since = "1.5.0")]
#[track_caller]
pub fn shrink_to_fit(&mut self) {
self.shrink_to(0);
}
@@ -974,6 +981,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// assert!(buf.capacity() >= 4);
/// ```
#[stable(feature = "shrink_to", since = "1.56.0")]
#[track_caller]
pub fn shrink_to(&mut self, min_capacity: usize) {
let target_cap = min_capacity.max(self.len);

@@ -1740,6 +1748,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// assert_eq!(d.front(), Some(&2));
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[track_caller]
pub fn push_front(&mut self, value: T) {
if self.is_full() {
self.grow();
@@ -1767,6 +1776,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_confusables("push", "put", "append")]
#[track_caller]
pub fn push_back(&mut self, value: T) {
if self.is_full() {
self.grow();
@@ -1876,6 +1886,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// assert_eq!(vec_deque, &['a', 'd', 'b', 'c']);
/// ```
#[stable(feature = "deque_extras_15", since = "1.5.0")]
#[track_caller]
pub fn insert(&mut self, index: usize, value: T) {
assert!(index <= self.len(), "index out of bounds");
if self.is_full() {
@@ -1979,6 +1990,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
#[inline]
#[must_use = "use `.truncate()` if you don't need the other half"]
#[stable(feature = "split_off", since = "1.4.0")]
#[track_caller]
pub fn split_off(&mut self, at: usize) -> Self
where
A: Clone,
@@ -2045,6 +2057,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// ```
#[inline]
#[stable(feature = "append", since = "1.4.0")]
#[track_caller]
pub fn append(&mut self, other: &mut Self) {
if T::IS_ZST {
self.len = self.len.checked_add(other.len).expect("capacity overflow");
@@ -2167,6 +2180,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
// be called in cold paths.
// This may panic or abort
#[inline(never)]
#[track_caller]
fn grow(&mut self) {
// Extend or possibly remove this assertion when valid use-cases for growing the
// buffer without it being full emerge
@@ -2205,6 +2219,7 @@ impl<T, A: Allocator> VecDeque<T, A> {
/// assert_eq!(buf, [5, 10, 101, 102, 103]);
/// ```
#[stable(feature = "vec_resize_with", since = "1.33.0")]
#[track_caller]
pub fn resize_with(&mut self, new_len: usize, generator: impl FnMut() -> T) {
let len = self.len;

@@ -2751,6 +2766,7 @@ impl<T: Clone, A: Allocator> VecDeque<T, A> {
/// assert_eq!(buf, [5, 10, 20, 20, 20]);
/// ```
#[stable(feature = "deque_extras", since = "1.16.0")]
#[track_caller]
pub fn resize(&mut self, new_len: usize, value: T) {
if new_len > self.len() {
let extra = new_len - self.len();
@@ -2870,6 +2886,7 @@ impl<T, A: Allocator> IndexMut<usize> for VecDeque<T, A> {

#[stable(feature = "rust1", since = "1.0.0")]
impl<T> FromIterator<T> for VecDeque<T> {
#[track_caller]
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> VecDeque<T> {
SpecFromIter::spec_from_iter(iter.into_iter())
}
@@ -2909,16 +2926,19 @@ impl<'a, T, A: Allocator> IntoIterator for &'a mut VecDeque<T, A> {

#[stable(feature = "rust1", since = "1.0.0")]
impl<T, A: Allocator> Extend<T> for VecDeque<T, A> {
#[track_caller]
fn extend<I: IntoIterator<Item = T>>(&mut self, iter: I) {
<Self as SpecExtend<T, I::IntoIter>>::spec_extend(self, iter.into_iter());
}

#[inline]
#[track_caller]
fn extend_one(&mut self, elem: T) {
self.push_back(elem);
}

#[inline]
#[track_caller]
fn extend_reserve(&mut self, additional: usize) {
self.reserve(additional);
}
@@ -2934,16 +2954,19 @@ impl<T, A: Allocator> Extend<T> for VecDeque<T, A> {

#[stable(feature = "extend_ref", since = "1.2.0")]
impl<'a, T: 'a + Copy, A: Allocator> Extend<&'a T> for VecDeque<T, A> {
#[track_caller]
fn extend<I: IntoIterator<Item = &'a T>>(&mut self, iter: I) {
self.spec_extend(iter.into_iter());
}

#[inline]
#[track_caller]
fn extend_one(&mut self, &elem: &'a T) {
self.push_back(elem);
}

#[inline]
#[track_caller]
fn extend_reserve(&mut self, additional: usize) {
self.reserve(additional);
}
@@ -3041,6 +3064,7 @@ impl<T, const N: usize> From<[T; N]> for VecDeque<T> {
/// let deq2: VecDeque<_> = [1, 2, 3, 4].into();
/// assert_eq!(deq1, deq2);
/// ```
#[track_caller]
fn from(arr: [T; N]) -> Self {
let mut deq = VecDeque::with_capacity(N);
let arr = ManuallyDrop::new(arr);
6 changes: 6 additions & 0 deletions library/alloc/src/collections/vec_deque/spec_extend.rs
Original file line number Diff line number Diff line change
@@ -7,13 +7,15 @@ use crate::vec;

// Specialization trait used for VecDeque::extend
pub(super) trait SpecExtend<T, I> {
#[track_caller]
fn spec_extend(&mut self, iter: I);
}

impl<T, I, A: Allocator> SpecExtend<T, I> for VecDeque<T, A>
where
I: Iterator<Item = T>,
{
#[track_caller]
default fn spec_extend(&mut self, mut iter: I) {
// This function should be the moral equivalent of:
//
@@ -44,6 +46,7 @@ impl<T, I, A: Allocator> SpecExtend<T, I> for VecDeque<T, A>
where
I: TrustedLen<Item = T>,
{
#[track_caller]
default fn spec_extend(&mut self, iter: I) {
// This is the case for a TrustedLen iterator.
let (low, high) = iter.size_hint();
@@ -76,6 +79,7 @@ where
}

impl<T, A: Allocator> SpecExtend<T, vec::IntoIter<T>> for VecDeque<T, A> {
#[track_caller]
fn spec_extend(&mut self, mut iterator: vec::IntoIter<T>) {
let slice = iterator.as_slice();
self.reserve(slice.len());
@@ -93,6 +97,7 @@ where
I: Iterator<Item = &'a T>,
T: Copy,
{
#[track_caller]
default fn spec_extend(&mut self, iterator: I) {
self.spec_extend(iterator.copied())
}
@@ -102,6 +107,7 @@ impl<'a, T: 'a, A: Allocator> SpecExtend<&'a T, slice::Iter<'a, T>> for VecDeque
where
T: Copy,
{
#[track_caller]
fn spec_extend(&mut self, iterator: slice::Iter<'a, T>) {
let slice = iterator.as_slice();
self.reserve(slice.len());
1 change: 1 addition & 0 deletions library/alloc/src/collections/vec_deque/spec_from_iter.rs
Original file line number Diff line number Diff line change
@@ -9,6 +9,7 @@ impl<T, I> SpecFromIter<T, I> for VecDeque<T>
where
I: Iterator<Item = T>,
{
#[track_caller]
default fn spec_from_iter(iterator: I) -> Self {
// Since converting is O(1) now, just re-use the `Vec` logic for
// anything where we can't do something extra-special for `VecDeque`,
17 changes: 17 additions & 0 deletions library/alloc/src/raw_vec.rs
Original file line number Diff line number Diff line change
@@ -20,6 +20,7 @@ mod tests;
// only one location which panics rather than a bunch throughout the module.
#[cfg(not(no_global_oom_handling))]
#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
#[track_caller]
fn capacity_overflow() -> ! {
panic!("capacity overflow");
}
@@ -125,6 +126,7 @@ impl<T> RawVec<T, Global> {
#[cfg(not(any(no_global_oom_handling, test)))]
#[must_use]
#[inline]
#[track_caller]
pub fn with_capacity(capacity: usize) -> Self {
Self { inner: RawVecInner::with_capacity(capacity, T::LAYOUT), _marker: PhantomData }
}
@@ -133,6 +135,7 @@ impl<T> RawVec<T, Global> {
#[cfg(not(any(no_global_oom_handling, test)))]
#[must_use]
#[inline]
#[track_caller]
pub fn with_capacity_zeroed(capacity: usize) -> Self {
Self {
inner: RawVecInner::with_capacity_zeroed_in(capacity, Global, T::LAYOUT),
@@ -145,6 +148,7 @@ impl RawVecInner<Global> {
#[cfg(not(any(no_global_oom_handling, test)))]
#[must_use]
#[inline]
#[track_caller]
fn with_capacity(capacity: usize, elem_layout: Layout) -> Self {
match Self::try_allocate_in(capacity, AllocInit::Uninitialized, Global, elem_layout) {
Ok(res) => res,
@@ -184,6 +188,7 @@ impl<T, A: Allocator> RawVec<T, A> {
/// allocator for the returned `RawVec`.
#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
Self {
inner: RawVecInner::with_capacity_in(capacity, alloc, T::LAYOUT),
@@ -205,6 +210,7 @@ impl<T, A: Allocator> RawVec<T, A> {
/// of allocator for the returned `RawVec`.
#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self {
Self {
inner: RawVecInner::with_capacity_zeroed_in(capacity, alloc, T::LAYOUT),
@@ -324,6 +330,7 @@ impl<T, A: Allocator> RawVec<T, A> {
/// Aborts on OOM.
#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
pub fn reserve(&mut self, len: usize, additional: usize) {
self.inner.reserve(len, additional, T::LAYOUT)
}
@@ -332,6 +339,7 @@ impl<T, A: Allocator> RawVec<T, A> {
/// caller to ensure `len == self.capacity()`.
#[cfg(not(no_global_oom_handling))]
#[inline(never)]
#[track_caller]
pub fn grow_one(&mut self) {
self.inner.grow_one(T::LAYOUT)
}
@@ -359,6 +367,7 @@ impl<T, A: Allocator> RawVec<T, A> {
///
/// Aborts on OOM.
#[cfg(not(no_global_oom_handling))]
#[track_caller]
pub fn reserve_exact(&mut self, len: usize, additional: usize) {
self.inner.reserve_exact(len, additional, T::LAYOUT)
}
@@ -383,6 +392,7 @@ impl<T, A: Allocator> RawVec<T, A> {
///
/// Aborts on OOM.
#[cfg(not(no_global_oom_handling))]
#[track_caller]
#[inline]
pub fn shrink_to_fit(&mut self, cap: usize) {
self.inner.shrink_to_fit(cap, T::LAYOUT)
@@ -408,6 +418,7 @@ impl<A: Allocator> RawVecInner<A> {

#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
fn with_capacity_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
match Self::try_allocate_in(capacity, AllocInit::Uninitialized, alloc, elem_layout) {
Ok(this) => {
@@ -432,6 +443,7 @@ impl<A: Allocator> RawVecInner<A> {

#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
fn with_capacity_zeroed_in(capacity: usize, alloc: A, elem_layout: Layout) -> Self {
match Self::try_allocate_in(capacity, AllocInit::Zeroed, alloc, elem_layout) {
Ok(res) => res,
@@ -526,6 +538,7 @@ impl<A: Allocator> RawVecInner<A> {

#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
fn reserve(&mut self, len: usize, additional: usize, elem_layout: Layout) {
// Callers expect this function to be very cheap when there is already sufficient capacity.
// Therefore, we move all the resizing and error-handling logic from grow_amortized and
@@ -550,6 +563,7 @@ impl<A: Allocator> RawVecInner<A> {

#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
fn grow_one(&mut self, elem_layout: Layout) {
if let Err(err) = self.grow_amortized(self.cap.0, 1, elem_layout) {
handle_error(err);
@@ -573,6 +587,7 @@ impl<A: Allocator> RawVecInner<A> {
}

#[cfg(not(no_global_oom_handling))]
#[track_caller]
fn reserve_exact(&mut self, len: usize, additional: usize, elem_layout: Layout) {
if let Err(err) = self.try_reserve_exact(len, additional, elem_layout) {
handle_error(err);
@@ -597,6 +612,7 @@ impl<A: Allocator> RawVecInner<A> {

#[cfg(not(no_global_oom_handling))]
#[inline]
#[track_caller]
fn shrink_to_fit(&mut self, cap: usize, elem_layout: Layout) {
if let Err(err) = self.shrink(cap, elem_layout) {
handle_error(err);
@@ -770,6 +786,7 @@ where
#[cfg(not(no_global_oom_handling))]
#[cold]
#[optimize(size)]
#[track_caller]
fn handle_error(e: TryReserveError) -> ! {
match e.kind() {
CapacityOverflow => capacity_overflow(),
1 change: 1 addition & 0 deletions library/alloc/src/vec/cow.rs
Original file line number Diff line number Diff line change
@@ -58,6 +58,7 @@ impl<'a, T> FromIterator<T> for Cow<'a, [T]>
where
T: Clone,
{
#[track_caller]
fn from_iter<I: IntoIterator<Item = T>>(it: I) -> Cow<'a, [T]> {
Cow::Owned(FromIterator::from_iter(it))
}
2 changes: 2 additions & 0 deletions library/alloc/src/vec/in_place_collect.rs
Original file line number Diff line number Diff line change
@@ -229,6 +229,7 @@ where
I: Iterator<Item = T> + InPlaceCollect,
<I as SourceIter>::Source: AsVecIntoIter,
{
#[track_caller]
default fn from_iter(iterator: I) -> Self {
// Select the implementation in const eval to avoid codegen of the dead branch to improve compile times.
let fun: fn(I) -> Vec<T> = const {
@@ -246,6 +247,7 @@ where
}
}

#[track_caller]
fn from_iter_in_place<I, T>(mut iterator: I) -> Vec<T>
where
I: Iterator<Item = T> + InPlaceCollect,
Loading