diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index 91b83cfe011f2..4ac0c9b15be7a 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -160,6 +160,7 @@ #![feature(tuple_trait)] #![feature(unicode_internals)] #![feature(unsize)] +#![feature(unwrap_infallible)] #![feature(vec_pop_if)] // tidy-alphabetical-end // diff --git a/library/alloc/src/vec/in_place_collect.rs b/library/alloc/src/vec/in_place_collect.rs index 88aa1b1b0e081..22541a2b9d82f 100644 --- a/library/alloc/src/vec/in_place_collect.rs +++ b/library/alloc/src/vec/in_place_collect.rs @@ -259,7 +259,8 @@ where inner.cap, inner.buf.cast::(), inner.end as *const T, - inner.cap * mem::size_of::() / mem::size_of::(), + // SAFETY: the multiplication can not overflow, since `inner.cap * size_of::()` is the size of the allocation. + inner.cap.unchecked_mul(mem::size_of::()) / mem::size_of::(), ) }; @@ -374,7 +375,7 @@ where // - it lets us thread the write pointer through its innards and get it back in the end let sink = InPlaceDrop { inner: dst_buf, dst: dst_buf }; let sink = - self.try_fold::<_, _, Result<_, !>>(sink, write_in_place_with_drop(end)).unwrap(); + self.try_fold::<_, _, Result<_, !>>(sink, write_in_place_with_drop(end)).into_ok(); // iteration succeeded, don't drop head unsafe { ManuallyDrop::new(sink).dst.sub_ptr(dst_buf) } } diff --git a/library/alloc/src/vec/into_iter.rs b/library/alloc/src/vec/into_iter.rs index b0226c848332c..c47989337708f 100644 --- a/library/alloc/src/vec/into_iter.rs +++ b/library/alloc/src/vec/into_iter.rs @@ -289,6 +289,60 @@ impl Iterator for IntoIter { }; } + fn fold(mut self, mut accum: B, mut f: F) -> B + where + F: FnMut(B, Self::Item) -> B, + { + if T::IS_ZST { + while self.ptr.as_ptr() != self.end.cast_mut() { + // SAFETY: we just checked that `self.ptr` is in bounds. + let tmp = unsafe { self.ptr.read() }; + // See `next` for why we subtract from `end` here. + self.end = self.end.wrapping_byte_sub(1); + accum = f(accum, tmp); + } + } else { + // SAFETY: `self.end` can only be null if `T` is a ZST. + while self.ptr != non_null!(self.end, T) { + // SAFETY: we just checked that `self.ptr` is in bounds. + let tmp = unsafe { self.ptr.read() }; + // SAFETY: the maximum this can be is `self.end`. + // Increment `self.ptr` first to avoid double dropping in the event of a panic. + self.ptr = unsafe { self.ptr.add(1) }; + accum = f(accum, tmp); + } + } + accum + } + + fn try_fold(&mut self, mut accum: B, mut f: F) -> R + where + Self: Sized, + F: FnMut(B, Self::Item) -> R, + R: core::ops::Try, + { + if T::IS_ZST { + while self.ptr.as_ptr() != self.end.cast_mut() { + // SAFETY: we just checked that `self.ptr` is in bounds. + let tmp = unsafe { self.ptr.read() }; + // See `next` for why we subtract from `end` here. + self.end = self.end.wrapping_byte_sub(1); + accum = f(accum, tmp)?; + } + } else { + // SAFETY: `self.end` can only be null if `T` is a ZST. + while self.ptr != non_null!(self.end, T) { + // SAFETY: we just checked that `self.ptr` is in bounds. + let tmp = unsafe { self.ptr.read() }; + // SAFETY: the maximum this can be is `self.end`. + // Increment `self.ptr` first to avoid double dropping in the event of a panic. + self.ptr = unsafe { self.ptr.add(1) }; + accum = f(accum, tmp)?; + } + } + R::from_output(accum) + } + unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item where Self: TrustedRandomAccessNoCoerce,