Skip to content

Commit 39dbe23

Browse files
committedAug 4, 2022
rust: import upstream alloc crate
This is a subset of the Rust standard library `alloc` crate, version 1.62.0, licensed under "Apache-2.0 OR MIT", from: https://github.com/rust-lang/rust/tree/1.62.0/library/alloc/src The files are copied as-is, with no modifications whatsoever (not even adding the SPDX identifiers). For copyright details, please see: https://github.com/rust-lang/rust/blob/1.62.0/COPYRIGHT The next patch modifies these files as needed for use within the kernel. This patch split allows reviewers to double-check the import and to clearly see the differences introduced. Vendoring `alloc`, at least for the moment, allows us to have fallible allocations support (i.e. the `try_*` versions of methods which return a `Result` instead of panicking) early on. It also gives a bit more freedom to experiment with new interfaces and to iterate quickly. Eventually, the goal is to have everything the kernel needs in upstream `alloc` and drop it from the kernel tree. For a summary of work on `alloc` happening upstream, please see: Rust-for-Linux#408 Co-developed-by: Alex Gaynor <alex.gaynor@gmail.com> Signed-off-by: Alex Gaynor <alex.gaynor@gmail.com> Co-developed-by: Wedson Almeida Filho <wedsonaf@google.com> Signed-off-by: Wedson Almeida Filho <wedsonaf@google.com> Signed-off-by: Miguel Ojeda <ojeda@kernel.org>
1 parent f1870fe commit 39dbe23

13 files changed

+9037
-0
lines changed
 

‎rust/alloc/alloc.rs

+438
Large diffs are not rendered by default.

‎rust/alloc/borrow.rs

+496
Large diffs are not rendered by default.

‎rust/alloc/boxed.rs

+2,024
Large diffs are not rendered by default.

‎rust/alloc/collections/mod.rs

+154
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,154 @@
1+
//! Collection types.
2+
3+
#![stable(feature = "rust1", since = "1.0.0")]
4+
5+
#[cfg(not(no_global_oom_handling))]
6+
pub mod binary_heap;
7+
#[cfg(not(no_global_oom_handling))]
8+
mod btree;
9+
#[cfg(not(no_global_oom_handling))]
10+
pub mod linked_list;
11+
#[cfg(not(no_global_oom_handling))]
12+
pub mod vec_deque;
13+
14+
#[cfg(not(no_global_oom_handling))]
15+
#[stable(feature = "rust1", since = "1.0.0")]
16+
pub mod btree_map {
17+
//! An ordered map based on a B-Tree.
18+
#[stable(feature = "rust1", since = "1.0.0")]
19+
pub use super::btree::map::*;
20+
}
21+
22+
#[cfg(not(no_global_oom_handling))]
23+
#[stable(feature = "rust1", since = "1.0.0")]
24+
pub mod btree_set {
25+
//! An ordered set based on a B-Tree.
26+
#[stable(feature = "rust1", since = "1.0.0")]
27+
pub use super::btree::set::*;
28+
}
29+
30+
#[cfg(not(no_global_oom_handling))]
31+
#[stable(feature = "rust1", since = "1.0.0")]
32+
#[doc(no_inline)]
33+
pub use binary_heap::BinaryHeap;
34+
35+
#[cfg(not(no_global_oom_handling))]
36+
#[stable(feature = "rust1", since = "1.0.0")]
37+
#[doc(no_inline)]
38+
pub use btree_map::BTreeMap;
39+
40+
#[cfg(not(no_global_oom_handling))]
41+
#[stable(feature = "rust1", since = "1.0.0")]
42+
#[doc(no_inline)]
43+
pub use btree_set::BTreeSet;
44+
45+
#[cfg(not(no_global_oom_handling))]
46+
#[stable(feature = "rust1", since = "1.0.0")]
47+
#[doc(no_inline)]
48+
pub use linked_list::LinkedList;
49+
50+
#[cfg(not(no_global_oom_handling))]
51+
#[stable(feature = "rust1", since = "1.0.0")]
52+
#[doc(no_inline)]
53+
pub use vec_deque::VecDeque;
54+
55+
use crate::alloc::{Layout, LayoutError};
56+
use core::fmt::Display;
57+
58+
/// The error type for `try_reserve` methods.
59+
#[derive(Clone, PartialEq, Eq, Debug)]
60+
#[stable(feature = "try_reserve", since = "1.57.0")]
61+
pub struct TryReserveError {
62+
kind: TryReserveErrorKind,
63+
}
64+
65+
impl TryReserveError {
66+
/// Details about the allocation that caused the error
67+
#[inline]
68+
#[must_use]
69+
#[unstable(
70+
feature = "try_reserve_kind",
71+
reason = "Uncertain how much info should be exposed",
72+
issue = "48043"
73+
)]
74+
pub fn kind(&self) -> TryReserveErrorKind {
75+
self.kind.clone()
76+
}
77+
}
78+
79+
/// Details of the allocation that caused a `TryReserveError`
80+
#[derive(Clone, PartialEq, Eq, Debug)]
81+
#[unstable(
82+
feature = "try_reserve_kind",
83+
reason = "Uncertain how much info should be exposed",
84+
issue = "48043"
85+
)]
86+
pub enum TryReserveErrorKind {
87+
/// Error due to the computed capacity exceeding the collection's maximum
88+
/// (usually `isize::MAX` bytes).
89+
CapacityOverflow,
90+
91+
/// The memory allocator returned an error
92+
AllocError {
93+
/// The layout of allocation request that failed
94+
layout: Layout,
95+
96+
#[doc(hidden)]
97+
#[unstable(
98+
feature = "container_error_extra",
99+
issue = "none",
100+
reason = "\
101+
Enable exposing the allocator’s custom error value \
102+
if an associated type is added in the future: \
103+
https://github.com/rust-lang/wg-allocators/issues/23"
104+
)]
105+
non_exhaustive: (),
106+
},
107+
}
108+
109+
#[unstable(
110+
feature = "try_reserve_kind",
111+
reason = "Uncertain how much info should be exposed",
112+
issue = "48043"
113+
)]
114+
impl From<TryReserveErrorKind> for TryReserveError {
115+
#[inline]
116+
fn from(kind: TryReserveErrorKind) -> Self {
117+
Self { kind }
118+
}
119+
}
120+
121+
#[unstable(feature = "try_reserve_kind", reason = "new API", issue = "48043")]
122+
impl From<LayoutError> for TryReserveErrorKind {
123+
/// Always evaluates to [`TryReserveErrorKind::CapacityOverflow`].
124+
#[inline]
125+
fn from(_: LayoutError) -> Self {
126+
TryReserveErrorKind::CapacityOverflow
127+
}
128+
}
129+
130+
#[stable(feature = "try_reserve", since = "1.57.0")]
131+
impl Display for TryReserveError {
132+
fn fmt(
133+
&self,
134+
fmt: &mut core::fmt::Formatter<'_>,
135+
) -> core::result::Result<(), core::fmt::Error> {
136+
fmt.write_str("memory allocation failed")?;
137+
let reason = match self.kind {
138+
TryReserveErrorKind::CapacityOverflow => {
139+
" because the computed capacity exceeded the collection's maximum"
140+
}
141+
TryReserveErrorKind::AllocError { .. } => {
142+
" because the memory allocator returned a error"
143+
}
144+
};
145+
fmt.write_str(reason)
146+
}
147+
}
148+
149+
/// An intermediate trait for specialization of `Extend`.
150+
#[doc(hidden)]
151+
trait SpecExtend<I: IntoIterator> {
152+
/// Extends `self` with the contents of the given iterator.
153+
fn spec_extend(&mut self, iter: I);
154+
}

‎rust/alloc/lib.rs

+236
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,236 @@
1+
//! # The Rust core allocation and collections library
2+
//!
3+
//! This library provides smart pointers and collections for managing
4+
//! heap-allocated values.
5+
//!
6+
//! This library, like libcore, normally doesn’t need to be used directly
7+
//! since its contents are re-exported in the [`std` crate](../std/index.html).
8+
//! Crates that use the `#![no_std]` attribute however will typically
9+
//! not depend on `std`, so they’d use this crate instead.
10+
//!
11+
//! ## Boxed values
12+
//!
13+
//! The [`Box`] type is a smart pointer type. There can only be one owner of a
14+
//! [`Box`], and the owner can decide to mutate the contents, which live on the
15+
//! heap.
16+
//!
17+
//! This type can be sent among threads efficiently as the size of a `Box` value
18+
//! is the same as that of a pointer. Tree-like data structures are often built
19+
//! with boxes because each node often has only one owner, the parent.
20+
//!
21+
//! ## Reference counted pointers
22+
//!
23+
//! The [`Rc`] type is a non-threadsafe reference-counted pointer type intended
24+
//! for sharing memory within a thread. An [`Rc`] pointer wraps a type, `T`, and
25+
//! only allows access to `&T`, a shared reference.
26+
//!
27+
//! This type is useful when inherited mutability (such as using [`Box`]) is too
28+
//! constraining for an application, and is often paired with the [`Cell`] or
29+
//! [`RefCell`] types in order to allow mutation.
30+
//!
31+
//! ## Atomically reference counted pointers
32+
//!
33+
//! The [`Arc`] type is the threadsafe equivalent of the [`Rc`] type. It
34+
//! provides all the same functionality of [`Rc`], except it requires that the
35+
//! contained type `T` is shareable. Additionally, [`Arc<T>`][`Arc`] is itself
36+
//! sendable while [`Rc<T>`][`Rc`] is not.
37+
//!
38+
//! This type allows for shared access to the contained data, and is often
39+
//! paired with synchronization primitives such as mutexes to allow mutation of
40+
//! shared resources.
41+
//!
42+
//! ## Collections
43+
//!
44+
//! Implementations of the most common general purpose data structures are
45+
//! defined in this library. They are re-exported through the
46+
//! [standard collections library](../std/collections/index.html).
47+
//!
48+
//! ## Heap interfaces
49+
//!
50+
//! The [`alloc`](alloc/index.html) module defines the low-level interface to the
51+
//! default global allocator. It is not compatible with the libc allocator API.
52+
//!
53+
//! [`Arc`]: sync
54+
//! [`Box`]: boxed
55+
//! [`Cell`]: core::cell
56+
//! [`Rc`]: rc
57+
//! [`RefCell`]: core::cell
58+
59+
// To run liballoc tests without x.py without ending up with two copies of liballoc, Miri needs to be
60+
// able to "empty" this crate. See <https://github.com/rust-lang/miri-test-libstd/issues/4>.
61+
// rustc itself never sets the feature, so this line has no affect there.
62+
#![cfg(any(not(feature = "miri-test-libstd"), test, doctest))]
63+
#![allow(unused_attributes)]
64+
#![stable(feature = "alloc", since = "1.36.0")]
65+
#![doc(
66+
html_playground_url = "https://play.rust-lang.org/",
67+
issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/",
68+
test(no_crate_inject, attr(allow(unused_variables), deny(warnings)))
69+
)]
70+
#![doc(cfg_hide(
71+
not(test),
72+
not(any(test, bootstrap)),
73+
any(not(feature = "miri-test-libstd"), test, doctest),
74+
no_global_oom_handling,
75+
not(no_global_oom_handling),
76+
target_has_atomic = "ptr"
77+
))]
78+
#![no_std]
79+
#![needs_allocator]
80+
//
81+
// Lints:
82+
#![deny(unsafe_op_in_unsafe_fn)]
83+
#![warn(deprecated_in_future)]
84+
#![warn(missing_debug_implementations)]
85+
#![warn(missing_docs)]
86+
#![allow(explicit_outlives_requirements)]
87+
//
88+
// Library features:
89+
#![cfg_attr(not(no_global_oom_handling), feature(alloc_c_string))]
90+
#![feature(alloc_layout_extra)]
91+
#![feature(allocator_api)]
92+
#![feature(array_chunks)]
93+
#![feature(array_methods)]
94+
#![feature(array_windows)]
95+
#![feature(assert_matches)]
96+
#![feature(async_iterator)]
97+
#![feature(coerce_unsized)]
98+
#![cfg_attr(not(no_global_oom_handling), feature(const_alloc_error))]
99+
#![feature(const_box)]
100+
#![cfg_attr(not(no_global_oom_handling), feature(const_btree_new))]
101+
#![feature(const_cow_is_borrowed)]
102+
#![feature(const_convert)]
103+
#![feature(const_size_of_val)]
104+
#![feature(const_align_of_val)]
105+
#![feature(const_ptr_read)]
106+
#![feature(const_maybe_uninit_write)]
107+
#![feature(const_maybe_uninit_as_mut_ptr)]
108+
#![feature(const_refs_to_cell)]
109+
#![feature(core_c_str)]
110+
#![feature(core_intrinsics)]
111+
#![feature(core_ffi_c)]
112+
#![feature(const_eval_select)]
113+
#![feature(const_pin)]
114+
#![feature(cstr_from_bytes_until_nul)]
115+
#![feature(dispatch_from_dyn)]
116+
#![feature(exact_size_is_empty)]
117+
#![feature(extend_one)]
118+
#![feature(fmt_internals)]
119+
#![feature(fn_traits)]
120+
#![feature(hasher_prefixfree_extras)]
121+
#![feature(inplace_iteration)]
122+
#![feature(iter_advance_by)]
123+
#![feature(layout_for_ptr)]
124+
#![feature(maybe_uninit_slice)]
125+
#![cfg_attr(test, feature(new_uninit))]
126+
#![feature(nonnull_slice_from_raw_parts)]
127+
#![feature(pattern)]
128+
#![feature(ptr_internals)]
129+
#![feature(ptr_metadata)]
130+
#![feature(ptr_sub_ptr)]
131+
#![feature(receiver_trait)]
132+
#![feature(set_ptr_value)]
133+
#![feature(slice_group_by)]
134+
#![feature(slice_ptr_get)]
135+
#![feature(slice_ptr_len)]
136+
#![feature(slice_range)]
137+
#![feature(str_internals)]
138+
#![feature(strict_provenance)]
139+
#![feature(trusted_len)]
140+
#![feature(trusted_random_access)]
141+
#![feature(try_trait_v2)]
142+
#![feature(unchecked_math)]
143+
#![feature(unicode_internals)]
144+
#![feature(unsize)]
145+
//
146+
// Language features:
147+
#![feature(allocator_internals)]
148+
#![feature(allow_internal_unstable)]
149+
#![feature(associated_type_bounds)]
150+
#![feature(box_syntax)]
151+
#![feature(cfg_sanitize)]
152+
#![feature(const_deref)]
153+
#![feature(const_mut_refs)]
154+
#![feature(const_ptr_write)]
155+
#![feature(const_precise_live_drops)]
156+
#![feature(const_trait_impl)]
157+
#![feature(const_try)]
158+
#![feature(dropck_eyepatch)]
159+
#![feature(exclusive_range_pattern)]
160+
#![feature(fundamental)]
161+
#![cfg_attr(not(test), feature(generator_trait))]
162+
#![feature(hashmap_internals)]
163+
#![feature(lang_items)]
164+
#![feature(let_else)]
165+
#![feature(min_specialization)]
166+
#![feature(negative_impls)]
167+
#![feature(never_type)]
168+
#![feature(nll)] // Not necessary, but here to test the `nll` feature.
169+
#![feature(rustc_allow_const_fn_unstable)]
170+
#![feature(rustc_attrs)]
171+
#![feature(slice_internals)]
172+
#![feature(staged_api)]
173+
#![cfg_attr(test, feature(test))]
174+
#![feature(unboxed_closures)]
175+
#![feature(unsized_fn_params)]
176+
#![feature(c_unwind)]
177+
//
178+
// Rustdoc features:
179+
#![feature(doc_cfg)]
180+
#![feature(doc_cfg_hide)]
181+
// Technically, this is a bug in rustdoc: rustdoc sees the documentation on `#[lang = slice_alloc]`
182+
// blocks is for `&[T]`, which also has documentation using this feature in `core`, and gets mad
183+
// that the feature-gate isn't enabled. Ideally, it wouldn't check for the feature gate for docs
184+
// from other crates, but since this can only appear for lang items, it doesn't seem worth fixing.
185+
#![feature(intra_doc_pointers)]
186+
187+
// Allow testing this library
188+
#[cfg(test)]
189+
#[macro_use]
190+
extern crate std;
191+
#[cfg(test)]
192+
extern crate test;
193+
194+
// Module with internal macros used by other modules (needs to be included before other modules).
195+
#[macro_use]
196+
mod macros;
197+
198+
mod raw_vec;
199+
200+
// Heaps provided for low-level allocation strategies
201+
202+
pub mod alloc;
203+
204+
// Primitive types using the heaps above
205+
206+
// Need to conditionally define the mod from `boxed.rs` to avoid
207+
// duplicating the lang-items when building in test cfg; but also need
208+
// to allow code to have `use boxed::Box;` declarations.
209+
#[cfg(not(test))]
210+
pub mod boxed;
211+
#[cfg(test)]
212+
mod boxed {
213+
pub use std::boxed::Box;
214+
}
215+
pub mod borrow;
216+
pub mod collections;
217+
#[cfg(not(no_global_oom_handling))]
218+
pub mod ffi;
219+
pub mod fmt;
220+
pub mod rc;
221+
pub mod slice;
222+
pub mod str;
223+
pub mod string;
224+
#[cfg(target_has_atomic = "ptr")]
225+
pub mod sync;
226+
#[cfg(all(not(no_global_oom_handling), target_has_atomic = "ptr"))]
227+
pub mod task;
228+
#[cfg(test)]
229+
mod tests;
230+
pub mod vec;
231+
232+
#[doc(hidden)]
233+
#[unstable(feature = "liballoc_internals", issue = "none", reason = "implementation detail")]
234+
pub mod __export {
235+
pub use core::format_args;
236+
}

‎rust/alloc/raw_vec.rs

+518
Large diffs are not rendered by default.

‎rust/alloc/slice.rs

+1,202
Large diffs are not rendered by default.

‎rust/alloc/vec/drain.rs

+184
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,184 @@
1+
use crate::alloc::{Allocator, Global};
2+
use core::fmt;
3+
use core::iter::{FusedIterator, TrustedLen};
4+
use core::mem;
5+
use core::ptr::{self, NonNull};
6+
use core::slice::{self};
7+
8+
use super::Vec;
9+
10+
/// A draining iterator for `Vec<T>`.
11+
///
12+
/// This `struct` is created by [`Vec::drain`].
13+
/// See its documentation for more.
14+
///
15+
/// # Example
16+
///
17+
/// ```
18+
/// let mut v = vec![0, 1, 2];
19+
/// let iter: std::vec::Drain<_> = v.drain(..);
20+
/// ```
21+
#[stable(feature = "drain", since = "1.6.0")]
22+
pub struct Drain<
23+
'a,
24+
T: 'a,
25+
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + 'a = Global,
26+
> {
27+
/// Index of tail to preserve
28+
pub(super) tail_start: usize,
29+
/// Length of tail
30+
pub(super) tail_len: usize,
31+
/// Current remaining range to remove
32+
pub(super) iter: slice::Iter<'a, T>,
33+
pub(super) vec: NonNull<Vec<T, A>>,
34+
}
35+
36+
#[stable(feature = "collection_debug", since = "1.17.0")]
37+
impl<T: fmt::Debug, A: Allocator> fmt::Debug for Drain<'_, T, A> {
38+
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
39+
f.debug_tuple("Drain").field(&self.iter.as_slice()).finish()
40+
}
41+
}
42+
43+
impl<'a, T, A: Allocator> Drain<'a, T, A> {
44+
/// Returns the remaining items of this iterator as a slice.
45+
///
46+
/// # Examples
47+
///
48+
/// ```
49+
/// let mut vec = vec!['a', 'b', 'c'];
50+
/// let mut drain = vec.drain(..);
51+
/// assert_eq!(drain.as_slice(), &['a', 'b', 'c']);
52+
/// let _ = drain.next().unwrap();
53+
/// assert_eq!(drain.as_slice(), &['b', 'c']);
54+
/// ```
55+
#[must_use]
56+
#[stable(feature = "vec_drain_as_slice", since = "1.46.0")]
57+
pub fn as_slice(&self) -> &[T] {
58+
self.iter.as_slice()
59+
}
60+
61+
/// Returns a reference to the underlying allocator.
62+
#[unstable(feature = "allocator_api", issue = "32838")]
63+
#[must_use]
64+
#[inline]
65+
pub fn allocator(&self) -> &A {
66+
unsafe { self.vec.as_ref().allocator() }
67+
}
68+
}
69+
70+
#[stable(feature = "vec_drain_as_slice", since = "1.46.0")]
71+
impl<'a, T, A: Allocator> AsRef<[T]> for Drain<'a, T, A> {
72+
fn as_ref(&self) -> &[T] {
73+
self.as_slice()
74+
}
75+
}
76+
77+
#[stable(feature = "drain", since = "1.6.0")]
78+
unsafe impl<T: Sync, A: Sync + Allocator> Sync for Drain<'_, T, A> {}
79+
#[stable(feature = "drain", since = "1.6.0")]
80+
unsafe impl<T: Send, A: Send + Allocator> Send for Drain<'_, T, A> {}
81+
82+
#[stable(feature = "drain", since = "1.6.0")]
83+
impl<T, A: Allocator> Iterator for Drain<'_, T, A> {
84+
type Item = T;
85+
86+
#[inline]
87+
fn next(&mut self) -> Option<T> {
88+
self.iter.next().map(|elt| unsafe { ptr::read(elt as *const _) })
89+
}
90+
91+
fn size_hint(&self) -> (usize, Option<usize>) {
92+
self.iter.size_hint()
93+
}
94+
}
95+
96+
#[stable(feature = "drain", since = "1.6.0")]
97+
impl<T, A: Allocator> DoubleEndedIterator for Drain<'_, T, A> {
98+
#[inline]
99+
fn next_back(&mut self) -> Option<T> {
100+
self.iter.next_back().map(|elt| unsafe { ptr::read(elt as *const _) })
101+
}
102+
}
103+
104+
#[stable(feature = "drain", since = "1.6.0")]
105+
impl<T, A: Allocator> Drop for Drain<'_, T, A> {
106+
fn drop(&mut self) {
107+
/// Moves back the un-`Drain`ed elements to restore the original `Vec`.
108+
struct DropGuard<'r, 'a, T, A: Allocator>(&'r mut Drain<'a, T, A>);
109+
110+
impl<'r, 'a, T, A: Allocator> Drop for DropGuard<'r, 'a, T, A> {
111+
fn drop(&mut self) {
112+
if self.0.tail_len > 0 {
113+
unsafe {
114+
let source_vec = self.0.vec.as_mut();
115+
// memmove back untouched tail, update to new length
116+
let start = source_vec.len();
117+
let tail = self.0.tail_start;
118+
if tail != start {
119+
let src = source_vec.as_ptr().add(tail);
120+
let dst = source_vec.as_mut_ptr().add(start);
121+
ptr::copy(src, dst, self.0.tail_len);
122+
}
123+
source_vec.set_len(start + self.0.tail_len);
124+
}
125+
}
126+
}
127+
}
128+
129+
let iter = mem::replace(&mut self.iter, (&mut []).iter());
130+
let drop_len = iter.len();
131+
132+
let mut vec = self.vec;
133+
134+
if mem::size_of::<T>() == 0 {
135+
// ZSTs have no identity, so we don't need to move them around, we only need to drop the correct amount.
136+
// this can be achieved by manipulating the Vec length instead of moving values out from `iter`.
137+
unsafe {
138+
let vec = vec.as_mut();
139+
let old_len = vec.len();
140+
vec.set_len(old_len + drop_len + self.tail_len);
141+
vec.truncate(old_len + self.tail_len);
142+
}
143+
144+
return;
145+
}
146+
147+
// ensure elements are moved back into their appropriate places, even when drop_in_place panics
148+
let _guard = DropGuard(self);
149+
150+
if drop_len == 0 {
151+
return;
152+
}
153+
154+
// as_slice() must only be called when iter.len() is > 0 because
155+
// vec::Splice modifies vec::Drain fields and may grow the vec which would invalidate
156+
// the iterator's internal pointers. Creating a reference to deallocated memory
157+
// is invalid even when it is zero-length
158+
let drop_ptr = iter.as_slice().as_ptr();
159+
160+
unsafe {
161+
// drop_ptr comes from a slice::Iter which only gives us a &[T] but for drop_in_place
162+
// a pointer with mutable provenance is necessary. Therefore we must reconstruct
163+
// it from the original vec but also avoid creating a &mut to the front since that could
164+
// invalidate raw pointers to it which some unsafe code might rely on.
165+
let vec_ptr = vec.as_mut().as_mut_ptr();
166+
let drop_offset = drop_ptr.sub_ptr(vec_ptr);
167+
let to_drop = ptr::slice_from_raw_parts_mut(vec_ptr.add(drop_offset), drop_len);
168+
ptr::drop_in_place(to_drop);
169+
}
170+
}
171+
}
172+
173+
#[stable(feature = "drain", since = "1.6.0")]
174+
impl<T, A: Allocator> ExactSizeIterator for Drain<'_, T, A> {
175+
fn is_empty(&self) -> bool {
176+
self.iter.is_empty()
177+
}
178+
}
179+
180+
#[unstable(feature = "trusted_len", issue = "37572")]
181+
unsafe impl<T, A: Allocator> TrustedLen for Drain<'_, T, A> {}
182+
183+
#[stable(feature = "fused", since = "1.26.0")]
184+
impl<T, A: Allocator> FusedIterator for Drain<'_, T, A> {}

‎rust/alloc/vec/drain_filter.rs

+143
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,143 @@
1+
use crate::alloc::{Allocator, Global};
2+
use core::ptr::{self};
3+
use core::slice::{self};
4+
5+
use super::Vec;
6+
7+
/// An iterator which uses a closure to determine if an element should be removed.
8+
///
9+
/// This struct is created by [`Vec::drain_filter`].
10+
/// See its documentation for more.
11+
///
12+
/// # Example
13+
///
14+
/// ```
15+
/// #![feature(drain_filter)]
16+
///
17+
/// let mut v = vec![0, 1, 2];
18+
/// let iter: std::vec::DrainFilter<_, _> = v.drain_filter(|x| *x % 2 == 0);
19+
/// ```
20+
#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
21+
#[derive(Debug)]
22+
pub struct DrainFilter<
23+
'a,
24+
T,
25+
F,
26+
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
27+
> where
28+
F: FnMut(&mut T) -> bool,
29+
{
30+
pub(super) vec: &'a mut Vec<T, A>,
31+
/// The index of the item that will be inspected by the next call to `next`.
32+
pub(super) idx: usize,
33+
/// The number of items that have been drained (removed) thus far.
34+
pub(super) del: usize,
35+
/// The original length of `vec` prior to draining.
36+
pub(super) old_len: usize,
37+
/// The filter test predicate.
38+
pub(super) pred: F,
39+
/// A flag that indicates a panic has occurred in the filter test predicate.
40+
/// This is used as a hint in the drop implementation to prevent consumption
41+
/// of the remainder of the `DrainFilter`. Any unprocessed items will be
42+
/// backshifted in the `vec`, but no further items will be dropped or
43+
/// tested by the filter predicate.
44+
pub(super) panic_flag: bool,
45+
}
46+
47+
impl<T, F, A: Allocator> DrainFilter<'_, T, F, A>
48+
where
49+
F: FnMut(&mut T) -> bool,
50+
{
51+
/// Returns a reference to the underlying allocator.
52+
#[unstable(feature = "allocator_api", issue = "32838")]
53+
#[inline]
54+
pub fn allocator(&self) -> &A {
55+
self.vec.allocator()
56+
}
57+
}
58+
59+
#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
60+
impl<T, F, A: Allocator> Iterator for DrainFilter<'_, T, F, A>
61+
where
62+
F: FnMut(&mut T) -> bool,
63+
{
64+
type Item = T;
65+
66+
fn next(&mut self) -> Option<T> {
67+
unsafe {
68+
while self.idx < self.old_len {
69+
let i = self.idx;
70+
let v = slice::from_raw_parts_mut(self.vec.as_mut_ptr(), self.old_len);
71+
self.panic_flag = true;
72+
let drained = (self.pred)(&mut v[i]);
73+
self.panic_flag = false;
74+
// Update the index *after* the predicate is called. If the index
75+
// is updated prior and the predicate panics, the element at this
76+
// index would be leaked.
77+
self.idx += 1;
78+
if drained {
79+
self.del += 1;
80+
return Some(ptr::read(&v[i]));
81+
} else if self.del > 0 {
82+
let del = self.del;
83+
let src: *const T = &v[i];
84+
let dst: *mut T = &mut v[i - del];
85+
ptr::copy_nonoverlapping(src, dst, 1);
86+
}
87+
}
88+
None
89+
}
90+
}
91+
92+
fn size_hint(&self) -> (usize, Option<usize>) {
93+
(0, Some(self.old_len - self.idx))
94+
}
95+
}
96+
97+
#[unstable(feature = "drain_filter", reason = "recently added", issue = "43244")]
98+
impl<T, F, A: Allocator> Drop for DrainFilter<'_, T, F, A>
99+
where
100+
F: FnMut(&mut T) -> bool,
101+
{
102+
fn drop(&mut self) {
103+
struct BackshiftOnDrop<'a, 'b, T, F, A: Allocator>
104+
where
105+
F: FnMut(&mut T) -> bool,
106+
{
107+
drain: &'b mut DrainFilter<'a, T, F, A>,
108+
}
109+
110+
impl<'a, 'b, T, F, A: Allocator> Drop for BackshiftOnDrop<'a, 'b, T, F, A>
111+
where
112+
F: FnMut(&mut T) -> bool,
113+
{
114+
fn drop(&mut self) {
115+
unsafe {
116+
if self.drain.idx < self.drain.old_len && self.drain.del > 0 {
117+
// This is a pretty messed up state, and there isn't really an
118+
// obviously right thing to do. We don't want to keep trying
119+
// to execute `pred`, so we just backshift all the unprocessed
120+
// elements and tell the vec that they still exist. The backshift
121+
// is required to prevent a double-drop of the last successfully
122+
// drained item prior to a panic in the predicate.
123+
let ptr = self.drain.vec.as_mut_ptr();
124+
let src = ptr.add(self.drain.idx);
125+
let dst = src.sub(self.drain.del);
126+
let tail_len = self.drain.old_len - self.drain.idx;
127+
src.copy_to(dst, tail_len);
128+
}
129+
self.drain.vec.set_len(self.drain.old_len - self.drain.del);
130+
}
131+
}
132+
}
133+
134+
let backshift = BackshiftOnDrop { drain: self };
135+
136+
// Attempt to consume any remaining elements if the filter predicate
137+
// has not yet panicked. We'll backshift any remaining elements
138+
// whether we've already panicked or if the consumption here panics.
139+
if !backshift.drain.panic_flag {
140+
backshift.drain.for_each(drop);
141+
}
142+
}
143+
}

‎rust/alloc/vec/into_iter.rs

+362
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,362 @@
1+
#[cfg(not(no_global_oom_handling))]
2+
use super::AsVecIntoIter;
3+
use crate::alloc::{Allocator, Global};
4+
use crate::raw_vec::RawVec;
5+
use core::fmt;
6+
use core::intrinsics::arith_offset;
7+
use core::iter::{
8+
FusedIterator, InPlaceIterable, SourceIter, TrustedLen, TrustedRandomAccessNoCoerce,
9+
};
10+
use core::marker::PhantomData;
11+
use core::mem::{self, ManuallyDrop};
12+
use core::ops::Deref;
13+
use core::ptr::{self, NonNull};
14+
use core::slice::{self};
15+
16+
/// An iterator that moves out of a vector.
17+
///
18+
/// This `struct` is created by the `into_iter` method on [`Vec`](super::Vec)
19+
/// (provided by the [`IntoIterator`] trait).
20+
///
21+
/// # Example
22+
///
23+
/// ```
24+
/// let v = vec![0, 1, 2];
25+
/// let iter: std::vec::IntoIter<_> = v.into_iter();
26+
/// ```
27+
#[stable(feature = "rust1", since = "1.0.0")]
28+
#[rustc_insignificant_dtor]
29+
pub struct IntoIter<
30+
T,
31+
#[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global,
32+
> {
33+
pub(super) buf: NonNull<T>,
34+
pub(super) phantom: PhantomData<T>,
35+
pub(super) cap: usize,
36+
// the drop impl reconstructs a RawVec from buf, cap and alloc
37+
// to avoid dropping the allocator twice we need to wrap it into ManuallyDrop
38+
pub(super) alloc: ManuallyDrop<A>,
39+
pub(super) ptr: *const T,
40+
pub(super) end: *const T,
41+
}
42+
43+
#[stable(feature = "vec_intoiter_debug", since = "1.13.0")]
44+
impl<T: fmt::Debug, A: Allocator> fmt::Debug for IntoIter<T, A> {
45+
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
46+
f.debug_tuple("IntoIter").field(&self.as_slice()).finish()
47+
}
48+
}
49+
50+
impl<T, A: Allocator> IntoIter<T, A> {
51+
/// Returns the remaining items of this iterator as a slice.
52+
///
53+
/// # Examples
54+
///
55+
/// ```
56+
/// let vec = vec!['a', 'b', 'c'];
57+
/// let mut into_iter = vec.into_iter();
58+
/// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
59+
/// let _ = into_iter.next().unwrap();
60+
/// assert_eq!(into_iter.as_slice(), &['b', 'c']);
61+
/// ```
62+
#[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
63+
pub fn as_slice(&self) -> &[T] {
64+
unsafe { slice::from_raw_parts(self.ptr, self.len()) }
65+
}
66+
67+
/// Returns the remaining items of this iterator as a mutable slice.
68+
///
69+
/// # Examples
70+
///
71+
/// ```
72+
/// let vec = vec!['a', 'b', 'c'];
73+
/// let mut into_iter = vec.into_iter();
74+
/// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
75+
/// into_iter.as_mut_slice()[2] = 'z';
76+
/// assert_eq!(into_iter.next().unwrap(), 'a');
77+
/// assert_eq!(into_iter.next().unwrap(), 'b');
78+
/// assert_eq!(into_iter.next().unwrap(), 'z');
79+
/// ```
80+
#[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
81+
pub fn as_mut_slice(&mut self) -> &mut [T] {
82+
unsafe { &mut *self.as_raw_mut_slice() }
83+
}
84+
85+
/// Returns a reference to the underlying allocator.
86+
#[unstable(feature = "allocator_api", issue = "32838")]
87+
#[inline]
88+
pub fn allocator(&self) -> &A {
89+
&self.alloc
90+
}
91+
92+
fn as_raw_mut_slice(&mut self) -> *mut [T] {
93+
ptr::slice_from_raw_parts_mut(self.ptr as *mut T, self.len())
94+
}
95+
96+
/// Drops remaining elements and relinquishes the backing allocation.
97+
///
98+
/// This is roughly equivalent to the following, but more efficient
99+
///
100+
/// ```
101+
/// # let mut into_iter = Vec::<u8>::with_capacity(10).into_iter();
102+
/// (&mut into_iter).for_each(core::mem::drop);
103+
/// unsafe { core::ptr::write(&mut into_iter, Vec::new().into_iter()); }
104+
/// ```
105+
///
106+
/// This method is used by in-place iteration, refer to the vec::in_place_collect
107+
/// documentation for an overview.
108+
#[cfg(not(no_global_oom_handling))]
109+
pub(super) fn forget_allocation_drop_remaining(&mut self) {
110+
let remaining = self.as_raw_mut_slice();
111+
112+
// overwrite the individual fields instead of creating a new
113+
// struct and then overwriting &mut self.
114+
// this creates less assembly
115+
self.cap = 0;
116+
self.buf = unsafe { NonNull::new_unchecked(RawVec::NEW.ptr()) };
117+
self.ptr = self.buf.as_ptr();
118+
self.end = self.buf.as_ptr();
119+
120+
unsafe {
121+
ptr::drop_in_place(remaining);
122+
}
123+
}
124+
125+
/// Forgets to Drop the remaining elements while still allowing the backing allocation to be freed.
126+
pub(crate) fn forget_remaining_elements(&mut self) {
127+
self.ptr = self.end;
128+
}
129+
}
130+
131+
#[stable(feature = "vec_intoiter_as_ref", since = "1.46.0")]
132+
impl<T, A: Allocator> AsRef<[T]> for IntoIter<T, A> {
133+
fn as_ref(&self) -> &[T] {
134+
self.as_slice()
135+
}
136+
}
137+
138+
#[stable(feature = "rust1", since = "1.0.0")]
139+
unsafe impl<T: Send, A: Allocator + Send> Send for IntoIter<T, A> {}
140+
#[stable(feature = "rust1", since = "1.0.0")]
141+
unsafe impl<T: Sync, A: Allocator + Sync> Sync for IntoIter<T, A> {}
142+
143+
#[stable(feature = "rust1", since = "1.0.0")]
144+
impl<T, A: Allocator> Iterator for IntoIter<T, A> {
145+
type Item = T;
146+
147+
#[inline]
148+
fn next(&mut self) -> Option<T> {
149+
if self.ptr as *const _ == self.end {
150+
None
151+
} else if mem::size_of::<T>() == 0 {
152+
// purposefully don't use 'ptr.offset' because for
153+
// vectors with 0-size elements this would return the
154+
// same pointer.
155+
self.ptr = unsafe { arith_offset(self.ptr as *const i8, 1) as *mut T };
156+
157+
// Make up a value of this ZST.
158+
Some(unsafe { mem::zeroed() })
159+
} else {
160+
let old = self.ptr;
161+
self.ptr = unsafe { self.ptr.offset(1) };
162+
163+
Some(unsafe { ptr::read(old) })
164+
}
165+
}
166+
167+
#[inline]
168+
fn size_hint(&self) -> (usize, Option<usize>) {
169+
let exact = if mem::size_of::<T>() == 0 {
170+
self.end.addr().wrapping_sub(self.ptr.addr())
171+
} else {
172+
unsafe { self.end.sub_ptr(self.ptr) }
173+
};
174+
(exact, Some(exact))
175+
}
176+
177+
#[inline]
178+
fn advance_by(&mut self, n: usize) -> Result<(), usize> {
179+
let step_size = self.len().min(n);
180+
let to_drop = ptr::slice_from_raw_parts_mut(self.ptr as *mut T, step_size);
181+
if mem::size_of::<T>() == 0 {
182+
// SAFETY: due to unchecked casts of unsigned amounts to signed offsets the wraparound
183+
// effectively results in unsigned pointers representing positions 0..usize::MAX,
184+
// which is valid for ZSTs.
185+
self.ptr = unsafe { arith_offset(self.ptr as *const i8, step_size as isize) as *mut T }
186+
} else {
187+
// SAFETY: the min() above ensures that step_size is in bounds
188+
self.ptr = unsafe { self.ptr.add(step_size) };
189+
}
190+
// SAFETY: the min() above ensures that step_size is in bounds
191+
unsafe {
192+
ptr::drop_in_place(to_drop);
193+
}
194+
if step_size < n {
195+
return Err(step_size);
196+
}
197+
Ok(())
198+
}
199+
200+
#[inline]
201+
fn count(self) -> usize {
202+
self.len()
203+
}
204+
205+
unsafe fn __iterator_get_unchecked(&mut self, i: usize) -> Self::Item
206+
where
207+
Self: TrustedRandomAccessNoCoerce,
208+
{
209+
// SAFETY: the caller must guarantee that `i` is in bounds of the
210+
// `Vec<T>`, so `i` cannot overflow an `isize`, and the `self.ptr.add(i)`
211+
// is guaranteed to pointer to an element of the `Vec<T>` and
212+
// thus guaranteed to be valid to dereference.
213+
//
214+
// Also note the implementation of `Self: TrustedRandomAccess` requires
215+
// that `T: Copy` so reading elements from the buffer doesn't invalidate
216+
// them for `Drop`.
217+
unsafe {
218+
if mem::size_of::<T>() == 0 { mem::zeroed() } else { ptr::read(self.ptr.add(i)) }
219+
}
220+
}
221+
}
222+
223+
#[stable(feature = "rust1", since = "1.0.0")]
224+
impl<T, A: Allocator> DoubleEndedIterator for IntoIter<T, A> {
225+
#[inline]
226+
fn next_back(&mut self) -> Option<T> {
227+
if self.end == self.ptr {
228+
None
229+
} else if mem::size_of::<T>() == 0 {
230+
// See above for why 'ptr.offset' isn't used
231+
self.end = unsafe { arith_offset(self.end as *const i8, -1) as *mut T };
232+
233+
// Make up a value of this ZST.
234+
Some(unsafe { mem::zeroed() })
235+
} else {
236+
self.end = unsafe { self.end.offset(-1) };
237+
238+
Some(unsafe { ptr::read(self.end) })
239+
}
240+
}
241+
242+
#[inline]
243+
fn advance_back_by(&mut self, n: usize) -> Result<(), usize> {
244+
let step_size = self.len().min(n);
245+
if mem::size_of::<T>() == 0 {
246+
// SAFETY: same as for advance_by()
247+
self.end = unsafe {
248+
arith_offset(self.end as *const i8, step_size.wrapping_neg() as isize) as *mut T
249+
}
250+
} else {
251+
// SAFETY: same as for advance_by()
252+
self.end = unsafe { self.end.offset(step_size.wrapping_neg() as isize) };
253+
}
254+
let to_drop = ptr::slice_from_raw_parts_mut(self.end as *mut T, step_size);
255+
// SAFETY: same as for advance_by()
256+
unsafe {
257+
ptr::drop_in_place(to_drop);
258+
}
259+
if step_size < n {
260+
return Err(step_size);
261+
}
262+
Ok(())
263+
}
264+
}
265+
266+
#[stable(feature = "rust1", since = "1.0.0")]
267+
impl<T, A: Allocator> ExactSizeIterator for IntoIter<T, A> {
268+
fn is_empty(&self) -> bool {
269+
self.ptr == self.end
270+
}
271+
}
272+
273+
#[stable(feature = "fused", since = "1.26.0")]
274+
impl<T, A: Allocator> FusedIterator for IntoIter<T, A> {}
275+
276+
#[unstable(feature = "trusted_len", issue = "37572")]
277+
unsafe impl<T, A: Allocator> TrustedLen for IntoIter<T, A> {}
278+
279+
#[doc(hidden)]
280+
#[unstable(issue = "none", feature = "std_internals")]
281+
#[rustc_unsafe_specialization_marker]
282+
pub trait NonDrop {}
283+
284+
// T: Copy as approximation for !Drop since get_unchecked does not advance self.ptr
285+
// and thus we can't implement drop-handling
286+
#[unstable(issue = "none", feature = "std_internals")]
287+
impl<T: Copy> NonDrop for T {}
288+
289+
#[doc(hidden)]
290+
#[unstable(issue = "none", feature = "std_internals")]
291+
// TrustedRandomAccess (without NoCoerce) must not be implemented because
292+
// subtypes/supertypes of `T` might not be `NonDrop`
293+
unsafe impl<T, A: Allocator> TrustedRandomAccessNoCoerce for IntoIter<T, A>
294+
where
295+
T: NonDrop,
296+
{
297+
const MAY_HAVE_SIDE_EFFECT: bool = false;
298+
}
299+
300+
#[cfg(not(no_global_oom_handling))]
301+
#[stable(feature = "vec_into_iter_clone", since = "1.8.0")]
302+
impl<T: Clone, A: Allocator + Clone> Clone for IntoIter<T, A> {
303+
#[cfg(not(test))]
304+
fn clone(&self) -> Self {
305+
self.as_slice().to_vec_in(self.alloc.deref().clone()).into_iter()
306+
}
307+
#[cfg(test)]
308+
fn clone(&self) -> Self {
309+
crate::slice::to_vec(self.as_slice(), self.alloc.deref().clone()).into_iter()
310+
}
311+
}
312+
313+
#[stable(feature = "rust1", since = "1.0.0")]
314+
unsafe impl<#[may_dangle] T, A: Allocator> Drop for IntoIter<T, A> {
315+
fn drop(&mut self) {
316+
struct DropGuard<'a, T, A: Allocator>(&'a mut IntoIter<T, A>);
317+
318+
impl<T, A: Allocator> Drop for DropGuard<'_, T, A> {
319+
fn drop(&mut self) {
320+
unsafe {
321+
// `IntoIter::alloc` is not used anymore after this and will be dropped by RawVec
322+
let alloc = ManuallyDrop::take(&mut self.0.alloc);
323+
// RawVec handles deallocation
324+
let _ = RawVec::from_raw_parts_in(self.0.buf.as_ptr(), self.0.cap, alloc);
325+
}
326+
}
327+
}
328+
329+
let guard = DropGuard(self);
330+
// destroy the remaining elements
331+
unsafe {
332+
ptr::drop_in_place(guard.0.as_raw_mut_slice());
333+
}
334+
// now `guard` will be dropped and do the rest
335+
}
336+
}
337+
338+
// In addition to the SAFETY invariants of the following three unsafe traits
339+
// also refer to the vec::in_place_collect module documentation to get an overview
340+
#[unstable(issue = "none", feature = "inplace_iteration")]
341+
#[doc(hidden)]
342+
unsafe impl<T, A: Allocator> InPlaceIterable for IntoIter<T, A> {}
343+
344+
#[unstable(issue = "none", feature = "inplace_iteration")]
345+
#[doc(hidden)]
346+
unsafe impl<T, A: Allocator> SourceIter for IntoIter<T, A> {
347+
type Source = Self;
348+
349+
#[inline]
350+
unsafe fn as_inner(&mut self) -> &mut Self::Source {
351+
self
352+
}
353+
}
354+
355+
#[cfg(not(no_global_oom_handling))]
356+
unsafe impl<T> AsVecIntoIter for IntoIter<T> {
357+
type Item = T;
358+
359+
fn as_into_iter(&mut self) -> &mut IntoIter<Self::Item> {
360+
self
361+
}
362+
}

‎rust/alloc/vec/is_zero.rs

+118
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,118 @@
1+
use crate::boxed::Box;
2+
3+
#[rustc_specialization_trait]
4+
pub(super) unsafe trait IsZero {
5+
/// Whether this value's representation is all zeros
6+
fn is_zero(&self) -> bool;
7+
}
8+
9+
macro_rules! impl_is_zero {
10+
($t:ty, $is_zero:expr) => {
11+
unsafe impl IsZero for $t {
12+
#[inline]
13+
fn is_zero(&self) -> bool {
14+
$is_zero(*self)
15+
}
16+
}
17+
};
18+
}
19+
20+
impl_is_zero!(i16, |x| x == 0);
21+
impl_is_zero!(i32, |x| x == 0);
22+
impl_is_zero!(i64, |x| x == 0);
23+
impl_is_zero!(i128, |x| x == 0);
24+
impl_is_zero!(isize, |x| x == 0);
25+
26+
impl_is_zero!(u16, |x| x == 0);
27+
impl_is_zero!(u32, |x| x == 0);
28+
impl_is_zero!(u64, |x| x == 0);
29+
impl_is_zero!(u128, |x| x == 0);
30+
impl_is_zero!(usize, |x| x == 0);
31+
32+
impl_is_zero!(bool, |x| x == false);
33+
impl_is_zero!(char, |x| x == '\0');
34+
35+
impl_is_zero!(f32, |x: f32| x.to_bits() == 0);
36+
impl_is_zero!(f64, |x: f64| x.to_bits() == 0);
37+
38+
unsafe impl<T> IsZero for *const T {
39+
#[inline]
40+
fn is_zero(&self) -> bool {
41+
(*self).is_null()
42+
}
43+
}
44+
45+
unsafe impl<T> IsZero for *mut T {
46+
#[inline]
47+
fn is_zero(&self) -> bool {
48+
(*self).is_null()
49+
}
50+
}
51+
52+
unsafe impl<T: IsZero, const N: usize> IsZero for [T; N] {
53+
#[inline]
54+
fn is_zero(&self) -> bool {
55+
// Because this is generated as a runtime check, it's not obvious that
56+
// it's worth doing if the array is really long. The threshold here
57+
// is largely arbitrary, but was picked because as of 2022-05-01 LLVM
58+
// can const-fold the check in `vec![[0; 32]; n]` but not in
59+
// `vec![[0; 64]; n]`: https://godbolt.org/z/WTzjzfs5b
60+
// Feel free to tweak if you have better evidence.
61+
62+
N <= 32 && self.iter().all(IsZero::is_zero)
63+
}
64+
}
65+
66+
// `Option<&T>` and `Option<Box<T>>` are guaranteed to represent `None` as null.
67+
// For fat pointers, the bytes that would be the pointer metadata in the `Some`
68+
// variant are padding in the `None` variant, so ignoring them and
69+
// zero-initializing instead is ok.
70+
// `Option<&mut T>` never implements `Clone`, so there's no need for an impl of
71+
// `SpecFromElem`.
72+
73+
unsafe impl<T: ?Sized> IsZero for Option<&T> {
74+
#[inline]
75+
fn is_zero(&self) -> bool {
76+
self.is_none()
77+
}
78+
}
79+
80+
unsafe impl<T: ?Sized> IsZero for Option<Box<T>> {
81+
#[inline]
82+
fn is_zero(&self) -> bool {
83+
self.is_none()
84+
}
85+
}
86+
87+
// `Option<num::NonZeroU32>` and similar have a representation guarantee that
88+
// they're the same size as the corresponding `u32` type, as well as a guarantee
89+
// that transmuting between `NonZeroU32` and `Option<num::NonZeroU32>` works.
90+
// While the documentation officially makes it UB to transmute from `None`,
91+
// we're the standard library so we can make extra inferences, and we know that
92+
// the only niche available to represent `None` is the one that's all zeros.
93+
94+
macro_rules! impl_is_zero_option_of_nonzero {
95+
($($t:ident,)+) => {$(
96+
unsafe impl IsZero for Option<core::num::$t> {
97+
#[inline]
98+
fn is_zero(&self) -> bool {
99+
self.is_none()
100+
}
101+
}
102+
)+};
103+
}
104+
105+
impl_is_zero_option_of_nonzero!(
106+
NonZeroU8,
107+
NonZeroU16,
108+
NonZeroU32,
109+
NonZeroU64,
110+
NonZeroU128,
111+
NonZeroI8,
112+
NonZeroI16,
113+
NonZeroI32,
114+
NonZeroI64,
115+
NonZeroI128,
116+
NonZeroUsize,
117+
NonZeroIsize,
118+
);

‎rust/alloc/vec/mod.rs

+3,115
Large diffs are not rendered by default.

‎rust/alloc/vec/partial_eq.rs

+47
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,47 @@
1+
use crate::alloc::Allocator;
2+
#[cfg(not(no_global_oom_handling))]
3+
use crate::borrow::Cow;
4+
5+
use super::Vec;
6+
7+
macro_rules! __impl_slice_eq1 {
8+
([$($vars:tt)*] $lhs:ty, $rhs:ty $(where $ty:ty: $bound:ident)?, #[$stability:meta]) => {
9+
#[$stability]
10+
impl<T, U, $($vars)*> PartialEq<$rhs> for $lhs
11+
where
12+
T: PartialEq<U>,
13+
$($ty: $bound)?
14+
{
15+
#[inline]
16+
fn eq(&self, other: &$rhs) -> bool { self[..] == other[..] }
17+
#[inline]
18+
fn ne(&self, other: &$rhs) -> bool { self[..] != other[..] }
19+
}
20+
}
21+
}
22+
23+
__impl_slice_eq1! { [A1: Allocator, A2: Allocator] Vec<T, A1>, Vec<U, A2>, #[stable(feature = "rust1", since = "1.0.0")] }
24+
__impl_slice_eq1! { [A: Allocator] Vec<T, A>, &[U], #[stable(feature = "rust1", since = "1.0.0")] }
25+
__impl_slice_eq1! { [A: Allocator] Vec<T, A>, &mut [U], #[stable(feature = "rust1", since = "1.0.0")] }
26+
__impl_slice_eq1! { [A: Allocator] &[T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
27+
__impl_slice_eq1! { [A: Allocator] &mut [T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_ref_slice", since = "1.46.0")] }
28+
__impl_slice_eq1! { [A: Allocator] Vec<T, A>, [U], #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] }
29+
__impl_slice_eq1! { [A: Allocator] [T], Vec<U, A>, #[stable(feature = "partialeq_vec_for_slice", since = "1.48.0")] }
30+
#[cfg(not(no_global_oom_handling))]
31+
__impl_slice_eq1! { [A: Allocator] Cow<'_, [T]>, Vec<U, A> where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
32+
#[cfg(not(no_global_oom_handling))]
33+
__impl_slice_eq1! { [] Cow<'_, [T]>, &[U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
34+
#[cfg(not(no_global_oom_handling))]
35+
__impl_slice_eq1! { [] Cow<'_, [T]>, &mut [U] where T: Clone, #[stable(feature = "rust1", since = "1.0.0")] }
36+
__impl_slice_eq1! { [A: Allocator, const N: usize] Vec<T, A>, [U; N], #[stable(feature = "rust1", since = "1.0.0")] }
37+
__impl_slice_eq1! { [A: Allocator, const N: usize] Vec<T, A>, &[U; N], #[stable(feature = "rust1", since = "1.0.0")] }
38+
39+
// NOTE: some less important impls are omitted to reduce code bloat
40+
// FIXME(Centril): Reconsider this?
41+
//__impl_slice_eq1! { [const N: usize] Vec<A>, &mut [B; N], }
42+
//__impl_slice_eq1! { [const N: usize] [A; N], Vec<B>, }
43+
//__impl_slice_eq1! { [const N: usize] &[A; N], Vec<B>, }
44+
//__impl_slice_eq1! { [const N: usize] &mut [A; N], Vec<B>, }
45+
//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, [B; N], }
46+
//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &[B; N], }
47+
//__impl_slice_eq1! { [const N: usize] Cow<'a, [A]>, &mut [B; N], }

0 commit comments

Comments
 (0)
Please sign in to comment.