@@ -93,7 +93,7 @@ impl<T> ArenaChunk<T> {
93
93
#[ inline]
94
94
fn end ( & mut self ) -> * mut T {
95
95
unsafe {
96
- if mem :: size_of :: < T > ( ) == 0 {
96
+ if size_of :: < T > ( ) == 0 {
97
97
// A pointer as large as possible for zero-sized elements.
98
98
ptr:: without_provenance_mut ( !0 )
99
99
} else {
@@ -151,7 +151,7 @@ impl<T> TypedArena<T> {
151
151
}
152
152
153
153
unsafe {
154
- if mem :: size_of :: < T > ( ) == 0 {
154
+ if size_of :: < T > ( ) == 0 {
155
155
self . ptr . set ( self . ptr . get ( ) . wrapping_byte_add ( 1 ) ) ;
156
156
let ptr = ptr:: NonNull :: < T > :: dangling ( ) . as_ptr ( ) ;
157
157
// Don't drop the object. This `write` is equivalent to `forget`.
@@ -173,13 +173,13 @@ impl<T> TypedArena<T> {
173
173
// FIXME: this should *likely* use `offset_from`, but more
174
174
// investigation is needed (including running tests in miri).
175
175
let available_bytes = self . end . get ( ) . addr ( ) - self . ptr . get ( ) . addr ( ) ;
176
- let additional_bytes = additional. checked_mul ( mem :: size_of :: < T > ( ) ) . unwrap ( ) ;
176
+ let additional_bytes = additional. checked_mul ( size_of :: < T > ( ) ) . unwrap ( ) ;
177
177
available_bytes >= additional_bytes
178
178
}
179
179
180
180
#[ inline]
181
181
fn alloc_raw_slice ( & self , len : usize ) -> * mut T {
182
- assert ! ( mem :: size_of:: <T >( ) != 0 ) ;
182
+ assert ! ( size_of:: <T >( ) != 0 ) ;
183
183
assert ! ( len != 0 ) ;
184
184
185
185
// Ensure the current chunk can fit `len` objects.
@@ -213,7 +213,7 @@ impl<T> TypedArena<T> {
213
213
// So we collect all the elements beforehand, which takes care of reentrancy and panic
214
214
// safety. This function is much less hot than `DroplessArena::alloc_from_iter`, so it
215
215
// doesn't need to be hyper-optimized.
216
- assert ! ( mem :: size_of:: <T >( ) != 0 ) ;
216
+ assert ! ( size_of:: <T >( ) != 0 ) ;
217
217
218
218
let mut vec: SmallVec < [ _ ; 8 ] > = iter. into_iter ( ) . collect ( ) ;
219
219
if vec. is_empty ( ) {
@@ -236,7 +236,7 @@ impl<T> TypedArena<T> {
236
236
unsafe {
237
237
// We need the element size to convert chunk sizes (ranging from
238
238
// PAGE to HUGE_PAGE bytes) to element counts.
239
- let elem_size = cmp:: max ( 1 , mem :: size_of :: < T > ( ) ) ;
239
+ let elem_size = cmp:: max ( 1 , size_of :: < T > ( ) ) ;
240
240
let mut chunks = self . chunks . borrow_mut ( ) ;
241
241
let mut new_cap;
242
242
if let Some ( last_chunk) = chunks. last_mut ( ) {
@@ -246,7 +246,7 @@ impl<T> TypedArena<T> {
246
246
// FIXME: this should *likely* use `offset_from`, but more
247
247
// investigation is needed (including running tests in miri).
248
248
let used_bytes = self . ptr . get ( ) . addr ( ) - last_chunk. start ( ) . addr ( ) ;
249
- last_chunk. entries = used_bytes / mem :: size_of :: < T > ( ) ;
249
+ last_chunk. entries = used_bytes / size_of :: < T > ( ) ;
250
250
}
251
251
252
252
// If the previous chunk's len is less than HUGE_PAGE
@@ -276,15 +276,15 @@ impl<T> TypedArena<T> {
276
276
let end = self . ptr . get ( ) . addr ( ) ;
277
277
// We then calculate the number of elements to be dropped in the last chunk,
278
278
// which is the filled area's length.
279
- let diff = if mem :: size_of :: < T > ( ) == 0 {
279
+ let diff = if size_of :: < T > ( ) == 0 {
280
280
// `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
281
281
// the number of zero-sized values in the last and only chunk, just out of caution.
282
282
// Recall that `end` was incremented for each allocated value.
283
283
end - start
284
284
} else {
285
285
// FIXME: this should *likely* use `offset_from`, but more
286
286
// investigation is needed (including running tests in miri).
287
- ( end - start) / mem :: size_of :: < T > ( )
287
+ ( end - start) / size_of :: < T > ( )
288
288
} ;
289
289
// Pass that to the `destroy` method.
290
290
unsafe {
@@ -329,7 +329,7 @@ fn align_up(val: usize, align: usize) -> usize {
329
329
330
330
// Pointer alignment is common in compiler types, so keep `DroplessArena` aligned to them
331
331
// to optimize away alignment code.
332
- const DROPLESS_ALIGNMENT : usize = mem :: align_of :: < usize > ( ) ;
332
+ const DROPLESS_ALIGNMENT : usize = align_of :: < usize > ( ) ;
333
333
334
334
/// An arena that can hold objects of multiple different types that impl `Copy`
335
335
/// and/or satisfy `!mem::needs_drop`.
@@ -447,7 +447,7 @@ impl DroplessArena {
447
447
#[ inline]
448
448
pub fn alloc < T > ( & self , object : T ) -> & mut T {
449
449
assert ! ( !mem:: needs_drop:: <T >( ) ) ;
450
- assert ! ( mem :: size_of:: <T >( ) != 0 ) ;
450
+ assert ! ( size_of:: <T >( ) != 0 ) ;
451
451
452
452
let mem = self . alloc_raw ( Layout :: new :: < T > ( ) ) as * mut T ;
453
453
@@ -471,7 +471,7 @@ impl DroplessArena {
471
471
T : Copy ,
472
472
{
473
473
assert ! ( !mem:: needs_drop:: <T >( ) ) ;
474
- assert ! ( mem :: size_of:: <T >( ) != 0 ) ;
474
+ assert ! ( size_of:: <T >( ) != 0 ) ;
475
475
assert ! ( !slice. is_empty( ) ) ;
476
476
477
477
let mem = self . alloc_raw ( Layout :: for_value :: < [ T ] > ( slice) ) as * mut T ;
@@ -546,7 +546,7 @@ impl DroplessArena {
546
546
// Warning: this function is reentrant: `iter` could hold a reference to `&self` and
547
547
// allocate additional elements while we're iterating.
548
548
let iter = iter. into_iter ( ) ;
549
- assert ! ( mem :: size_of:: <T >( ) != 0 ) ;
549
+ assert ! ( size_of:: <T >( ) != 0 ) ;
550
550
assert ! ( !mem:: needs_drop:: <T >( ) ) ;
551
551
552
552
let size_hint = iter. size_hint ( ) ;
0 commit comments