@@ -148,6 +148,8 @@ impl<T> RawVec<T, Global> {
148
148
}
149
149
150
150
impl < T , A : AllocRef > RawVec < T , A > {
151
+ const ELEMENT_SIZE : usize = mem:: size_of :: < T > ( ) ;
152
+
151
153
/// Like `new`, but parameterized over the choice of allocator for
152
154
/// the returned `RawVec`.
153
155
pub const fn new_in ( alloc : A ) -> Self {
@@ -170,7 +172,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
170
172
}
171
173
172
174
fn allocate_in ( capacity : usize , init : AllocInit , mut alloc : A ) -> Self {
173
- if mem :: size_of :: < T > ( ) == 0 {
175
+ if Self :: ELEMENT_SIZE == 0 {
174
176
Self :: new_in ( alloc)
175
177
} else {
176
178
// We avoid `unwrap_or_else` here because it bloats the amount of
@@ -227,7 +229,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
227
229
/// This will always be `usize::MAX` if `T` is zero-sized.
228
230
#[ inline( always) ]
229
231
pub fn capacity ( & self ) -> usize {
230
- if mem :: size_of :: < T > ( ) == 0 { usize:: MAX } else { self . cap }
232
+ if Self :: ELEMENT_SIZE == 0 { usize:: MAX } else { self . cap }
231
233
}
232
234
233
235
/// Returns a shared reference to the allocator backing this `RawVec`.
@@ -241,14 +243,14 @@ impl<T, A: AllocRef> RawVec<T, A> {
241
243
}
242
244
243
245
fn current_memory ( & self ) -> Option < ( NonNull < u8 > , Layout ) > {
244
- if mem :: size_of :: < T > ( ) == 0 || self . cap == 0 {
246
+ if Self :: ELEMENT_SIZE == 0 || self . cap == 0 {
245
247
None
246
248
} else {
247
249
// We have an allocated chunk of memory, so we can bypass runtime
248
250
// checks to get our current layout.
249
251
unsafe {
250
252
let align = mem:: align_of :: < T > ( ) ;
251
- let size = mem :: size_of :: < T > ( ) * self . cap ;
253
+ let size = Self :: ELEMENT_SIZE * self . cap ;
252
254
let layout = Layout :: from_size_align_unchecked ( size, align) ;
253
255
Some ( ( self . ptr . cast ( ) . into ( ) , layout) )
254
256
}
@@ -383,15 +385,28 @@ impl<T, A: AllocRef> RawVec<T, A> {
383
385
}
384
386
385
387
fn capacity_from_bytes ( excess : usize ) -> usize {
386
- debug_assert_ne ! ( mem :: size_of :: < T > ( ) , 0 ) ;
387
- excess / mem :: size_of :: < T > ( )
388
+ debug_assert_ne ! ( Self :: ELEMENT_SIZE , 0 ) ;
389
+ excess / Self :: ELEMENT_SIZE
388
390
}
389
391
390
392
fn set_ptr ( & mut self , ptr : NonNull < [ u8 ] > ) {
391
393
self . ptr = unsafe { Unique :: new_unchecked ( ptr. cast ( ) . as_ptr ( ) ) } ;
392
394
self . cap = Self :: capacity_from_bytes ( ptr. len ( ) ) ;
393
395
}
394
396
397
+ // Tiny Vecs are dumb. Skip to:
398
+ // - 8 if the element size is 1, because any heap allocators is likely
399
+ // to round up a request of less than 8 bytes to at least 8 bytes.
400
+ // - 4 if elements are moderate-sized (<= 1 KiB).
401
+ // - 1 otherwise, to avoid wasting too much space for very short Vecs.
402
+ const MIN_NON_ZERO_CAP : usize = if Self :: ELEMENT_SIZE == 1 {
403
+ 8
404
+ } else if Self :: ELEMENT_SIZE <= 1024 {
405
+ 4
406
+ } else {
407
+ 1
408
+ } ;
409
+
395
410
// This method is usually instantiated many times. So we want it to be as
396
411
// small as possible, to improve compile times. But we also want as much of
397
412
// its contents to be statically computable as possible, to make the
@@ -403,7 +418,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
403
418
// This is ensured by the calling contexts.
404
419
debug_assert ! ( additional > 0 ) ;
405
420
406
- if mem :: size_of :: < T > ( ) == 0 {
421
+ if Self :: ELEMENT_SIZE == 0 {
407
422
// Since we return a capacity of `usize::MAX` when `elem_size` is
408
423
// 0, getting to here necessarily means the `RawVec` is overfull.
409
424
return Err ( CapacityOverflow ) ;
@@ -416,21 +431,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
416
431
// because `cap <= isize::MAX` and the type of `cap` is `usize`.
417
432
let cap = cmp:: max ( self . cap * 2 , required_cap) ;
418
433
419
- // Tiny Vecs are dumb. Skip to:
420
- // - 8 if the element size is 1, because any heap allocators is likely
421
- // to round up a request of less than 8 bytes to at least 8 bytes.
422
- // - 4 if elements are moderate-sized (<= 1 KiB).
423
- // - 1 otherwise, to avoid wasting too much space for very short Vecs.
424
- // Note that `min_non_zero_cap` is computed statically.
425
- let elem_size = mem:: size_of :: < T > ( ) ;
426
- let min_non_zero_cap = if elem_size == 1 {
427
- 8
428
- } else if elem_size <= 1024 {
429
- 4
430
- } else {
431
- 1
432
- } ;
433
- let cap = cmp:: max ( min_non_zero_cap, cap) ;
434
+ let cap = cmp:: max ( Self :: MIN_NON_ZERO_CAP , cap) ;
434
435
435
436
let new_layout = Layout :: array :: < T > ( cap) ;
436
437
@@ -444,7 +445,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
444
445
// `grow_amortized`, but this method is usually instantiated less often so
445
446
// it's less critical.
446
447
fn grow_exact ( & mut self , len : usize , additional : usize ) -> Result < ( ) , TryReserveError > {
447
- if mem :: size_of :: < T > ( ) == 0 {
448
+ if Self :: ELEMENT_SIZE == 0 {
448
449
// Since we return a capacity of `usize::MAX` when the type size is
449
450
// 0, getting to here necessarily means the `RawVec` is overfull.
450
451
return Err ( CapacityOverflow ) ;
@@ -463,7 +464,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
463
464
assert ! ( amount <= self . capacity( ) , "Tried to shrink to a larger capacity" ) ;
464
465
465
466
let ( ptr, layout) = if let Some ( mem) = self . current_memory ( ) { mem } else { return Ok ( ( ) ) } ;
466
- let new_size = amount * mem :: size_of :: < T > ( ) ;
467
+ let new_size = amount * Self :: ELEMENT_SIZE ;
467
468
468
469
let ptr = unsafe {
469
470
let new_layout = Layout :: from_size_align_unchecked ( new_size, layout. align ( ) ) ;
0 commit comments