Skip to content

Commit 83494b1

Browse files
committedSep 22, 2020
use constants to generate less llvm-ir for raw_vec functions
1 parent b01326a commit 83494b1

File tree

1 file changed

+25
-24
lines changed

1 file changed

+25
-24
lines changed
 

‎library/alloc/src/raw_vec.rs

+25-24
Original file line numberDiff line numberDiff line change
@@ -148,6 +148,8 @@ impl<T> RawVec<T, Global> {
148148
}
149149

150150
impl<T, A: AllocRef> RawVec<T, A> {
151+
const ELEMENT_SIZE: usize = mem::size_of::<T>();
152+
151153
/// Like `new`, but parameterized over the choice of allocator for
152154
/// the returned `RawVec`.
153155
pub const fn new_in(alloc: A) -> Self {
@@ -170,7 +172,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
170172
}
171173

172174
fn allocate_in(capacity: usize, init: AllocInit, mut alloc: A) -> Self {
173-
if mem::size_of::<T>() == 0 {
175+
if Self::ELEMENT_SIZE == 0 {
174176
Self::new_in(alloc)
175177
} else {
176178
// We avoid `unwrap_or_else` here because it bloats the amount of
@@ -227,7 +229,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
227229
/// This will always be `usize::MAX` if `T` is zero-sized.
228230
#[inline(always)]
229231
pub fn capacity(&self) -> usize {
230-
if mem::size_of::<T>() == 0 { usize::MAX } else { self.cap }
232+
if Self::ELEMENT_SIZE == 0 { usize::MAX } else { self.cap }
231233
}
232234

233235
/// Returns a shared reference to the allocator backing this `RawVec`.
@@ -241,14 +243,14 @@ impl<T, A: AllocRef> RawVec<T, A> {
241243
}
242244

243245
fn current_memory(&self) -> Option<(NonNull<u8>, Layout)> {
244-
if mem::size_of::<T>() == 0 || self.cap == 0 {
246+
if Self::ELEMENT_SIZE == 0 || self.cap == 0 {
245247
None
246248
} else {
247249
// We have an allocated chunk of memory, so we can bypass runtime
248250
// checks to get our current layout.
249251
unsafe {
250252
let align = mem::align_of::<T>();
251-
let size = mem::size_of::<T>() * self.cap;
253+
let size = Self::ELEMENT_SIZE * self.cap;
252254
let layout = Layout::from_size_align_unchecked(size, align);
253255
Some((self.ptr.cast().into(), layout))
254256
}
@@ -383,15 +385,28 @@ impl<T, A: AllocRef> RawVec<T, A> {
383385
}
384386

385387
fn capacity_from_bytes(excess: usize) -> usize {
386-
debug_assert_ne!(mem::size_of::<T>(), 0);
387-
excess / mem::size_of::<T>()
388+
debug_assert_ne!(Self::ELEMENT_SIZE, 0);
389+
excess / Self::ELEMENT_SIZE
388390
}
389391

390392
fn set_ptr(&mut self, ptr: NonNull<[u8]>) {
391393
self.ptr = unsafe { Unique::new_unchecked(ptr.cast().as_ptr()) };
392394
self.cap = Self::capacity_from_bytes(ptr.len());
393395
}
394396

397+
// Tiny Vecs are dumb. Skip to:
398+
// - 8 if the element size is 1, because any heap allocators is likely
399+
// to round up a request of less than 8 bytes to at least 8 bytes.
400+
// - 4 if elements are moderate-sized (<= 1 KiB).
401+
// - 1 otherwise, to avoid wasting too much space for very short Vecs.
402+
const MIN_NON_ZERO_CAP: usize = if Self::ELEMENT_SIZE == 1 {
403+
8
404+
} else if Self::ELEMENT_SIZE <= 1024 {
405+
4
406+
} else {
407+
1
408+
};
409+
395410
// This method is usually instantiated many times. So we want it to be as
396411
// small as possible, to improve compile times. But we also want as much of
397412
// its contents to be statically computable as possible, to make the
@@ -403,7 +418,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
403418
// This is ensured by the calling contexts.
404419
debug_assert!(additional > 0);
405420

406-
if mem::size_of::<T>() == 0 {
421+
if Self::ELEMENT_SIZE == 0 {
407422
// Since we return a capacity of `usize::MAX` when `elem_size` is
408423
// 0, getting to here necessarily means the `RawVec` is overfull.
409424
return Err(CapacityOverflow);
@@ -416,21 +431,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
416431
// because `cap <= isize::MAX` and the type of `cap` is `usize`.
417432
let cap = cmp::max(self.cap * 2, required_cap);
418433

419-
// Tiny Vecs are dumb. Skip to:
420-
// - 8 if the element size is 1, because any heap allocators is likely
421-
// to round up a request of less than 8 bytes to at least 8 bytes.
422-
// - 4 if elements are moderate-sized (<= 1 KiB).
423-
// - 1 otherwise, to avoid wasting too much space for very short Vecs.
424-
// Note that `min_non_zero_cap` is computed statically.
425-
let elem_size = mem::size_of::<T>();
426-
let min_non_zero_cap = if elem_size == 1 {
427-
8
428-
} else if elem_size <= 1024 {
429-
4
430-
} else {
431-
1
432-
};
433-
let cap = cmp::max(min_non_zero_cap, cap);
434+
let cap = cmp::max(Self::MIN_NON_ZERO_CAP, cap);
434435

435436
let new_layout = Layout::array::<T>(cap);
436437

@@ -444,7 +445,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
444445
// `grow_amortized`, but this method is usually instantiated less often so
445446
// it's less critical.
446447
fn grow_exact(&mut self, len: usize, additional: usize) -> Result<(), TryReserveError> {
447-
if mem::size_of::<T>() == 0 {
448+
if Self::ELEMENT_SIZE == 0 {
448449
// Since we return a capacity of `usize::MAX` when the type size is
449450
// 0, getting to here necessarily means the `RawVec` is overfull.
450451
return Err(CapacityOverflow);
@@ -463,7 +464,7 @@ impl<T, A: AllocRef> RawVec<T, A> {
463464
assert!(amount <= self.capacity(), "Tried to shrink to a larger capacity");
464465

465466
let (ptr, layout) = if let Some(mem) = self.current_memory() { mem } else { return Ok(()) };
466-
let new_size = amount * mem::size_of::<T>();
467+
let new_size = amount * Self::ELEMENT_SIZE;
467468

468469
let ptr = unsafe {
469470
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());

0 commit comments

Comments
 (0)