From 07a9c5353320c22596241dd0699f209b4567fc2f Mon Sep 17 00:00:00 2001 From: Huon Wilson Date: Wed, 20 May 2015 19:18:03 +1000 Subject: [PATCH 1/3] Make `align_of` behave like `min_align_of`. This removes a footgun, since it is a reasonable assumption to make that pointers to `T` will be aligned to `align_of::()`. This also matches the behaviour of C/C++. `min_align_of` is now deprecated. Closes #21611. --- src/liballoc/arc.rs | 6 +++--- src/liballoc/rc.rs | 8 ++++---- src/libarena/lib.rs | 12 ++++++------ src/libcollections/btree/node.rs | 10 +++++----- src/libcollections/vec.rs | 14 +++++++------- src/libcollections/vec_deque.rs | 8 ++++---- src/libcore/mem.rs | 20 +++++++------------- src/libstd/collections/hash/table.rs | 22 +++++++++++----------- 8 files changed, 47 insertions(+), 53 deletions(-) diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 7bfeaec36d729..dd9c1d1fd1885 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -77,7 +77,7 @@ use core::atomic; use core::atomic::Ordering::{Relaxed, Release, Acquire, SeqCst}; use core::fmt; use core::cmp::Ordering; -use core::mem::{min_align_of_val, size_of_val}; +use core::mem::{align_of_val, size_of_val}; use core::intrinsics::drop_in_place; use core::mem; use core::nonzero::NonZero; @@ -241,7 +241,7 @@ impl Arc { if self.inner().weak.fetch_sub(1, Release) == 1 { atomic::fence(Acquire); - deallocate(ptr as *mut u8, size_of_val(&*ptr), min_align_of_val(&*ptr)) + deallocate(ptr as *mut u8, size_of_val(&*ptr), align_of_val(&*ptr)) } } } @@ -565,7 +565,7 @@ impl Drop for Weak { atomic::fence(Acquire); unsafe { deallocate(ptr as *mut u8, size_of_val(&*ptr), - min_align_of_val(&*ptr)) } + align_of_val(&*ptr)) } } } } diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index d5b6c86ef359a..3dfafd0a378b9 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -162,7 +162,7 @@ use core::fmt; use core::hash::{Hasher, Hash}; use core::intrinsics::{assume, drop_in_place}; use core::marker::{self, Unsize}; -use core::mem::{self, min_align_of, size_of, min_align_of_val, size_of_val, forget}; +use core::mem::{self, align_of, size_of, align_of_val, size_of_val, forget}; use core::nonzero::NonZero; use core::ops::{CoerceUnsized, Deref}; use core::ptr; @@ -246,7 +246,7 @@ impl Rc { // destruct the box and skip our Drop // we can ignore the refcounts because we know we're unique deallocate(*rc._ptr as *mut u8, size_of::>(), - min_align_of::>()); + align_of::>()); forget(rc); Ok(val) } @@ -496,7 +496,7 @@ impl Drop for Rc { if self.weak() == 0 { deallocate(ptr as *mut u8, size_of_val(&*ptr), - min_align_of_val(&*ptr)) + align_of_val(&*ptr)) } } } @@ -805,7 +805,7 @@ impl Drop for Weak { // the strong pointers have disappeared. if self.weak() == 0 { deallocate(ptr as *mut u8, size_of_val(&*ptr), - min_align_of_val(&*ptr)) + align_of_val(&*ptr)) } } } diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index 109ad8a942c84..4d064b16ad027 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -244,7 +244,7 @@ impl<'longer_than_self> Arena<'longer_than_self> { fn alloc_copy(&self, op: F) -> &mut T where F: FnOnce() -> T { unsafe { let ptr = self.alloc_copy_inner(mem::size_of::(), - mem::min_align_of::()); + mem::align_of::()); let ptr = ptr as *mut T; ptr::write(&mut (*ptr), op()); return &mut *ptr; @@ -300,7 +300,7 @@ impl<'longer_than_self> Arena<'longer_than_self> { let tydesc = get_tydesc::(); let (ty_ptr, ptr) = self.alloc_noncopy_inner(mem::size_of::(), - mem::min_align_of::()); + mem::align_of::()); let ty_ptr = ty_ptr as *mut usize; let ptr = ptr as *mut T; // Write in our tydesc along with a bit indicating that it @@ -393,7 +393,7 @@ struct TypedArenaChunk { fn calculate_size(capacity: usize) -> usize { let mut size = mem::size_of::>(); - size = round_up(size, mem::min_align_of::()); + size = round_up(size, mem::align_of::()); let elem_size = mem::size_of::(); let elems_size = elem_size.checked_mul(capacity).unwrap(); size = size.checked_add(elems_size).unwrap(); @@ -405,7 +405,7 @@ impl TypedArenaChunk { unsafe fn new(next: *mut TypedArenaChunk, capacity: usize) -> *mut TypedArenaChunk { let size = calculate_size::(capacity); - let chunk = allocate(size, mem::min_align_of::>()) + let chunk = allocate(size, mem::align_of::>()) as *mut TypedArenaChunk; if chunk.is_null() { alloc::oom() } (*chunk).next = next; @@ -431,7 +431,7 @@ impl TypedArenaChunk { let size = calculate_size::(self.capacity); let self_ptr: *mut TypedArenaChunk = self; deallocate(self_ptr as *mut u8, size, - mem::min_align_of::>()); + mem::align_of::>()); if !next.is_null() { let capacity = (*next).capacity; (*next).destroy(capacity); @@ -444,7 +444,7 @@ impl TypedArenaChunk { let this: *const TypedArenaChunk = self; unsafe { mem::transmute(round_up(this.offset(1) as usize, - mem::min_align_of::())) + mem::align_of::())) } } diff --git a/src/libcollections/btree/node.rs b/src/libcollections/btree/node.rs index 2d8335d373473..4d76a986700a2 100644 --- a/src/libcollections/btree/node.rs +++ b/src/libcollections/btree/node.rs @@ -163,12 +163,12 @@ fn test_offset_calculation() { } fn calculate_allocation_generic(capacity: usize, is_leaf: bool) -> (usize, usize) { - let (keys_size, keys_align) = (capacity * mem::size_of::(), mem::min_align_of::()); - let (vals_size, vals_align) = (capacity * mem::size_of::(), mem::min_align_of::()); + let (keys_size, keys_align) = (capacity * mem::size_of::(), mem::align_of::()); + let (vals_size, vals_align) = (capacity * mem::size_of::(), mem::align_of::()); let (edges_size, edges_align) = if is_leaf { (0, 1) } else { - ((capacity + 1) * mem::size_of::>(), mem::min_align_of::>()) + ((capacity + 1) * mem::size_of::>(), mem::align_of::>()) }; calculate_allocation( @@ -181,11 +181,11 @@ fn calculate_allocation_generic(capacity: usize, is_leaf: bool) -> (usize, fn calculate_offsets_generic(capacity: usize, is_leaf: bool) -> (usize, usize) { let keys_size = capacity * mem::size_of::(); let vals_size = capacity * mem::size_of::(); - let vals_align = mem::min_align_of::(); + let vals_align = mem::align_of::(); let edges_align = if is_leaf { 1 } else { - mem::min_align_of::>() + mem::align_of::>() }; calculate_offsets( diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index 54528c50f1d1e..6918668dcaaec 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -219,7 +219,7 @@ impl Vec { } else { let size = capacity.checked_mul(mem::size_of::()) .expect("capacity overflow"); - let ptr = unsafe { allocate(size, mem::min_align_of::()) }; + let ptr = unsafe { allocate(size, mem::align_of::()) }; if ptr.is_null() { ::alloc::oom() } unsafe { Vec::from_raw_parts(ptr as *mut T, 0, capacity) } } @@ -393,7 +393,7 @@ impl Vec { let ptr = reallocate(*self.ptr as *mut u8, self.cap * mem::size_of::(), self.len * mem::size_of::(), - mem::min_align_of::()) as *mut T; + mem::align_of::()) as *mut T; if ptr.is_null() { ::alloc::oom() } self.ptr = Unique::new(ptr); } @@ -866,9 +866,9 @@ impl Vec { // FIXME: Assert statically that the types `T` and `U` have the // same minimal alignment in case they are not zero-sized. - // These asserts are necessary because the `min_align_of` of the + // These asserts are necessary because the `align_of` of the // types are passed to the allocator by `Vec`. - assert!(mem::min_align_of::() == mem::min_align_of::()); + assert!(mem::align_of::() == mem::align_of::()); // This `as isize` cast is safe, because the size of the elements of the // vector is not 0, and: @@ -1269,9 +1269,9 @@ impl Vec { #[inline(never)] unsafe fn alloc_or_realloc(ptr: *mut T, old_size: usize, size: usize) -> *mut T { if old_size == 0 { - allocate(size, mem::min_align_of::()) as *mut T + allocate(size, mem::align_of::()) as *mut T } else { - reallocate(ptr as *mut u8, old_size, size, mem::min_align_of::()) as *mut T + reallocate(ptr as *mut u8, old_size, size, mem::align_of::()) as *mut T } } @@ -1280,7 +1280,7 @@ unsafe fn dealloc(ptr: *mut T, len: usize) { if mem::size_of::() != 0 { deallocate(ptr as *mut u8, len * mem::size_of::(), - mem::min_align_of::()) + mem::align_of::()) } } diff --git a/src/libcollections/vec_deque.rs b/src/libcollections/vec_deque.rs index edcd1008747fd..ed47c06e7cd08 100644 --- a/src/libcollections/vec_deque.rs +++ b/src/libcollections/vec_deque.rs @@ -67,7 +67,7 @@ impl Drop for VecDeque { if mem::size_of::() != 0 { heap::deallocate(*self.ptr as *mut u8, self.cap * mem::size_of::(), - mem::min_align_of::()) + mem::align_of::()) } } } @@ -172,7 +172,7 @@ impl VecDeque { let ptr = unsafe { if mem::size_of::() != 0 { - let ptr = heap::allocate(size, mem::min_align_of::()) as *mut T;; + let ptr = heap::allocate(size, mem::align_of::()) as *mut T;; if ptr.is_null() { ::alloc::oom() } Unique::new(ptr) } else { @@ -340,7 +340,7 @@ impl VecDeque { let ptr = heap::reallocate(*self.ptr as *mut u8, old, new, - mem::min_align_of::()) as *mut T; + mem::align_of::()) as *mut T; if ptr.is_null() { ::alloc::oom() } self.ptr = Unique::new(ptr); } @@ -460,7 +460,7 @@ impl VecDeque { let ptr = heap::reallocate(*self.ptr as *mut u8, old, new_size, - mem::min_align_of::()) as *mut T; + mem::align_of::()) as *mut T; if ptr.is_null() { ::alloc::oom() } self.ptr = Unique::new(ptr); } diff --git a/src/libcore/mem.rs b/src/libcore/mem.rs index 15e7cdbde408d..4177e0666e3a5 100644 --- a/src/libcore/mem.rs +++ b/src/libcore/mem.rs @@ -155,6 +155,7 @@ pub fn size_of_val(val: &T) -> usize { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] +#[deprecated(reason = "use `align_of` instead", since = "1.1.0")] pub fn min_align_of() -> usize { unsafe { intrinsics::min_align_of::() } } @@ -170,14 +171,14 @@ pub fn min_align_of() -> usize { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] +#[deprecated(reason = "use `align_of_val` instead", since = "1.1.0")] pub fn min_align_of_val(val: &T) -> usize { unsafe { intrinsics::min_align_of_val(val) } } /// Returns the alignment in memory for a type. /// -/// This function will return the alignment, in bytes, of a type in memory. If the alignment -/// returned is adhered to, then the type is guaranteed to function properly. +/// This is the alignment used for struct fields. It may be smaller than the preferred alignment. /// /// # Examples /// @@ -189,17 +190,10 @@ pub fn min_align_of_val(val: &T) -> usize { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn align_of() -> usize { - // We use the preferred alignment as the default alignment for a type. This - // appears to be what clang migrated towards as well: - // - // http://lists.cs.uiuc.edu/pipermail/cfe-commits/Week-of-Mon-20110725/044411.html - unsafe { intrinsics::pref_align_of::() } + unsafe { intrinsics::min_align_of::() } } -/// Returns the alignment of the type of the value that `_val` points to. -/// -/// This is similar to `align_of`, but function will properly handle types such as trait objects -/// (in the future), returning the alignment for an arbitrary value at runtime. +/// Returns the ABI-required minimum alignment of the type of the value that `val` points to /// /// # Examples /// @@ -210,8 +204,8 @@ pub fn align_of() -> usize { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] -pub fn align_of_val(_val: &T) -> usize { - align_of::() +pub fn align_of_val(val: &T) -> usize { + unsafe { intrinsics::min_align_of_val(val) } } /// Creates a value initialized to zero. diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs index 2616bc5278589..349462aebe31b 100644 --- a/src/libstd/collections/hash/table.rs +++ b/src/libstd/collections/hash/table.rs @@ -15,7 +15,7 @@ use cmp; use hash::{Hash, Hasher}; use iter::{Iterator, ExactSizeIterator}; use marker::{Copy, Send, Sync, Sized, self}; -use mem::{min_align_of, size_of}; +use mem::{align_of, size_of}; use mem; use num::wrapping::OverflowingOps; use ops::{Deref, DerefMut, Drop}; @@ -553,9 +553,9 @@ fn calculate_allocation(hash_size: usize, hash_align: usize, vals_align); let (end_of_vals, oflo2) = vals_offset.overflowing_add(vals_size); - let min_align = cmp::max(hash_align, cmp::max(keys_align, vals_align)); + let align = cmp::max(hash_align, cmp::max(keys_align, vals_align)); - (min_align, hash_offset, end_of_vals, oflo || oflo2) + (align, hash_offset, end_of_vals, oflo || oflo2) } #[test] @@ -597,9 +597,9 @@ impl RawTable { // factored out into a different function. let (malloc_alignment, hash_offset, size, oflo) = calculate_allocation( - hashes_size, min_align_of::(), - keys_size, min_align_of::< K >(), - vals_size, min_align_of::< V >()); + hashes_size, align_of::(), + keys_size, align_of::< K >(), + vals_size, align_of::< V >()); assert!(!oflo, "capacity overflow"); @@ -630,8 +630,8 @@ impl RawTable { let buffer = *self.hashes as *mut u8; let (keys_offset, vals_offset, oflo) = calculate_offsets(hashes_size, - keys_size, min_align_of::(), - min_align_of::()); + keys_size, align_of::(), + align_of::()); debug_assert!(!oflo, "capacity overflow"); unsafe { RawBucket { @@ -1005,9 +1005,9 @@ impl Drop for RawTable { let keys_size = self.capacity * size_of::(); let vals_size = self.capacity * size_of::(); let (align, _, size, oflo) = - calculate_allocation(hashes_size, min_align_of::(), - keys_size, min_align_of::(), - vals_size, min_align_of::()); + calculate_allocation(hashes_size, align_of::(), + keys_size, align_of::(), + vals_size, align_of::()); debug_assert!(!oflo, "should be impossible"); From 8d26dedecb16cc97a1d9897ce7afc99de63e7815 Mon Sep 17 00:00:00 2001 From: Steven Fackler Date: Wed, 24 Jun 2015 22:47:56 -0700 Subject: [PATCH 2/3] Avoid overflow in Vec::from_iter Closes #26550 --- src/libcollections/vec.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index 6918668dcaaec..4ea26509fd9e8 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -1482,7 +1482,7 @@ impl FromIterator for Vec { None => return Vec::new(), Some(element) => { let (lower, _) = iterator.size_hint(); - let mut vector = Vec::with_capacity(1 + lower); + let mut vector = Vec::with_capacity(lower.saturating_add(1)); unsafe { ptr::write(vector.get_unchecked_mut(0), element); vector.set_len(1); @@ -1570,10 +1570,11 @@ impl Vec { let len = self.len(); if len == self.capacity() { let (lower, _) = iterator.size_hint(); - self.reserve(lower + 1); + self.reserve(lower.saturating_add(1)); } unsafe { ptr::write(self.get_unchecked_mut(len), element); + // NB can't overflow since we would have had to alloc the address space self.set_len(len + 1); } } From 22946db133ae4713224bf9d655d31a2bcae94989 Mon Sep 17 00:00:00 2001 From: Huon Wilson Date: Fri, 26 Jun 2015 15:00:31 -0700 Subject: [PATCH 3/3] core: fix deprecation since version of align_of_min. These will first be deprecated in 1.2.0, not 1.1.0. --- src/libcore/mem.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/libcore/mem.rs b/src/libcore/mem.rs index 4177e0666e3a5..b53b61e517397 100644 --- a/src/libcore/mem.rs +++ b/src/libcore/mem.rs @@ -155,7 +155,7 @@ pub fn size_of_val(val: &T) -> usize { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] -#[deprecated(reason = "use `align_of` instead", since = "1.1.0")] +#[deprecated(reason = "use `align_of` instead", since = "1.2.0")] pub fn min_align_of() -> usize { unsafe { intrinsics::min_align_of::() } } @@ -171,7 +171,7 @@ pub fn min_align_of() -> usize { /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] -#[deprecated(reason = "use `align_of_val` instead", since = "1.1.0")] +#[deprecated(reason = "use `align_of_val` instead", since = "1.2.0")] pub fn min_align_of_val(val: &T) -> usize { unsafe { intrinsics::min_align_of_val(val) } }