Skip to content

Commit c666287

Browse files
authored
Rollup merge of rust-lang#138040 - thaliaarchi:use-prelude-size-of.compiler, r=compiler-errors
compiler: Use `size_of` from the prelude instead of imported Use `std::mem::{size_of, size_of_val, align_of, align_of_val}` from the prelude instead of importing or qualifying them. Apply this change across the compiler. These functions were added to all preludes in Rust 1.80. r? ``@compiler-errors``
2 parents cdd97ba + 38fad98 commit c666287

File tree

30 files changed

+64
-74
lines changed

30 files changed

+64
-74
lines changed

compiler/rustc_arena/src/lib.rs

+13-13
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,7 @@ impl<T> ArenaChunk<T> {
9393
#[inline]
9494
fn end(&mut self) -> *mut T {
9595
unsafe {
96-
if mem::size_of::<T>() == 0 {
96+
if size_of::<T>() == 0 {
9797
// A pointer as large as possible for zero-sized elements.
9898
ptr::without_provenance_mut(!0)
9999
} else {
@@ -151,7 +151,7 @@ impl<T> TypedArena<T> {
151151
}
152152

153153
unsafe {
154-
if mem::size_of::<T>() == 0 {
154+
if size_of::<T>() == 0 {
155155
self.ptr.set(self.ptr.get().wrapping_byte_add(1));
156156
let ptr = ptr::NonNull::<T>::dangling().as_ptr();
157157
// Don't drop the object. This `write` is equivalent to `forget`.
@@ -173,13 +173,13 @@ impl<T> TypedArena<T> {
173173
// FIXME: this should *likely* use `offset_from`, but more
174174
// investigation is needed (including running tests in miri).
175175
let available_bytes = self.end.get().addr() - self.ptr.get().addr();
176-
let additional_bytes = additional.checked_mul(mem::size_of::<T>()).unwrap();
176+
let additional_bytes = additional.checked_mul(size_of::<T>()).unwrap();
177177
available_bytes >= additional_bytes
178178
}
179179

180180
#[inline]
181181
fn alloc_raw_slice(&self, len: usize) -> *mut T {
182-
assert!(mem::size_of::<T>() != 0);
182+
assert!(size_of::<T>() != 0);
183183
assert!(len != 0);
184184

185185
// Ensure the current chunk can fit `len` objects.
@@ -213,7 +213,7 @@ impl<T> TypedArena<T> {
213213
// So we collect all the elements beforehand, which takes care of reentrancy and panic
214214
// safety. This function is much less hot than `DroplessArena::alloc_from_iter`, so it
215215
// doesn't need to be hyper-optimized.
216-
assert!(mem::size_of::<T>() != 0);
216+
assert!(size_of::<T>() != 0);
217217

218218
let mut vec: SmallVec<[_; 8]> = iter.into_iter().collect();
219219
if vec.is_empty() {
@@ -236,7 +236,7 @@ impl<T> TypedArena<T> {
236236
unsafe {
237237
// We need the element size to convert chunk sizes (ranging from
238238
// PAGE to HUGE_PAGE bytes) to element counts.
239-
let elem_size = cmp::max(1, mem::size_of::<T>());
239+
let elem_size = cmp::max(1, size_of::<T>());
240240
let mut chunks = self.chunks.borrow_mut();
241241
let mut new_cap;
242242
if let Some(last_chunk) = chunks.last_mut() {
@@ -246,7 +246,7 @@ impl<T> TypedArena<T> {
246246
// FIXME: this should *likely* use `offset_from`, but more
247247
// investigation is needed (including running tests in miri).
248248
let used_bytes = self.ptr.get().addr() - last_chunk.start().addr();
249-
last_chunk.entries = used_bytes / mem::size_of::<T>();
249+
last_chunk.entries = used_bytes / size_of::<T>();
250250
}
251251

252252
// If the previous chunk's len is less than HUGE_PAGE
@@ -276,15 +276,15 @@ impl<T> TypedArena<T> {
276276
let end = self.ptr.get().addr();
277277
// We then calculate the number of elements to be dropped in the last chunk,
278278
// which is the filled area's length.
279-
let diff = if mem::size_of::<T>() == 0 {
279+
let diff = if size_of::<T>() == 0 {
280280
// `T` is ZST. It can't have a drop flag, so the value here doesn't matter. We get
281281
// the number of zero-sized values in the last and only chunk, just out of caution.
282282
// Recall that `end` was incremented for each allocated value.
283283
end - start
284284
} else {
285285
// FIXME: this should *likely* use `offset_from`, but more
286286
// investigation is needed (including running tests in miri).
287-
(end - start) / mem::size_of::<T>()
287+
(end - start) / size_of::<T>()
288288
};
289289
// Pass that to the `destroy` method.
290290
unsafe {
@@ -329,7 +329,7 @@ fn align_up(val: usize, align: usize) -> usize {
329329

330330
// Pointer alignment is common in compiler types, so keep `DroplessArena` aligned to them
331331
// to optimize away alignment code.
332-
const DROPLESS_ALIGNMENT: usize = mem::align_of::<usize>();
332+
const DROPLESS_ALIGNMENT: usize = align_of::<usize>();
333333

334334
/// An arena that can hold objects of multiple different types that impl `Copy`
335335
/// and/or satisfy `!mem::needs_drop`.
@@ -447,7 +447,7 @@ impl DroplessArena {
447447
#[inline]
448448
pub fn alloc<T>(&self, object: T) -> &mut T {
449449
assert!(!mem::needs_drop::<T>());
450-
assert!(mem::size_of::<T>() != 0);
450+
assert!(size_of::<T>() != 0);
451451

452452
let mem = self.alloc_raw(Layout::new::<T>()) as *mut T;
453453

@@ -471,7 +471,7 @@ impl DroplessArena {
471471
T: Copy,
472472
{
473473
assert!(!mem::needs_drop::<T>());
474-
assert!(mem::size_of::<T>() != 0);
474+
assert!(size_of::<T>() != 0);
475475
assert!(!slice.is_empty());
476476

477477
let mem = self.alloc_raw(Layout::for_value::<[T]>(slice)) as *mut T;
@@ -546,7 +546,7 @@ impl DroplessArena {
546546
// Warning: this function is reentrant: `iter` could hold a reference to `&self` and
547547
// allocate additional elements while we're iterating.
548548
let iter = iter.into_iter();
549-
assert!(mem::size_of::<T>() != 0);
549+
assert!(size_of::<T>() != 0);
550550
assert!(!mem::needs_drop::<T>());
551551

552552
let size_hint = iter.size_hint();

compiler/rustc_codegen_gcc/src/builder.rs

+1-5
Original file line numberDiff line numberDiff line change
@@ -2439,9 +2439,5 @@ fn get_maybe_pointer_size(value: RValue<'_>) -> u32 {
24392439
#[cfg(not(feature = "master"))]
24402440
fn get_maybe_pointer_size(value: RValue<'_>) -> u32 {
24412441
let type_ = value.get_type();
2442-
if type_.get_pointee().is_some() {
2443-
std::mem::size_of::<*const ()>() as _
2444-
} else {
2445-
type_.get_size()
2446-
}
2442+
if type_.get_pointee().is_some() { size_of::<*const ()>() as _ } else { type_.get_size() }
24472443
}

compiler/rustc_codegen_ssa/src/back/link.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1177,7 +1177,7 @@ mod win {
11771177
let mut cp: u32 = 0;
11781178
// We're using the `LOCALE_RETURN_NUMBER` flag to return a u32.
11791179
// But the API requires us to pass the data as though it's a [u16] string.
1180-
let len = std::mem::size_of::<u32>() / std::mem::size_of::<u16>();
1180+
let len = size_of::<u32>() / size_of::<u16>();
11811181
let data = std::slice::from_raw_parts_mut(&mut cp as *mut u32 as *mut u16, len);
11821182
let len_written = GetLocaleInfoEx(
11831183
LOCALE_NAME_SYSTEM_DEFAULT,

compiler/rustc_data_structures/src/aligned.rs

+3-5
Original file line numberDiff line numberDiff line change
@@ -2,10 +2,8 @@ use std::ptr::Alignment;
22

33
/// Returns the ABI-required minimum alignment of a type in bytes.
44
///
5-
/// This is equivalent to [`mem::align_of`], but also works for some unsized
5+
/// This is equivalent to [`align_of`], but also works for some unsized
66
/// types (e.g. slices or rustc's `List`s).
7-
///
8-
/// [`mem::align_of`]: std::mem::align_of
97
pub const fn align_of<T: ?Sized + Aligned>() -> Alignment {
108
T::ALIGN
119
}
@@ -15,10 +13,10 @@ pub const fn align_of<T: ?Sized + Aligned>() -> Alignment {
1513
/// # Safety
1614
///
1715
/// `Self::ALIGN` must be equal to the alignment of `Self`. For sized types it
18-
/// is [`mem::align_of<Self>()`], for unsized types it depends on the type, for
16+
/// is [`align_of::<Self>()`], for unsized types it depends on the type, for
1917
/// example `[T]` has alignment of `T`.
2018
///
21-
/// [`mem::align_of<Self>()`]: std::mem::align_of
19+
/// [`align_of::<Self>()`]: align_of
2220
pub unsafe trait Aligned {
2321
/// Alignment of `Self`.
2422
const ALIGN: Alignment;

compiler/rustc_data_structures/src/profiling.rs

+2-4
Original file line numberDiff line numberDiff line change
@@ -863,15 +863,13 @@ fn get_thread_id() -> u32 {
863863
cfg_match! {
864864
windows => {
865865
pub fn get_resident_set_size() -> Option<usize> {
866-
use std::mem;
867-
868866
use windows::{
869867
Win32::System::ProcessStatus::{K32GetProcessMemoryInfo, PROCESS_MEMORY_COUNTERS},
870868
Win32::System::Threading::GetCurrentProcess,
871869
};
872870

873871
let mut pmc = PROCESS_MEMORY_COUNTERS::default();
874-
let pmc_size = mem::size_of_val(&pmc);
872+
let pmc_size = size_of_val(&pmc);
875873
unsafe {
876874
K32GetProcessMemoryInfo(
877875
GetCurrentProcess(),
@@ -889,7 +887,7 @@ cfg_match! {
889887
pub fn get_resident_set_size() -> Option<usize> {
890888
use libc::{c_int, c_void, getpid, proc_pidinfo, proc_taskinfo, PROC_PIDTASKINFO};
891889
use std::mem;
892-
const PROC_TASKINFO_SIZE: c_int = mem::size_of::<proc_taskinfo>() as c_int;
890+
const PROC_TASKINFO_SIZE: c_int = size_of::<proc_taskinfo>() as c_int;
893891

894892
unsafe {
895893
let mut info: proc_taskinfo = mem::zeroed();

compiler/rustc_data_structures/src/sharded.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
use std::borrow::Borrow;
22
use std::collections::hash_map::RawEntryMut;
33
use std::hash::{Hash, Hasher};
4-
use std::{iter, mem};
4+
use std::iter;
55

66
use either::Either;
77

@@ -221,7 +221,7 @@ pub fn make_hash<K: Hash + ?Sized>(val: &K) -> u64 {
221221
/// consistently for each `Sharded` instance.
222222
#[inline]
223223
fn get_shard_hash(hash: u64) -> usize {
224-
let hash_len = mem::size_of::<usize>();
224+
let hash_len = size_of::<usize>();
225225
// Ignore the top 7 bits as hashbrown uses these and get the next SHARD_BITS highest bits.
226226
// hashbrown also uses the lowest bits, so we can't use those
227227
(hash >> (hash_len * 8 - 7 - SHARD_BITS)) as usize

compiler/rustc_errors/src/diagnostic.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -490,7 +490,7 @@ pub struct Diag<'a, G: EmissionGuarantee = ErrorGuaranteed> {
490490
// would be bad.
491491
impl<G> !Clone for Diag<'_, G> {}
492492

493-
rustc_data_structures::static_assert_size!(Diag<'_, ()>, 3 * std::mem::size_of::<usize>());
493+
rustc_data_structures::static_assert_size!(Diag<'_, ()>, 3 * size_of::<usize>());
494494

495495
impl<G: EmissionGuarantee> Deref for Diag<'_, G> {
496496
type Target = DiagInner;

compiler/rustc_hir/src/def.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -435,7 +435,7 @@ pub enum Res<Id = hir::HirId> {
435435
/// mention any generic parameters to allow the following with `min_const_generics`:
436436
/// ```
437437
/// # struct Foo;
438-
/// impl Foo { fn test() -> [u8; std::mem::size_of::<Self>()] { todo!() } }
438+
/// impl Foo { fn test() -> [u8; size_of::<Self>()] { todo!() } }
439439
///
440440
/// struct Bar([u8; baz::<Self>()]);
441441
/// const fn baz<T>() -> usize { 10 }
@@ -445,7 +445,7 @@ pub enum Res<Id = hir::HirId> {
445445
/// compat lint:
446446
/// ```
447447
/// fn foo<T>() {
448-
/// let _bar = [1_u8; std::mem::size_of::<*mut T>()];
448+
/// let _bar = [1_u8; size_of::<*mut T>()];
449449
/// }
450450
/// ```
451451
// FIXME(generic_const_exprs): Remove this bodge once that feature is stable.

compiler/rustc_incremental/src/persist/file_format.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ pub(crate) fn read_file(
123123

124124
// Check HEADER_FORMAT_VERSION
125125
{
126-
debug_assert!(::std::mem::size_of_val(&HEADER_FORMAT_VERSION) == 2);
126+
debug_assert!(size_of_val(&HEADER_FORMAT_VERSION) == 2);
127127
let mut header_format_version = [0u8; 2];
128128
file.read_exact(&mut header_format_version)?;
129129
let header_format_version =

compiler/rustc_index/src/bit_set.rs

+4-2
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
use std::marker::PhantomData;
2+
#[cfg(not(feature = "nightly"))]
3+
use std::mem;
24
use std::ops::{BitAnd, BitAndAssign, BitOrAssign, Bound, Not, Range, RangeBounds, Shl};
35
use std::rc::Rc;
4-
use std::{fmt, iter, mem, slice};
6+
use std::{fmt, iter, slice};
57

68
use Chunk::*;
79
#[cfg(feature = "nightly")]
@@ -14,7 +16,7 @@ use crate::{Idx, IndexVec};
1416
mod tests;
1517

1618
type Word = u64;
17-
const WORD_BYTES: usize = mem::size_of::<Word>();
19+
const WORD_BYTES: usize = size_of::<Word>();
1820
const WORD_BITS: usize = WORD_BYTES * 8;
1921

2022
// The choice of chunk size has some trade-offs.

compiler/rustc_index/src/vec/tests.rs

-2
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,6 @@ crate::newtype_index! {
99

1010
#[test]
1111
fn index_size_is_optimized() {
12-
use std::mem::size_of;
13-
1412
assert_eq!(size_of::<MyIdx>(), 4);
1513
// Uses 0xFFFF_FFFB
1614
assert_eq!(size_of::<Option<MyIdx>>(), 4);

compiler/rustc_lint_defs/src/builtin.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -2673,7 +2673,7 @@ declare_lint! {
26732673
///
26742674
/// ```rust
26752675
/// const fn foo<T>() -> usize {
2676-
/// if std::mem::size_of::<*mut T>() < 8 { // size of *mut T does not depend on T
2676+
/// if size_of::<*mut T>() < 8 { // size of *mut T does not depend on T
26772677
/// 4
26782678
/// } else {
26792679
/// 8

compiler/rustc_middle/src/mir/interpret/allocation/init_mask.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -223,8 +223,8 @@ impl<D: TyDecoder> Decodable<D> for InitMaskMaterialized {
223223
// large.
224224
impl hash::Hash for InitMaskMaterialized {
225225
fn hash<H: hash::Hasher>(&self, state: &mut H) {
226-
const MAX_BLOCKS_TO_HASH: usize = super::MAX_BYTES_TO_HASH / std::mem::size_of::<Block>();
227-
const MAX_BLOCKS_LEN: usize = super::MAX_HASHED_BUFFER_LEN / std::mem::size_of::<Block>();
226+
const MAX_BLOCKS_TO_HASH: usize = super::MAX_BYTES_TO_HASH / size_of::<Block>();
227+
const MAX_BLOCKS_LEN: usize = super::MAX_HASHED_BUFFER_LEN / size_of::<Block>();
228228

229229
// Partially hash the `blocks` buffer when it is large. To limit collisions with common
230230
// prefixes and suffixes, we hash the length and some slices of the buffer.

compiler/rustc_middle/src/mir/interpret/mod.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -573,7 +573,7 @@ pub fn write_target_uint(
573573
#[inline]
574574
pub fn read_target_uint(endianness: Endian, mut source: &[u8]) -> Result<u128, io::Error> {
575575
// This u128 holds an "any-size uint" (since smaller uints can fits in it)
576-
let mut buf = [0u8; std::mem::size_of::<u128>()];
576+
let mut buf = [0u8; size_of::<u128>()];
577577
// So we do not read exactly 16 bytes into the u128, just the "payload".
578578
let uint = match endianness {
579579
Endian::Little => {

compiler/rustc_middle/src/mir/mod.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -332,13 +332,13 @@ pub struct Body<'tcx> {
332332
///
333333
/// ```rust
334334
/// fn test<T>() {
335-
/// let _ = [0; std::mem::size_of::<*mut T>()];
335+
/// let _ = [0; size_of::<*mut T>()];
336336
/// }
337337
/// ```
338338
///
339339
/// **WARNING**: Do not change this flags after the MIR was originally created, even if an optimization
340340
/// removed the last mention of all generic params. We do not want to rely on optimizations and
341-
/// potentially allow things like `[u8; std::mem::size_of::<T>() * 0]` due to this.
341+
/// potentially allow things like `[u8; size_of::<T>() * 0]` due to this.
342342
pub is_polymorphic: bool,
343343

344344
/// The phase at which this MIR should be "injected" into the compilation process.

compiler/rustc_middle/src/query/erase.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ pub type Erase<T: EraseType> = Erased<impl Copy>;
2727
pub fn erase<T: EraseType>(src: T) -> Erase<T> {
2828
// Ensure the sizes match
2929
const {
30-
if std::mem::size_of::<T>() != std::mem::size_of::<T::Result>() {
30+
if size_of::<T>() != size_of::<T::Result>() {
3131
panic!("size of T must match erased type T::Result")
3232
}
3333
};

compiler/rustc_middle/src/query/plumbing.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -370,7 +370,7 @@ macro_rules! define_callbacks {
370370
// Increase this limit if necessary, but do try to keep the size low if possible
371371
#[cfg(target_pointer_width = "64")]
372372
const _: () = {
373-
if mem::size_of::<Key<'static>>() > 88 {
373+
if size_of::<Key<'static>>() > 88 {
374374
panic!("{}", concat!(
375375
"the query `",
376376
stringify!($name),
@@ -386,7 +386,7 @@ macro_rules! define_callbacks {
386386
#[cfg(target_pointer_width = "64")]
387387
#[cfg(not(feature = "rustc_randomized_layouts"))]
388388
const _: () = {
389-
if mem::size_of::<Value<'static>>() > 64 {
389+
if size_of::<Value<'static>>() > 64 {
390390
panic!("{}", concat!(
391391
"the query `",
392392
stringify!($name),

compiler/rustc_middle/src/ty/consts/int.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -408,7 +408,7 @@ macro_rules! from_x_for_scalar_int {
408408
fn from(u: $ty) -> Self {
409409
Self {
410410
data: u128::from(u),
411-
size: NonZero::new(std::mem::size_of::<$ty>() as u8).unwrap(),
411+
size: NonZero::new(size_of::<$ty>() as u8).unwrap(),
412412
}
413413
}
414414
}
@@ -424,7 +424,7 @@ macro_rules! from_scalar_int_for_x {
424424
fn from(int: ScalarInt) -> Self {
425425
// The `unwrap` cannot fail because to_bits (if it succeeds)
426426
// is guaranteed to return a value that fits into the size.
427-
int.to_bits(Size::from_bytes(std::mem::size_of::<$ty>()))
427+
int.to_bits(Size::from_bytes(size_of::<$ty>()))
428428
.try_into().unwrap()
429429
}
430430
}

compiler/rustc_middle/src/ty/generic_args.rs

+3-4
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22

33
use core::intrinsics;
44
use std::marker::PhantomData;
5-
use std::mem;
65
use std::num::NonZero;
76
use std::ptr::NonNull;
87

@@ -176,17 +175,17 @@ impl<'tcx> GenericArgKind<'tcx> {
176175
let (tag, ptr) = match self {
177176
GenericArgKind::Lifetime(lt) => {
178177
// Ensure we can use the tag bits.
179-
assert_eq!(mem::align_of_val(&*lt.0.0) & TAG_MASK, 0);
178+
assert_eq!(align_of_val(&*lt.0.0) & TAG_MASK, 0);
180179
(REGION_TAG, NonNull::from(lt.0.0).cast())
181180
}
182181
GenericArgKind::Type(ty) => {
183182
// Ensure we can use the tag bits.
184-
assert_eq!(mem::align_of_val(&*ty.0.0) & TAG_MASK, 0);
183+
assert_eq!(align_of_val(&*ty.0.0) & TAG_MASK, 0);
185184
(TYPE_TAG, NonNull::from(ty.0.0).cast())
186185
}
187186
GenericArgKind::Const(ct) => {
188187
// Ensure we can use the tag bits.
189-
assert_eq!(mem::align_of_val(&*ct.0.0) & TAG_MASK, 0);
188+
assert_eq!(align_of_val(&*ct.0.0) & TAG_MASK, 0);
190189
(CONST_TAG, NonNull::from(ct.0.0).cast())
191190
}
192191
};

0 commit comments

Comments
 (0)