Skip to content

Commit

Permalink
Remove explicit use of core::mem:: functions
Browse files Browse the repository at this point in the history
  • Loading branch information
marxin committed Nov 24, 2024
1 parent a39b88a commit e14834b
Show file tree
Hide file tree
Showing 16 changed files with 85 additions and 81 deletions.
6 changes: 3 additions & 3 deletions linker-diff/src/asm_diff.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1332,7 +1332,7 @@ impl<'data> AddressIndex<'data> {
let phoff = header.e_phoff.get(e);
let phnum = header.e_phnum.get(e);
let file_header_size =
core::mem::size_of::<object::elf::FileHeader64<LittleEndian>>() as u64;
size_of::<object::elf::FileHeader64<LittleEndian>>() as u64;
for raw_seg in elf_file.elf_program_headers() {
if raw_seg.p_type(e) != object::elf::PT_LOAD {
continue;
Expand All @@ -1347,7 +1347,7 @@ impl<'data> AddressIndex<'data> {
if file_range.contains(&phoff) {
let mem_start = phoff - file_offset + seg_address;
let byte_len = u64::from(phnum)
* core::mem::size_of::<object::elf::ProgramHeader64<LittleEndian>>() as u64;
* size_of::<object::elf::ProgramHeader64<LittleEndian>>() as u64;
self.program_header_addresses = mem_start..(mem_start + byte_len);
}
}
Expand All @@ -1364,7 +1364,7 @@ impl<'data> AddressIndex<'data> {
return Ok(());
};
let data = got.data()?;
let entry_size = core::mem::size_of::<u64>();
let entry_size = size_of::<u64>();
let entries: &[u64] = object::slice_from_bytes(data, data.len() / entry_size)
.unwrap()
.0;
Expand Down
12 changes: 6 additions & 6 deletions linker-diff/src/eh_frame_diff.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ use object::ObjectSymbol;
use object::SymbolKind;
use std::collections::HashMap;
use std::collections::HashSet;
use std::mem::offset_of;

pub(crate) fn report_diffs(report: &mut crate::Report, objects: &[crate::Object]) {
report.add_diffs(crate::header_diff::diff_fields(
Expand Down Expand Up @@ -45,9 +46,8 @@ fn read_eh_frame_hdr_fields(object: &crate::Object) -> Result<FieldValues> {
}

let data = section.data()?;
let header: &EhFrameHdr = bytemuck::from_bytes(&data[..core::mem::size_of::<EhFrameHdr>()]);
let header_entries: &[EhFrameHdrEntry] =
bytemuck::cast_slice(&data[core::mem::size_of::<EhFrameHdr>()..]);
let header: &EhFrameHdr = bytemuck::from_bytes(&data[..size_of::<EhFrameHdr>()]);
let header_entries: &[EhFrameHdrEntry] = bytemuck::cast_slice(&data[size_of::<EhFrameHdr>()..]);

values.insert("version", header.version, Converter::None, object);
values.insert(
Expand All @@ -71,7 +71,7 @@ fn read_eh_frame_hdr_fields(object: &crate::Object) -> Result<FieldValues> {
values.insert(
"frame_pointer",
(address1 as i64 + i64::from(header.frame_pointer)) as u64
+ core::mem::offset_of!(EhFrameHdr, frame_pointer) as u64,
+ offset_of!(EhFrameHdr, frame_pointer) as u64,
Converter::SectionAddress,
object,
);
Expand Down Expand Up @@ -117,7 +117,7 @@ fn verify_frames(
let eh_frame_base = eh_frame_section.address();
let eh_frame_data = eh_frame_section.data()?;
let mut offset = 0;
const PREFIX_LEN: usize = core::mem::size_of::<EhFrameEntryPrefix>();
const PREFIX_LEN: usize = size_of::<EhFrameEntryPrefix>();
while offset + PREFIX_LEN <= eh_frame_data.len() {
let prefix: EhFrameEntryPrefix =
bytemuck::pod_read_unaligned(&eh_frame_data[offset..offset + PREFIX_LEN]);
Expand All @@ -139,7 +139,7 @@ fn verify_frames(
);
}
}
offset += core::mem::size_of_val(&prefix.length) + prefix.length as usize;
offset += size_of_val(&prefix.length) + prefix.length as usize;
}

// TODO: Enable this or clean it it up.
Expand Down
2 changes: 1 addition & 1 deletion linker-diff/src/gnu_hash.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ fn lookup_symbol(
let e = LittleEndian;
let symbol_base = header.symbol_base.get(e) as usize;
let hash = object::elf::gnu_hash(sym_name);
let elf_class_bits = core::mem::size_of::<u64>() as u32 * 8;
let elf_class_bits = size_of::<u64>() as u32 * 8;
let bloom_shift = header.bloom_shift.get(e);
let bloom_count = bloom_values.len() as u32;
let bucket_count = buckets.len() as u32;
Expand Down
2 changes: 1 addition & 1 deletion linker-diff/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -662,7 +662,7 @@ impl<'data> NameIndex<'data> {
}

fn slice_from_all_bytes<T: object::Pod>(data: &[u8]) -> &[T] {
object::slice_from_bytes(data, data.len() / core::mem::size_of::<T>())
object::slice_from_bytes(data, data.len() / size_of::<T>())
.unwrap()
.0
}
Expand Down
4 changes: 2 additions & 2 deletions wild_lib/src/archive.rs
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,10 @@ struct EntryHeader {
}

const _ASSERTS: () = {
assert!(core::mem::size_of::<EntryHeader>() == 60);
assert!(size_of::<EntryHeader>() == 60);
};

const HEADER_SIZE: usize = core::mem::size_of::<EntryHeader>();
const HEADER_SIZE: usize = size_of::<EntryHeader>();

impl<'data> ArchiveIterator<'data> {
/// Create an iterator from the bytes of the whole archive. The supplied bytes should start with
Expand Down
14 changes: 7 additions & 7 deletions wild_lib/src/elf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ impl<'data> File<'data> {
return get_entries(
self.data,
header.p_offset(e) as usize,
header.p_filesz(e) as usize / core::mem::size_of::<DynamicEntry>(),
header.p_filesz(e) as usize / size_of::<DynamicEntry>(),
)
.context("Failed to read dynamic table");
}
Expand Down Expand Up @@ -261,7 +261,7 @@ pub(crate) fn get_entries<T: object::Pod>(
offset: usize,
entry_count: usize,
) -> Result<&[T]> {
debug_assert_eq!(core::mem::align_of::<T>(), 1);
debug_assert_eq!(align_of::<T>(), 1);
if offset >= data.len() {
bail!("Invalid offset 0x{offset}");
}
Expand All @@ -270,7 +270,7 @@ pub(crate) fn get_entries<T: object::Pod>(
anyhow!(
"Tried to extract 0x{:x} entries of size 0x{:x} from 0x{:x}",
entry_count,
core::mem::size_of::<T>(),
size_of::<T>(),
data.len(),
)
})?
Expand Down Expand Up @@ -351,14 +351,14 @@ pub(crate) const FILE_HEADER_SIZE: u16 = 0x40;
pub(crate) const PROGRAM_HEADER_SIZE: u16 = 0x38;
pub(crate) const SECTION_HEADER_SIZE: u16 = 0x40;
pub(crate) const COMPRESSION_HEADER_SIZE: usize =
core::mem::size_of::<object::elf::CompressionHeader64<LittleEndian>>();
size_of::<object::elf::CompressionHeader64<LittleEndian>>();

pub(crate) const GOT_ENTRY_SIZE: u64 = 0x8;
pub(crate) const PLT_ENTRY_SIZE: u64 = PLT_ENTRY_TEMPLATE.len() as u64;
pub(crate) const RELA_ENTRY_SIZE: u64 = 0x18;

pub(crate) const SYMTAB_ENTRY_SIZE: u64 = core::mem::size_of::<SymtabEntry>() as u64;
pub(crate) const GNU_VERSION_ENTRY_SIZE: u64 = core::mem::size_of::<Versym>() as u64;
pub(crate) const SYMTAB_ENTRY_SIZE: u64 = size_of::<SymtabEntry>() as u64;
pub(crate) const GNU_VERSION_ENTRY_SIZE: u64 = size_of::<Versym>() as u64;

pub(crate) const PLT_ENTRY_TEMPLATE: &[u8] = &[
0xf3, 0x0f, 0x1e, 0xfa, // endbr64
Expand Down Expand Up @@ -473,7 +473,7 @@ impl RelocationKindInfo {
}

pub(crate) fn slice_from_all_bytes_mut<T: object::Pod>(data: &mut [u8]) -> &mut [T] {
object::slice_from_bytes_mut(data, data.len() / core::mem::size_of::<T>())
object::slice_from_bytes_mut(data, data.len() / size_of::<T>())
.unwrap()
.0
}
34 changes: 17 additions & 17 deletions wild_lib/src/elf_writer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -389,7 +389,7 @@ fn split_output_into_sections<'out, S: StorageModel>(

#[tracing::instrument(skip_all, name = "Sort .eh_frame_hdr")]
fn sort_eh_frame_hdr_entries(eh_frame_hdr: &mut [u8]) {
let entry_bytes = &mut eh_frame_hdr[core::mem::size_of::<elf::EhFrameHdr>()..];
let entry_bytes = &mut eh_frame_hdr[size_of::<elf::EhFrameHdr>()..];
let entries: &mut [elf::EhFrameHdrEntry] = bytemuck::cast_slice_mut(entry_bytes);
entries.sort_by_key(|e| e.frame_ptr);
}
Expand Down Expand Up @@ -535,15 +535,15 @@ impl<'out> VersionWriter<'out> {
}

fn take_verneed(&mut self) -> Result<&'out mut Verneed> {
let bytes = self.take_bytes(core::mem::size_of::<Verneed>())?;
let bytes = self.take_bytes(size_of::<Verneed>())?;
Ok(object::from_bytes_mut(bytes)
.map_err(|_| anyhow!("Incorrect .gnu.version_r alignment"))?
.0)
}

fn take_auxes(&mut self, version_count: u16) -> Result<&'out mut [Vernaux]> {
let bytes =
self.take_bytes(core::mem::size_of::<Vernaux>() * usize::from(version_count))?;
self.take_bytes(size_of::<Vernaux>() * usize::from(version_count))?;
object::slice_from_all_bytes_mut::<Vernaux>(bytes)
.map_err(|_| anyhow!("Invalid .gnu.version_r allocation"))
}
Expand Down Expand Up @@ -938,7 +938,7 @@ impl<'data, 'layout, 'out> TableWriter<'data, 'layout, 'out> {
fn take_eh_frame_hdr(&mut self) -> &'out mut EhFrameHdr {
let entry_bytes = crate::slice::slice_take_prefix_mut(
&mut self.eh_frame_hdr,
core::mem::size_of::<EhFrameHdr>(),
size_of::<EhFrameHdr>(),
);
bytemuck::from_bytes_mut(entry_bytes)
}
Expand All @@ -949,7 +949,7 @@ impl<'data, 'layout, 'out> TableWriter<'data, 'layout, 'out> {
}
let entry_bytes = crate::slice::slice_take_prefix_mut(
&mut self.eh_frame_hdr,
core::mem::size_of::<EhFrameHdrEntry>(),
size_of::<EhFrameHdrEntry>(),
);
Some(bytemuck::from_bytes_mut(entry_bytes))
}
Expand Down Expand Up @@ -1420,7 +1420,7 @@ impl<'data> ObjectLayout<'data> {
) -> Result {
let eh_frame_section = self.object.section(eh_frame_section_index)?;
let data = self.object.raw_section_data(eh_frame_section)?;
const PREFIX_LEN: usize = core::mem::size_of::<elf::EhFrameEntryPrefix>();
const PREFIX_LEN: usize = size_of::<elf::EhFrameEntryPrefix>();
let e = LittleEndian;
let section_flags = SectionFlags::from_header(eh_frame_section);
let mut relocations = self
Expand All @@ -1439,7 +1439,7 @@ impl<'data> ObjectLayout<'data> {
while input_pos + PREFIX_LEN <= data.len() {
let prefix: elf::EhFrameEntryPrefix =
bytemuck::pod_read_unaligned(&data[input_pos..input_pos + PREFIX_LEN]);
let size = core::mem::size_of_val(&prefix.length) + prefix.length as usize;
let size = size_of_val(&prefix.length) + prefix.length as usize;
let next_input_pos = input_pos + size;
let next_output_pos = output_pos + size;
if next_input_pos > data.len() {
Expand Down Expand Up @@ -2075,7 +2075,7 @@ fn write_gnu_hash_tables(

let mut sym_defs = epilogue.dynamic_symbol_definitions.iter().peekable();

let elf_class_bits = core::mem::size_of::<u64>() as u32 * 8;
let elf_class_bits = size_of::<u64>() as u32 * 8;

let mut start_of_chain = true;
for (i, chain_out) in chains.iter_mut().enumerate() {
Expand Down Expand Up @@ -2309,8 +2309,8 @@ fn write_eh_frame_hdr<S: StorageModel>(
fn eh_frame_hdr_entry_count<S: StorageModel>(layout: &Layout<S>) -> Result<u32> {
let hdr_sec = layout.section_layouts.get(output_section_id::EH_FRAME_HDR);
u32::try_from(
(hdr_sec.mem_size - core::mem::size_of::<elf::EhFrameHdr>() as u64)
/ core::mem::size_of::<elf::EhFrameHdrEntry>() as u64,
(hdr_sec.mem_size - size_of::<elf::EhFrameHdr>() as u64)
/ size_of::<elf::EhFrameHdrEntry>() as u64,
)
.context(".eh_frame_hdr entries overflowed 32 bits")
}
Expand Down Expand Up @@ -2371,7 +2371,7 @@ const EPILOGUE_DYNAMIC_ENTRY_WRITERS: &[DynamicEntryWriter] = &[
inputs.vma_of_section(output_section_id::DYNSYM)
}),
DynamicEntryWriter::new(object::elf::DT_SYMENT, |_inputs| {
core::mem::size_of::<elf::SymtabEntry>() as u64
size_of::<elf::SymtabEntry>() as u64
}),
DynamicEntryWriter::optional(
object::elf::DT_VERNEED,
Expand Down Expand Up @@ -2449,7 +2449,7 @@ const EPILOGUE_DYNAMIC_ENTRY_WRITERS: &[DynamicEntryWriter] = &[
.section_part_layouts
.get(part_id::RELA_DYN_RELATIVE)
.mem_size
/ core::mem::size_of::<elf::Rela>() as u64
/ size_of::<elf::Rela>() as u64
}),
DynamicEntryWriter::new(object::elf::DT_GNU_HASH, |inputs| {
inputs.vma_of_section(output_section_id::GNU_HASH)
Expand Down Expand Up @@ -2731,15 +2731,15 @@ impl<'data> DynamicLayout<'data> {
let next_verneed_offset = if self.is_last_verneed {
0
} else {
(core::mem::size_of::<Verneed>()
+ core::mem::size_of::<Vernaux>() * verdef_info.version_count as usize)
(size_of::<Verneed>()
+ size_of::<Vernaux>() * verdef_info.version_count as usize)
as u32
};
ver_need.vn_version.set(e, 1);
ver_need.vn_cnt.set(e, verdef_info.version_count);
ver_need
.vn_aux
.set(e, core::mem::size_of::<Verneed>() as u32);
.set(e, size_of::<Verneed>() as u32);
ver_need.vn_next.set(e, next_verneed_offset);

let auxes = table_writer
Expand Down Expand Up @@ -2778,7 +2778,7 @@ impl<'data> DynamicLayout<'data> {
let vna_next = if is_last_aux {
0
} else {
core::mem::size_of::<Vernaux>() as u32
size_of::<Vernaux>() as u32
};
aux_out.vna_next.set(e, vna_next);
aux_out.vna_other.set(e, output_version);
Expand Down Expand Up @@ -2884,7 +2884,7 @@ pub(crate) fn verify_resolution_allocation(
total_bytes_allocated = alignment.align_up(total_bytes_allocated) + size;
});
total_bytes_allocated = crate::alignment::USIZE.align_up(total_bytes_allocated);
let mut all_mem = vec![0_u64; total_bytes_allocated as usize / core::mem::size_of::<u64>()];
let mut all_mem = vec![0_u64; total_bytes_allocated as usize / size_of::<u64>()];
let mut all_mem: &mut [u8] = bytemuck::cast_slice_mut(all_mem.as_mut_slice());
let mut offset = 0;
let mut buffers = mem_sizes.output_order_map(output_sections, |_part_id, alignment, &size| {
Expand Down
3 changes: 2 additions & 1 deletion wild_lib/src/grouping.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use crate::input_data::FileId;
use crate::parsing::ParsedInput;
use crate::sharding::ShardKey as _;
use crate::symbol_db::SymbolId;
use std::mem::replace;

pub(crate) struct Group<'data> {
pub(crate) files: Vec<ParsedInput<'data>>,
Expand Down Expand Up @@ -39,7 +40,7 @@ pub(crate) fn group_files<'data>(files: Vec<ParsedInput<'data>>, args: &Args) ->
|| (!group.files.is_empty() && num_symbols_with_file > symbols_per_group)
{
// Start a new group.
groups.push(core::mem::replace(&mut group, Group::empty()));
groups.push(replace(&mut group, Group::empty()));
num_symbols_with_file = file.symbol_id_range().len();
}
num_symbols = num_symbols_with_file;
Expand Down
Loading

0 comments on commit e14834b

Please sign in to comment.