diff --git a/src/dlmalloc.rs b/src/dlmalloc.rs index f11a64b..573e8ea 100644 --- a/src/dlmalloc.rs +++ b/src/dlmalloc.rs @@ -240,7 +240,7 @@ impl Dlmalloc { fn align_as_chunk(&self, ptr: *mut u8) -> *mut Chunk { unsafe { let chunk = Chunk::to_mem(ptr.cast()); - ptr.add(self.align_offset(chunk)).cast() + ptr.wrapping_add(self.align_offset(chunk)).cast() } } @@ -428,7 +428,7 @@ impl Dlmalloc { } else { self.least_addr = cmp::min(tbase, self.least_addr); let mut sp: *mut Segment = &mut self.seg; - while !sp.is_null() && (*sp).base != tbase.add(tsize) { + while !sp.is_null() && (*sp).base != tbase.wrapping_add(tsize) { sp = (*sp).next; } if !sp.is_null() && !Segment::is_extern(sp) && Segment::sys_flags(sp) == flags { @@ -571,7 +571,7 @@ impl Dlmalloc { let newmmsize = self.mmap_align(nb + 6 * mem::size_of::() + self.malloc_alignment() - 1); let ptr = self.system_allocator.remap( - oldp.cast::().sub(offset), + oldp.cast::().wrapping_sub(offset), oldmmsize, newmmsize, can_move, @@ -579,7 +579,7 @@ impl Dlmalloc { if ptr.is_null() { return ptr::null_mut(); } - let newp = ptr.add(offset).cast::(); + let newp = ptr.wrapping_add(offset).cast::(); let psize = newmmsize - offset - self.mmap_foot_pad(); (*newp).head = psize; (*Chunk::plus_offset(newp, psize)).head = Chunk::fencepost_head(); @@ -622,7 +622,7 @@ impl Dlmalloc { let pos = if (br as usize - p as usize) > self.min_chunk_size() { br.cast::() } else { - br.cast::().add(alignment) + br.cast::().wrapping_add(alignment) }; let newp = pos.cast::(); let leadsize = pos as usize - p as usize; @@ -670,7 +670,7 @@ impl Dlmalloc { psize += prevsize + self.mmap_foot_pad(); if self .system_allocator - .free(p.cast::().sub(prevsize), psize) + .free(p.cast::().wrapping_sub(prevsize), psize) { self.footprint -= psize; } @@ -794,10 +794,10 @@ impl Dlmalloc { let old_end = Segment::top(oldsp); let ssize = self.pad_request(mem::size_of::()); let offset = ssize + mem::size_of::() * 4 + self.malloc_alignment() - 1; - let rawsp = old_end.sub(offset); + let rawsp = old_end.wrapping_sub(offset); let offset = self.align_offset(Chunk::to_mem(rawsp.cast())); - let asp = rawsp.add(offset); - let csp = if asp < old_top.add(self.min_chunk_size()) { + let asp = rawsp.wrapping_add(offset); + let csp = if asp < old_top.wrapping_add(self.min_chunk_size()) { old_top } else { asp @@ -969,13 +969,13 @@ impl Dlmalloc { unsafe fn smallbin_at(&mut self, idx: u32) -> *mut Chunk { let idx = usize::try_from(idx * 2).unwrap(); debug_assert!(idx < self.smallbins.len()); - self.smallbins.as_mut_ptr().add(idx).cast() + self.smallbins.as_mut_ptr().wrapping_add(idx).cast() } unsafe fn treebin_at(&mut self, idx: u32) -> *mut *mut TreeChunk { let idx = usize::try_from(idx).unwrap(); debug_assert!(idx < self.treebins.len()); - self.treebins.as_mut_ptr().add(idx) + self.treebins.as_mut_ptr().wrapping_add(idx) } fn compute_tree_index(&self, size: usize) -> u32 { @@ -1221,7 +1221,7 @@ impl Dlmalloc { psize += prevsize + self.mmap_foot_pad(); if self .system_allocator - .free(p.cast::().sub(prevsize), psize) + .free(p.cast::().wrapping_sub(prevsize), psize) { self.footprint -= psize; } @@ -1364,8 +1364,8 @@ impl Dlmalloc { let psize = Chunk::size(p); // We can unmap if the first chunk holds the entire segment and // isn't pinned. - let chunk_top = p.cast::().add(psize); - let top = base.add(size - self.top_foot_size()); + let chunk_top = p.cast::().wrapping_add(psize); + let top = base.wrapping_add(size - self.top_foot_size()); if !Chunk::inuse(p) && chunk_top >= top { let tp = p.cast::(); debug_assert!(Segment::holds(sp, sp.cast())); @@ -1727,11 +1727,11 @@ impl Chunk { } unsafe fn next(me: *mut Chunk) -> *mut Chunk { - me.cast::().add((*me).head & !FLAG_BITS).cast() + me.cast::().wrapping_add((*me).head & !FLAG_BITS).cast() } unsafe fn prev(me: *mut Chunk) -> *mut Chunk { - me.cast::().sub((*me).prev_foot).cast() + me.cast::().wrapping_sub((*me).prev_foot).cast() } unsafe fn cinuse(me: *mut Chunk) -> bool { @@ -1786,15 +1786,15 @@ impl Chunk { } unsafe fn plus_offset(me: *mut Chunk, offset: usize) -> *mut Chunk { - me.cast::().add(offset).cast() + me.cast::().wrapping_add(offset).cast() } unsafe fn minus_offset(me: *mut Chunk, offset: usize) -> *mut Chunk { - me.cast::().sub(offset).cast() + me.cast::().wrapping_sub(offset).cast() } unsafe fn to_mem(me: *mut Chunk) -> *mut u8 { - me.cast::().add(Chunk::mem_offset()) + me.cast::().wrapping_add(Chunk::mem_offset()) } fn mem_offset() -> usize { @@ -1802,7 +1802,7 @@ impl Chunk { } unsafe fn from_mem(mem: *mut u8) -> *mut Chunk { - mem.sub(2 * mem::size_of::()).cast() + mem.wrapping_sub(2 * mem::size_of::()).cast() } } @@ -1849,7 +1849,7 @@ impl Segment { } unsafe fn top(seg: *mut Segment) -> *mut u8 { - (*seg).base.add((*seg).size) + (*seg).base.wrapping_add((*seg).size) } }