diff --git a/src/librustc/mir/interpret/allocation.rs b/src/librustc/mir/interpret/allocation.rs index f849361e08be9..22dafaf4e0fba 100644 --- a/src/librustc/mir/interpret/allocation.rs +++ b/src/librustc/mir/interpret/allocation.rs @@ -695,6 +695,12 @@ impl Allocation { } } +impl AllocationDefinedness { + pub fn all_bytes_undef(&self) -> bool { + self.initial == false && self.ranges.len() == 1 + } +} + /// Relocations. #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)] pub struct Relocations(SortedMap); diff --git a/src/librustc_mir/interpret/memory.rs b/src/librustc_mir/interpret/memory.rs index ee7fb18fd05a5..dd9f769ead451 100644 --- a/src/librustc_mir/interpret/memory.rs +++ b/src/librustc_mir/interpret/memory.rs @@ -855,6 +855,22 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> { let relocations = self.get_raw(src.alloc_id)? .prepare_relocation_copy(self, src, size, dest, length); + // Prepare a copy of the undef mask. + let compressed = self.get_raw(src.alloc_id)?.compress_undef_range(src, size); + + if compressed.all_bytes_undef() { + // Fast path: If all bytes are `undef` then there is nothing to copy. The target range + // is marked as undef but we otherwise omit changing the byte representation which may + // be arbitrary for undef bytes. + // This also avoids writing to the target bytes so that the backing allocation is never + // touched if the bytes stay undef for the whole interpreter execution. On contemporary + // operating system this can avoid physically allocating the page. + let dest_alloc = self.get_raw_mut(dest.alloc_id)?; + dest_alloc.mark_definedness(dest, size * length, false); + dest_alloc.mark_relocation_range(relocations); + return Ok(()); + } + let tcx = self.tcx.tcx; // This checks relocation edges on the src. @@ -897,8 +913,10 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> { } } - // copy definedness to the destination - self.copy_undef_mask(src, dest, size, length)?; + // now copy over the undef data + self.get_raw_mut(dest.alloc_id)? + .mark_compressed_undef_range(&compressed, dest, size, length); + // copy the relocations to the destination self.get_raw_mut(dest.alloc_id)?.mark_relocation_range(relocations); @@ -906,29 +924,8 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> { } } -/// Undefined bytes +/// Machine pointer introspection. impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'mir, 'tcx, M> { - // FIXME: Add a fast version for the common, nonoverlapping case - fn copy_undef_mask( - &mut self, - src: Pointer, - dest: Pointer, - size: Size, - repeat: u64, - ) -> InterpResult<'tcx> { - // The bits have to be saved locally before writing to dest in case src and dest overlap. - assert_eq!(size.bytes() as usize as u64, size.bytes()); - - let src_alloc = self.get_raw(src.alloc_id)?; - let compressed = src_alloc.compress_undef_range(src, size); - - // now fill in all the data - let dest_allocation = self.get_raw_mut(dest.alloc_id)?; - dest_allocation.mark_compressed_undef_range(&compressed, dest, size, repeat); - - Ok(()) - } - pub fn force_ptr( &self, scalar: Scalar,