Skip to content

Commit

Permalink
[NativeAOT] Remove unused native memcopy helpers. (#84314)
Browse files Browse the repository at this point in the history
* Remove unused (and in some cases broken) RhpCopy helpers.

* PR feedback
  • Loading branch information
VSadov authored Apr 4, 2023
1 parent 337999d commit 70d00e4
Show file tree
Hide file tree
Showing 11 changed files with 20 additions and 864 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -155,8 +155,8 @@ internal static int RhEndNoGCRegion()
internal static extern unsafe void RhpAssignRef(ref object address, object obj);

[MethodImplAttribute(MethodImplOptions.InternalCall)]
[RuntimeImport(Redhawk.BaseName, "RhpInitMultibyte")]
internal static extern unsafe ref byte RhpInitMultibyte(ref byte dmem, int c, nuint size);
[RuntimeImport(Redhawk.BaseName, "RhpGcSafeZeroMemory")]
internal static extern unsafe ref byte RhpGcSafeZeroMemory(ref byte dmem, nuint size);

[MethodImplAttribute(MethodImplOptions.InternalCall)]
[RuntimeImport(Redhawk.BaseName, "memmove")]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -234,9 +234,8 @@ public static unsafe void RhUnbox(object? obj, ref byte data, MethodTable* pUnbo
Debug.Assert(pUnboxToEEType != null && pUnboxToEEType->IsNullable);

// Set HasValue to false and clear the value (in case there were GC references we wish to stop reporting).
InternalCalls.RhpInitMultibyte(
InternalCalls.RhpGcSafeZeroMemory(
ref data,
0,
pUnboxToEEType->ValueTypeSize);

return;
Expand Down
8 changes: 0 additions & 8 deletions src/coreclr/nativeaot/Runtime/EHHelpers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -286,14 +286,6 @@ EXTERN_C void * RhpCheckedLockCmpXchgAVLocation;
EXTERN_C void * RhpCheckedXchgAVLocation;
EXTERN_C void * RhpLockCmpXchg32AVLocation;
EXTERN_C void * RhpLockCmpXchg64AVLocation;
EXTERN_C void * RhpCopyMultibyteDestAVLocation;
EXTERN_C void * RhpCopyMultibyteSrcAVLocation;
EXTERN_C void * RhpCopyMultibyteNoGCRefsDestAVLocation;
EXTERN_C void * RhpCopyMultibyteNoGCRefsSrcAVLocation;
EXTERN_C void * RhpCopyMultibyteWithWriteBarrierDestAVLocation;
EXTERN_C void * RhpCopyMultibyteWithWriteBarrierSrcAVLocation;
EXTERN_C void * RhpCopyAnyWithWriteBarrierDestAVLocation;
EXTERN_C void * RhpCopyAnyWithWriteBarrierSrcAVLocation;

static bool InWriteBarrierHelper(uintptr_t faultingIP)
{
Expand Down
95 changes: 7 additions & 88 deletions src/coreclr/nativeaot/Runtime/GCMemoryHelpers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -13,99 +13,24 @@
#include "GCMemoryHelpers.h"
#include "GCMemoryHelpers.inl"

// This function clears a piece of memory in a GC safe way. It makes the guarantee that it will clear memory in at
// least pointer sized chunks whenever possible. Unaligned memory at the beginning and remaining bytes at the end are
// written bytewise. We must make this guarantee whenever we clear memory in the GC heap that could contain object
// This function clears a piece of memory in a GC safe way.
// Object-aligned memory is zeroed with no smaller than pointer-size granularity.
// We must make this guarantee whenever we clear memory in the GC heap that could contain object
// references. The GC or other user threads can read object references at any time, clearing them bytewise can result
// in a read on another thread getting incorrect data.
//
// USAGE: The caller is responsible for hoisting any null reference exceptions to a place where the hardware exception
// can be properly translated to a managed exception.
COOP_PINVOKE_CDECL_HELPER(void *, RhpInitMultibyte, (void * mem, int c, size_t size))
// Unaligned memory at the beginning and remaining bytes at the end are written bytewise.
// USAGE: The caller is responsible for null-checking the reference.
COOP_PINVOKE_CDECL_HELPER(void *, RhpGcSafeZeroMemory, (void * mem, size_t size))
{
// The caller must do the null-check because we cannot take an AV in the runtime and translate it to managed.
ASSERT(mem != nullptr);

uintptr_t bv = (uint8_t)c;
uintptr_t pv = 0;

if (bv != 0)
{
pv =
#if (POINTER_SIZE == 8)
bv << 7*8 | bv << 6*8 | bv << 5*8 | bv << 4*8 |
#endif
bv << 3*8 | bv << 2*8 | bv << 1*8 | bv;
}

InlineGCSafeFillMemory(mem, size, pv);
InlineGcSafeZeroMemory(mem, size);

// memset returns the destination buffer
return mem;
}


// This is a GC-safe variant of memcpy. It guarantees that the object references in the GC heap are updated atomically.
// This is required for type safety and proper operation of the background GC.
//
// USAGE: 1) The caller is responsible for performing the appropriate bulk write barrier.
// 2) The caller is responsible for hoisting any null reference exceptions to a place where the hardware
// exception can be properly translated to a managed exception. This is handled by RhpCopyMultibyte.
// 3) The caller must ensure that all three parameters are pointer-size-aligned. This should be the case for
// value types which contain GC refs anyway, so if you want to copy structs without GC refs which might be
// unaligned, then you must use RhpCopyMultibyteNoGCRefs.
COOP_PINVOKE_CDECL_HELPER(void *, memcpyGCRefs, (void * dest, const void *src, size_t len))
{
// null pointers are not allowed (they are checked by RhpCopyMultibyte)
ASSERT(dest != nullptr);
ASSERT(src != nullptr);

InlineForwardGCSafeCopy(dest, src, len);

// memcpy returns the destination buffer
return dest;
}

// This is a GC-safe variant of memcpy. It guarantees that the object references in the GC heap are updated atomically.
// This is required for type safety and proper operation of the background GC.
// Writebarrier is included.
//
// USAGE:
// 1) The caller is responsible for hoisting any null reference exceptions to a place where the hardware
// exception can be properly translated to a managed exception. This is handled by RhpCopyMultibyte.
// 2) The caller must ensure that all three parameters are pointer-size-aligned. This should be the case for
// value types which contain GC refs anyway, so if you want to copy structs without GC refs which might be
// unaligned, then you must use RhpCopyMultibyteNoGCRefs.
COOP_PINVOKE_CDECL_HELPER(void *, memcpyGCRefsWithWriteBarrier, (void * dest, const void *src, size_t len))
{
// null pointers are not allowed (they are checked by RhpCopyMultibyteWithWriteBarrier)
ASSERT(dest != nullptr);
ASSERT(src != nullptr);

InlineForwardGCSafeCopy(dest, src, len);
InlinedBulkWriteBarrier(dest, len);

// memcpy returns the destination buffer
return dest;
}

// Same as memcpyGCRefsWithWriteBarrier, except it checks if memory might contain GC pointers
// and if so dispatches to memcpyGCRefsWithWriteBarrier and if not uses traditional memcpy
COOP_PINVOKE_CDECL_HELPER(void *, memcpyAnyWithWriteBarrier, (void * dest, const void *src, size_t len))
{
// null pointers are not allowed (they are checked by RhpCopyMultibyteWithWriteBarrier)
ASSERT(dest != nullptr);
ASSERT(src != nullptr);

// Use GC safe copy whenever there might be GC pointers
if (IS_ALIGNED(dest, sizeof(size_t)) && IS_ALIGNED(src, sizeof(size_t)) && IS_ALIGNED(len, sizeof(size_t)))
{
return memcpyGCRefsWithWriteBarrier(dest, src, len);
}

return memcpy(dest, src, len);
}

// Move memory, in a way that is compatible with a move onto the heap, but
// does not require the destination pointer to be on the heap.

Expand All @@ -119,12 +44,6 @@ COOP_PINVOKE_HELPER(void, RhBulkMoveWithWriteBarrier, (uint8_t* pDest, uint8_t*
InlinedBulkWriteBarrier(pDest, cbDest);
}

void GCSafeCopyMemoryWithWriteBarrier(void * dest, const void *src, size_t len)
{
InlineForwardGCSafeCopy(dest, src, len);
InlinedBulkWriteBarrier(dest, len);
}

void REDHAWK_CALLCONV RhpBulkWriteBarrier(void* pMemStart, uint32_t cbMemSize)
{
InlinedBulkWriteBarrier(pMemStart, cbMemSize);
Expand Down
2 changes: 0 additions & 2 deletions src/coreclr/nativeaot/Runtime/GCMemoryHelpers.h
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,4 @@
// Unmanaged GC memory helpers
//

void GCSafeCopyMemoryWithWriteBarrier(void * dest, const void *src, size_t len);

EXTERN_C void REDHAWK_CALLCONV RhpBulkWriteBarrier(void* pMemStart, uint32_t cbMemSize);
19 changes: 10 additions & 9 deletions src/coreclr/nativeaot/Runtime/GCMemoryHelpers.inl
Original file line number Diff line number Diff line change
Expand Up @@ -19,32 +19,33 @@ static const int card_byte_shift = 10;
#endif


// This function fills a piece of memory in a GC safe way. It makes the guarantee
// that it will fill memory in at least pointer sized chunks whenever possible.
// This function clears a piece of memory in a GC safe way.
// Object-aligned memory is zeroed with no smaller than pointer-size granularity.
// We must make this guarantee whenever we clear memory in the GC heap that could contain object
// references. The GC or other user threads can read object references at any time, clearing them bytewise can result
// in a read on another thread getting incorrect data.
// Unaligned memory at the beginning and remaining bytes at the end are written bytewise.
// We must make this guarantee whenever we clear memory in the GC heap that could contain
// object references. The GC or other user threads can read object references at any time,
// clearing them bytewise can result in a read on another thread getting incorrect data.
FORCEINLINE void InlineGCSafeFillMemory(void * mem, size_t size, size_t pv)
// USAGE: The caller is responsible for null-checking the reference.
FORCEINLINE void InlineGcSafeZeroMemory(void * mem, size_t size)
{
uint8_t * memBytes = (uint8_t *)mem;
uint8_t * endBytes = &memBytes[size];

// handle unaligned bytes at the beginning
while (!IS_ALIGNED(memBytes, sizeof(void *)) && (memBytes < endBytes))
*memBytes++ = (uint8_t)pv;
*memBytes++ = 0;

// now write pointer sized pieces
// volatile ensures that this doesn't get optimized back into a memset call
size_t nPtrs = (endBytes - memBytes) / sizeof(void *);
volatile uintptr_t* memPtr = (uintptr_t*)memBytes;
for (size_t i = 0; i < nPtrs; i++)
*memPtr++ = pv;
*memPtr++ = 0;

// handle remaining bytes at the end
memBytes = (uint8_t*)memPtr;
while (memBytes < endBytes)
*memBytes++ = (uint8_t)pv;
*memBytes++ = 0;
}

// These functions copy memory in a GC safe way. They makes the guarantee
Expand Down
142 changes: 0 additions & 142 deletions src/coreclr/nativeaot/Runtime/amd64/MiscStubs.asm
Original file line number Diff line number Diff line change
Expand Up @@ -3,150 +3,8 @@

include AsmMacros.inc

EXTERN memcpy : PROC
EXTERN memcpyGCRefs : PROC
EXTERN memcpyGCRefsWithWriteBarrier : PROC
EXTERN memcpyAnyWithWriteBarrier : PROC
EXTERN RhpGetThreadStaticBaseForTypeSlow : PROC

;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; void* RhpCopyMultibyteNoGCRefs(void*, void*, size_t)
;;
;; The purpose of this wrapper is to hoist the potential null reference exceptions of copying memory up to a place where
;; the stack unwinder and exception dispatch can properly transform the exception into a managed exception and dispatch
;; it to managed code.
;;
LEAF_ENTRY RhpCopyMultibyteNoGCRefs, _TEXT

; rcx dest
; rdx src
; r8 count

test r8, r8 ; check for a zero-length copy
jz NothingToCopy

; Now check the dest and src pointers. If they AV, the EH subsystem will recognize the address of the AV,
; unwind the frame, and fixup the stack to make it look like the (managed) caller AV'ed, which will be
; translated to a managed exception as usual.
ALTERNATE_ENTRY RhpCopyMultibyteNoGCRefsDestAVLocation
cmp byte ptr [rcx], 0
ALTERNATE_ENTRY RhpCopyMultibyteNoGCRefsSrcAVLocation
cmp byte ptr [rdx], 0

; tail-call to plain-old-memcpy
jmp memcpy

NothingToCopy:
mov rax, rcx ; return dest
ret

LEAF_END RhpCopyMultibyteNoGCRefs, _TEXT

;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; void* RhpCopyMultibyte(void*, void*, size_t)
;;
;; The purpose of this wrapper is to hoist the potential null reference exceptions of copying memory up to a place where
;; the stack unwinder and exception dispatch can properly transform the exception into a managed exception and dispatch
;; it to managed code.
;;
LEAF_ENTRY RhpCopyMultibyte, _TEXT

; rcx dest
; rdx src
; r8 count

test r8, r8 ; check for a zero-length copy
jz NothingToCopy

; Now check the dest and src pointers. If they AV, the EH subsystem will recognize the address of the AV,
; unwind the frame, and fixup the stack to make it look like the (managed) caller AV'ed, which will be
; translated to a managed exception as usual.
ALTERNATE_ENTRY RhpCopyMultibyteDestAVLocation
cmp byte ptr [rcx], 0
ALTERNATE_ENTRY RhpCopyMultibyteSrcAVLocation
cmp byte ptr [rdx], 0

; tail-call to the GC-safe memcpy implementation
jmp memcpyGCRefs

NothingToCopy:
mov rax, rcx ; return dest
ret

LEAF_END RhpCopyMultibyte, _TEXT

;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; void* RhpCopyMultibyteWithWriteBarrier(void*, void*, size_t)
;;
;; The purpose of this wrapper is to hoist the potential null reference exceptions of copying memory up to a place where
;; the stack unwinder and exception dispatch can properly transform the exception into a managed exception and dispatch
;; it to managed code.
;; Runs a card table update via RhpBulkWriteBarrier after the copy
;;
LEAF_ENTRY RhpCopyMultibyteWithWriteBarrier, _TEXT

; rcx dest
; rdx src
; r8 count

test r8, r8 ; check for a zero-length copy
jz NothingToCopy

; Now check the dest and src pointers. If they AV, the EH subsystem will recognize the address of the AV,
; unwind the frame, and fixup the stack to make it look like the (managed) caller AV'ed, which will be
; translated to a managed exception as usual.
ALTERNATE_ENTRY RhpCopyMultibyteWithWriteBarrierDestAVLocation
cmp byte ptr [rcx], 0
ALTERNATE_ENTRY RhpCopyMultibyteWithWriteBarrierSrcAVLocation
cmp byte ptr [rdx], 0

; tail-call to the GC-safe memcpy implementation
jmp memcpyGCRefsWithWriteBarrier

NothingToCopy:
mov rax, rcx ; return dest
ret

LEAF_END RhpCopyMultibyteWithWriteBarrier, _TEXT

;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; void* RhpCopyAnyWithWriteBarrier(void*, void*, size_t)
;;
;; The purpose of this wrapper is to hoist the potential null reference exceptions of copying memory up to a place where
;; the stack unwinder and exception dispatch can properly transform the exception into a managed exception and dispatch
;; it to managed code.
;; Runs a card table update via RhpBulkWriteBarrier after the copy if the copy may contain GC pointers
;;
LEAF_ENTRY RhpCopyAnyWithWriteBarrier, _TEXT

; rcx dest
; rdx src
; r8 count

test r8, r8 ; check for a zero-length copy
jz NothingToCopy

; Now check the dest and src pointers. If they AV, the EH subsystem will recognize the address of the AV,
; unwind the frame, and fixup the stack to make it look like the (managed) caller AV'ed, which will be
; translated to a managed exception as usual.
ALTERNATE_ENTRY RhpCopyAnyWithWriteBarrierDestAVLocation
cmp byte ptr [rcx], 0
ALTERNATE_ENTRY RhpCopyAnyWithWriteBarrierSrcAVLocation
cmp byte ptr [rdx], 0

; tail-call to the GC-safe memcpy implementation
jmp memcpyAnyWithWriteBarrier

NothingToCopy:
mov rax, rcx ; return dest
ret

LEAF_END RhpCopyAnyWithWriteBarrier, _TEXT

;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
; The following helper will access ("probe") a word on each page of the stack
; starting with the page right beneath rsp down to the one pointed to by r11.
Expand Down
Loading

0 comments on commit 70d00e4

Please sign in to comment.