Skip to content
This repository has been archived by the owner on Nov 1, 2020. It is now read-only.

Commit

Permalink
Merge pull request #816 from jkotas/nmirror-merge
Browse files Browse the repository at this point in the history
Merge nmirror to master
  • Loading branch information
jkotas committed Feb 6, 2016
2 parents faaddc6 + 56c6dfd commit 068d57c
Show file tree
Hide file tree
Showing 29 changed files with 175 additions and 3,732 deletions.
4 changes: 4 additions & 0 deletions src/Native/Runtime/EHHelpers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -346,6 +346,8 @@ EXTERN_C void * RhpCopyMultibyteNoGCRefsDestAVLocation;
EXTERN_C void * RhpCopyMultibyteNoGCRefsSrcAVLocation;
EXTERN_C void * RhpCopyMultibyteWithWriteBarrierDestAVLocation;
EXTERN_C void * RhpCopyMultibyteWithWriteBarrierSrcAVLocation;
EXTERN_C void * RhpCopyAnyWithWriteBarrierDestAVLocation;
EXTERN_C void * RhpCopyAnyWithWriteBarrierSrcAVLocation;

static bool InWriteBarrierHelper(UIntNative faultingIP)
{
Expand All @@ -366,6 +368,8 @@ static bool InWriteBarrierHelper(UIntNative faultingIP)
(UIntNative)&RhpCopyMultibyteNoGCRefsSrcAVLocation,
(UIntNative)&RhpCopyMultibyteWithWriteBarrierDestAVLocation,
(UIntNative)&RhpCopyMultibyteWithWriteBarrierSrcAVLocation,
(UIntNative)&RhpCopyAnyWithWriteBarrierDestAVLocation,
(UIntNative)&RhpCopyAnyWithWriteBarrierSrcAVLocation,
#endif
};

Expand Down
17 changes: 17 additions & 0 deletions src/Native/Runtime/GCMemoryHelpers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,23 @@ COOP_PINVOKE_CDECL_HELPER(void *, memcpyGCRefsWithWriteBarrier, (void * dest, co
return dest;
}

// Same as memcpyGCRefsWithWriteBarrier, except it checks if memory might contain GC pointers
// and if so dispatches to memcpyGCRefsWithWriteBarrier and if not uses traditional memcpy
COOP_PINVOKE_CDECL_HELPER(void *, memcpyAnyWithWriteBarrier, (void * dest, const void *src, size_t len))
{
// null pointers are not allowed (they are checked by RhpCopyMultibyteWithWriteBarrier)
ASSERT(dest != nullptr);
ASSERT(src != nullptr);

// Use GC safe copy whenever there might be GC pointers
if (IS_ALIGNED(dest, sizeof(size_t)) && IS_ALIGNED(src, sizeof(size_t)) && IS_ALIGNED(len, sizeof(size_t)))
{
return memcpyGCRefsWithWriteBarrier(dest, src, len);
}

return memcpy(dest, src, len);
}

// Move memory, in a way that is compatible with a move onto the heap, but
// does not require the destination pointer to be on the heap.

Expand Down
5 changes: 0 additions & 5 deletions src/Native/Runtime/MiscHelpers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -134,11 +134,6 @@ COOP_PINVOKE_HELPER(HANDLE, RhGetModuleFromPointer, (PTR_VOID pPointerVal))

COOP_PINVOKE_HELPER(HANDLE, RhGetModuleFromEEType, (EEType * pEEType))
{
// Runtime allocated EETypes have no associated module, but class libraries shouldn't be able to get to
// any of these since they're currently only used for the canonical version of a generic EEType and we
// provide no means to go from the cloned version to the canonical version.
ASSERT(!pEEType->IsRuntimeAllocated());

// For dynamically created types, return the module handle that contains the template type
if (pEEType->IsDynamicType())
pEEType = pEEType->get_DynamicTemplateType();
Expand Down
743 changes: 22 additions & 721 deletions src/Native/Runtime/RuntimeInstance.cpp

Large diffs are not rendered by default.

34 changes: 2 additions & 32 deletions src/Native/Runtime/RuntimeInstance.h
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ typedef DPTR(ThreadStore) PTR_ThreadStore;
class Module;
typedef DPTR(Module) PTR_Module;
class ICodeManager;
class VirtualCallStubManager;
struct GenericInstanceDesc;
typedef SPTR(GenericInstanceDesc) PTR_GenericInstanceDesc;
struct ModuleHeader;
Expand Down Expand Up @@ -45,14 +44,9 @@ class RuntimeInstance
CodeManagerList m_CodeManagerList;
#endif

#ifdef FEATURE_VSD
VirtualCallStubManager * m_pVSDManager;
#endif

// Indicates whether the runtime is in standalone exe mode where the only Redhawk module that will be
// loaded into the process (besides the runtime's own module) is the exe itself. The most important aspect
// of this is that generic types don't need to be unified. This flag will be correctly initialized once
// the exe module has loaded.
// loaded into the process (besides the runtime's own module) is the exe itself. This flag will be
// correctly initialized once the exe module has loaded.
bool m_fStandaloneExeMode;

// If m_fStandaloneExeMode is set this contains a pointer to the exe module. Otherwise it's null.
Expand All @@ -66,16 +60,6 @@ class RuntimeInstance
bool m_fProfileThreadCreated;
#endif

// Generic type unification. Used only if we're not in single standalone exe mode.
UnifiedGenericInstance ** m_genericInstHashtab;
UnifiedGenericInstance ** m_genericInstHashtabUpdates;
UInt32 m_genericInstHashtabCount;
UInt32 m_genericInstHashtabEntries;
CrstStatic m_genericInstHashtabLock;
#ifdef _DEBUG
bool m_genericInstHashUpdateInProgress;
#endif

// List of generic instances that have GC references to report. This list is updated under the hash table
// lock above and enumerated without lock during garbage collections (when updates cannot occur). This
// list is only used in non-standalone exe mode, i.e. when we're unifying generic types. In standalone
Expand All @@ -91,10 +75,6 @@ class RuntimeInstance

RuntimeInstance();

#ifdef FEATURE_VSD
static bool CreateVSD(VirtualCallStubManager ** ppVSD);
#endif

SList<Module>* GetModuleList();

bool BuildGenericTypeHashTable();
Expand Down Expand Up @@ -144,19 +124,9 @@ class RuntimeInstance
void EnumGenericStaticGCRefs(PTR_GenericInstanceDesc pInst, void * pfnCallback, void * pvCallbackData, Module *pModule);
void EnumAllStaticGCRefs(void * pfnCallback, void * pvCallbackData);

#ifdef FEATURE_VSD
VirtualCallStubManager * GetVSDManager() { return m_pVSDManager; }
#endif

bool ShouldHijackCallsiteForGcStress(UIntNative CallsiteIP);
bool ShouldHijackLoopForGcStress(UIntNative CallsiteIP);

bool StartGenericUnification(UInt32 cInstances);
UnifiedGenericInstance *UnifyGenericInstance(GenericInstanceDesc *genericInstance, UInt32 uiLocalTlsIndex);
void EndGenericUnification();

void ReleaseGenericInstance(GenericInstanceDesc * pInst);

void EnableGcPollStress();
void UnsychronizedResetHijackedLoops();

Expand Down
8 changes: 1 addition & 7 deletions src/Native/Runtime/SyncClean.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,6 @@
#include "holder.h"
#include "SpinLock.h"
#include "rhbinder.h"
#ifdef FEATURE_VSD
#include "virtualcallstub.h"
#endif // FEATURE_VSD
#include "CachedInterfaceDispatch.h"

#include "SyncClean.hpp"
Expand All @@ -26,10 +23,7 @@ void SyncClean::Terminate()

void SyncClean::CleanUp ()
{
#ifdef FEATURE_VSD
// Give others we want to reclaim during the GC sync point a chance to do it
VirtualCallStubManager::ReclaimAll();
#elif defined(FEATURE_CACHED_INTERFACE_DISPATCH)
#ifdef FEATURE_CACHED_INTERFACE_DISPATCH
// Update any interface dispatch caches that were unsafe to modify outside of this GC.
ReclaimUnusedInterfaceDispatchCaches();
#endif
Expand Down
36 changes: 36 additions & 0 deletions src/Native/Runtime/amd64/MiscStubs.asm
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ EXTERN GetClasslibCCtorCheck : PROC
EXTERN memcpy : PROC
EXTERN memcpyGCRefs : PROC
EXTERN memcpyGCRefsWithWriteBarrier : PROC
EXTERN memcpyAnyWithWriteBarrier : PROC

;;
;; Currently called only from a managed executable once Main returns, this routine does whatever is needed to
Expand Down Expand Up @@ -267,4 +268,39 @@ NothingToCopy:

LEAF_END RhpCopyMultibyteWithWriteBarrier, _TEXT

;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;
;; void* RhpCopyAnyWithWriteBarrier(void*, void*, size_t)
;;
;; The purpose of this wrapper is to hoist the potential null reference exceptions of copying memory up to a place where
;; the stack unwinder and exception dispatch can properly transform the exception into a managed exception and dispatch
;; it to managed code.
;; Runs a card table update via RhpBulkWriteBarrier after the copy if the copy may contain GC pointers
;;
LEAF_ENTRY RhpCopyAnyWithWriteBarrier, _TEXT

; rcx dest
; rdx src
; r8 count

test r8, r8 ; check for a zero-length copy
jz NothingToCopy

; Now check the dest and src pointers. If they AV, the EH subsystem will recognize the address of the AV,
; unwind the frame, and fixup the stack to make it look like the (managed) caller AV'ed, which will be
; translated to a managed exception as usual.
ALTERNATE_ENTRY RhpCopyAnyWithWriteBarrierDestAVLocation
cmp byte ptr [rcx], 0
ALTERNATE_ENTRY RhpCopyAnyWithWriteBarrierSrcAVLocation
cmp byte ptr [rdx], 0

; tail-call to the GC-safe memcpy implementation
jmp memcpyAnyWithWriteBarrier

NothingToCopy:
mov rax, rcx ; return dest
ret

LEAF_END RhpCopyAnyWithWriteBarrier, _TEXT

end
127 changes: 0 additions & 127 deletions src/Native/Runtime/amd64/StubDispatch.asm
Original file line number Diff line number Diff line change
Expand Up @@ -4,133 +4,6 @@

include AsmMacros.inc

ifdef FEATURE_VSD

RESOLVE_WORKER_STATIC equ ?ResolveWorkerStatic@VirtualCallStubManager@@CAPEAEPEAEPEAVEEType@@PEAPEBE@Z
BACKPATCH_WORKER_STATIC equ ?BackPatchWorkerStatic@VirtualCallStubManager@@CAXPEBEPEAPEBE@Z

;; VirtualStubDispatch
EXTERN RESOLVE_WORKER_STATIC : PROC
EXTERN BACKPATCH_WORKER_STATIC : PROC

;; This is the initial and failure entrypoint for VSD. All interface call sites
;; start off pointing here, and all failed mono- and poly-morphic target lookups
;; end up here. The purpose is to save the right registers and pass control to
;; VirtualCallStubManager to find the correct target, change the indirection cell
;; (if necessary) and populate the poly-morphic cache (if necessary).

VSDRWAS_ReservedStack equ 20h + 8h + 40h ;; Scratch space, padding, and room for the FP args

NESTED_ENTRY VSDResolveWorkerAsmStub, _TEXT
;; Figure out the return address
mov r10, [rsp]

alloc_stack VSDRWAS_ReservedStack

;; preserve the argument registers in the scratch space across the helper call.
save_reg_postrsp rcx, (VSDRWAS_ReservedStack + 8*1)
save_reg_postrsp rdx, (VSDRWAS_ReservedStack + 8*2)
save_reg_postrsp r8, (VSDRWAS_ReservedStack + 8*3)
save_reg_postrsp r9, (VSDRWAS_ReservedStack + 8*4)
save_xmm128_postrsp xmm0, (20h + 16*0)
save_xmm128_postrsp xmm1, (20h + 16*1)
save_xmm128_postrsp xmm2, (20h + 16*2)
save_xmm128_postrsp xmm3, (20h + 16*3)
END_PROLOGUE
;; what was in rax (could be address of indirection in shared generic instantiation case)
;; gets passed in r8
mov r8, r11

;; load the eetype from the object instance in rcx
mov rdx, [rcx]

;; return address is first argument
mov rcx, r10

call RESOLVE_WORKER_STATIC

;; Restore the argument registers.
movdqa xmm0, [rsp + 20h + 16*0]
movdqa xmm1, [rsp + 20h + 16*1]
movdqa xmm2, [rsp + 20h + 16*2]
movdqa xmm3, [rsp + 20h + 16*3]
mov r9, [rsp + VSDRWAS_ReservedStack + 8*4]
mov r8, [rsp + VSDRWAS_ReservedStack + 8*3]
mov rdx, [rsp + VSDRWAS_ReservedStack + 8*2]
mov rcx, [rsp + VSDRWAS_ReservedStack + 8*1]

add rsp, VSDRWAS_ReservedStack
TAILJMP_RAX
NESTED_END VSDResolveWorkerAsmStub, _TEXT


;; Call the callsite back patcher. The fail stub piece of the resolver is being
;; call too often, i.e. dispatch stubs are failing the expect EEType test too often.

VSDBPWAS_ReservedStack equ 20h + 40h + 10h ;; Scratch space, room for the FP args, and space to save R11

NESTED_ENTRY VSDBackPatchWorkerAsmStub, _TEXT
;; Account for the return address in our unwind info. We've been called from
;; the ResolveStub, and it does not have any unwind info, nor has it reserved
;; any scratch space. We make it okay to be called from a leaf function by
;; accounting for the stack spaced it used making the call to us in our frame.
;; i.e., this fools the unwinder into using the return address of the previous
;; frame for the return address of this frame, thereby skipping the leaf frame.
.allocstack 8
;; Figure out the return address of the method that has called the ResolveStub.
;; It is at +8 becuase the entry at the top of the stack is the return address
;; to the ResolveStub.
mov r10, [rsp + 8]
alloc_stack VSDBPWAS_ReservedStack

;; Preserve the argument registers across our helper call. We use the scratch space
;; allocated by the caller of the ResolveStub, thus the extra 8 bytes in the offsets.
save_reg_postrsp rcx, (VSDBPWAS_ReservedStack + 8 + 8*1)
save_reg_postrsp rdx, (VSDBPWAS_ReservedStack + 8 + 8*2)
save_reg_postrsp r8, (VSDBPWAS_ReservedStack + 8 + 8*3)
save_reg_postrsp r9, (VSDBPWAS_ReservedStack + 8 + 8*4)

;; save xmm regs just after the scratch space for our callees
save_xmm128_postrsp xmm0, (20h + 10h*0)
save_xmm128_postrsp xmm1, (20h + 10h*1)
save_xmm128_postrsp xmm2, (20h + 10h*2)
save_xmm128_postrsp xmm3, (20h + 10h*3)
;; save the indirection cell address for shared generic instantiation case after the xmm regs
save_reg_postrsp r11, (20h + 10h*4)
END_PROLOGUE

;; whatever was in r11 is second argument - this is the
;; address of the indirection cell in the shared generic instantiation case
mov rdx, r11
;; return address is first argument
mov rcx, r10

call BACKPATCH_WORKER_STATIC

;; restore shared generic instantiation indirection cell
mov r11, [rsp + 20h + 10h*4]

;; Restore the argument registers.
movdqa xmm0, [rsp + 20h + 10h*0]
movdqa xmm1, [rsp + 20h + 10h*1]
movdqa xmm2, [rsp + 20h + 10h*2]
movdqa xmm3, [rsp + 20h + 10h*3]
mov r9, [rsp + VSDBPWAS_ReservedStack + 8 + 8*4]
mov r8, [rsp + VSDBPWAS_ReservedStack + 8 + 8*3]
mov rdx, [rsp + VSDBPWAS_ReservedStack + 8 + 8*2]
mov rcx, [rsp + VSDBPWAS_ReservedStack + 8 + 8*1]

add rsp, VSDBPWAS_ReservedStack
ret
NESTED_END VSDBackPatchWorkerAsmStub, _TEXT

endif ;; FEATURE_VSD

ifdef FEATURE_CACHED_INTERFACE_DISPATCH

Expand Down
Loading

0 comments on commit 068d57c

Please sign in to comment.