Skip to content
This repository has been archived by the owner on Jan 23, 2023. It is now read-only.

Commit

Permalink
[x86/Linux] Add UMThunkStub
Browse files Browse the repository at this point in the history
Add UMThunkStub method with logic from that of AMD64
  • Loading branch information
seanshpark committed Dec 14, 2016
1 parent 3019808 commit bafbd62
Show file tree
Hide file tree
Showing 3 changed files with 119 additions and 5 deletions.
4 changes: 4 additions & 0 deletions src/vm/i386/asmconstants.h
Original file line number Diff line number Diff line change
Expand Up @@ -449,6 +449,10 @@ ASMCONSTANTS_C_ASSERT(CallDescrData__fpReturnSize == offsetof(CallDescrD
ASMCONSTANTS_C_ASSERT(CallDescrData__pTarget == offsetof(CallDescrData, pTarget))
ASMCONSTANTS_C_ASSERT(CallDescrData__returnValue == offsetof(CallDescrData, returnValue))

#define UMEntryThunk_m_pUMThunkMarshInfo 0x0C
#define UMThunkMarshInfo_m_pILStub 0x00
#define UMThunkMarshInfo_m_cbActualArgSize 0x04

#undef ASMCONSTANTS_C_ASSERT
#undef ASMCONSTANTS_RUNTIME_ASSERT

Expand Down
118 changes: 114 additions & 4 deletions src/vm/i386/umthunkstub.S
Original file line number Diff line number Diff line change
Expand Up @@ -11,22 +11,132 @@
//
NESTED_ENTRY TheUMEntryPrestub, _TEXT, UnhandledExceptionHandlerUnix
// Preserve argument registers
push ecx
push ebx
push edx

push eax
push eax // UMEntryThunk*
call C_FUNC(TheUMEntryPrestubWorker)
pop edx
// eax = PCODE

// Restore argument registers
pop edx
pop ecx
pop ebx

jmp eax // Tail Jmp
NESTED_END TheUMEntryPrestub, _TEXT


#define UMThunkStub_FIXEDALLOCSIZE 0x20
#define UMThunkStub_UMENTRYTHUNK_OFFSET 0x04
#define UMThunkStub_THREAD_OFFSET 0x08
#define UMThunkStub_INT_ARG_OFFSET 0x00
#define UMThunkStub_EBP_OFFSET (UMThunkStub_FIXEDALLOCSIZE)


NESTED_ENTRY UMThunkStub, _TEXT, UnhandledExceptionHandlerUnix
int 3 // implement here
PROLOG_BEG
PROLOG_END
sub esp, UMThunkStub_FIXEDALLOCSIZE + 4

mov dword ptr [ebp - UMThunkStub_EBP_OFFSET + UMThunkStub_INT_ARG_OFFSET + 0x00], edi
mov dword ptr [ebp - UMThunkStub_EBP_OFFSET + UMThunkStub_INT_ARG_OFFSET + 0x04], esi
mov dword ptr [ebp - UMThunkStub_EBP_OFFSET + UMThunkStub_INT_ARG_OFFSET + 0x08], ebx
mov dword ptr [ebp - UMThunkStub_EBP_OFFSET + UMThunkStub_INT_ARG_OFFSET + 0x0c], ecx

mov dword ptr [ebp - UMThunkStub_UMENTRYTHUNK_OFFSET], eax // save UMEntryThunk*

call C_FUNC(GetThread)
test eax, eax
jz LOCAL_LABEL(DoThreadSetup)

LOCAL_LABEL(HaveThread):

mov dword ptr [ebp - UMThunkStub_THREAD_OFFSET], eax // save Thread*

// FailFast if a native callable method invoked via ldftn and calli.
cmp dword ptr [eax + Thread_m_fPreemptiveGCDisabled], 1
jz LOCAL_LABEL(InvalidTransition)

// disable preemptive GC
mov dword ptr [eax + Thread_m_fPreemptiveGCDisabled], 1

// catch returning thread here if a GC is in progress
PREPARE_EXTERNAL_VAR g_TrapReturningThreads, eax
cmp eax, 0
jnz LOCAL_LABEL(DoTrapReturningThreadsTHROW)

LOCAL_LABEL(InCooperativeMode):

mov eax, dword ptr [ebp - UMThunkStub_UMENTRYTHUNK_OFFSET]
mov ebx, dword ptr [eax + UMEntryThunk_m_pUMThunkMarshInfo]
mov eax, dword ptr [ebx + UMThunkMarshInfo_m_cbActualArgSize]
test eax, eax
jnz LOCAL_LABEL(UMThunkStub_CopyStackArgs)

LOCAL_LABEL(UMThunkStub_ArgumentsSetup):

mov eax, dword ptr [ebp - UMThunkStub_UMENTRYTHUNK_OFFSET]
mov ebx, dword ptr [eax + UMEntryThunk_m_pUMThunkMarshInfo]
mov eax, dword ptr [ebx + UMThunkMarshInfo_m_pILStub] // eax <- Stub*

mov edi, dword ptr [ebp - UMThunkStub_EBP_OFFSET + UMThunkStub_INT_ARG_OFFSET + 0x00]
mov esi, dword ptr [ebp - UMThunkStub_EBP_OFFSET + UMThunkStub_INT_ARG_OFFSET + 0x04]
mov ebx, dword ptr [ebp - UMThunkStub_EBP_OFFSET + UMThunkStub_INT_ARG_OFFSET + 0x08]
mov ecx, dword ptr [ebp - UMThunkStub_EBP_OFFSET + UMThunkStub_INT_ARG_OFFSET + 0x0c]

call eax

LOCAL_LABEL(PostCall):

mov eax, dword ptr [ebp - UMThunkStub_THREAD_OFFSET]
mov dword ptr [eax + Thread_m_fPreemptiveGCDisabled], 0

lea esp, [ebp - 4] // deallocate arguments
EPILOG_BEG
EPILOG_END
ret

LOCAL_LABEL(DoThreadSetup):

call C_FUNC(CreateThreadBlockThrow)
jmp LOCAL_LABEL(HaveThread)

LOCAL_LABEL(InvalidTransition):

//No arguments to setup , ReversePInvokeBadTransition will failfast
call C_FUNC(ReversePInvokeBadTransition)

LOCAL_LABEL(DoTrapReturningThreadsTHROW):

// extern "C" VOID STDCALL UMThunkStubRareDisableWorker(Thread *pThread, UMEntryThunk *pUMEntryThunk)
mov eax, dword ptr [ebp - UMThunkStub_UMENTRYTHUNK_OFFSET]
push eax
mov eax, dword ptr [ebp - UMThunkStub_THREAD_OFFSET]
push eax
call C_FUNC(UMThunkStubRareDisableWorker)

jmp LOCAL_LABEL(InCooperativeMode)

LOCAL_LABEL(UMThunkStub_CopyStackArgs):

// eax = m_cbActualArgSize
sub esp, eax
lea esi, [ebp + 0x08]
lea edi, [esp]

LOCAL_LABEL(CopyLoop):

// eax = number of bytes
// esi = src
// edi = dest
// edx = sratch

add eax, -4
mov edx, dword ptr [esi + eax]
mov dword ptr [edi + eax], edx
jnz LOCAL_LABEL(CopyLoop)

jmp LOCAL_LABEL(UMThunkStub_ArgumentsSetup)

NESTED_END UMThunkStub, _TEXT
2 changes: 1 addition & 1 deletion src/vm/stackwalk.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2677,7 +2677,7 @@ StackWalkAction StackFrameIterator::NextRaw(void)

// We are transitioning from unmanaged code to managed code... lets do some validation of our
// EH mechanism on platforms that we can.
#if defined(_DEBUG) && !defined(DACCESS_COMPILE) && defined(_TARGET_X86_)
#if defined(_DEBUG) && !defined(DACCESS_COMPILE) && (defined(_TARGET_X86_) && !defined(FEATURE_STUBS_AS_IL))
VerifyValidTransitionFromManagedCode(m_crawl.pThread, &m_crawl);
#endif // _DEBUG && !DACCESS_COMPILE && _TARGET_X86_
}
Expand Down

0 comments on commit bafbd62

Please sign in to comment.