diff --git a/src/vm/i386/asmconstants.h b/src/vm/i386/asmconstants.h index 5fd39d6897af..454745ff6291 100644 --- a/src/vm/i386/asmconstants.h +++ b/src/vm/i386/asmconstants.h @@ -449,6 +449,10 @@ ASMCONSTANTS_C_ASSERT(CallDescrData__fpReturnSize == offsetof(CallDescrD ASMCONSTANTS_C_ASSERT(CallDescrData__pTarget == offsetof(CallDescrData, pTarget)) ASMCONSTANTS_C_ASSERT(CallDescrData__returnValue == offsetof(CallDescrData, returnValue)) +#define UMEntryThunk_m_pUMThunkMarshInfo 0x0C +#define UMThunkMarshInfo_m_pILStub 0x00 +#define UMThunkMarshInfo_m_cbActualArgSize 0x04 + #undef ASMCONSTANTS_C_ASSERT #undef ASMCONSTANTS_RUNTIME_ASSERT diff --git a/src/vm/i386/umthunkstub.S b/src/vm/i386/umthunkstub.S index 2bc6fb702fd8..4378b710b3b0 100644 --- a/src/vm/i386/umthunkstub.S +++ b/src/vm/i386/umthunkstub.S @@ -14,9 +14,10 @@ NESTED_ENTRY TheUMEntryPrestub, _TEXT, UnhandledExceptionHandlerUnix push ecx push edx - push eax + push eax // UMEntryThunk* call C_FUNC(TheUMEntryPrestubWorker) pop edx + // eax = PCODE // Restore argument registers pop edx @@ -25,8 +26,125 @@ NESTED_ENTRY TheUMEntryPrestub, _TEXT, UnhandledExceptionHandlerUnix jmp eax // Tail Jmp NESTED_END TheUMEntryPrestub, _TEXT - +// +// eax: UMEntryThunk* +// NESTED_ENTRY UMThunkStub, _TEXT, UnhandledExceptionHandlerUnix - int 3 // implement here + +#define UMThunkStub_LOCALVARS (2*4) // UMEntryThunk*, Thread* +#define UMThunkStub_UMENTRYTHUNK_OFFSET 0x04 +#define UMThunkStub_THREAD_OFFSET 0x08 +#define UMThunkStub_INT_ARG_SPILL (2*4) // for save ecx, edx +#define UMThunkStub_FIXEDALLOCSIZE (UMThunkStub_LOCALVARS+UMThunkStub_INT_ARG_SPILL) +#define UMThunkStub_INT_ARG_OFFSET (UMThunkStub_LOCALVARS+4) + +// return address <-- entry ESP +// saved ebp <-- EBP +// UMEntryThunk* +// Thread* +// save ecx +// save edx +// {optional stack args passed to callee} <-- new esp + + PROLOG_BEG + PROLOG_END + sub esp, UMThunkStub_FIXEDALLOCSIZE + + mov dword ptr [ebp - UMThunkStub_INT_ARG_OFFSET], ecx + mov dword ptr [ebp - UMThunkStub_INT_ARG_OFFSET - 4], edx + + mov dword ptr [ebp - UMThunkStub_UMENTRYTHUNK_OFFSET], eax + + call C_FUNC(GetThread) + test eax, eax + jz LOCAL_LABEL(DoThreadSetup) + +LOCAL_LABEL(HaveThread): + + mov dword ptr [ebp - UMThunkStub_THREAD_OFFSET], eax + + // FailFast if a native callable method invoked via ldftn and calli. + cmp dword ptr [eax + Thread_m_fPreemptiveGCDisabled], 1 + jz LOCAL_LABEL(InvalidTransition) + + // disable preemptive GC + mov dword ptr [eax + Thread_m_fPreemptiveGCDisabled], 1 + + // catch returning thread here if a GC is in progress + PREPARE_EXTERNAL_VAR g_TrapReturningThreads, eax + cmp eax, 0 + jnz LOCAL_LABEL(DoTrapReturningThreadsTHROW) + +LOCAL_LABEL(InCooperativeMode): + + mov eax, dword ptr [ebp - UMThunkStub_UMENTRYTHUNK_OFFSET] + mov ebx, dword ptr [eax + UMEntryThunk_m_pUMThunkMarshInfo] + mov eax, dword ptr [ebx + UMThunkMarshInfo_m_cbActualArgSize] + test eax, eax + jnz LOCAL_LABEL(UMThunkStub_CopyStackArgs) + +LOCAL_LABEL(UMThunkStub_ArgumentsSetup): + + mov ecx, dword ptr [ebp - UMThunkStub_INT_ARG_OFFSET] + mov edx, dword ptr [ebp - UMThunkStub_INT_ARG_OFFSET - 4] + + mov eax, dword ptr [ebp - UMThunkStub_UMENTRYTHUNK_OFFSET] + mov ebx, dword ptr [eax + UMEntryThunk_m_pUMThunkMarshInfo] + mov eax, dword ptr [ebx + UMThunkMarshInfo_m_pILStub] // eax <- Stub* + + call eax + +LOCAL_LABEL(PostCall): + + mov eax, dword ptr [ebp - UMThunkStub_THREAD_OFFSET] + mov dword ptr [eax + Thread_m_fPreemptiveGCDisabled], 0 + + mov esp, ebp // deallocate arguments + EPILOG_BEG + EPILOG_END ret + +LOCAL_LABEL(DoThreadSetup): + + call C_FUNC(CreateThreadBlockThrow) + jmp LOCAL_LABEL(HaveThread) + +LOCAL_LABEL(InvalidTransition): + + //No arguments to setup , ReversePInvokeBadTransition will failfast + call C_FUNC(ReversePInvokeBadTransition) + +LOCAL_LABEL(DoTrapReturningThreadsTHROW): + + // extern "C" VOID STDCALL UMThunkStubRareDisableWorker(Thread *pThread, UMEntryThunk *pUMEntryThunk) + mov eax, dword ptr [ebp - UMThunkStub_UMENTRYTHUNK_OFFSET] + push eax + mov eax, dword ptr [ebp - UMThunkStub_THREAD_OFFSET] + push eax + call C_FUNC(UMThunkStubRareDisableWorker) + + jmp LOCAL_LABEL(InCooperativeMode) + +LOCAL_LABEL(UMThunkStub_CopyStackArgs): + + // eax = m_cbActualArgSize + sub esp, eax + and esp, -16 // align with 16 byte + lea esi, [ebp + 0x08] + lea edi, [esp] + +LOCAL_LABEL(CopyLoop): + + // eax = number of bytes + // esi = src + // edi = dest + // edx = sratch + + add eax, -4 + mov edx, dword ptr [esi + eax] + mov dword ptr [edi + eax], edx + jnz LOCAL_LABEL(CopyLoop) + + jmp LOCAL_LABEL(UMThunkStub_ArgumentsSetup) + NESTED_END UMThunkStub, _TEXT diff --git a/src/vm/stackwalk.cpp b/src/vm/stackwalk.cpp index dbc83f4a0156..18a890003952 100644 --- a/src/vm/stackwalk.cpp +++ b/src/vm/stackwalk.cpp @@ -2677,7 +2677,7 @@ StackWalkAction StackFrameIterator::NextRaw(void) // We are transitioning from unmanaged code to managed code... lets do some validation of our // EH mechanism on platforms that we can. -#if defined(_DEBUG) && !defined(DACCESS_COMPILE) && defined(_TARGET_X86_) +#if defined(_DEBUG) && !defined(DACCESS_COMPILE) && (defined(_TARGET_X86_) && !defined(FEATURE_STUBS_AS_IL)) VerifyValidTransitionFromManagedCode(m_crawl.pThread, &m_crawl); #endif // _DEBUG && !DACCESS_COMPILE && _TARGET_X86_ }