Skip to content

Commit

Permalink
Delete some dead code (#87249)
Browse files Browse the repository at this point in the history
I went through RuntimeImports looking for things that looked unnecessary. The rest of this PR is basically "stuff that came out after pulling on a loose thread of a sweater".
  • Loading branch information
MichalStrehovsky authored Jun 8, 2023
1 parent 90b416a commit 157996f
Show file tree
Hide file tree
Showing 21 changed files with 4 additions and 934 deletions.
10 changes: 0 additions & 10 deletions src/coreclr/nativeaot/Runtime/AsmOffsets.h
Original file line number Diff line number Diff line change
Expand Up @@ -72,16 +72,6 @@ ASM_OFFSET( 10, 20, InterfaceDispatchCache, m_rgEntries)
ASM_SIZEOF( 8, 10, InterfaceDispatchCacheEntry)
#endif

#ifdef FEATURE_DYNAMIC_CODE
ASM_OFFSET( 0, 0, CallDescrData, pSrc)
ASM_OFFSET( 4, 8, CallDescrData, numStackSlots)
ASM_OFFSET( 8, C, CallDescrData, fpReturnSize)
ASM_OFFSET( C, 10, CallDescrData, pArgumentRegisters)
ASM_OFFSET( 10, 18, CallDescrData, pFloatArgumentRegisters)
ASM_OFFSET( 14, 20, CallDescrData, pTarget)
ASM_OFFSET( 18, 28, CallDescrData, pReturnBuffer)
#endif

// Undefine macros that are only used in this header for convenience.
#undef ASM_OFFSET
#undef ASM_SIZEOF
Expand Down
1 change: 0 additions & 1 deletion src/coreclr/nativeaot/Runtime/AsmOffsetsVerify.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
#include "RuntimeInstance.h"
#include "CachedInterfaceDispatch.h"
#include "shash.h"
#include "CallDescr.h"

class AsmOffsets
{
Expand Down
1 change: 0 additions & 1 deletion src/coreclr/nativeaot/Runtime/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,6 @@ endif (CLR_CMAKE_TARGET_ARCH_AMD64 AND CLR_CMAKE_TARGET_WIN32)

list(APPEND RUNTIME_SOURCES_ARCH_ASM
${ARCH_SOURCES_DIR}/AllocFast.${ASM_SUFFIX}
${ARCH_SOURCES_DIR}/CallDescrWorker.${ASM_SUFFIX}
${ARCH_SOURCES_DIR}/ExceptionHandling.${ASM_SUFFIX}
${ARCH_SOURCES_DIR}/GcProbe.${ASM_SUFFIX}
${ARCH_SOURCES_DIR}/Interlocked.${ASM_SUFFIX}
Expand Down
13 changes: 0 additions & 13 deletions src/coreclr/nativeaot/Runtime/CallDescr.h

This file was deleted.

22 changes: 0 additions & 22 deletions src/coreclr/nativeaot/Runtime/MiscHelpers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -121,16 +121,6 @@ COOP_PINVOKE_HELPER(HANDLE, RhGetOSModuleFromPointer, (PTR_VOID pPointerVal))
return NULL;
}

COOP_PINVOKE_HELPER(HANDLE, RhGetOSModuleFromEEType, (MethodTable * pEEType))
{
return pEEType->GetTypeManagerPtr()->AsTypeManager()->GetOsModuleHandle();
}

COOP_PINVOKE_HELPER(TypeManagerHandle, RhGetModuleFromEEType, (MethodTable * pEEType))
{
return *pEEType->GetTypeManagerPtr();
}

COOP_PINVOKE_HELPER(FC_BOOL_RET, RhFindBlob, (TypeManagerHandle *pTypeManagerHandle, uint32_t blobId, uint8_t ** ppbBlob, uint32_t * pcbBlob))
{
TypeManagerHandle typeManagerHandle = *pTypeManagerHandle;
Expand Down Expand Up @@ -350,18 +340,6 @@ COOP_PINVOKE_HELPER(uint8_t *, RhGetCodeTarget, (uint8_t * pCodeOrg))
return pCodeOrg;
}

// Get the universal transition thunk. If the universal transition stub is called through
// the normal PE static linkage model, a jump stub would be used which may interfere with
// the custom calling convention of the universal transition thunk. So instead, a special
// api just for getting the thunk address is needed.
// TODO: On ARM this may still result in a jump stub that trashes R12. Determine if anything
// needs to be done about that when we implement the stub for ARM.
extern "C" void RhpUniversalTransition();
COOP_PINVOKE_HELPER(void*, RhGetUniversalTransitionThunk, ())
{
return (void*)RhpUniversalTransition;
}

extern CrstStatic g_ThunkPoolLock;

EXTERN_C NATIVEAOT_API void __cdecl RhpAcquireThunkPoolLock()
Expand Down
2 changes: 0 additions & 2 deletions src/coreclr/nativeaot/Runtime/PalRedhawk.h
Original file line number Diff line number Diff line change
Expand Up @@ -752,8 +752,6 @@ REDHAWK_PALIMPORT uint32_t REDHAWK_PALAPI PalEventUnregister(REGHANDLE arg1);
REDHAWK_PALIMPORT uint32_t REDHAWK_PALAPI PalEventWrite(REGHANDLE arg1, const EVENT_DESCRIPTOR * arg2, uint32_t arg3, EVENT_DATA_DESCRIPTOR * arg4);
#endif

REDHAWK_PALIMPORT _Ret_maybenull_ void* REDHAWK_PALAPI PalSetWerDataBuffer(_In_ void* pNewBuffer);

REDHAWK_PALIMPORT UInt32_BOOL REDHAWK_PALAPI PalAllocateThunksFromTemplate(_In_ HANDLE hTemplateModule, uint32_t templateRva, size_t templateSize, _Outptr_result_bytebuffer_(templateSize) void** newThunksOut);
REDHAWK_PALIMPORT UInt32_BOOL REDHAWK_PALAPI PalFreeThunksFromTemplate(_In_ void *pBaseAddress);

Expand Down
8 changes: 0 additions & 8 deletions src/coreclr/nativeaot/Runtime/RuntimeInstance.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -35,14 +35,6 @@ bool ShouldHijackForGcStress(uintptr_t CallsiteIP, HijackType ht);

#include "shash.inl"

#ifndef DACCESS_COMPILE
COOP_PINVOKE_HELPER(uint8_t *, RhSetErrorInfoBuffer, (uint8_t * pNewBuffer))
{
return (uint8_t *) PalSetWerDataBuffer(pNewBuffer);
}
#endif // DACCESS_COMPILE


ThreadStore * RuntimeInstance::GetThreadStore()
{
return m_pThreadStore;
Expand Down
140 changes: 2 additions & 138 deletions src/coreclr/nativeaot/Runtime/StackFrameIterator.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,6 @@ GVAL_IMPL_INIT(PTR_VOID, g_ReturnFromUniversalTransitionAddr, PointerToReturnFro

EXTERN_C PTR_VOID PointerToReturnFromUniversalTransition_DebugStepTailCall;
GVAL_IMPL_INIT(PTR_VOID, g_ReturnFromUniversalTransition_DebugStepTailCallAddr, PointerToReturnFromUniversalTransition_DebugStepTailCall);

EXTERN_C PTR_VOID PointerToReturnFromCallDescrThunk;
GVAL_IMPL_INIT(PTR_VOID, g_ReturnFromCallDescrThunkAddr, PointerToReturnFromCallDescrThunk);
#endif

#ifdef TARGET_X86
Expand Down Expand Up @@ -1200,130 +1197,6 @@ void StackFrameIterator::UnwindUniversalTransitionThunk()
#define STACK_ALIGN_SIZE 4
#endif

#ifdef TARGET_AMD64
struct CALL_DESCR_CONTEXT
{
uintptr_t Rbp;
uintptr_t Rsi;
uintptr_t Rbx;
uintptr_t IP;
};
#elif defined(TARGET_ARM)
struct CALL_DESCR_CONTEXT
{
uintptr_t R4;
uintptr_t R5;
uintptr_t R7;
uintptr_t IP;
};
#elif defined(TARGET_ARM64)
struct CALL_DESCR_CONTEXT
{
uintptr_t FP;
uintptr_t IP;
uintptr_t X19;
uintptr_t X20;
};
#elif defined(TARGET_X86)
struct CALL_DESCR_CONTEXT
{
uintptr_t Rbx;
uintptr_t Rbp;
uintptr_t IP;
};
#elif defined (TARGET_WASM)
struct CALL_DESCR_CONTEXT
{
uintptr_t IP;
};
#else
#error NYI - For this arch
#endif

typedef DPTR(CALL_DESCR_CONTEXT) PTR_CALL_DESCR_CONTEXT;

void StackFrameIterator::UnwindCallDescrThunk()
{
ASSERT((m_dwFlags & MethodStateCalculated) == 0);

#if defined(USE_PORTABLE_HELPERS) // @TODO: Corresponding helper code is only defined in assembly code
return;
#else // defined(USE_PORTABLE_HELPERS)
ASSERT(CategorizeUnadjustedReturnAddress(m_ControlPC) == InCallDescrThunk);

uintptr_t newSP;
#ifdef TARGET_AMD64
// RBP points to the SP that we want to capture. (This arrangement allows for
// the arguments from this function to be loaded into memory with an adjustment
// to SP, like an alloca
newSP = *(PTR_UIntNative)m_RegDisplay.pRbp;

PTR_CALL_DESCR_CONTEXT pContext = (PTR_CALL_DESCR_CONTEXT)newSP;

m_RegDisplay.pRbp = PTR_TO_MEMBER(CALL_DESCR_CONTEXT, pContext, Rbp);
m_RegDisplay.pRsi = PTR_TO_MEMBER(CALL_DESCR_CONTEXT, pContext, Rsi);
m_RegDisplay.pRbx = PTR_TO_MEMBER(CALL_DESCR_CONTEXT, pContext, Rbx);

// And adjust SP to be the state that it should be in just after returning from
// the CallDescrFunction
newSP += sizeof(CALL_DESCR_CONTEXT);
#elif defined(TARGET_ARM)
// R7 points to the SP that we want to capture. (This arrangement allows for
// the arguments from this function to be loaded into memory with an adjustment
// to SP, like an alloca
newSP = *(PTR_UIntNative)m_RegDisplay.pR7;
PTR_CALL_DESCR_CONTEXT pContext = (PTR_CALL_DESCR_CONTEXT)newSP;

m_RegDisplay.pR4 = PTR_TO_MEMBER(CALL_DESCR_CONTEXT, pContext, R4);
m_RegDisplay.pR5 = PTR_TO_MEMBER(CALL_DESCR_CONTEXT, pContext, R5);
m_RegDisplay.pR7 = PTR_TO_MEMBER(CALL_DESCR_CONTEXT, pContext, R7);

// And adjust SP to be the state that it should be in just after returning from
// the CallDescrFunction
newSP += sizeof(CALL_DESCR_CONTEXT);

#elif defined(TARGET_ARM64)
// pFP points to the SP that we want to capture. (This arrangement allows for
// the arguments from this function to be loaded into memory with an adjustment
// to SP, like an alloca
newSP = *(PTR_UIntNative)m_RegDisplay.pFP;
PTR_CALL_DESCR_CONTEXT pContext = (PTR_CALL_DESCR_CONTEXT)newSP;

m_RegDisplay.pX19 = PTR_TO_MEMBER(CALL_DESCR_CONTEXT, pContext, X19);
m_RegDisplay.pX20 = PTR_TO_MEMBER(CALL_DESCR_CONTEXT, pContext, X20);

// And adjust SP to be the state that it should be in just after returning from
// the CallDescrFunction
newSP += sizeof(CALL_DESCR_CONTEXT);

#elif defined(TARGET_X86)
// RBP points to the SP that we want to capture. (This arrangement allows for
// the arguments from this function to be loaded into memory with an adjustment
// to SP, like an alloca
newSP = *(PTR_UIntNative)m_RegDisplay.pRbp;

PTR_CALL_DESCR_CONTEXT pContext = (PTR_CALL_DESCR_CONTEXT)(newSP - offsetof(CALL_DESCR_CONTEXT, Rbp));

m_RegDisplay.pRbp = PTR_TO_MEMBER(CALL_DESCR_CONTEXT, pContext, Rbp);
m_RegDisplay.pRbx = PTR_TO_MEMBER(CALL_DESCR_CONTEXT, pContext, Rbx);

// And adjust SP to be the state that it should be in just after returning from
// the CallDescrFunction
newSP += sizeof(CALL_DESCR_CONTEXT) - offsetof(CALL_DESCR_CONTEXT, Rbp);

#else
PORTABILITY_ASSERT("UnwindCallDescrThunk");
PTR_CALL_DESCR_CONTEXT pContext = NULL;
#endif

m_RegDisplay.SetAddrOfIP(PTR_TO_MEMBER(CALL_DESCR_CONTEXT, pContext, IP));
m_RegDisplay.SetIP(pContext->IP);
m_RegDisplay.SetSP(newSP);
SetControlPC(dac_cast<PTR_VOID>(pContext->IP));

#endif // defined(USE_PORTABLE_HELPERS)
}

void StackFrameIterator::UnwindThrowSiteThunk()
{
ASSERT((m_dwFlags & MethodStateCalculated) == 0);
Expand Down Expand Up @@ -1659,11 +1532,7 @@ void StackFrameIterator::UnwindNonEHThunkSequence()
{
ASSERT(m_pConservativeStackRangeLowerBound == NULL);

if (category == InCallDescrThunk)
{
UnwindCallDescrThunk();
}
else if (category == InUniversalTransitionThunk)
if (category == InUniversalTransitionThunk)
{
UnwindUniversalTransitionThunk();
ASSERT(m_pConservativeStackRangeLowerBound != NULL);
Expand Down Expand Up @@ -1856,7 +1725,6 @@ bool StackFrameIterator::IsNonEHThunk(ReturnAddressCategory category)
default:
return false;
case InUniversalTransitionThunk:
case InCallDescrThunk:
return true;
}
}
Expand Down Expand Up @@ -1926,11 +1794,7 @@ StackFrameIterator::ReturnAddressCategory StackFrameIterator::CategorizeUnadjust
#else // defined(USE_PORTABLE_HELPERS)

#if defined(FEATURE_DYNAMIC_CODE)
if (EQUALS_RETURN_ADDRESS(returnAddress, ReturnFromCallDescrThunk))
{
return InCallDescrThunk;
}
else if (EQUALS_RETURN_ADDRESS(returnAddress, ReturnFromUniversalTransition) ||
if (EQUALS_RETURN_ADDRESS(returnAddress, ReturnFromUniversalTransition) ||
EQUALS_RETURN_ADDRESS(returnAddress, ReturnFromUniversalTransition_DebugStepTailCall))
{
return InUniversalTransitionThunk;
Expand Down
5 changes: 0 additions & 5 deletions src/coreclr/nativeaot/Runtime/StackFrameIterator.h
Original file line number Diff line number Diff line change
Expand Up @@ -79,10 +79,6 @@ class StackFrameIterator
// NOTE: This function always publishes a non-NULL conservative stack range lower bound.
void UnwindUniversalTransitionThunk();

// If our control PC indicates that we're in the call descr thunk that we use to call an arbitrary managed
// function with an arbitrary signature from a normal managed function handle the stack walk specially.
void UnwindCallDescrThunk();

void EnterInitialInvalidState(Thread * pThreadToWalk);

void InternalInit(Thread * pThreadToWalk, PTR_PInvokeTransitionFrame pFrame, uint32_t dwFlags); // GC stackwalk
Expand Down Expand Up @@ -115,7 +111,6 @@ class StackFrameIterator
InThrowSiteThunk,
InFuncletInvokeThunk,
InFilterFuncletInvokeThunk,
InCallDescrThunk,
InUniversalTransitionThunk,
};

Expand Down
13 changes: 0 additions & 13 deletions src/coreclr/nativeaot/Runtime/amd64/CallDescrWorker.S

This file was deleted.

Loading

0 comments on commit 157996f

Please sign in to comment.