From 997f9373ae3ced2981d64bf3fe8b737eddff7886 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Tue, 13 Jan 2026 18:01:13 +0100 Subject: [PATCH 01/33] Add Load/Store SwiftSelf routines and update CallStubGenerator for Swift calling convention --- src/coreclr/vm/arm64/asmhelpers.S | 12 ++++ src/coreclr/vm/arm64/asmhelpers.asm | 12 ++++ src/coreclr/vm/callstubgenerator.cpp | 85 +++++++++++++++++++++++++++- src/coreclr/vm/callstubgenerator.h | 4 +- 4 files changed, 109 insertions(+), 4 deletions(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index 046482ea6e996c..afb3fa666b5248 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -1165,6 +1165,18 @@ Store_Ref X5 Store_Ref X6 Store_Ref X7 +LEAF_ENTRY Load_SwiftSelf + ldr x20, [x9], #8 + ldr x11, [x10], #8 + EPILOG_BRANCH_REG x11 +LEAF_END Load_SwiftSelf + +LEAF_ENTRY Store_SwiftSelf + str x20, [x9], #8 + ldr x11, [x10], #8 + EPILOG_BRANCH_REG x11 +LEAF_END Store_SwiftSelf + LEAF_ENTRY Store_X0 str x0, [x9], #8 ldr x11, [x10], #8 diff --git a/src/coreclr/vm/arm64/asmhelpers.asm b/src/coreclr/vm/arm64/asmhelpers.asm index 281ca3bd0e85bc..968383fbe0b91d 100644 --- a/src/coreclr/vm/arm64/asmhelpers.asm +++ b/src/coreclr/vm/arm64/asmhelpers.asm @@ -1445,6 +1445,18 @@ RefCopyDone$argReg Store_Ref X6 Store_Ref X7 + LEAF_ENTRY Load_SwiftSelf + ldr x20, [x9], #8 + ldr x11, [x10], #8 + EPILOG_BRANCH_REG x11 + LEAF_END Load_SwiftSelf + + LEAF_ENTRY Store_SwiftSelf + str x20, [x9], #8 + ldr x11, [x10], #8 + EPILOG_BRANCH_REG x11 + LEAF_END Store_SwiftSelf + LEAF_ENTRY Store_X0 str x0, [x9], #8 ldr x11, [x10], #8 diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 68adf3fe859bc8..f26a13b108d021 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -592,6 +592,9 @@ extern "C" void Store_X6(); extern "C" void Store_X6_X7(); extern "C" void Store_X7(); +extern "C" void Load_SwiftSelf(); +extern "C" void Store_SwiftSelf(); + extern "C" void Load_Ref_X0(); extern "C" void Load_Ref_X1(); extern "C" void Load_Ref_X2(); @@ -1968,6 +1971,14 @@ PCODE CallStubGenerator::GetFPReg32RangeRoutine(int x1, int x2) int index = x1 * NUM_FLOAT_ARGUMENT_REGISTERS + x2; return m_interpreterToNative ? FPRegs32LoadRoutines[index] : FPRegs32StoreRoutines[index]; } + +PCODE CallStubGenerator::GetSwiftSelfRoutine() +{ +#if LOG_COMPUTE_CALL_STUB + printf("GetSwiftSelfRoutine\n"); +#endif + return m_interpreterToNative ? (PCODE)Load_SwiftSelf : (PCODE)Store_SwiftSelf; +} #endif // TARGET_ARM64 extern "C" void CallJittedMethodRetVoid(PCODE *routines, int8_t*pArgs, int8_t*pRet, int totalStackSize, PTR_PTR_Object pContinuation); @@ -2441,6 +2452,11 @@ void CallStubGenerator::TerminateCurrentRoutineIfNotOfNewType(RoutineType type, m_x1 = NoRange; m_currentRoutineType = RoutineType::None; } + else if ((m_currentRoutineType == RoutineType::SwiftSelf) && (type != RoutineType::SwiftSelf)) + { + pRoutines[m_routineIndex++] = GetSwiftSelfRoutine(); + m_currentRoutineType = RoutineType::None; + } #endif // TARGET_ARM64 else if ((m_currentRoutineType == RoutineType::Stack) && (type != RoutineType::Stack)) { @@ -2487,6 +2503,37 @@ bool isNativePrimitiveStructType(MethodTable* pMT) return strcmp(typeName, "CLong") == 0 || strcmp(typeName, "CULong") == 0 || strcmp(typeName, "NFloat") == 0; } +#ifdef TARGET_ARM64 +//--------------------------------------------------------------------------- +// isSwiftSelfType: +// Check if the given type is SwiftSelf or SwiftSelf. +// +// Arguments: +// pMT - the handle for the type. +// +// Return Value: +// true if the given type is SwiftSelf or SwiftSelf, +// false otherwise. +// +bool isSwiftSelfType(MethodTable* pMT) +{ + const char* namespaceName = nullptr; + const char* typeName = pMT->GetFullyQualifiedNameInfo(&namespaceName); + + if ((namespaceName == NULL) || (typeName == NULL)) + { + return false; + } + + if (strcmp(namespaceName, "System.Runtime.InteropServices.Swift") != 0) + { + return false; + } + + return strcmp(typeName, "SwiftSelf") == 0 || strcmp(typeName, "SwiftSelf`1") == 0; +} +#endif // TARGET_ARM64 + void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDesc *pMD) { bool rewriteMetaSigFromExplicitThisToHasThis = false; @@ -2532,6 +2579,10 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe } } +#ifdef TARGET_ARM64 + bool isSwiftCallConv = false; +#endif + if (hasUnmanagedCallConv) { switch (unmanagedCallConv) @@ -2542,6 +2593,11 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe case CorInfoCallConvExtension::FastcallMemberFunction: unmanagedThisCallConv = true; break; +#ifdef TARGET_ARM64 + case CorInfoCallConvExtension::Swift: + isSwiftCallConv = true; + break; +#endif default: break; } @@ -2696,7 +2752,30 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe // Each entry on the interpreter stack is always aligned to at least 8 bytes, but some arguments are 16 byte aligned TypeHandle thArgTypeHandle; - if ((argIt.GetArgType(&thArgTypeHandle) == ELEMENT_TYPE_VALUETYPE) && thArgTypeHandle.GetSize() > 8) + CorElementType argCorType = argIt.GetArgType(&thArgTypeHandle); + +#ifdef TARGET_ARM64 + if (isSwiftCallConv) + { + if (argCorType == ELEMENT_TYPE_VALUETYPE && !thArgTypeHandle.IsNull()) + { + MethodTable* pArgMT = thArgTypeHandle.IsTypeDesc() ? nullptr : thArgTypeHandle.AsMethodTable(); + if (pArgMT != nullptr && isSwiftSelfType(pArgMT)) + { +#if LOG_COMPUTE_CALL_STUB + printf("Swift Self argument detected\n"); +#endif + + TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftSelf, pRoutines); + m_currentRoutineType = RoutineType::SwiftSelf; + interpreterStackOffset += interpStackSlotSize; + continue; + } + } + } +#endif // TARGET_ARM64 + + if ((argCorType == ELEMENT_TYPE_VALUETYPE) && thArgTypeHandle.GetSize() > 8) { unsigned align = CEEInfo::getClassAlignmentRequirementStatic(thArgTypeHandle); if (align < INTERP_STACK_SLOT_SIZE) @@ -2836,7 +2915,7 @@ void CallStubGenerator::ProcessArgument(ArgIterator *pArgIt, ArgLocDesc& argLocD { argType = RoutineType::Stack; } - + TerminateCurrentRoutineIfNotOfNewType(argType, pRoutines); if (argLocDesc.m_cGenReg != 0) @@ -2907,7 +2986,7 @@ void CallStubGenerator::ProcessArgument(ArgIterator *pArgIt, ArgLocDesc& argLocD { // HFA Arguments using odd number of 32 bit FP registers cannot be merged with further ranges due to the // interpreter stack slot size alignment needs. The range copy routines for these registers - // ensure that the interpreter stack is properly aligned after the odd number of registers are + // ensure that the interpreter stack is properly aligned after the odd number of registers are // loaded / stored. pRoutines[m_routineIndex++] = GetFPReg32RangeRoutine(m_x1, m_x2); argType = RoutineType::None; diff --git a/src/coreclr/vm/callstubgenerator.h b/src/coreclr/vm/callstubgenerator.h index 292801d6c62fe5..cdcbf98a8d8cb2 100644 --- a/src/coreclr/vm/callstubgenerator.h +++ b/src/coreclr/vm/callstubgenerator.h @@ -117,6 +117,7 @@ class CallStubGenerator #ifdef TARGET_ARM64 FPReg32, FPReg128, + SwiftSelf, #endif Stack }; @@ -157,7 +158,8 @@ class CallStubGenerator #ifdef TARGET_ARM64 PCODE GetFPReg128RangeRoutine(int x1, int x2); PCODE GetFPReg32RangeRoutine(int x1, int x2); -#endif + PCODE GetSwiftSelfRoutine(); +#endif PCODE GetGPRegRangeRoutine(int r1, int r2); ReturnType GetReturnType(ArgIterator *pArgIt); CallStubHeader::InvokeFunctionPtr GetInvokeFunctionPtr(ReturnType returnType); From dbb976a368a7101be8e9130fd1c67a1a963d3435 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Wed, 14 Jan 2026 14:58:22 +0100 Subject: [PATCH 02/33] Add support for SwiftError --- src/coreclr/vm/arm64/asmhelpers.S | 202 +++++++++++++++++++++------ src/coreclr/vm/callstubgenerator.cpp | 66 ++++++++- src/coreclr/vm/callstubgenerator.h | 2 + 3 files changed, 225 insertions(+), 45 deletions(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index afb3fa666b5248..08100255dfabe2 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -695,7 +695,7 @@ NESTED_ENTRY InterpreterStub, _TEXT, NoHandler mov x19, METHODDESC_REGISTER #ifdef TARGET_APPLE - mov x21, x0 + mov x22, x0 #endif INLINE_GETTHREAD x20 // thrashes x0 on Apple OSes (and possibly other arg registers on other Unixes) cbz x20, LOCAL_LABEL(NoManagedThreadOrCallStub) @@ -708,7 +708,7 @@ LOCAL_LABEL(NoManagedThreadOrCallStub): // GetInterpThreadContextWithPossiblyMissingThreadOrCallStub can destroy all argument registers, so we // need to save them. For non-Apple, they have been already saved in the PROLOG_WITH_TRANSITION_BLOCK // Restore x0 thrashed by the INLINE_GETTHREAD - mov x0, x21 + mov x0, x22 SAVE_ARGUMENT_REGISTERS sp, __PWTB_ArgumentRegisters SAVE_FLOAT_ARGUMENT_REGISTERS sp, __PWTB_FloatArgumentRegisters #endif @@ -729,7 +729,7 @@ LOCAL_LABEL(HaveInterpThreadContext): LOCAL_LABEL(HaveInterpThreadContext): // On Apple, the INLINE_GETTHREAD is guaranteed to not to thrash argument registers other than x0 // So we restore just the x0 - mov x0, x21 + mov x0, x22 #endif // TARGET_APPLE ldr x9, [x19] // InterpMethod* @@ -744,6 +744,8 @@ LOCAL_LABEL(HaveInterpThreadContext): // Fill in the ContinuationContext register ldr x2, [sp, #(__PWTB_ArgumentRegister_FirstArg + 16)] + // Restore SwiftError to scratch reg + mov x10, x21 EPILOG_WITH_TRANSITION_BLOCK_RETURN NESTED_END InterpreterStub, _TEXT @@ -1177,6 +1179,20 @@ LEAF_ENTRY Store_SwiftSelf EPILOG_BRANCH_REG x11 LEAF_END Store_SwiftSelf +LEAF_ENTRY Load_SwiftError + ldr x11, [x9], #8 + str x11, [fp, #40] + mov x21, xzr + ldr x11, [x10], #8 + EPILOG_BRANCH_REG x11 +LEAF_END Load_SwiftError + +LEAF_ENTRY Store_SwiftError + str x21, [x9], #8 + ldr x11, [x10], #8 + EPILOG_BRANCH_REG x11 +LEAF_END Store_SwiftError + LEAF_ENTRY Store_X0 str x0, [x9], #8 ldr x11, [x10], #8 @@ -2315,17 +2331,22 @@ LEAF_END Load_Q1_Q2_Q3_Q4_Q5_Q6_Q7 // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetVoid, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 str x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRetVoid_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRetVoid_NoSwiftError): ldr x4, [fp, #16] str x2, [x4] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRetVoid, _TEXT @@ -2335,18 +2356,23 @@ NESTED_END CallJittedMethodRetVoid, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetBuff, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 str x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 mov x8, x2 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRetBuff_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRetBuff_NoSwiftError): ldr x4, [fp, #16] str x2, [x4] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRetBuff, _TEXT @@ -2356,19 +2382,24 @@ NESTED_END CallJittedMethodRetBuff, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetI8, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRetI8_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRetI8_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] str x0, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRetI8, _TEXT @@ -2378,19 +2409,24 @@ NESTED_END CallJittedMethodRetI8, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet2I8, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRet2I8_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRet2I8_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] stp x0, x1, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRet2I8, _TEXT @@ -2400,19 +2436,24 @@ NESTED_END CallJittedMethodRet2I8, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetDouble, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRetDouble_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRetDouble_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] str d0, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRetDouble, _TEXT @@ -2422,19 +2463,24 @@ NESTED_END CallJittedMethodRetDouble, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet2Double, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRet2Double_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRet2Double_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] stp d0, d1, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRet2Double, _TEXT @@ -2444,20 +2490,25 @@ NESTED_END CallJittedMethodRet2Double, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet3Double, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRet3Double_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRet3Double_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] stp d0, d1, [x9], #16 str d2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRet3Double, _TEXT @@ -2467,20 +2518,25 @@ NESTED_END CallJittedMethodRet3Double, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet4Double, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRet4Double_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRet4Double_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] stp d0, d1, [x9], #16 stp d2, d3, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRet4Double, _TEXT @@ -2490,19 +2546,24 @@ NESTED_END CallJittedMethodRet4Double, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetFloat, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRetFloat_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRetFloat_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] str s0, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRetFloat, _TEXT @@ -2512,19 +2573,24 @@ NESTED_END CallJittedMethodRetFloat, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet2Float, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRet2Float_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRet2Float_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] stp s0, s1, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRet2Float, _TEXT @@ -2534,20 +2600,25 @@ NESTED_END CallJittedMethodRet2Float, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet3Float, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRet3Float_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRet3Float_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] stp s0, s1, [x9], #8 str s2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRet3Float, _TEXT @@ -2557,20 +2628,25 @@ NESTED_END CallJittedMethodRet3Float, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet4Float, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRet4Float_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRet4Float_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] stp s0, s1, [x9], #8 stp s2, s3, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRet4Float, _TEXT @@ -2580,19 +2656,24 @@ NESTED_END CallJittedMethodRet4Float, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetVector64, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRetVector64_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRetVector64_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] str d0, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRetVector64, _TEXT @@ -2602,20 +2683,25 @@ NESTED_END CallJittedMethodRetVector64, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet2Vector64, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRet2Vector64_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRet2Vector64_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] str d0, [x9], #8 str d1, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRet2Vector64, _TEXT @@ -2625,13 +2711,18 @@ NESTED_END CallJittedMethodRet2Vector64, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet3Vector64, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRet3Vector64_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRet3Vector64_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -2639,7 +2730,7 @@ NESTED_ENTRY CallJittedMethodRet3Vector64, _TEXT, NoHandler str d1, [x9], #8 str d2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRet3Vector64, _TEXT @@ -2649,13 +2740,18 @@ NESTED_END CallJittedMethodRet3Vector64, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet4Vector64, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRet4Vector64_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRet4Vector64_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -2664,7 +2760,7 @@ NESTED_ENTRY CallJittedMethodRet4Vector64, _TEXT, NoHandler str d2, [x9], #8 str d3, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRet4Vector64, _TEXT @@ -2674,19 +2770,24 @@ NESTED_END CallJittedMethodRet4Vector64, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetVector128, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRetVector128_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRetVector128_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] str q0, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRetVector128, _TEXT @@ -2696,20 +2797,25 @@ NESTED_END CallJittedMethodRetVector128, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet2Vector128, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRet2Vector128_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRet2Vector128_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] str q0, [x9], #16 str q1, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRet2Vector128, _TEXT @@ -2719,13 +2825,18 @@ NESTED_END CallJittedMethodRet2Vector128, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet3Vector128, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRet3Vector128_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRet3Vector128_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -2733,7 +2844,7 @@ NESTED_ENTRY CallJittedMethodRet3Vector128, _TEXT, NoHandler str q1, [x9], #16 str q2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRet3Vector128, _TEXT @@ -2743,13 +2854,18 @@ NESTED_END CallJittedMethodRet3Vector128, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet4Vector128, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRet4Vector128_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRet4Vector128_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -2758,7 +2874,7 @@ NESTED_ENTRY CallJittedMethodRet4Vector128, _TEXT, NoHandler str q2, [x9], #16 str q3, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRet4Vector128, _TEXT diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index f26a13b108d021..7bad225dcfaa8e 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -595,6 +595,9 @@ extern "C" void Store_X7(); extern "C" void Load_SwiftSelf(); extern "C" void Store_SwiftSelf(); +extern "C" void Load_SwiftError(); +extern "C" void Store_SwiftError(); + extern "C" void Load_Ref_X0(); extern "C" void Load_Ref_X1(); extern "C" void Load_Ref_X2(); @@ -1873,7 +1876,7 @@ PCODE GPRegsRefStoreRoutines[] = #endif // TARGET_RISCV64 -#define LOG_COMPUTE_CALL_STUB 0 +#define LOG_COMPUTE_CALL_STUB 1 PCODE CallStubGenerator::GetStackRoutine() { @@ -1979,6 +1982,14 @@ PCODE CallStubGenerator::GetSwiftSelfRoutine() #endif return m_interpreterToNative ? (PCODE)Load_SwiftSelf : (PCODE)Store_SwiftSelf; } + +PCODE CallStubGenerator::GetSwiftErrorRoutine() +{ +#if LOG_COMPUTE_CALL_STUB + printf("GetSwiftErrorRoutine\n"); +#endif + return m_interpreterToNative ? (PCODE)Load_SwiftError : (PCODE)Store_SwiftError; +} #endif // TARGET_ARM64 extern "C" void CallJittedMethodRetVoid(PCODE *routines, int8_t*pArgs, int8_t*pRet, int totalStackSize, PTR_PTR_Object pContinuation); @@ -2457,6 +2468,11 @@ void CallStubGenerator::TerminateCurrentRoutineIfNotOfNewType(RoutineType type, pRoutines[m_routineIndex++] = GetSwiftSelfRoutine(); m_currentRoutineType = RoutineType::None; } + else if ((m_currentRoutineType == RoutineType::SwiftError) && (type != RoutineType::SwiftError)) + { + pRoutines[m_routineIndex++] = GetSwiftErrorRoutine(); + m_currentRoutineType = RoutineType::None; + } #endif // TARGET_ARM64 else if ((m_currentRoutineType == RoutineType::Stack) && (type != RoutineType::Stack)) { @@ -2532,6 +2548,36 @@ bool isSwiftSelfType(MethodTable* pMT) return strcmp(typeName, "SwiftSelf") == 0 || strcmp(typeName, "SwiftSelf`1") == 0; } + + +//--------------------------------------------------------------------------- +// isSwiftErrorType: +// Check if the given type is SwiftError. +// +// Arguments: +// pMT - the handle for the type. +// +// Return Value: +// true if the given type is SwiftError +// false otherwise. +// +bool isSwiftErrorType(MethodTable* pMT) +{ + const char* namespaceName = nullptr; + const char* typeName = pMT->GetFullyQualifiedNameInfo(&namespaceName); + + if ((namespaceName == NULL) || (typeName == NULL)) + { + return false; + } + + if (strcmp(namespaceName, "System.Runtime.InteropServices.Swift") != 0) + { + return false; + } + + return strcmp(typeName, "SwiftError") == 0; +} #endif // TARGET_ARM64 void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDesc *pMD) @@ -2757,7 +2803,12 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe #ifdef TARGET_ARM64 if (isSwiftCallConv) { - if (argCorType == ELEMENT_TYPE_VALUETYPE && !thArgTypeHandle.IsNull()) + if (argCorType == ELEMENT_TYPE_BYREF) + { + sig.GetByRefType(&thArgTypeHandle); + } + + if ((argCorType == ELEMENT_TYPE_VALUETYPE || argCorType == ELEMENT_TYPE_BYREF) && !thArgTypeHandle.IsNull()) { MethodTable* pArgMT = thArgTypeHandle.IsTypeDesc() ? nullptr : thArgTypeHandle.AsMethodTable(); if (pArgMT != nullptr && isSwiftSelfType(pArgMT)) @@ -2771,6 +2822,17 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe interpreterStackOffset += interpStackSlotSize; continue; } + if (pArgMT != nullptr && isSwiftErrorType(pArgMT)) + { +#if LOG_COMPUTE_CALL_STUB + printf("Swift Error argument detected\n"); +#endif + + TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftError, pRoutines); + m_currentRoutineType = RoutineType::SwiftError; + interpreterStackOffset += interpStackSlotSize; + continue; + } } } #endif // TARGET_ARM64 diff --git a/src/coreclr/vm/callstubgenerator.h b/src/coreclr/vm/callstubgenerator.h index cdcbf98a8d8cb2..115c07fc8307dc 100644 --- a/src/coreclr/vm/callstubgenerator.h +++ b/src/coreclr/vm/callstubgenerator.h @@ -118,6 +118,7 @@ class CallStubGenerator FPReg32, FPReg128, SwiftSelf, + SwiftError, #endif Stack }; @@ -159,6 +160,7 @@ class CallStubGenerator PCODE GetFPReg128RangeRoutine(int x1, int x2); PCODE GetFPReg32RangeRoutine(int x1, int x2); PCODE GetSwiftSelfRoutine(); + PCODE GetSwiftErrorRoutine(); #endif PCODE GetGPRegRangeRoutine(int r1, int r2); ReturnType GetReturnType(ArgIterator *pArgIt); From ed96eec8f0605960bddc2a30b261c7b7473fdb1b Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Wed, 14 Jan 2026 16:50:31 +0100 Subject: [PATCH 03/33] Fix build --- src/coreclr/vm/arm64/asmhelpers.asm | 14 +++ src/coreclr/vm/callstubgenerator.cpp | 154 ++++++++++++++++++++++++++- 2 files changed, 167 insertions(+), 1 deletion(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.asm b/src/coreclr/vm/arm64/asmhelpers.asm index 968383fbe0b91d..dd03c276242538 100644 --- a/src/coreclr/vm/arm64/asmhelpers.asm +++ b/src/coreclr/vm/arm64/asmhelpers.asm @@ -1457,6 +1457,20 @@ RefCopyDone$argReg EPILOG_BRANCH_REG x11 LEAF_END Store_SwiftSelf + LEAF_ENTRY Load_SwiftError + ldr x11, [x9], #8 + str x11, [fp, #40] + mov x21, xzr + ldr x11, [x10], #8 + EPILOG_BRANCH_REG x11 + LEAF_END Load_SwiftError + + LEAF_ENTRY Store_SwiftError + str x21, [x9], #8 + ldr x11, [x10], #8 + EPILOG_BRANCH_REG x11 + LEAF_END Store_SwiftError + LEAF_ENTRY Store_X0 str x0, [x9], #8 ldr x11, [x10], #8 diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 7bad225dcfaa8e..7b334b257fde06 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -3,6 +3,7 @@ #if defined(FEATURE_INTERPRETER) && !defined(TARGET_WASM) +#include "common.h" #include "callstubgenerator.h" #include "callconvbuilder.hpp" #include "ecall.h" @@ -2546,7 +2547,7 @@ bool isSwiftSelfType(MethodTable* pMT) return false; } - return strcmp(typeName, "SwiftSelf") == 0 || strcmp(typeName, "SwiftSelf`1") == 0; + return strcmp(typeName, "SwiftSelf") == 0; } @@ -2578,6 +2579,156 @@ bool isSwiftErrorType(MethodTable* pMT) return strcmp(typeName, "SwiftError") == 0; } + +//--------------------------------------------------------------------------- +// isSwiftIndirectResultType: +// Check if the given type is SwiftIndirectResult. +// +// Arguments: +// pMT - the handle for the type. +// +// Return Value: +// true if the given type is SwiftIndirectResult, +// false otherwise. +// +bool isSwiftIndirectResultType(MethodTable* pMT) +{ + const char* namespaceName = nullptr; + const char* typeName = pMT->GetFullyQualifiedNameInfo(&namespaceName); + + if ((namespaceName == NULL) || (typeName == NULL)) + { + return false; + } + + if (strcmp(namespaceName, "System.Runtime.InteropServices.Swift") != 0) + { + return false; + } + + return strcmp(typeName, "SwiftIndirectResult") == 0; +} + +//--------------------------------------------------------------------------- +// isIntrinsicSIMDType: +// Check if the given type is a SIMD type (Vector, Vector64, Vector128, etc.). +// +// Arguments: +// pMT - the handle for the type. +// +// Return Value: +// true if the given type is a SIMD type, +// false otherwise. +// +bool isIntrinsicSIMDType(MethodTable* pMT) +{ + if (!pMT->IsIntrinsicType()) + { + return false; + } + + const char* namespaceName = nullptr; + const char* typeName = pMT->GetFullyQualifiedNameInfo(&namespaceName); + + if ((namespaceName == NULL) || (typeName == NULL)) + { + return false; + } + + if (strcmp(namespaceName, "System.Runtime.Intrinsics") == 0) + { + return true; + } + + if (strcmp(namespaceName, "System.Numerics") == 0) + { + return true; + } + + return false; +} + +//--------------------------------------------------------------------------- +// ValidateSwiftCallSignature: +// Validates that a Swift calling convention signature is valid. +// Throws InvalidProgramException if the signature is invalid. +// +// Arguments: +// sig - the method signature to validate. +// +// Throws: +// InvalidProgramException +// +void ValidateSwiftCallSignature(MetaSig &sig) +{ + int swiftSelfCount = 0; + int swiftErrorCount = 0; + int swiftIndirectResultCount = 0; + + sig.Reset(); + + CorElementType argCorType; + while ((argCorType = sig.NextArg()) != ELEMENT_TYPE_END) + { + TypeHandle thArgTypeHandle = sig.GetLastTypeHandleThrowing(); + bool isByRef = (argCorType == ELEMENT_TYPE_BYREF); + + if (isByRef) + { + sig.GetByRefType(&thArgTypeHandle); + } + + if (thArgTypeHandle.IsNull() || thArgTypeHandle.IsTypeDesc()) + { + continue; + } + + MethodTable* pArgMT = thArgTypeHandle.AsMethodTable(); + + if (!pArgMT->IsValueType()) + { + COMPlusThrow(kInvalidProgramException); + } + + if (isIntrinsicSIMDType(pArgMT)) + { + COMPlusThrow(kInvalidProgramException); + } + + if (isSwiftSelfType(pArgMT)) + { + swiftSelfCount++; + if (swiftSelfCount > 1) + { + COMPlusThrow(kInvalidProgramException); + } + } + + if (isSwiftErrorType(pArgMT)) + { + if (!isByRef) + { + COMPlusThrow(kInvalidProgramException); + } + swiftErrorCount++; + if (swiftErrorCount > 1) + { + COMPlusThrow(kInvalidProgramException); + } + } + + if (isSwiftIndirectResultType(pArgMT)) + { + swiftIndirectResultCount++; + if (swiftIndirectResultCount > 1) + { + COMPlusThrow(kInvalidProgramException); + } + } + } + + sig.Reset(); +} #endif // TARGET_ARM64 void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDesc *pMD) @@ -2642,6 +2793,7 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe #ifdef TARGET_ARM64 case CorInfoCallConvExtension::Swift: isSwiftCallConv = true; + ValidateSwiftCallSignature(sig); break; #endif default: From 7942050233515ad48bba4dcceee74fdcea78a6f3 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Thu, 15 Jan 2026 16:46:01 +0100 Subject: [PATCH 04/33] Add support for Swift struct lowering --- src/coreclr/vm/arm64/asmhelpers.S | 112 +++++++- src/coreclr/vm/arm64/asmhelpers.asm | 18 +- src/coreclr/vm/callstubgenerator.cpp | 383 ++++++++++++++++++++++++++- src/coreclr/vm/callstubgenerator.h | 8 + 4 files changed, 497 insertions(+), 24 deletions(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index 08100255dfabe2..28d3673d341ceb 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -1173,11 +1173,13 @@ LEAF_ENTRY Load_SwiftSelf EPILOG_BRANCH_REG x11 LEAF_END Load_SwiftSelf -LEAF_ENTRY Store_SwiftSelf - str x20, [x9], #8 +LEAF_ENTRY Load_SwiftSelf_ByRef + mov x20, x9 + ldr x11, [x10], #8 + add x9, x9, x11 ldr x11, [x10], #8 EPILOG_BRANCH_REG x11 -LEAF_END Store_SwiftSelf +LEAF_END Load_SwiftSelf_ByRef LEAF_ENTRY Load_SwiftError ldr x11, [x9], #8 @@ -1187,11 +1189,109 @@ LEAF_ENTRY Load_SwiftError EPILOG_BRANCH_REG x11 LEAF_END Load_SwiftError -LEAF_ENTRY Store_SwiftError - str x21, [x9], #8 +.macro SwiftLoad_AtOffset reg + ldr x12, [x10], #8 // Load offset|struct_size + and w11, w12, #0xFFFF // Extract offset (lower 16 bits) + ldr \reg, [x9, x11] // Load from [x9 + offset] + lsr x12, x12, #16 // Shift to get struct_size + cbz x12, 1f // If struct_size == 0, skip advance + add x9, x9, x12 // Advance x9 by struct_size +1: + ldr x11, [x10], #8 + EPILOG_BRANCH_REG x11 +.endm + +LEAF_ENTRY Load_X0_AtOffset + SwiftLoad_AtOffset x0 +LEAF_END Load_X0_AtOffset + +LEAF_ENTRY Load_X1_AtOffset + SwiftLoad_AtOffset x1 +LEAF_END Load_X1_AtOffset + +LEAF_ENTRY Load_X2_AtOffset + SwiftLoad_AtOffset x2 +LEAF_END Load_X2_AtOffset + +LEAF_ENTRY Load_X3_AtOffset + SwiftLoad_AtOffset x3 +LEAF_END Load_X3_AtOffset + +LEAF_ENTRY Load_X4_AtOffset + SwiftLoad_AtOffset x4 +LEAF_END Load_X4_AtOffset + +LEAF_ENTRY Load_X5_AtOffset + SwiftLoad_AtOffset x5 +LEAF_END Load_X5_AtOffset + +LEAF_ENTRY Load_X6_AtOffset + SwiftLoad_AtOffset x6 +LEAF_END Load_X6_AtOffset + +LEAF_ENTRY Load_X7_AtOffset + SwiftLoad_AtOffset x7 +LEAF_END Load_X7_AtOffset + +// Float versions for Swift lowering +.macro SwiftLoadFloat_AtOffset reg + ldr x12, [x10], #8 // Load offset|struct_size + and w11, w12, #0xFFFF // Extract offset (lower 16 bits) + ldr \reg, [x9, x11] // Load float from [x9 + offset] + lsr x12, x12, #16 // Shift to get struct_size + cbz x12, 1f // If struct_size == 0, skip advance + add x9, x9, x12 // Advance x9 by struct_size +1: + ldr x11, [x10], #8 + EPILOG_BRANCH_REG x11 +.endm + +LEAF_ENTRY Load_D0_AtOffset + SwiftLoadFloat_AtOffset d0 +LEAF_END Load_D0_AtOffset + +LEAF_ENTRY Load_D1_AtOffset + SwiftLoadFloat_AtOffset d1 +LEAF_END Load_D1_AtOffset + +LEAF_ENTRY Load_D2_AtOffset + SwiftLoadFloat_AtOffset d2 +LEAF_END Load_D2_AtOffset + +LEAF_ENTRY Load_D3_AtOffset + SwiftLoadFloat_AtOffset d3 +LEAF_END Load_D3_AtOffset + +LEAF_ENTRY Load_D4_AtOffset + SwiftLoadFloat_AtOffset d4 +LEAF_END Load_D4_AtOffset + +LEAF_ENTRY Load_D5_AtOffset + SwiftLoadFloat_AtOffset d5 +LEAF_END Load_D5_AtOffset + +LEAF_ENTRY Load_D6_AtOffset + SwiftLoadFloat_AtOffset d6 +LEAF_END Load_D6_AtOffset + +LEAF_ENTRY Load_D7_AtOffset + SwiftLoadFloat_AtOffset d7 +LEAF_END Load_D7_AtOffset + +LEAF_ENTRY Load_Stack_AtOffset + ldr x12, [x10], #8 // Load offset|structSize|stackOffset + and w11, w12, #0xFFFF // Extract offset (lower 16 bits) + ldr x13, [x9, x11] // Load 8 bytes from [x9 + offset] + lsr x14, x12, #32 // Extract stackOffset (upper 32 bits) + add x14, sp, x14 // Calculate stack destination + str x13, [x14] // Store to native stack + ubfx x12, x12, #16, #16 // Extract structSize (bits 16-31) + cbz x12, 1f // If structSize == 0, skip advance + add x9, x9, x12 // Advance x9 by structSize +1: ldr x11, [x10], #8 EPILOG_BRANCH_REG x11 -LEAF_END Store_SwiftError +LEAF_END Load_Stack_AtOffset LEAF_ENTRY Store_X0 str x0, [x9], #8 diff --git a/src/coreclr/vm/arm64/asmhelpers.asm b/src/coreclr/vm/arm64/asmhelpers.asm index dd03c276242538..e9b5b4c96b2ee5 100644 --- a/src/coreclr/vm/arm64/asmhelpers.asm +++ b/src/coreclr/vm/arm64/asmhelpers.asm @@ -1451,11 +1451,15 @@ RefCopyDone$argReg EPILOG_BRANCH_REG x11 LEAF_END Load_SwiftSelf - LEAF_ENTRY Store_SwiftSelf - str x20, [x9], #8 - ldr x11, [x10], #8 + ; Load address of struct on interpreter stack into x20 (SwiftSelf byReference) + ; The next entry in the routines array contains the size of the struct + LEAF_ENTRY Load_SwiftSelf_ByRef + mov x20, x9 ; x20 = address of struct on interpreter stack + ldr x11, [x10], #8 ; Load size from routines array + add x9, x9, x11 ; Advance interpreter stack pointer by struct size + ldr x11, [x10], #8 ; Load next routine address EPILOG_BRANCH_REG x11 - LEAF_END Store_SwiftSelf + LEAF_END Load_SwiftSelf_ByRef LEAF_ENTRY Load_SwiftError ldr x11, [x9], #8 @@ -1465,12 +1469,6 @@ RefCopyDone$argReg EPILOG_BRANCH_REG x11 LEAF_END Load_SwiftError - LEAF_ENTRY Store_SwiftError - str x21, [x9], #8 - ldr x11, [x10], #8 - EPILOG_BRANCH_REG x11 - LEAF_END Store_SwiftError - LEAF_ENTRY Store_X0 str x0, [x9], #8 ldr x11, [x10], #8 diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 7b334b257fde06..2bbc5d41419ed5 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -594,10 +594,26 @@ extern "C" void Store_X6_X7(); extern "C" void Store_X7(); extern "C" void Load_SwiftSelf(); -extern "C" void Store_SwiftSelf(); - +extern "C" void Load_SwiftSelf_ByRef(); extern "C" void Load_SwiftError(); -extern "C" void Store_SwiftError(); + +extern "C" void Load_X0_AtOffset(); +extern "C" void Load_X1_AtOffset(); +extern "C" void Load_X2_AtOffset(); +extern "C" void Load_X3_AtOffset(); +extern "C" void Load_X4_AtOffset(); +extern "C" void Load_X5_AtOffset(); +extern "C" void Load_X6_AtOffset(); +extern "C" void Load_X7_AtOffset(); +extern "C" void Load_D0_AtOffset(); +extern "C" void Load_D1_AtOffset(); +extern "C" void Load_D2_AtOffset(); +extern "C" void Load_D3_AtOffset(); +extern "C" void Load_D4_AtOffset(); +extern "C" void Load_D5_AtOffset(); +extern "C" void Load_D6_AtOffset(); +extern "C" void Load_D7_AtOffset(); +extern "C" void Load_Stack_AtOffset(); extern "C" void Load_Ref_X0(); extern "C" void Load_Ref_X1(); @@ -1981,7 +1997,15 @@ PCODE CallStubGenerator::GetSwiftSelfRoutine() #if LOG_COMPUTE_CALL_STUB printf("GetSwiftSelfRoutine\n"); #endif - return m_interpreterToNative ? (PCODE)Load_SwiftSelf : (PCODE)Store_SwiftSelf; + return (PCODE)Load_SwiftSelf; +} + +PCODE CallStubGenerator::GetSwiftSelfByRefRoutine() +{ +#if LOG_COMPUTE_CALL_STUB + printf("GetSwiftSelfByRefRoutine\n"); +#endif + return (PCODE)Load_SwiftSelf_ByRef; } PCODE CallStubGenerator::GetSwiftErrorRoutine() @@ -1989,7 +2013,29 @@ PCODE CallStubGenerator::GetSwiftErrorRoutine() #if LOG_COMPUTE_CALL_STUB printf("GetSwiftErrorRoutine\n"); #endif - return m_interpreterToNative ? (PCODE)Load_SwiftError : (PCODE)Store_SwiftError; + return (PCODE)Load_SwiftError; +} + +// Get offset-aware load routine for Swift struct lowering (GP registers) +PCODE CallStubGenerator::GetSwiftLoadGPAtOffsetRoutine(int regIndex) +{ + static PCODE routines[] = { + (PCODE)Load_X0_AtOffset, (PCODE)Load_X1_AtOffset, (PCODE)Load_X2_AtOffset, (PCODE)Load_X3_AtOffset, + (PCODE)Load_X4_AtOffset, (PCODE)Load_X5_AtOffset, (PCODE)Load_X6_AtOffset, (PCODE)Load_X7_AtOffset + }; + _ASSERTE(regIndex >= 0 && regIndex < 8); + return routines[regIndex]; +} + +// Get offset-aware load routine for Swift struct lowering (FP registers) +PCODE CallStubGenerator::GetSwiftLoadFPAtOffsetRoutine(int regIndex) +{ + static PCODE routines[] = { + (PCODE)Load_D0_AtOffset, (PCODE)Load_D1_AtOffset, (PCODE)Load_D2_AtOffset, (PCODE)Load_D3_AtOffset, + (PCODE)Load_D4_AtOffset, (PCODE)Load_D5_AtOffset, (PCODE)Load_D6_AtOffset, (PCODE)Load_D7_AtOffset + }; + _ASSERTE(regIndex >= 0 && regIndex < 8); + return routines[regIndex]; } #endif // TARGET_ARM64 @@ -2469,6 +2515,13 @@ void CallStubGenerator::TerminateCurrentRoutineIfNotOfNewType(RoutineType type, pRoutines[m_routineIndex++] = GetSwiftSelfRoutine(); m_currentRoutineType = RoutineType::None; } + else if ((m_currentRoutineType == RoutineType::SwiftSelfByRef) && (type != RoutineType::SwiftSelfByRef)) + { + pRoutines[m_routineIndex++] = GetSwiftSelfByRefRoutine(); + pRoutines[m_routineIndex++] = (PCODE)m_swiftSelfByRefSize; + m_swiftSelfByRefSize = 0; + m_currentRoutineType = RoutineType::None; + } else if ((m_currentRoutineType == RoutineType::SwiftError) && (type != RoutineType::SwiftError)) { pRoutines[m_routineIndex++] = GetSwiftErrorRoutine(); @@ -2523,13 +2576,13 @@ bool isNativePrimitiveStructType(MethodTable* pMT) #ifdef TARGET_ARM64 //--------------------------------------------------------------------------- // isSwiftSelfType: -// Check if the given type is SwiftSelf or SwiftSelf. +// Check if the given type is SwiftSelf. // // Arguments: // pMT - the handle for the type. // // Return Value: -// true if the given type is SwiftSelf or SwiftSelf, +// true if the given type is SwiftSelf, // false otherwise. // bool isSwiftSelfType(MethodTable* pMT) @@ -2550,6 +2603,34 @@ bool isSwiftSelfType(MethodTable* pMT) return strcmp(typeName, "SwiftSelf") == 0; } +//--------------------------------------------------------------------------- +// isSwiftSelfGenericType: +// Check if the given type is SwiftSelf. +// +// Arguments: +// pMT - the handle for the type. +// +// Return Value: +// true if the given type is SwiftSelf, +// false otherwise. +// +bool isSwiftSelfGenericType(MethodTable* pMT) +{ + const char* namespaceName = nullptr; + const char* typeName = pMT->GetFullyQualifiedNameInfo(&namespaceName); + + if ((namespaceName == NULL) || (typeName == NULL)) + { + return false; + } + + if (strcmp(namespaceName, "System.Runtime.InteropServices.Swift") != 0) + { + return false; + } + + return strcmp(typeName, "SwiftSelf`1") == 0; +} //--------------------------------------------------------------------------- // isSwiftErrorType: @@ -2793,7 +2874,6 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe #ifdef TARGET_ARM64 case CorInfoCallConvExtension::Swift: isSwiftCallConv = true; - ValidateSwiftCallSignature(sig); break; #endif default: @@ -2882,6 +2962,213 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe sig = newSig; } +#ifdef TARGET_ARM64 + // Swift lowering info for expanded struct elements + // Max 4 elements per struct, max ~32 args = 128 entries should be plenty + struct SwiftLoweringElement { + uint16_t offset; // Offset within struct + uint16_t structSize; // If non-zero, this is the last element, advance x9 by this amount + bool isFloat; // True if this element goes in FP register + bool isLowered; // True if this is part of a lowered struct (not a regular arg) + }; + SwiftLoweringElement swiftLoweringInfo[128]; + int swiftLoweringCount = 0; + + if (isSwiftCallConv) + { + if (!m_interpreterToNative) + { + COMPlusThrow(kNotImplementedException); + } + + // Count how many extra arguments we need due to Swift lowering + sig.Reset(); + int newArgCount = 0; + int swiftSelfCount = 0; + int swiftErrorCount = 0; + int swiftIndirectResultCount = 0; + CorElementType argType; + while ((argType = sig.NextArg()) != ELEMENT_TYPE_END) + { + TypeHandle thArgType = sig.GetLastTypeHandleThrowing(); + bool isByRef = (argType == ELEMENT_TYPE_BYREF); + + if (isByRef) + { + sig.GetByRefType(&thArgType); + } + + if (!thArgType.IsNull() && !thArgType.IsTypeDesc()) + { + MethodTable* pArgMT = thArgType.AsMethodTable(); + + if (!pArgMT->IsValueType()) + { + COMPlusThrow(kInvalidProgramException); + } + + if (isIntrinsicSIMDType(pArgMT)) + { + COMPlusThrow(kInvalidProgramException); + } + + if (isSwiftSelfType(pArgMT)) + { + swiftSelfCount++; + if (swiftSelfCount > 1) + { + COMPlusThrow(kInvalidProgramException); + } + newArgCount++; + continue; + } + + if (isSwiftErrorType(pArgMT)) + { + if (!isByRef) + { + COMPlusThrow(kInvalidProgramException); + } + swiftErrorCount++; + if (swiftErrorCount > 1) + { + COMPlusThrow(kInvalidProgramException); + } + newArgCount++; + continue; + } + + if (isSwiftIndirectResultType(pArgMT)) + { + swiftIndirectResultCount++; + if (swiftIndirectResultCount > 1) + { + COMPlusThrow(kInvalidProgramException); + } + } + + if (argType == ELEMENT_TYPE_VALUETYPE) + { + CORINFO_SWIFT_LOWERING lowering = {}; + pArgMT->GetNativeSwiftPhysicalLowering(&lowering, false); + + if (!lowering.byReference && lowering.numLoweredElements > 0) + { + newArgCount += lowering.numLoweredElements; + continue; + } + } + } + + newArgCount++; + } + + // Build new signature with lowered structs and store lowering info + SigBuilder swiftSigBuilder; + swiftSigBuilder.AppendByte(sig.GetCallingConventionInfo()); + swiftSigBuilder.AppendData(newArgCount); + + // Copy return type + SigPointer pReturn = sig.GetReturnProps(); + pReturn.ConvertToInternalExactlyOne(sig.GetModule(), sig.GetSigTypeContext(), &swiftSigBuilder); + + // Process arguments + sig.Reset(); + while ((argType = sig.NextArg()) != ELEMENT_TYPE_END) + { + if (argType == ELEMENT_TYPE_VALUETYPE) + { + TypeHandle thArgType = sig.GetLastTypeHandleThrowing(); + MethodTable* pArgMT = thArgType.IsTypeDesc() ? nullptr : thArgType.AsMethodTable(); + if (pArgMT != nullptr) + { + // Don't lower SwiftSelf or SwiftError types + if (isSwiftSelfType(pArgMT) || isSwiftErrorType(pArgMT)) + { + SigPointer pArg = sig.GetArgProps(); + pArg.ConvertToInternalExactlyOne(sig.GetModule(), sig.GetSigTypeContext(), &swiftSigBuilder); + swiftLoweringInfo[swiftLoweringCount++] = { 0, 0, false, false }; + continue; + } + + CORINFO_SWIFT_LOWERING lowering = {}; + pArgMT->GetNativeSwiftPhysicalLowering(&lowering, false); + + if (!lowering.byReference && lowering.numLoweredElements > 0) + { + // Emit primitive types instead of struct + int structSize = ALIGN_UP(pArgMT->GetNumInstanceFieldBytes(), INTERP_STACK_SLOT_SIZE); + for (size_t i = 0; i < lowering.numLoweredElements; i++) + { + bool isFloat = false; + switch (lowering.loweredElements[i]) + { + case CORINFO_TYPE_BYTE: + case CORINFO_TYPE_UBYTE: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I1); + break; + case CORINFO_TYPE_SHORT: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I2); + break; + case CORINFO_TYPE_USHORT: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_U2); + break; + case CORINFO_TYPE_INT: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I4); + break; + case CORINFO_TYPE_UINT: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_U4); + break; + case CORINFO_TYPE_LONG: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I8); + break; + case CORINFO_TYPE_ULONG: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_U8); + break; + case CORINFO_TYPE_NATIVEINT: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I); + break; + case CORINFO_TYPE_NATIVEUINT: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_U); + break; + case CORINFO_TYPE_FLOAT: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_R4); + isFloat = true; + break; + case CORINFO_TYPE_DOUBLE: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_R8); + isFloat = true; + break; + default: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I); + break; + } + bool isLast = (i == lowering.numLoweredElements - 1); + swiftLoweringInfo[swiftLoweringCount++] = { + (uint16_t)lowering.offsets[i], + isLast ? (uint16_t)structSize : (uint16_t)0, + isFloat, + true + }; + } + continue; + } + } + } + + SigPointer pArg = sig.GetArgProps(); + pArg.ConvertToInternalExactlyOne(sig.GetModule(), sig.GetSigTypeContext(), &swiftSigBuilder); + swiftLoweringInfo[swiftLoweringCount++] = { 0, 0, false, false }; + } + + DWORD cSwiftSig; + PCCOR_SIGNATURE pSwiftSig = (PCCOR_SIGNATURE)swiftSigBuilder.GetSignature(&cSwiftSig); + MetaSig swiftSig(pSwiftSig, cSwiftSig, sig.GetModule(), NULL, MetaSig::sigMember); + sig = swiftSig; + } + int swiftArgIndex = 0; +#endif + ArgIterator argIt(&sig); int32_t interpreterStackOffset = 0; @@ -2894,6 +3181,9 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe m_s2 = 0; m_routineIndex = 0; m_totalStackSize = argIt.SizeOfArgStack(); +#ifdef TARGET_ARM64 + m_swiftSelfByRefSize = 0; +#endif #if LOG_COMPUTE_CALL_STUB printf("ComputeCallStub\n"); #endif @@ -2963,6 +3253,30 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe if ((argCorType == ELEMENT_TYPE_VALUETYPE || argCorType == ELEMENT_TYPE_BYREF) && !thArgTypeHandle.IsNull()) { MethodTable* pArgMT = thArgTypeHandle.IsTypeDesc() ? nullptr : thArgTypeHandle.AsMethodTable(); + if (pArgMT != nullptr && isSwiftSelfGenericType(pArgMT)) + { + Instantiation inst = pArgMT->GetInstantiation(); + _ASSERTE(inst.GetNumArgs() != 0); + TypeHandle innerType = inst[0]; + _ASSERTE(!innerType.IsNull() && !innerType.IsTypeDesc()); + MethodTable* pInnerMT = innerType.AsMethodTable(); +#if DEBUG + CORINFO_SWIFT_LOWERING lowering = {}; + pInnerMT->GetNativeSwiftPhysicalLowering(&lowering, false /* useNativeLayout */); + _ASSERTE(lowering.byReference); +#endif // DEBUG + +#if LOG_COMPUTE_CALL_STUB + printf("SwiftSelf argument detected\n"); +#endif + TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftSelfByRef, pRoutines); + m_currentRoutineType = RoutineType::SwiftSelfByRef; + + int structSize = ALIGN_UP(pInnerMT->GetNumInstanceFieldBytes(), INTERP_STACK_SLOT_SIZE); + m_swiftSelfByRefSize = structSize; + interpreterStackOffset += structSize; + continue; + } if (pArgMT != nullptr && isSwiftSelfType(pArgMT)) { #if LOG_COMPUTE_CALL_STUB @@ -3018,6 +3332,59 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe } interpreterStackOffset += interpStackSlotSize; +#ifdef TARGET_ARM64 + if (isSwiftCallConv && m_interpreterToNative && swiftArgIndex < swiftLoweringCount) + { + SwiftLoweringElement& elem = swiftLoweringInfo[swiftArgIndex]; + swiftArgIndex++; + + if (elem.isLowered) + { + TerminateCurrentRoutineIfNotOfNewType(RoutineType::None, pRoutines); + + if (elem.isFloat && argLocDesc.m_cFloatReg > 0) + { + int regIndex = argLocDesc.m_idxFloatReg; + pRoutines[m_routineIndex++] = GetSwiftLoadFPAtOffsetRoutine(regIndex); + // Pack offset (lower 16 bits) and structSize (bits 16-31) + PCODE packedData = (PCODE)elem.offset | ((PCODE)elem.structSize << 16); + pRoutines[m_routineIndex++] = packedData; +#if LOG_COMPUTE_CALL_STUB + printf("Swift lowered element to FP reg: offset=%d, structSize=%d, reg=d%d\n", + elem.offset, elem.structSize, regIndex); +#endif + } + else if (!elem.isFloat && argLocDesc.m_cGenReg > 0) + { + int regIndex = argLocDesc.m_idxGenReg; + pRoutines[m_routineIndex++] = GetSwiftLoadGPAtOffsetRoutine(regIndex); + // Pack offset (lower 16 bits) and structSize (bits 16-31) + PCODE packedData = (PCODE)elem.offset | ((PCODE)elem.structSize << 16); + pRoutines[m_routineIndex++] = packedData; +#if LOG_COMPUTE_CALL_STUB + printf("Swift lowered element to GP reg: offset=%d, structSize=%d, reg=x%d\n", + elem.offset, elem.structSize, regIndex); +#endif + } + else + { + // Spilled to stack + pRoutines[m_routineIndex++] = (PCODE)Load_Stack_AtOffset; + // Pack offset (lower 16 bits), structSize (bits 16-31), and stackOffset (bits 32-63) + PCODE packedData = (PCODE)elem.offset | + ((PCODE)elem.structSize << 16) | + ((PCODE)argLocDesc.m_byteStackIndex << 32); + pRoutines[m_routineIndex++] = packedData; +#if LOG_COMPUTE_CALL_STUB + printf("Swift lowered element to stack: offset=%d, structSize=%d, stackOffset=%d\n", + elem.offset, elem.structSize, argLocDesc.m_byteStackIndex); +#endif + } + continue; + } + } +#endif // TARGET_ARM64 + #ifdef UNIX_AMD64_ABI if (argIt.GetArgLocDescForStructInRegs() != NULL) { diff --git a/src/coreclr/vm/callstubgenerator.h b/src/coreclr/vm/callstubgenerator.h index 115c07fc8307dc..a86ce2034b09c1 100644 --- a/src/coreclr/vm/callstubgenerator.h +++ b/src/coreclr/vm/callstubgenerator.h @@ -118,6 +118,7 @@ class CallStubGenerator FPReg32, FPReg128, SwiftSelf, + SwiftSelfByRef, SwiftError, #endif Stack @@ -141,6 +142,10 @@ class CallStubGenerator int m_routineIndex = 0; // The total stack size used for the arguments. int m_totalStackSize = 0; +#ifdef TARGET_ARM64 + // Size of struct for SwiftSelf byReference case + int m_swiftSelfByRefSize = 0; +#endif CallStubHeader::InvokeFunctionPtr m_pInvokeFunction = NULL; bool m_interpreterToNative = false; @@ -160,7 +165,10 @@ class CallStubGenerator PCODE GetFPReg128RangeRoutine(int x1, int x2); PCODE GetFPReg32RangeRoutine(int x1, int x2); PCODE GetSwiftSelfRoutine(); + PCODE GetSwiftSelfByRefRoutine(); PCODE GetSwiftErrorRoutine(); + PCODE GetSwiftLoadGPAtOffsetRoutine(int regIndex); + PCODE GetSwiftLoadFPAtOffsetRoutine(int regIndex); #endif PCODE GetGPRegRangeRoutine(int r1, int r2); ReturnType GetReturnType(ArgIterator *pArgIt); From 61fc4b5c49380174aa5ad8fefffc77ceb9e1cfd9 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Thu, 15 Jan 2026 18:48:27 +0100 Subject: [PATCH 05/33] Add support for loading Swift indirect results --- src/coreclr/vm/arm64/asmhelpers.S | 8 +++++ src/coreclr/vm/arm64/asmhelpers.asm | 8 +++++ src/coreclr/vm/callstubgenerator.cpp | 49 ++++++++++++++++++++++++++-- src/coreclr/vm/callstubgenerator.h | 4 +++ 4 files changed, 67 insertions(+), 2 deletions(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index 28d3673d341ceb..d9d9b14405d0f6 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -37,12 +37,14 @@ NESTED_ENTRY PInvokeImportThunk, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -224 SAVE_ARGUMENT_REGISTERS sp, 16 SAVE_FLOAT_ARGUMENT_REGISTERS sp, 96 + str x8, [sp, #208] // Save x8 (indirect result register for Swift) mov x0, x12 bl C_FUNC(PInvokeImportWorker) mov x12, x0 // pop the stack and restore original register state + ldr x8, [sp, #208] // Restore x8 (indirect result register for Swift) RESTORE_FLOAT_ARGUMENT_REGISTERS sp, 96 RESTORE_ARGUMENT_REGISTERS sp, 16 EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 224 @@ -1189,6 +1191,12 @@ LEAF_ENTRY Load_SwiftError EPILOG_BRANCH_REG x11 LEAF_END Load_SwiftError +LEAF_ENTRY Load_SwiftIndirectResult + ldr x8, [x9], #8 + ldr x11, [x10], #8 + EPILOG_BRANCH_REG x11 +LEAF_END Load_SwiftIndirectResult + .macro SwiftLoad_AtOffset reg ldr x12, [x10], #8 // Load offset|struct_size and w11, w12, #0xFFFF // Extract offset (lower 16 bits) diff --git a/src/coreclr/vm/arm64/asmhelpers.asm b/src/coreclr/vm/arm64/asmhelpers.asm index e9b5b4c96b2ee5..4953df3ab8fd0d 100644 --- a/src/coreclr/vm/arm64/asmhelpers.asm +++ b/src/coreclr/vm/arm64/asmhelpers.asm @@ -101,12 +101,14 @@ PROLOG_SAVE_REG_PAIR fp, lr, #-224! SAVE_ARGUMENT_REGISTERS sp, 16 SAVE_FLOAT_ARGUMENT_REGISTERS sp, 96 + str x8, [sp, #208] ; Save x8 (indirect result register for Swift) mov x0, x12 bl PInvokeImportWorker mov x12, x0 ; pop the stack and restore original register state + ldr x8, [sp, #208] ; Restore x8 (indirect result register for Swift) RESTORE_FLOAT_ARGUMENT_REGISTERS sp, 96 RESTORE_ARGUMENT_REGISTERS sp, 16 EPILOG_RESTORE_REG_PAIR fp, lr, #224! @@ -1469,6 +1471,12 @@ RefCopyDone$argReg EPILOG_BRANCH_REG x11 LEAF_END Load_SwiftError + LEAF_ENTRY Load_SwiftIndirectResult + ldr x11, [x9], #8 + ldr x11, [x10], #8 + EPILOG_BRANCH_REG x11 + LEAF_END Load_SwiftIndirectResult + LEAF_ENTRY Store_X0 str x0, [x9], #8 ldr x11, [x10], #8 diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 2bbc5d41419ed5..fed53c954f5f3c 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -596,6 +596,7 @@ extern "C" void Store_X7(); extern "C" void Load_SwiftSelf(); extern "C" void Load_SwiftSelf_ByRef(); extern "C" void Load_SwiftError(); +extern "C" void Load_SwiftIndirectResult(); extern "C" void Load_X0_AtOffset(); extern "C" void Load_X1_AtOffset(); @@ -2016,6 +2017,14 @@ PCODE CallStubGenerator::GetSwiftErrorRoutine() return (PCODE)Load_SwiftError; } +PCODE CallStubGenerator::GetSwiftIndirectResultRoutine() +{ +#if LOG_COMPUTE_CALL_STUB + printf("GetSwiftIndirectResultRoutine\n"); +#endif + return (PCODE)Load_SwiftIndirectResult; +} + // Get offset-aware load routine for Swift struct lowering (GP registers) PCODE CallStubGenerator::GetSwiftLoadGPAtOffsetRoutine(int regIndex) { @@ -2527,6 +2536,11 @@ void CallStubGenerator::TerminateCurrentRoutineIfNotOfNewType(RoutineType type, pRoutines[m_routineIndex++] = GetSwiftErrorRoutine(); m_currentRoutineType = RoutineType::None; } + else if ((m_currentRoutineType == RoutineType::SwiftIndirectResult) && (type != RoutineType::SwiftIndirectResult)) + { + pRoutines[m_routineIndex++] = GetSwiftIndirectResultRoutine(); + m_currentRoutineType = RoutineType::None; + } #endif // TARGET_ARM64 else if ((m_currentRoutineType == RoutineType::Stack) && (type != RoutineType::Stack)) { @@ -2808,6 +2822,16 @@ void ValidateSwiftCallSignature(MetaSig &sig) } } + if (swiftIndirectResultCount > 0) + { + TypeHandle thReturnValueType; + CorElementType retType = sig.GetReturnTypeNormalized(&thReturnValueType); + if (retType != ELEMENT_TYPE_VOID) + { + COMPlusThrow(kInvalidProgramException); + } + } + sig.Reset(); } #endif // TARGET_ARM64 @@ -2973,6 +2997,7 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe }; SwiftLoweringElement swiftLoweringInfo[128]; int swiftLoweringCount = 0; + int swiftIndirectResultCount = 0; if (isSwiftCallConv) { @@ -2986,7 +3011,6 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe int newArgCount = 0; int swiftSelfCount = 0; int swiftErrorCount = 0; - int swiftIndirectResultCount = 0; CorElementType argType; while ((argType = sig.NextArg()) != ELEMENT_TYPE_END) { @@ -3045,6 +3069,8 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe { COMPlusThrow(kInvalidProgramException); } + // SwiftIndirectResult goes in x8, not in argument registers + continue; } if (argType == ELEMENT_TYPE_VALUETYPE) @@ -3082,7 +3108,12 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe MethodTable* pArgMT = thArgType.IsTypeDesc() ? nullptr : thArgType.AsMethodTable(); if (pArgMT != nullptr) { - // Don't lower SwiftSelf or SwiftError types + if (isSwiftIndirectResultType(pArgMT)) + { + // SwiftIndirectResult goes in x8, not in argument registers + continue; + } + // Don't lower Swift* types except SwiftSelf if (isSwiftSelfType(pArgMT) || isSwiftErrorType(pArgMT)) { SigPointer pArg = sig.GetArgProps(); @@ -3183,6 +3214,7 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe m_totalStackSize = argIt.SizeOfArgStack(); #ifdef TARGET_ARM64 m_swiftSelfByRefSize = 0; + m_hasSwiftIndirectResult = (isSwiftCallConv && swiftIndirectResultCount > 0); #endif #if LOG_COMPUTE_CALL_STUB printf("ComputeCallStub\n"); @@ -3226,6 +3258,19 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe interpreterStackOffset += INTERP_STACK_SLOT_SIZE; } +#ifdef TARGET_ARM64 + if (m_hasSwiftIndirectResult) + { +#if LOG_COMPUTE_CALL_STUB + printf("Emitting Load_SwiftIndirectResult routine\n"); +#endif + TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftIndirectResult, pRoutines); + pRoutines[m_routineIndex++] = GetSwiftIndirectResultRoutine(); + m_currentRoutineType = RoutineType::None; + interpreterStackOffset += INTERP_STACK_SLOT_SIZE; + } +#endif + int ofs; while ((ofs = argIt.GetNextOffset()) != TransitionBlock::InvalidOffset) { diff --git a/src/coreclr/vm/callstubgenerator.h b/src/coreclr/vm/callstubgenerator.h index a86ce2034b09c1..7a1b0318020c02 100644 --- a/src/coreclr/vm/callstubgenerator.h +++ b/src/coreclr/vm/callstubgenerator.h @@ -120,6 +120,7 @@ class CallStubGenerator SwiftSelf, SwiftSelfByRef, SwiftError, + SwiftIndirectResult, #endif Stack }; @@ -145,6 +146,8 @@ class CallStubGenerator #ifdef TARGET_ARM64 // Size of struct for SwiftSelf byReference case int m_swiftSelfByRefSize = 0; + // Track if SwiftIndirectResult was used + bool m_hasSwiftIndirectResult = false; #endif CallStubHeader::InvokeFunctionPtr m_pInvokeFunction = NULL; @@ -167,6 +170,7 @@ class CallStubGenerator PCODE GetSwiftSelfRoutine(); PCODE GetSwiftSelfByRefRoutine(); PCODE GetSwiftErrorRoutine(); + PCODE GetSwiftIndirectResultRoutine(); PCODE GetSwiftLoadGPAtOffsetRoutine(int regIndex); PCODE GetSwiftLoadFPAtOffsetRoutine(int regIndex); #endif From 41a7259f6b3cfb847833647db0cb01da5b951702 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Thu, 15 Jan 2026 21:33:53 +0100 Subject: [PATCH 06/33] FIx merge error --- src/coreclr/vm/callstubgenerator.cpp | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 25b9393989ba36..83d179985bb77c 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -2890,10 +2890,6 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe } } -#ifdef TARGET_ARM64 - bool isSwiftCallConv = false; -#endif - if (hasUnmanagedCallConv) { ComputeCallStubWorker(hasUnmanagedCallConv, unmanagedCallConv, sig, pRoutines, pMD); @@ -2909,6 +2905,9 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo { bool unmanagedThisCallConv = false; bool rewriteMetaSigFromExplicitThisToHasThis = false; +#ifdef TARGET_ARM64 + bool isSwiftCallConv = false; +#endif if (hasUnmanagedCallConv) { From 1dd70f3303f637d00fca6a2de67bff3996ea8797 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Fri, 16 Jan 2026 10:17:33 +0100 Subject: [PATCH 07/33] Implement Swift interop support for lowered return values --- src/coreclr/vm/arm64/asmhelpers.S | 105 ++++++++++++++++++++++ src/coreclr/vm/callstubgenerator.cpp | 128 ++++++++++++++++++++++++--- src/coreclr/vm/callstubgenerator.h | 24 +++-- 3 files changed, 242 insertions(+), 15 deletions(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index d9d9b14405d0f6..7ddbd41867ba32 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -1008,6 +1008,23 @@ NESTED_ENTRY InterpreterStubRet4Vector128, _TEXT, NoHandler EPILOG_RETURN NESTED_END InterpreterStubRet4Vector128, _TEXT +// This stub is for when an interpreted method is called with Swift calling convention +// and returns a lowered struct. This stub writes return values to a buffer. +NESTED_ENTRY InterpreterStubRetSwiftLowered, _TEXT, NoHandler + PROLOG_SAVE_REG_PAIR_NO_FP_INDEXED fp, lr, -16 + add x0, sp, #__PWTB_TransitionBlock + 16 + mov x1, x19 // the IR bytecode pointer + mov x2, xzr + bl C_FUNC(ExecuteInterpretedMethod) + mov x9, x0 // Save return buffer address + ldp x0, x1, [x9] + ldp x2, x3, [x9, #16] + ldp d0, d1, [x9, #32] + ldp d2, d3, [x9, #48] + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 16 + EPILOG_RETURN +NESTED_END InterpreterStubRetSwiftLowered, _TEXT + // Copy arguments from the processor stack to the interpreter stack // The CPU stack slots are aligned to pointer size. @@ -2986,5 +3003,93 @@ LOCAL_LABEL(CallJittedMethodRet4Vector128_NoSwiftError): EPILOG_RETURN NESTED_END CallJittedMethodRet4Vector128, _TEXT +.macro SwiftStore_AtOffset reg + ldr x11, [x10], #8 + and w11, w11, #0xFFFF + str \reg, [x9, x11] + ldr x11, [x10], #8 + br x11 +.endm + +LEAF_ENTRY Store_X0_AtOffset + SwiftStore_AtOffset x0 +LEAF_END Store_X0_AtOffset + +LEAF_ENTRY Store_X1_AtOffset + SwiftStore_AtOffset x1 +LEAF_END Store_X1_AtOffset + +LEAF_ENTRY Store_X2_AtOffset + SwiftStore_AtOffset x2 +LEAF_END Store_X2_AtOffset + +LEAF_ENTRY Store_X3_AtOffset + SwiftStore_AtOffset x3 +LEAF_END Store_X3_AtOffset + +.macro SwiftStoreFloat_AtOffset reg + ldr x11, [x10], #8 + and w11, w11, #0xFFFF + str \reg, [x9, x11] + ldr x11, [x10], #8 + br x11 +.endm + +LEAF_ENTRY Store_D0_AtOffset + SwiftStoreFloat_AtOffset d0 +LEAF_END Store_D0_AtOffset + +LEAF_ENTRY Store_D1_AtOffset + SwiftStoreFloat_AtOffset d1 +LEAF_END Store_D1_AtOffset + +LEAF_ENTRY Store_D2_AtOffset + SwiftStoreFloat_AtOffset d2 +LEAF_END Store_D2_AtOffset + +LEAF_ENTRY Store_D3_AtOffset + SwiftStoreFloat_AtOffset d3 +LEAF_END Store_D3_AtOffset + +NESTED_ENTRY CallJittedMethodRetSwiftLowered, _TEXT, NoHandler + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -64 + stp x2, x4, [fp, #16] + str xzr, [fp, #56] + // Store the return address for the terminator to use + adr x11, LOCAL_LABEL(CallJittedMethodRetSwiftLowered_Epilog) + str x11, [fp, #32] + str x19, [fp, #48] + ldrsw x11, [x0, #-20] // Load TargetSlotIndex + add x11, x11, #1 // TargetSlotIndex + 1 + add x11, x0, x11, lsl #3 // x0 + (TargetSlotIndex + 1) * 8 + str x11, [fp, #40] // Save store routines start pointer + sub sp, sp, x3 + mov x10, x0 + mov x9, x1 + ldr x11, [x10], #8 + blr x11 + ldr x11, [fp, #56] + cbz x11, LOCAL_LABEL(CallJittedMethodRetSwiftLowered_NoSwiftError) + str x10, [x11] +LOCAL_LABEL(CallJittedMethodRetSwiftLowered_NoSwiftError): + ldr x12, [fp, #24] + str x2, [x12] // Store continuation return value + ldr x9, [fp, #16] // Load return buffer address into x9 + ldr x10, [fp, #40] // Load store routines start pointer + ldr x11, [x10], #8 // Load first store routine + br x11 +LOCAL_LABEL(CallJittedMethodRetSwiftLowered_Epilog): + ldr x19, [fp, #48] // Restore x19 + EPILOG_STACK_RESTORE + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 64 + EPILOG_RETURN +NESTED_END CallJittedMethodRetSwiftLowered, _TEXT + +// Terminator routine branches back to the epilog +LEAF_ENTRY SwiftLoweredReturnTerminator + ldr x11, [fp, #32] + br x11 +LEAF_END SwiftLoweredReturnTerminator + #endif // FEATURE_INTERPRETER diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 83d179985bb77c..d56da8aad60976 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -616,6 +616,16 @@ extern "C" void Load_D6_AtOffset(); extern "C" void Load_D7_AtOffset(); extern "C" void Load_Stack_AtOffset(); +extern "C" void Store_X0_AtOffset(); +extern "C" void Store_X1_AtOffset(); +extern "C" void Store_X2_AtOffset(); +extern "C" void Store_X3_AtOffset(); +extern "C" void Store_D0_AtOffset(); +extern "C" void Store_D1_AtOffset(); +extern "C" void Store_D2_AtOffset(); +extern "C" void Store_D3_AtOffset(); +extern "C" void SwiftLoweredReturnTerminator(); + extern "C" void Load_Ref_X0(); extern "C" void Load_Ref_X1(); extern "C" void Load_Ref_X2(); @@ -1894,7 +1904,7 @@ PCODE GPRegsRefStoreRoutines[] = #endif // TARGET_RISCV64 -#define LOG_COMPUTE_CALL_STUB 1 +#define LOG_COMPUTE_CALL_STUB 0 PCODE CallStubGenerator::GetStackRoutine() { @@ -2025,7 +2035,6 @@ PCODE CallStubGenerator::GetSwiftIndirectResultRoutine() return (PCODE)Load_SwiftIndirectResult; } -// Get offset-aware load routine for Swift struct lowering (GP registers) PCODE CallStubGenerator::GetSwiftLoadGPAtOffsetRoutine(int regIndex) { static PCODE routines[] = { @@ -2036,7 +2045,6 @@ PCODE CallStubGenerator::GetSwiftLoadGPAtOffsetRoutine(int regIndex) return routines[regIndex]; } -// Get offset-aware load routine for Swift struct lowering (FP registers) PCODE CallStubGenerator::GetSwiftLoadFPAtOffsetRoutine(int regIndex) { static PCODE routines[] = { @@ -2046,6 +2054,24 @@ PCODE CallStubGenerator::GetSwiftLoadFPAtOffsetRoutine(int regIndex) _ASSERTE(regIndex >= 0 && regIndex < 8); return routines[regIndex]; } + +PCODE CallStubGenerator::GetSwiftStoreGPAtOffsetRoutine(int regIndex) +{ + static PCODE routines[] = { + (PCODE)Store_X0_AtOffset, (PCODE)Store_X1_AtOffset, (PCODE)Store_X2_AtOffset, (PCODE)Store_X3_AtOffset + }; + _ASSERTE(regIndex >= 0 && regIndex < 4); + return routines[regIndex]; +} + +PCODE CallStubGenerator::GetSwiftStoreFPAtOffsetRoutine(int regIndex) +{ + static PCODE routines[] = { + (PCODE)Store_D0_AtOffset, (PCODE)Store_D1_AtOffset, (PCODE)Store_D2_AtOffset, (PCODE)Store_D3_AtOffset + }; + _ASSERTE(regIndex >= 0 && regIndex < 4); + return routines[regIndex]; +} #endif // TARGET_ARM64 extern "C" void CallJittedMethodRetVoid(PCODE *routines, int8_t*pArgs, int8_t*pRet, int totalStackSize, PTR_PTR_Object pContinuation); @@ -2100,6 +2126,7 @@ extern "C" void CallJittedMethodRetVector128(PCODE *routines, int8_t *pArgs, int extern "C" void CallJittedMethodRet2Vector128(PCODE *routines, int8_t *pArgs, int8_t *pRet, int totalStackSize, PTR_PTR_Object pContinuation); extern "C" void CallJittedMethodRet3Vector128(PCODE *routines, int8_t *pArgs, int8_t *pRet, int totalStackSize, PTR_PTR_Object pContinuation); extern "C" void CallJittedMethodRet4Vector128(PCODE *routines, int8_t *pArgs, int8_t *pRet, int totalStackSize, PTR_PTR_Object pContinuation); +extern "C" void CallJittedMethodRetSwiftLowered(PCODE *routines, int8_t*pArgs, int8_t*pRet, int totalStackSize, PTR_PTR_Object pContinuation); extern "C" void InterpreterStubRet2I8(); extern "C" void InterpreterStubRet2Double(); extern "C" void InterpreterStubRet3Double(); @@ -2116,6 +2143,7 @@ extern "C" void InterpreterStubRetVector128(); extern "C" void InterpreterStubRet2Vector128(); extern "C" void InterpreterStubRet3Vector128(); extern "C" void InterpreterStubRet4Vector128(); +extern "C" void InterpreterStubRetSwiftLowered(); #endif // TARGET_ARM64 #if defined(TARGET_RISCV64) @@ -2206,6 +2234,8 @@ CallStubHeader::InvokeFunctionPtr CallStubGenerator::GetInvokeFunctionPtr(CallSt INVOKE_FUNCTION_PTR(CallJittedMethodRet3Vector128); case ReturnType4Vector128: INVOKE_FUNCTION_PTR(CallJittedMethodRet4Vector128); + case ReturnTypeSwiftLowered: + INVOKE_FUNCTION_PTR(CallJittedMethodRetSwiftLowered); #endif // TARGET_ARM64 #if defined(TARGET_RISCV64) case ReturnType2I8: @@ -2301,6 +2331,8 @@ PCODE CallStubGenerator::GetInterpreterReturnTypeHandler(CallStubGenerator::Retu RETURN_TYPE_HANDLER(InterpreterStubRet3Vector128); case ReturnType4Vector128: RETURN_TYPE_HANDLER(InterpreterStubRet4Vector128); + case ReturnTypeSwiftLowered: + RETURN_TYPE_HANDLER(InterpreterStubRetSwiftLowered); #endif // TARGET_ARM64 #if defined(TARGET_RISCV64) case ReturnType2I8: @@ -2353,15 +2385,16 @@ CallStubHeader *CallStubGenerator::GenerateCallStub(MethodDesc *pMD, AllocMemTra S_SIZE_T finalStubSize(sizeof(CallStubHeader) + m_routineIndex * sizeof(PCODE)); void *pHeaderStorage = pamTracker->Track(pLoaderAllocator->GetHighFrequencyHeap()->AllocMem(finalStubSize)); - CallStubHeader *pHeader = new (pHeaderStorage) CallStubHeader(m_routineIndex, pRoutines, ALIGN_UP(m_totalStackSize, STACK_ALIGN_SIZE), sig.IsAsyncCall(), m_pInvokeFunction); + int targetSlotIndex = m_interpreterToNative ? m_targetSlotIndex : (m_routineIndex - 1); + CallStubHeader *pHeader = new (pHeaderStorage) CallStubHeader(m_routineIndex, targetSlotIndex, pRoutines, ALIGN_UP(m_totalStackSize, STACK_ALIGN_SIZE), sig.IsAsyncCall(), m_pInvokeFunction); return pHeader; } struct CachedCallStubKey { - CachedCallStubKey(int32_t hashCode, int numRoutines, PCODE *pRoutines, int totalStackSize, bool hasContinuationRet, CallStubHeader::InvokeFunctionPtr pInvokeFunction) - : HashCode(hashCode), NumRoutines(numRoutines), TotalStackSize(totalStackSize), HasContinuationRet(hasContinuationRet), Invoke(pInvokeFunction), Routines(pRoutines) + CachedCallStubKey(int32_t hashCode, int numRoutines, int targetSlotIndex, PCODE *pRoutines, int totalStackSize, bool hasContinuationRet, CallStubHeader::InvokeFunctionPtr pInvokeFunction) + : HashCode(hashCode), NumRoutines(numRoutines), TargetSlotIndex(targetSlotIndex), TotalStackSize(totalStackSize), HasContinuationRet(hasContinuationRet), Invoke(pInvokeFunction), Routines(pRoutines) { } @@ -2369,7 +2402,7 @@ struct CachedCallStubKey { LIMITED_METHOD_CONTRACT; - if (HashCode != other.HashCode || NumRoutines != other.NumRoutines || TotalStackSize != other.TotalStackSize || Invoke != other.Invoke || HasContinuationRet != other.HasContinuationRet) + if (HashCode != other.HashCode || NumRoutines != other.NumRoutines || TargetSlotIndex != other.TargetSlotIndex || TotalStackSize != other.TotalStackSize || Invoke != other.Invoke || HasContinuationRet != other.HasContinuationRet) return false; for (int i = 0; i < NumRoutines; i++) @@ -2382,6 +2415,7 @@ struct CachedCallStubKey const int32_t HashCode = 0; const int NumRoutines = 0; + const int TargetSlotIndex = 0; const int TotalStackSize = 0; const bool HasContinuationRet = false; const CallStubHeader::InvokeFunctionPtr Invoke = NULL; // Pointer to the invoke function @@ -2390,9 +2424,9 @@ struct CachedCallStubKey struct CachedCallStub { - CachedCallStub(int32_t hashCode, int numRoutines, PCODE *pRoutines, int totalStackSize, bool hasContinuationRet, CallStubHeader::InvokeFunctionPtr pInvokeFunction) : + CachedCallStub(int32_t hashCode, int numRoutines, int targetSlotIndex, PCODE *pRoutines, int totalStackSize, bool hasContinuationRet, CallStubHeader::InvokeFunctionPtr pInvokeFunction) : HashCode(hashCode), - Header(numRoutines, pRoutines, totalStackSize, hasContinuationRet, pInvokeFunction) + Header(numRoutines, targetSlotIndex, pRoutines, totalStackSize, hasContinuationRet, pInvokeFunction) { } @@ -2404,6 +2438,7 @@ struct CachedCallStub return CachedCallStubKey( HashCode, Header.NumRoutines, + Header.TargetSlotIndex, &Header.Routines[0], Header.TotalStackSize, Header.HasContinuationRet, @@ -2454,10 +2489,12 @@ CallStubHeader *CallStubGenerator::GenerateCallStubForSig(MetaSig &sig) hashState.Add(m_totalStackSize); hashState.AddPointer((void*)m_pInvokeFunction); hashState.Add(sig.IsAsyncCall() ? 1 : 0); + hashState.Add(m_targetSlotIndex); CachedCallStubKey cachedHeaderKey( hashState.ToHashCode(), m_routineIndex, + m_targetSlotIndex, pRoutines, ALIGN_UP(m_totalStackSize, STACK_ALIGN_SIZE), sig.IsAsyncCall(), @@ -2480,7 +2517,7 @@ CallStubHeader *CallStubGenerator::GenerateCallStubForSig(MetaSig &sig) // We only need to allocate the actual pRoutines array, and then we can just use the cachedHeader we already constructed size_t finalCachedCallStubSize = sizeof(CachedCallStub) + m_routineIndex * sizeof(PCODE); void* pHeaderStorage = amTracker.Track(SystemDomain::GetGlobalLoaderAllocator()->GetHighFrequencyHeap()->AllocMem(S_SIZE_T(finalCachedCallStubSize))); - CachedCallStub *pHeader = new (pHeaderStorage) CachedCallStub(cachedHeaderKey.HashCode, m_routineIndex, pRoutines, ALIGN_UP(m_totalStackSize, STACK_ALIGN_SIZE), sig.IsAsyncCall(), m_pInvokeFunction); + CachedCallStub *pHeader = new (pHeaderStorage) CachedCallStub(cachedHeaderKey.HashCode, m_routineIndex, m_targetSlotIndex, pRoutines, ALIGN_UP(m_totalStackSize, STACK_ALIGN_SIZE), sig.IsAsyncCall(), m_pInvokeFunction); s_callStubCache->Add(pHeader); amTracker.SuppressRelease(); @@ -3023,6 +3060,9 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo int swiftLoweringCount = 0; int swiftIndirectResultCount = 0; + m_hasSwiftReturnLowering = false; + m_swiftReturnLowering = {}; + if (isSwiftCallConv) { if (!m_interpreterToNative) @@ -3030,6 +3070,27 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo COMPlusThrow(kNotImplementedException); } + sig.Reset(); + TypeHandle thReturnType; + CorElementType retCorType = sig.GetReturnTypeNormalized(&thReturnType); + if (retCorType == ELEMENT_TYPE_VALUETYPE && !thReturnType.IsNull() && !thReturnType.IsTypeDesc()) + { + MethodTable* pRetMT = thReturnType.AsMethodTable(); + if (pRetMT->IsValueType() && !pRetMT->IsHFA() && !isIntrinsicSIMDType(pRetMT)) + { + CORINFO_SWIFT_LOWERING lowering = {}; + pRetMT->GetNativeSwiftPhysicalLowering(&lowering, false); + if (!lowering.byReference && lowering.numLoweredElements > 0) + { + m_hasSwiftReturnLowering = true; + m_swiftReturnLowering = lowering; +#if LOG_COMPUTE_CALL_STUB + printf("Swift return lowering detected: %d elements\n", lowering.numLoweredElements); +#endif + } + } + } + // Count how many extra arguments we need due to Swift lowering sig.Reset(); int newArgCount = 0; @@ -3520,7 +3581,47 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo if (m_interpreterToNative) { m_pInvokeFunction = GetInvokeFunctionPtr(returnType); + m_targetSlotIndex = m_routineIndex; m_routineIndex++; // Reserve one extra slot for the target method pointer + +#ifdef TARGET_ARM64 + if (m_hasSwiftReturnLowering) + { + int gpRegIndex = 0; + int fpRegIndex = 0; + + for (size_t i = 0; i < m_swiftReturnLowering.numLoweredElements; i++) + { + CorInfoType elemType = m_swiftReturnLowering.loweredElements[i]; + uint32_t offset = m_swiftReturnLowering.offsets[i]; + + bool isFloat = (elemType == CORINFO_TYPE_FLOAT || elemType == CORINFO_TYPE_DOUBLE); + + if (isFloat) + { + _ASSERTE(fpRegIndex < 4); + pRoutines[m_routineIndex++] = GetSwiftStoreFPAtOffsetRoutine(fpRegIndex); + pRoutines[m_routineIndex++] = (PCODE)offset; + fpRegIndex++; +#if LOG_COMPUTE_CALL_STUB + printf("Swift return store FP d%d at offset %d\n", fpRegIndex - 1, offset); +#endif + } + else + { + _ASSERTE(gpRegIndex < 4); + pRoutines[m_routineIndex++] = GetSwiftStoreGPAtOffsetRoutine(gpRegIndex); + pRoutines[m_routineIndex++] = (PCODE)offset; + gpRegIndex++; +#if LOG_COMPUTE_CALL_STUB + printf("Swift return store GP x%d at offset %d\n", gpRegIndex - 1, offset); +#endif + } + } + + pRoutines[m_routineIndex++] = (PCODE)SwiftLoweredReturnTerminator; + } +#endif // TARGET_ARM64 } else { @@ -3736,6 +3837,13 @@ void CallStubGenerator::ProcessArgument(ArgIteratorType *pArgIt, ArgLocDesc& arg template CallStubGenerator::ReturnType CallStubGenerator::GetReturnType(ArgIteratorType *pArgIt) { +#ifdef TARGET_ARM64 + if (m_hasSwiftReturnLowering) + { + return ReturnTypeSwiftLowered; + } +#endif // TARGET_ARM64 + if (pArgIt->HasRetBuffArg()) { #ifdef TARGET_AMD64 diff --git a/src/coreclr/vm/callstubgenerator.h b/src/coreclr/vm/callstubgenerator.h index 6b2c5ec8f3a751..584b3871ea4d5d 100644 --- a/src/coreclr/vm/callstubgenerator.h +++ b/src/coreclr/vm/callstubgenerator.h @@ -15,8 +15,12 @@ struct CallStubHeader { typedef void (*InvokeFunctionPtr)(PCODE *routines, int8_t*pArgs, int8_t*pRet, int totalStackSize, PTR_PTR_Object pContinuationRet); - // Number of routines in the Routines array. The last one is the target method to call. + // Number of routines in the Routines array. int NumRoutines; + // Index of the target method slot within the Routines array. + // For normal calls, this is NumRoutines - 1. + // For Swift lowered returns, store routines follow the target slot. + int TargetSlotIndex; // Total stack size used for the arguments. int TotalStackSize; bool HasContinuationRet; // Indicates whether the stub supports returning a continuation @@ -26,11 +30,12 @@ struct CallStubHeader // This is an array of routines that translate the arguments from the interpreter stack to the CPU registers and native stack. PCODE Routines[0]; - CallStubHeader(int numRoutines, PCODE *pRoutines, int totalStackSize, bool hasContinuationRet, InvokeFunctionPtr pInvokeFunction) + CallStubHeader(int numRoutines, int targetSlotIndex, PCODE *pRoutines, int totalStackSize, bool hasContinuationRet, InvokeFunctionPtr pInvokeFunction) { LIMITED_METHOD_CONTRACT; NumRoutines = numRoutines; + TargetSlotIndex = targetSlotIndex; TotalStackSize = totalStackSize; Invoke = pInvokeFunction; HasContinuationRet = hasContinuationRet; @@ -43,14 +48,14 @@ struct CallStubHeader { LIMITED_METHOD_CONTRACT; - VolatileStore(&Routines[NumRoutines - 1], target); + VolatileStore(&Routines[TargetSlotIndex], target); } PCODE GetTarget() { LIMITED_METHOD_CONTRACT; - return VolatileLoadWithoutBarrier(&Routines[NumRoutines - 1]); + return VolatileLoadWithoutBarrier(&Routines[TargetSlotIndex]); } size_t GetSize() @@ -100,6 +105,7 @@ class CallStubGenerator ReturnType2Vector128, ReturnType3Vector128, ReturnType4Vector128, + ReturnTypeSwiftLowered, #endif // TARGET_ARM64 #if defined(TARGET_RISCV64) ReturnType2I8, @@ -141,6 +147,8 @@ class CallStubGenerator int m_s2 = NoRange; // The index of the next routine to store in the Routines array. int m_routineIndex = 0; + // The index of the target method slot in the Routines array. + int m_targetSlotIndex = -1; // The total stack size used for the arguments. int m_totalStackSize = 0; #ifdef TARGET_ARM64 @@ -148,6 +156,9 @@ class CallStubGenerator int m_swiftSelfByRefSize = 0; // Track if SwiftIndirectResult was used bool m_hasSwiftIndirectResult = false; + // Swift return lowering info + CORINFO_SWIFT_LOWERING m_swiftReturnLowering = {}; + bool m_hasSwiftReturnLowering = false; #endif CallStubHeader::InvokeFunctionPtr m_pInvokeFunction = NULL; @@ -173,6 +184,8 @@ class CallStubGenerator PCODE GetSwiftIndirectResultRoutine(); PCODE GetSwiftLoadGPAtOffsetRoutine(int regIndex); PCODE GetSwiftLoadFPAtOffsetRoutine(int regIndex); + PCODE GetSwiftStoreGPAtOffsetRoutine(int regIndex); + PCODE GetSwiftStoreFPAtOffsetRoutine(int regIndex); #endif PCODE GetGPRegRangeRoutine(int r1, int r2); template @@ -195,7 +208,8 @@ class CallStubGenerator // The size of the temporary storage is the size of the CallStubHeader plus the size of the routines array. // The size of the routines array is three times the number of arguments plus one slot for the target method pointer. - return sizeof(CallStubHeader) + ((numArgs + 1) * 3 + 1) * sizeof(PCODE); + // Add extra space for Swift return lowering (up to 4 elements * 2 slots + terminator = 9 slots). + return sizeof(CallStubHeader) + ((numArgs + 1) * 3 + 10) * sizeof(PCODE); } void ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDesc *pMD); template From dccc970f95e09c98ad7d0e8bfc6bc9762bdcfd6b Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Fri, 16 Jan 2026 10:18:05 +0100 Subject: [PATCH 08/33] Disable UnmanagedCallersOnly tests --- .../SwiftCallbackAbiStress.cs | 100 ++++++++++++++++++ .../SwiftErrorHandling/SwiftErrorHandling.cs | 2 + .../SwiftIndirectResult.cs | 1 + 3 files changed, 103 insertions(+) diff --git a/src/tests/Interop/Swift/SwiftCallbackAbiStress/SwiftCallbackAbiStress.cs b/src/tests/Interop/Swift/SwiftCallbackAbiStress/SwiftCallbackAbiStress.cs index efa305e1cf89da..ec18ffffca7477 100644 --- a/src/tests/Interop/Swift/SwiftCallbackAbiStress/SwiftCallbackAbiStress.cs +++ b/src/tests/Interop/Swift/SwiftCallbackAbiStress/SwiftCallbackAbiStress.cs @@ -61,6 +61,7 @@ private static int SwiftCallbackFunc0Callback(short a0, int a1, ulong a2, ushort } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc0() { Console.Write("Running SwiftCallbackFunc0: "); @@ -172,6 +173,7 @@ private static byte SwiftCallbackFunc1Callback(long a0, double a1, sbyte a2, F1_ } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc1() { Console.Write("Running SwiftCallbackFunc1: "); @@ -263,6 +265,7 @@ private static sbyte SwiftCallbackFunc2Callback(F2_S0 a0, F2_S1 a1, F2_S2 a2, fl } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc2() { Console.Write("Running SwiftCallbackFunc2: "); @@ -373,6 +376,7 @@ private static F3_Ret SwiftCallbackFunc3Callback(F3_S0 a0, float a1, ushort a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc3() { Console.Write("Running SwiftCallbackFunc3: "); @@ -458,6 +462,7 @@ private static F4_Ret SwiftCallbackFunc4Callback(double a0, F4_S0 a1, byte a2, i } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc4() { Console.Write("Running SwiftCallbackFunc4: "); @@ -586,6 +591,7 @@ private static F5_Ret SwiftCallbackFunc5Callback(byte a0, short a1, ulong a2, nu } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc5() { Console.Write("Running SwiftCallbackFunc5: "); @@ -710,6 +716,7 @@ private static F6_Ret SwiftCallbackFunc6Callback(float a0, F6_S0 a1, long a2, sb } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc6() { Console.Write("Running SwiftCallbackFunc6: "); @@ -783,6 +790,7 @@ private static ushort SwiftCallbackFunc7Callback(long a0, byte a1, double a2, us } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc7() { Console.Write("Running SwiftCallbackFunc7: "); @@ -864,6 +872,7 @@ private static F8_Ret SwiftCallbackFunc8Callback(F8_S0 a0, F8_S1 a1, SwiftSelf s } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc8() { Console.Write("Running SwiftCallbackFunc8: "); @@ -1010,6 +1019,7 @@ private static ushort SwiftCallbackFunc9Callback(sbyte a0, byte a1, long a2, F9_ } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc9() { Console.Write("Running SwiftCallbackFunc9: "); @@ -1059,6 +1069,7 @@ private static F10_Ret SwiftCallbackFunc10Callback(short a0, SwiftSelf self) } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc10() { Console.Write("Running SwiftCallbackFunc10: "); @@ -1167,6 +1178,7 @@ private static F11_Ret SwiftCallbackFunc11Callback(uint a0, nuint a1, ulong a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc11() { Console.Write("Running SwiftCallbackFunc11: "); @@ -1249,6 +1261,7 @@ private static F12_Ret SwiftCallbackFunc12Callback(F12_S0 a0, short a1, ulong a2 } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc12() { Console.Write("Running SwiftCallbackFunc12: "); @@ -1350,6 +1363,7 @@ private static double SwiftCallbackFunc13Callback(F13_S0 a0, int a1, nint a2, us } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc13() { Console.Write("Running SwiftCallbackFunc13: "); @@ -1405,6 +1419,7 @@ private static long SwiftCallbackFunc14Callback(long a0, F14_S0 a1, sbyte a2, ul } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc14() { Console.Write("Running SwiftCallbackFunc14: "); @@ -1487,6 +1502,7 @@ private static nint SwiftCallbackFunc15Callback(byte a0, ushort a1, ulong a2, ul } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc15() { Console.Write("Running SwiftCallbackFunc15: "); @@ -1586,6 +1602,7 @@ private static sbyte SwiftCallbackFunc16Callback(F16_S0 a0, short a1, float a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc16() { Console.Write("Running SwiftCallbackFunc16: "); @@ -1655,6 +1672,7 @@ private static double SwiftCallbackFunc17Callback(uint a0, F17_S0 a1, F17_S1 a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc17() { Console.Write("Running SwiftCallbackFunc17: "); @@ -1740,6 +1758,7 @@ private static F18_Ret SwiftCallbackFunc18Callback(F18_S0 a0, F18_S1 a1, F18_S2 } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc18() { Console.Write("Running SwiftCallbackFunc18: "); @@ -1853,6 +1872,7 @@ private static F19_Ret SwiftCallbackFunc19Callback(long a0, byte a1, F19_S0 a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc19() { Console.Write("Running SwiftCallbackFunc19: "); @@ -1958,6 +1978,7 @@ private static F20_Ret SwiftCallbackFunc20Callback(F20_S0 a0, F20_S1 a1, float a } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc20() { Console.Write("Running SwiftCallbackFunc20: "); @@ -2032,6 +2053,7 @@ private static F21_Ret SwiftCallbackFunc21Callback(int a0, short a1, F21_S0 a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc21() { Console.Write("Running SwiftCallbackFunc21: "); @@ -2155,6 +2177,7 @@ private static F22_Ret SwiftCallbackFunc22Callback(int a0, F22_S0 a1, F22_S1 a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc22() { Console.Write("Running SwiftCallbackFunc22: "); @@ -2209,6 +2232,7 @@ private static double SwiftCallbackFunc23Callback(nuint a0, byte a1, sbyte a2, b } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc23() { Console.Write("Running SwiftCallbackFunc23: "); @@ -2312,6 +2336,7 @@ private static float SwiftCallbackFunc24Callback(int a0, nuint a1, F24_S0 a2, us } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc24() { Console.Write("Running SwiftCallbackFunc24: "); @@ -2413,6 +2438,7 @@ private static F25_Ret SwiftCallbackFunc25Callback(F25_S0 a0, ushort a1, nuint a } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc25() { Console.Write("Running SwiftCallbackFunc25: "); @@ -2509,6 +2535,7 @@ private static F26_Ret SwiftCallbackFunc26Callback(sbyte a0, byte a1, uint a2, F } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc26() { Console.Write("Running SwiftCallbackFunc26: "); @@ -2600,6 +2627,7 @@ private static float SwiftCallbackFunc27Callback(ulong a0, byte a1, F27_S0 a2, b } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc27() { Console.Write("Running SwiftCallbackFunc27: "); @@ -2699,6 +2727,7 @@ private static F28_Ret SwiftCallbackFunc28Callback(uint a0, ushort a1, sbyte a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc28() { Console.Write("Running SwiftCallbackFunc28: "); @@ -2826,6 +2855,7 @@ private static F29_Ret SwiftCallbackFunc29Callback(F29_S0 a0, nint a1, ulong a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc29() { Console.Write("Running SwiftCallbackFunc29: "); @@ -2904,6 +2934,7 @@ private static float SwiftCallbackFunc30Callback(F30_S0 a0, F30_S1 a1, F30_S2 a2 } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc30() { Console.Write("Running SwiftCallbackFunc30: "); @@ -2979,6 +3010,7 @@ private static F31_Ret SwiftCallbackFunc31Callback(F31_S0 a0, double a1, SwiftSe } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc31() { Console.Write("Running SwiftCallbackFunc31: "); @@ -3032,6 +3064,7 @@ private static F32_Ret SwiftCallbackFunc32Callback(ushort a0, short a1, SwiftSel } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc32() { Console.Write("Running SwiftCallbackFunc32: "); @@ -3137,6 +3170,7 @@ private static nuint SwiftCallbackFunc33Callback(F33_S0 a0, float a1, F33_S1 a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc33() { Console.Write("Running SwiftCallbackFunc33: "); @@ -3186,6 +3220,7 @@ private static ushort SwiftCallbackFunc34Callback(uint a0, F34_S0 a1, nuint a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc34() { Console.Write("Running SwiftCallbackFunc34: "); @@ -3275,6 +3310,7 @@ private static ulong SwiftCallbackFunc35Callback(byte a0, sbyte a1, float a2, lo } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc35() { Console.Write("Running SwiftCallbackFunc35: "); @@ -3325,6 +3361,7 @@ private static nint SwiftCallbackFunc36Callback(nuint a0, double a1, nuint a2, b } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc36() { Console.Write("Running SwiftCallbackFunc36: "); @@ -3411,6 +3448,7 @@ private static F37_Ret SwiftCallbackFunc37Callback(ulong a0, F37_S0 a1, double a } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc37() { Console.Write("Running SwiftCallbackFunc37: "); @@ -3489,6 +3527,7 @@ private static double SwiftCallbackFunc38Callback(F38_S0 a0, F38_S1 a1, double a } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc38() { Console.Write("Running SwiftCallbackFunc38: "); @@ -3585,6 +3624,7 @@ private static nint SwiftCallbackFunc39Callback(F39_S0 a0, nuint a1, uint a2, do } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc39() { Console.Write("Running SwiftCallbackFunc39: "); @@ -3673,6 +3713,7 @@ private static nuint SwiftCallbackFunc40Callback(F40_S0 a0, uint a1, byte a2, F4 } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc40() { Console.Write("Running SwiftCallbackFunc40: "); @@ -3728,6 +3769,7 @@ private static F41_Ret SwiftCallbackFunc41Callback(F41_S0 a0, SwiftSelf self) } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc41() { Console.Write("Running SwiftCallbackFunc41: "); @@ -3786,6 +3828,7 @@ private static nint SwiftCallbackFunc42Callback(int a0, uint a1, F42_S0 a2, floa } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc42() { Console.Write("Running SwiftCallbackFunc42: "); @@ -3846,6 +3889,7 @@ private static F43_Ret SwiftCallbackFunc43Callback(F43_S0 a0, F43_S1 a1, SwiftSe } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc43() { Console.Write("Running SwiftCallbackFunc43: "); @@ -3950,6 +3994,7 @@ private static F44_Ret SwiftCallbackFunc44Callback(double a0, F44_S0 a1, F44_S1 } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc44() { Console.Write("Running SwiftCallbackFunc44: "); @@ -4034,6 +4079,7 @@ private static F45_Ret SwiftCallbackFunc45Callback(F45_S0 a0, F45_S1 a1, byte a2 } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc45() { Console.Write("Running SwiftCallbackFunc45: "); @@ -4093,6 +4139,7 @@ private static F46_Ret SwiftCallbackFunc46Callback(nint a0, nuint a1, ushort a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc46() { Console.Write("Running SwiftCallbackFunc46: "); @@ -4209,6 +4256,7 @@ private static F47_Ret SwiftCallbackFunc47Callback(nint a0, float a1, uint a2, F } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc47() { Console.Write("Running SwiftCallbackFunc47: "); @@ -4284,6 +4332,7 @@ private static long SwiftCallbackFunc48Callback(sbyte a0, short a1, short a2, ui } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc48() { Console.Write("Running SwiftCallbackFunc48: "); @@ -4352,6 +4401,7 @@ private static F49_Ret SwiftCallbackFunc49Callback(F49_S0 a0, long a1, SwiftSelf } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc49() { Console.Write("Running SwiftCallbackFunc49: "); @@ -4469,6 +4519,7 @@ private static byte SwiftCallbackFunc50Callback(F50_S0 a0, F50_S1 a1, byte a2, F } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc50() { Console.Write("Running SwiftCallbackFunc50: "); @@ -4529,6 +4580,7 @@ private static F51_Ret SwiftCallbackFunc51Callback(short a0, nuint a1, F51_S0 a2 } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc51() { Console.Write("Running SwiftCallbackFunc51: "); @@ -4602,6 +4654,7 @@ private static F52_Ret SwiftCallbackFunc52Callback(nint a0, F52_S0 a1, short a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc52() { Console.Write("Running SwiftCallbackFunc52: "); @@ -4761,6 +4814,7 @@ private static F53_Ret SwiftCallbackFunc53Callback(F53_S0 a0, byte a1, long a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc53() { Console.Write("Running SwiftCallbackFunc53: "); @@ -4880,6 +4934,7 @@ private static F54_Ret SwiftCallbackFunc54Callback(ushort a0, F54_S0 a1, float a } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc54() { Console.Write("Running SwiftCallbackFunc54: "); @@ -4978,6 +5033,7 @@ private static F55_Ret SwiftCallbackFunc55Callback(F55_S0 a0, long a1, F55_S1 a2 } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc55() { Console.Write("Running SwiftCallbackFunc55: "); @@ -5021,6 +5077,7 @@ private static uint SwiftCallbackFunc56Callback(F56_S0 a0, SwiftSelf self) } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc56() { Console.Write("Running SwiftCallbackFunc56: "); @@ -5112,6 +5169,7 @@ private static F57_Ret SwiftCallbackFunc57Callback(sbyte a0, nuint a1, uint a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc57() { Console.Write("Running SwiftCallbackFunc57: "); @@ -5186,6 +5244,7 @@ private static nint SwiftCallbackFunc58Callback(ulong a0, sbyte a1, nint a2, F58 } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc58() { Console.Write("Running SwiftCallbackFunc58: "); @@ -5220,6 +5279,7 @@ private static ulong SwiftCallbackFunc59Callback(ushort a0, long a1, nint a2, Sw } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc59() { Console.Write("Running SwiftCallbackFunc59: "); @@ -5276,6 +5336,7 @@ private static ulong SwiftCallbackFunc60Callback(float a0, double a1, long a2, u } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc60() { Console.Write("Running SwiftCallbackFunc60: "); @@ -5377,6 +5438,7 @@ private static uint SwiftCallbackFunc61Callback(uint a0, uint a1, F61_S0 a2, F61 } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc61() { Console.Write("Running SwiftCallbackFunc61: "); @@ -5432,6 +5494,7 @@ private static F62_Ret SwiftCallbackFunc62Callback(F62_S0 a0, SwiftSelf self) } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc62() { Console.Write("Running SwiftCallbackFunc62: "); @@ -5474,6 +5537,7 @@ private static float SwiftCallbackFunc63Callback(F63_S0 a0, short a1, SwiftSelf } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc63() { Console.Write("Running SwiftCallbackFunc63: "); @@ -5558,6 +5622,7 @@ private static F64_Ret SwiftCallbackFunc64Callback(sbyte a0, F64_S0 a1, F64_S1 a } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc64() { Console.Write("Running SwiftCallbackFunc64: "); @@ -5657,6 +5722,7 @@ private static F65_Ret SwiftCallbackFunc65Callback(F65_S0 a0, short a1, double a } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc65() { Console.Write("Running SwiftCallbackFunc65: "); @@ -5724,6 +5790,7 @@ private static F66_Ret SwiftCallbackFunc66Callback(long a0, SwiftSelf self) } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc66() { Console.Write("Running SwiftCallbackFunc66: "); @@ -5822,6 +5889,7 @@ private static int SwiftCallbackFunc67Callback(double a0, F67_S0 a1, float a2, F } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc67() { Console.Write("Running SwiftCallbackFunc67: "); @@ -5931,6 +5999,7 @@ private static F68_Ret SwiftCallbackFunc68Callback(byte a0, float a1, int a2, ni } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc68() { Console.Write("Running SwiftCallbackFunc68: "); @@ -6030,6 +6099,7 @@ private static F69_Ret SwiftCallbackFunc69Callback(F69_S0 a0, nint a1, int a2, F } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc69() { Console.Write("Running SwiftCallbackFunc69: "); @@ -6146,6 +6216,7 @@ private static F70_Ret SwiftCallbackFunc70Callback(short a0, byte a1, nint a2, u } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc70() { Console.Write("Running SwiftCallbackFunc70: "); @@ -6201,6 +6272,7 @@ private static ulong SwiftCallbackFunc71Callback(F71_S0 a0, F71_S1 a1, SwiftSelf } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc71() { Console.Write("Running SwiftCallbackFunc71: "); @@ -6258,6 +6330,7 @@ private static F72_Ret SwiftCallbackFunc72Callback(F72_S0 a0, long a1, sbyte a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc72() { Console.Write("Running SwiftCallbackFunc72: "); @@ -6358,6 +6431,7 @@ private static sbyte SwiftCallbackFunc73Callback(double a0, float a1, F73_S0 a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc73() { Console.Write("Running SwiftCallbackFunc73: "); @@ -6416,6 +6490,7 @@ private static long SwiftCallbackFunc74Callback(F74_S0 a0, F74_S1 a1, short a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc74() { Console.Write("Running SwiftCallbackFunc74: "); @@ -6509,6 +6584,7 @@ private static F75_Ret SwiftCallbackFunc75Callback(sbyte a0, sbyte a1, sbyte a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc75() { Console.Write("Running SwiftCallbackFunc75: "); @@ -6616,6 +6692,7 @@ private static ulong SwiftCallbackFunc76Callback(byte a0, F76_S0 a1, sbyte a2, F } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc76() { Console.Write("Running SwiftCallbackFunc76: "); @@ -6707,6 +6784,7 @@ private static F77_Ret SwiftCallbackFunc77Callback(double a0, F77_S0 a1, F77_S1 } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc77() { Console.Write("Running SwiftCallbackFunc77: "); @@ -6803,6 +6881,7 @@ private static double SwiftCallbackFunc78Callback(ulong a0, F78_S0 a1, ulong a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc78() { Console.Write("Running SwiftCallbackFunc78: "); @@ -6865,6 +6944,7 @@ private static F79_Ret SwiftCallbackFunc79Callback(F79_S0 a0, float a1, SwiftSel } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc79() { Console.Write("Running SwiftCallbackFunc79: "); @@ -6944,6 +7024,7 @@ private static float SwiftCallbackFunc80Callback(ulong a0, nint a1, int a2, shor } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc80() { Console.Write("Running SwiftCallbackFunc80: "); @@ -7005,6 +7086,7 @@ private static F81_Ret SwiftCallbackFunc81Callback(byte a0, uint a1, byte a2, F8 } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc81() { Console.Write("Running SwiftCallbackFunc81: "); @@ -7103,6 +7185,7 @@ private static float SwiftCallbackFunc82Callback(long a0, F82_S0 a1, short a2, s } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc82() { Console.Write("Running SwiftCallbackFunc82: "); @@ -7154,6 +7237,7 @@ private static F83_Ret SwiftCallbackFunc83Callback(sbyte a0, F83_S0 a1, short a2 } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc83() { Console.Write("Running SwiftCallbackFunc83: "); @@ -7258,6 +7342,7 @@ private static nint SwiftCallbackFunc84Callback(int a0, F84_S0 a1, F84_S1 a2, do } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc84() { Console.Write("Running SwiftCallbackFunc84: "); @@ -7380,6 +7465,7 @@ private static F85_Ret SwiftCallbackFunc85Callback(F85_S0 a0, F85_S1 a1, uint a2 } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc85() { Console.Write("Running SwiftCallbackFunc85: "); @@ -7484,6 +7570,7 @@ private static F86_Ret SwiftCallbackFunc86Callback(float a0, short a1, nint a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc86() { Console.Write("Running SwiftCallbackFunc86: "); @@ -7538,6 +7625,7 @@ private static ulong SwiftCallbackFunc87Callback(float a0, nint a1, F87_S0 a2, F } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc87() { Console.Write("Running SwiftCallbackFunc87: "); @@ -7632,6 +7720,7 @@ private static F88_Ret SwiftCallbackFunc88Callback(F88_S0 a0, F88_S1 a1, float a } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc88() { Console.Write("Running SwiftCallbackFunc88: "); @@ -7701,6 +7790,7 @@ private static F89_Ret SwiftCallbackFunc89Callback(F89_S0 a0, SwiftSelf self) } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc89() { Console.Write("Running SwiftCallbackFunc89: "); @@ -7809,6 +7899,7 @@ private static F90_Ret SwiftCallbackFunc90Callback(long a0, float a1, F90_S0 a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc90() { Console.Write("Running SwiftCallbackFunc90: "); @@ -7929,6 +8020,7 @@ private static F91_Ret SwiftCallbackFunc91Callback(F91_S0 a0, short a1, uint a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc91() { Console.Write("Running SwiftCallbackFunc91: "); @@ -8022,6 +8114,7 @@ private static F92_Ret SwiftCallbackFunc92Callback(uint a0, long a1, F92_S0 a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc92() { Console.Write("Running SwiftCallbackFunc92: "); @@ -8085,6 +8178,7 @@ private static F93_Ret SwiftCallbackFunc93Callback(nuint a0, ushort a1, double a } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc93() { Console.Write("Running SwiftCallbackFunc93: "); @@ -8194,6 +8288,7 @@ private static F94_Ret SwiftCallbackFunc94Callback(F94_S0 a0, short a1, F94_S1 a } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc94() { Console.Write("Running SwiftCallbackFunc94: "); @@ -8283,6 +8378,7 @@ private static F95_Ret SwiftCallbackFunc95Callback(F95_S0 a0, nuint a1, F95_S1 a } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc95() { Console.Write("Running SwiftCallbackFunc95: "); @@ -8353,6 +8449,7 @@ private static ulong SwiftCallbackFunc96Callback(uint a0, F96_S0 a1, float a2, u } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc96() { Console.Write("Running SwiftCallbackFunc96: "); @@ -8446,6 +8543,7 @@ private static F97_Ret SwiftCallbackFunc97Callback(F97_S0 a0, F97_S1 a1, F97_S2 } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc97() { Console.Write("Running SwiftCallbackFunc97: "); @@ -8491,6 +8589,7 @@ private static nint SwiftCallbackFunc98Callback(float a0, ushort a1, F98_S0 a2, } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc98() { Console.Write("Running SwiftCallbackFunc98: "); @@ -8557,6 +8656,7 @@ private static ulong SwiftCallbackFunc99Callback(long a0, nuint a1, float a2, us } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSwiftCallbackFunc99() { Console.Write("Running SwiftCallbackFunc99: "); diff --git a/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.cs b/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.cs index f903c53f1dd0af..732215edc5f473 100644 --- a/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.cs +++ b/src/tests/Interop/Swift/SwiftErrorHandling/SwiftErrorHandling.cs @@ -128,6 +128,7 @@ public unsafe static void TestSwiftErrorOnStackNotThrown() } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static unsafe void TestUnmanagedCallersOnly() { SwiftError error; @@ -143,6 +144,7 @@ public static unsafe void TestUnmanagedCallersOnly() } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static unsafe void TestUnmanagedCallersOnlyWithReturn() { SwiftError error; diff --git a/src/tests/Interop/Swift/SwiftIndirectResult/SwiftIndirectResult.cs b/src/tests/Interop/Swift/SwiftIndirectResult/SwiftIndirectResult.cs index 178c71604fa4e4..30f858ede96393 100644 --- a/src/tests/Interop/Swift/SwiftIndirectResult/SwiftIndirectResult.cs +++ b/src/tests/Interop/Swift/SwiftIndirectResult/SwiftIndirectResult.cs @@ -45,6 +45,7 @@ private static void ReversePInvokeReturnNonFrozenStruct(SwiftIndirectResult resu } [Fact] + [ActiveIssue("https://github.com/dotnet/runtime/issues/120049", typeof(TestLibrary.Utilities), nameof(TestLibrary.Utilities.IsCoreClrInterpreter))] public static void TestSumReturnedNonFrozenStruct() { int result = SumReturnedNonFrozenStruct(&ReversePInvokeReturnNonFrozenStruct, null); From 1c0cbc6838451aa1577551113826b610ba270cc0 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Fri, 16 Jan 2026 10:49:01 +0100 Subject: [PATCH 09/33] Remove obsolete code and mirror unix and windows stubs on arm64 --- src/coreclr/vm/arm64/asmhelpers.S | 106 +++--- src/coreclr/vm/arm64/asmhelpers.asm | 485 ++++++++++++++++++++++----- src/coreclr/vm/callstubgenerator.cpp | 95 +----- src/coreclr/vm/callstubgenerator.h | 2 +- 4 files changed, 459 insertions(+), 229 deletions(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index 7ddbd41867ba32..df07cab1ceceb6 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -1008,8 +1008,8 @@ NESTED_ENTRY InterpreterStubRet4Vector128, _TEXT, NoHandler EPILOG_RETURN NESTED_END InterpreterStubRet4Vector128, _TEXT -// This stub is for when an interpreted method is called with Swift calling convention -// and returns a lowered struct. This stub writes return values to a buffer. +// When interpreted method is called with Swift calling convention and returns a lowered struct, +// this stub writes return values to a buffer. NESTED_ENTRY InterpreterStubRetSwiftLowered, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_NO_FP_INDEXED fp, lr, -16 add x0, sp, #__PWTB_TransitionBlock + 16 @@ -1258,7 +1258,6 @@ LEAF_ENTRY Load_X7_AtOffset SwiftLoad_AtOffset x7 LEAF_END Load_X7_AtOffset -// Float versions for Swift lowering .macro SwiftLoadFloat_AtOffset reg ldr x12, [x10], #8 // Load offset|struct_size and w11, w12, #0xFFFF // Extract offset (lower 16 bits) @@ -1318,6 +1317,54 @@ LEAF_ENTRY Load_Stack_AtOffset EPILOG_BRANCH_REG x11 LEAF_END Load_Stack_AtOffset +.macro SwiftStore_AtOffset reg + ldr x11, [x10], #8 + and w11, w11, #0xFFFF + str \reg, [x9, x11] + ldr x11, [x10], #8 + br x11 +.endm + +LEAF_ENTRY Store_X0_AtOffset + SwiftStore_AtOffset x0 +LEAF_END Store_X0_AtOffset + +LEAF_ENTRY Store_X1_AtOffset + SwiftStore_AtOffset x1 +LEAF_END Store_X1_AtOffset + +LEAF_ENTRY Store_X2_AtOffset + SwiftStore_AtOffset x2 +LEAF_END Store_X2_AtOffset + +LEAF_ENTRY Store_X3_AtOffset + SwiftStore_AtOffset x3 +LEAF_END Store_X3_AtOffset + +.macro SwiftStoreFloat_AtOffset reg + ldr x11, [x10], #8 + and w11, w11, #0xFFFF + str \reg, [x9, x11] + ldr x11, [x10], #8 + br x11 +.endm + +LEAF_ENTRY Store_D0_AtOffset + SwiftStoreFloat_AtOffset d0 +LEAF_END Store_D0_AtOffset + +LEAF_ENTRY Store_D1_AtOffset + SwiftStoreFloat_AtOffset d1 +LEAF_END Store_D1_AtOffset + +LEAF_ENTRY Store_D2_AtOffset + SwiftStoreFloat_AtOffset d2 +LEAF_END Store_D2_AtOffset + +LEAF_ENTRY Store_D3_AtOffset + SwiftStoreFloat_AtOffset d3 +LEAF_END Store_D3_AtOffset + LEAF_ENTRY Store_X0 str x0, [x9], #8 ldr x11, [x10], #8 @@ -3003,54 +3050,11 @@ LOCAL_LABEL(CallJittedMethodRet4Vector128_NoSwiftError): EPILOG_RETURN NESTED_END CallJittedMethodRet4Vector128, _TEXT -.macro SwiftStore_AtOffset reg - ldr x11, [x10], #8 - and w11, w11, #0xFFFF - str \reg, [x9, x11] - ldr x11, [x10], #8 - br x11 -.endm - -LEAF_ENTRY Store_X0_AtOffset - SwiftStore_AtOffset x0 -LEAF_END Store_X0_AtOffset - -LEAF_ENTRY Store_X1_AtOffset - SwiftStore_AtOffset x1 -LEAF_END Store_X1_AtOffset - -LEAF_ENTRY Store_X2_AtOffset - SwiftStore_AtOffset x2 -LEAF_END Store_X2_AtOffset - -LEAF_ENTRY Store_X3_AtOffset - SwiftStore_AtOffset x3 -LEAF_END Store_X3_AtOffset - -.macro SwiftStoreFloat_AtOffset reg - ldr x11, [x10], #8 - and w11, w11, #0xFFFF - str \reg, [x9, x11] - ldr x11, [x10], #8 - br x11 -.endm - -LEAF_ENTRY Store_D0_AtOffset - SwiftStoreFloat_AtOffset d0 -LEAF_END Store_D0_AtOffset - -LEAF_ENTRY Store_D1_AtOffset - SwiftStoreFloat_AtOffset d1 -LEAF_END Store_D1_AtOffset - -LEAF_ENTRY Store_D2_AtOffset - SwiftStoreFloat_AtOffset d2 -LEAF_END Store_D2_AtOffset - -LEAF_ENTRY Store_D3_AtOffset - SwiftStoreFloat_AtOffset d3 -LEAF_END Store_D3_AtOffset - +// X0 - routines array +// X1 - interpreter stack args location +// X2 - interpreter stack return value location +// X3 - stack arguments size (properly aligned) +// X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetSwiftLowered, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -64 stp x2, x4, [fp, #16] diff --git a/src/coreclr/vm/arm64/asmhelpers.asm b/src/coreclr/vm/arm64/asmhelpers.asm index 1f4fd2bf9b8220..1e26087fda44d4 100644 --- a/src/coreclr/vm/arm64/asmhelpers.asm +++ b/src/coreclr/vm/arm64/asmhelpers.asm @@ -1384,6 +1384,24 @@ HaveInterpThreadContext EPILOG_RETURN NESTED_END InterpreterStubRet4Vector128 + ; When interpreted method is called with Swift calling convention and returns a lowered struct, + ; this stub writes return values to a buffer. + NESTED_ENTRY InterpreterStubRetSwiftLowered + PROLOG_SAVE_REG_PAIR fp, lr, #-16! + ; The +16 is for the fp, lr above + add x0, sp, #__PWTB_TransitionBlock + 16 + mov x1, x19 ; the IR bytecode pointer + mov x2, xzr + bl ExecuteInterpretedMethod + mov x9, x0 ; Save return buffer address + ldp x0, x1, [x9] + ldp x2, x3, [x9, #16] + ldp d0, d1, [x9, #32] + ldp d2, d3, [x9, #48] + EPILOG_RESTORE_REG_PAIR fp, lr, #16! + EPILOG_RETURN + NESTED_END InterpreterStubRetSwiftLowered + ; Routines for passing value type arguments by reference in general purpose registers X0..X7 ; from native code to the interpreter @@ -1483,13 +1501,11 @@ RefCopyDone$argReg EPILOG_BRANCH_REG x11 LEAF_END Load_SwiftSelf - ; Load address of struct on interpreter stack into x20 (SwiftSelf byReference) - ; The next entry in the routines array contains the size of the struct LEAF_ENTRY Load_SwiftSelf_ByRef - mov x20, x9 ; x20 = address of struct on interpreter stack - ldr x11, [x10], #8 ; Load size from routines array - add x9, x9, x11 ; Advance interpreter stack pointer by struct size - ldr x11, [x10], #8 ; Load next routine address + mov x20, x9 + ldr x11, [x10], #8 + add x9, x9, x11 + ldr x11, [x10], #8 EPILOG_BRANCH_REG x11 LEAF_END Load_SwiftSelf_ByRef @@ -1502,11 +1518,170 @@ RefCopyDone$argReg LEAF_END Load_SwiftError LEAF_ENTRY Load_SwiftIndirectResult - ldr x11, [x9], #8 + ldr x8, [x9], #8 ldr x11, [x10], #8 EPILOG_BRANCH_REG x11 LEAF_END Load_SwiftIndirectResult + MACRO + SwiftLoad_AtOffset $reg + + ldr x12, [x10], #8 ; Load offset|struct_size + and w11, w12, #0xFFFF ; Extract offset (lower 16 bits) + ldr $reg, [x9, x11] ; Load from [x9 + offset] + lsr x12, x12, #16 ; Shift to get struct_size + cbz x12, %F1 ; If struct_size == 0, skip advance + add x9, x9, x12 ; Advance x9 by struct_size +1 + ldr x11, [x10], #8 + EPILOG_BRANCH_REG x11 + MEND + + LEAF_ENTRY Load_X0_AtOffset + SwiftLoad_AtOffset x0 + LEAF_END Load_X0_AtOffset + + LEAF_ENTRY Load_X1_AtOffset + SwiftLoad_AtOffset x1 + LEAF_END Load_X1_AtOffset + + LEAF_ENTRY Load_X2_AtOffset + SwiftLoad_AtOffset x2 + LEAF_END Load_X2_AtOffset + + LEAF_ENTRY Load_X3_AtOffset + SwiftLoad_AtOffset x3 + LEAF_END Load_X3_AtOffset + + LEAF_ENTRY Load_X4_AtOffset + SwiftLoad_AtOffset x4 + LEAF_END Load_X4_AtOffset + + LEAF_ENTRY Load_X5_AtOffset + SwiftLoad_AtOffset x5 + LEAF_END Load_X5_AtOffset + + LEAF_ENTRY Load_X6_AtOffset + SwiftLoad_AtOffset x6 + LEAF_END Load_X6_AtOffset + + LEAF_ENTRY Load_X7_AtOffset + SwiftLoad_AtOffset x7 + LEAF_END Load_X7_AtOffset + + MACRO + SwiftLoadFloat_AtOffset $reg + + ldr x12, [x10], #8 ; Load offset|struct_size + and w11, w12, #0xFFFF ; Extract offset (lower 16 bits) + ldr $reg, [x9, x11] ; Load float from [x9 + offset] + lsr x12, x12, #16 ; Shift to get struct_size + cbz x12, %F1 ; If struct_size == 0, skip advance + add x9, x9, x12 ; Advance x9 by struct_size +1 + ldr x11, [x10], #8 + EPILOG_BRANCH_REG x11 + MEND + + LEAF_ENTRY Load_D0_AtOffset + SwiftLoadFloat_AtOffset d0 + LEAF_END Load_D0_AtOffset + + LEAF_ENTRY Load_D1_AtOffset + SwiftLoadFloat_AtOffset d1 + LEAF_END Load_D1_AtOffset + + LEAF_ENTRY Load_D2_AtOffset + SwiftLoadFloat_AtOffset d2 + LEAF_END Load_D2_AtOffset + + LEAF_ENTRY Load_D3_AtOffset + SwiftLoadFloat_AtOffset d3 + LEAF_END Load_D3_AtOffset + + LEAF_ENTRY Load_D4_AtOffset + SwiftLoadFloat_AtOffset d4 + LEAF_END Load_D4_AtOffset + + LEAF_ENTRY Load_D5_AtOffset + SwiftLoadFloat_AtOffset d5 + LEAF_END Load_D5_AtOffset + + LEAF_ENTRY Load_D6_AtOffset + SwiftLoadFloat_AtOffset d6 + LEAF_END Load_D6_AtOffset + + LEAF_ENTRY Load_D7_AtOffset + SwiftLoadFloat_AtOffset d7 + LEAF_END Load_D7_AtOffset + + LEAF_ENTRY Load_Stack_AtOffset + ldr x12, [x10], #8 ; Load offset|structSize|stackOffset + and w11, w12, #0xFFFF ; Extract offset (lower 16 bits) + ldr x13, [x9, x11] ; Load 8 bytes from [x9 + offset] + lsr x14, x12, #32 ; Extract stackOffset (upper 32 bits) + add x14, sp, x14 ; Calculate stack destination + str x13, [x14] ; Store to native stack + ubfx x12, x12, #16, #16 ; Extract structSize (bits 16-31) + cbz x12, %F1 ; If structSize == 0, skip advance + add x9, x9, x12 ; Advance x9 by structSize +1 + ldr x11, [x10], #8 + EPILOG_BRANCH_REG x11 + LEAF_END Load_Stack_AtOffset + + MACRO + SwiftStore_AtOffset $reg + + ldr x11, [x10], #8 + and w11, w11, #0xFFFF + str $reg, [x9, x11] + ldr x11, [x10], #8 + br x11 + MEND + + LEAF_ENTRY Store_X0_AtOffset + SwiftStore_AtOffset x0 + LEAF_END Store_X0_AtOffset + + LEAF_ENTRY Store_X1_AtOffset + SwiftStore_AtOffset x1 + LEAF_END Store_X1_AtOffset + + LEAF_ENTRY Store_X2_AtOffset + SwiftStore_AtOffset x2 + LEAF_END Store_X2_AtOffset + + LEAF_ENTRY Store_X3_AtOffset + SwiftStore_AtOffset x3 + LEAF_END Store_X3_AtOffset + + MACRO + SwiftStoreFloat_AtOffset $reg + + ldr x11, [x10], #8 + and w11, w11, #0xFFFF + str $reg, [x9, x11] + ldr x11, [x10], #8 + br x11 + MEND + + LEAF_ENTRY Store_D0_AtOffset + SwiftStoreFloat_AtOffset d0 + LEAF_END Store_D0_AtOffset + + LEAF_ENTRY Store_D1_AtOffset + SwiftStoreFloat_AtOffset d1 + LEAF_END Store_D1_AtOffset + + LEAF_ENTRY Store_D2_AtOffset + SwiftStoreFloat_AtOffset d2 + LEAF_END Store_D2_AtOffset + + LEAF_ENTRY Store_D3_AtOffset + SwiftStoreFloat_AtOffset d3 + LEAF_END Store_D3_AtOffset + LEAF_ENTRY Store_X0 str x0, [x9], #8 ldr x11, [x10], #8 @@ -2593,17 +2768,22 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetVoid - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! str x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRetVoid_NoSwiftError + str x10, [x11] +CallJittedMethodRetVoid_NoSwiftError ldr x4, [fp, #16] str x2, [x4] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! ret lr NESTED_END CallJittedMethodRetVoid @@ -2613,18 +2793,23 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetBuff - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! str x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 mov x8, x2 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRetBuff_NoSwiftError + str x10, [x11] +CallJittedMethodRetBuff_NoSwiftError ldr x4, [fp, #16] str x2, [x4] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRetBuff @@ -2634,19 +2819,24 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetI8 - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x9, [fp, #16] - str x0, [x9] + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRetI8_NoSwiftError + str x10, [x11] +CallJittedMethodRetI8_NoSwiftError ldr x9, [fp, #24] str x2, [x9] + ldr x9, [fp, #16] + str x0, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRetI8 @@ -2656,19 +2846,24 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet2I8 - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x9, [fp, #16] - stp x0, x1, [x9] + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRet2I8_NoSwiftError + str x10, [x11] +CallJittedMethodRet2I8_NoSwiftError ldr x9, [fp, #24] str x2, [x9] + ldr x9, [fp, #16] + stp x0, x1, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRet2I8 @@ -2678,19 +2873,24 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetDouble - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x9, [fp, #16] - str d0, [x9] + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRetDouble_NoSwiftError + str x10, [x11] +CallJittedMethodRetDouble_NoSwiftError ldr x9, [fp, #24] str x2, [x9] + ldr x9, [fp, #16] + str d0, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRetDouble @@ -2700,19 +2900,24 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet2Double - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x9, [fp, #16] - stp d0, d1, [x9] + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRet2Double_NoSwiftError + str x10, [x11] +CallJittedMethodRet2Double_NoSwiftError ldr x9, [fp, #24] str x2, [x9] + ldr x9, [fp, #16] + stp d0, d1, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRet2Double @@ -2722,20 +2927,25 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet3Double - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRet3Double_NoSwiftError + str x10, [x11] +CallJittedMethodRet3Double_NoSwiftError + ldr x9, [fp, #24] + str x2, [x9] ldr x9, [fp, #16] stp d0, d1, [x9], #16 str d2, [x9] - ldr x9, [fp, #24] - str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRet3Double @@ -2745,20 +2955,25 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet4Double - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRet4Double_NoSwiftError + str x10, [x11] +CallJittedMethodRet4Double_NoSwiftError + ldr x9, [fp, #24] + str x2, [x9] ldr x9, [fp, #16] stp d0, d1, [x9], #16 stp d2, d3, [x9] - ldr x9, [fp, #24] - str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRet4Double @@ -2768,19 +2983,24 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetFloat - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x9, [fp, #16] - str s0, [x9] + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRetFloat_NoSwiftError + str x10, [x11] +CallJittedMethodRetFloat_NoSwiftError ldr x9, [fp, #24] str x2, [x9] + ldr x9, [fp, #16] + str s0, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRetFloat @@ -2790,19 +3010,24 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet2Float - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x9, [fp, #16] - stp s0, s1, [x9] + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRet2Float_NoSwiftError + str x10, [x11] +CallJittedMethodRet2Float_NoSwiftError ldr x9, [fp, #24] str x2, [x9] + ldr x9, [fp, #16] + stp s0, s1, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRet2Float @@ -2812,20 +3037,25 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet3Float - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRet3Float_NoSwiftError + str x10, [x11] +CallJittedMethodRet3Float_NoSwiftError + ldr x9, [fp, #24] + str x2, [x9] ldr x9, [fp, #16] stp s0, s1, [x9], #8 str s2, [x9] - ldr x9, [fp, #24] - str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRet3Float @@ -2835,20 +3065,25 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet4Float - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRet4Float_NoSwiftError + str x10, [x11] +CallJittedMethodRet4Float_NoSwiftError + ldr x9, [fp, #24] + str x2, [x9] ldr x9, [fp, #16] stp s0, s1, [x9], #8 stp s2, s3, [x9] - ldr x9, [fp, #24] - str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRet4Float @@ -2858,19 +3093,24 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetVector64 - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x9, [fp, #16] - str d0, [x9] + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRetVector64_NoSwiftError + str x10, [x11] +CallJittedMethodRetVector64_NoSwiftError ldr x9, [fp, #24] str x2, [x9] + ldr x9, [fp, #16] + str d0, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRetVector64 @@ -2880,20 +3120,25 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet2Vector64 - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRet2Vector64_NoSwiftError + str x10, [x11] +CallJittedMethodRet2Vector64_NoSwiftError + ldr x9, [fp, #24] + str x2, [x9] ldr x9, [fp, #16] str d0, [x9], #8 str d1, [x9] - ldr x9, [fp, #24] - str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRet2Vector64 @@ -2903,21 +3148,26 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet3Vector64 - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRet3Vector64_NoSwiftError + str x10, [x11] +CallJittedMethodRet3Vector64_NoSwiftError + ldr x9, [fp, #24] + str x2, [x9] ldr x9, [fp, #16] str d0, [x9], #8 str d1, [x9], #8 str d2, [x9] - ldr x9, [fp, #24] - str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRet3Vector64 @@ -2927,22 +3177,27 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet4Vector64 - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRet4Vector64_NoSwiftError + str x10, [x11] +CallJittedMethodRet4Vector64_NoSwiftError + ldr x9, [fp, #24] + str x2, [x9] ldr x9, [fp, #16] str d0, [x9], #8 str d1, [x9], #8 str d2, [x9], #8 str d3, [x9] - ldr x9, [fp, #24] - str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRet4Vector64 @@ -2952,19 +3207,24 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetVector128 - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x9, [fp, #16] - str q0, [x9] + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRetVector128_NoSwiftError + str x10, [x11] +CallJittedMethodRetVector128_NoSwiftError ldr x9, [fp, #24] str x2, [x9] + ldr x9, [fp, #16] + str q0, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRetVector128 @@ -2974,20 +3234,25 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet2Vector128 - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRet2Vector128_NoSwiftError + str x10, [x11] +CallJittedMethodRet2Vector128_NoSwiftError + ldr x9, [fp, #24] + str x2, [x9] ldr x9, [fp, #16] str q0, [x9], #16 str q1, [x9] - ldr x9, [fp, #24] - str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRet2Vector128 @@ -2997,21 +3262,26 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet3Vector128 - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRet3Vector128_NoSwiftError + str x10, [x11] +CallJittedMethodRet3Vector128_NoSwiftError + ldr x9, [fp, #24] + str x2, [x9] ldr x9, [fp, #16] str q0, [x9], #16 str q1, [x9], #16 str q2, [x9] - ldr x9, [fp, #24] - str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRet3Vector128 @@ -3021,25 +3291,74 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet4Vector128 - PROLOG_SAVE_REG_PAIR fp, lr, #-32! + PROLOG_SAVE_REG_PAIR fp, lr, #-48! stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, CallJittedMethodRet4Vector128_NoSwiftError + str x10, [x11] +CallJittedMethodRet4Vector128_NoSwiftError + ldr x9, [fp, #24] + str x2, [x9] ldr x9, [fp, #16] str q0, [x9], #16 str q1, [x9], #16 str q2, [x9], #16 str q3, [x9] - ldr x9, [fp, #24] - str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #32! + EPILOG_RESTORE_REG_PAIR fp, lr, #48! EPILOG_RETURN NESTED_END CallJittedMethodRet4Vector128 + ; X0 - routines array + ; X1 - interpreter stack args location + ; X2 - interpreter stack return value location + ; X3 - stack arguments size (properly aligned) + ; X4 - address of continuation return value + NESTED_ENTRY CallJittedMethodRetSwiftLowered + PROLOG_SAVE_REG_PAIR fp, lr, #-64! + stp x2, x4, [fp, #16] + str xzr, [fp, #56] + ; Store the return address for the terminator to use + adr x11, CallJittedMethodRetSwiftLowered_Epilog + str x11, [fp, #32] + str x19, [fp, #48] + ldrsw x11, [x0, #-20] ; Load TargetSlotIndex + add x11, x11, #1 ; TargetSlotIndex + 1 + add x11, x0, x11, lsl #3 ; x0 + (TargetSlotIndex + 1) * 8 + str x11, [fp, #40] ; Save store routines start pointer + sub sp, sp, x3 + mov x10, x0 + mov x9, x1 + ldr x11, [x10], #8 + blr x11 + ldr x11, [fp, #56] + cbz x11, CallJittedMethodRetSwiftLowered_NoSwiftError + str x10, [x11] +CallJittedMethodRetSwiftLowered_NoSwiftError + ldr x12, [fp, #24] + str x2, [x12] ; Store continuation return value + ldr x9, [fp, #16] ; Load return buffer address into x9 + ldr x10, [fp, #40] ; Load store routines start pointer + ldr x11, [x10], #8 ; Load first store routine + br x11 +CallJittedMethodRetSwiftLowered_Epilog + ldr x19, [fp, #48] ; Restore x19 + EPILOG_STACK_RESTORE + EPILOG_RESTORE_REG_PAIR fp, lr, #64! + EPILOG_RETURN + NESTED_END CallJittedMethodRetSwiftLowered + + ; Terminator routine branches back to the epilog + LEAF_ENTRY SwiftLoweredReturnTerminator + ldr x11, [fp, #32] + br x11 + LEAF_END SwiftLoweredReturnTerminator #endif // FEATURE_INTERPRETER diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index d56da8aad60976..3976b2c64efe53 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -2779,98 +2779,6 @@ bool isIntrinsicSIMDType(MethodTable* pMT) return false; } - -//--------------------------------------------------------------------------- -// ValidateSwiftCallSignature: -// Validates that a Swift calling convention signature is valid. -// Throws InvalidProgramException if the signature is invalid. -// -// Arguments: -// sig - the method signature to validate. -// -// Throws: -// InvalidProgramException -// -void ValidateSwiftCallSignature(MetaSig &sig) -{ - int swiftSelfCount = 0; - int swiftErrorCount = 0; - int swiftIndirectResultCount = 0; - - sig.Reset(); - - CorElementType argCorType; - while ((argCorType = sig.NextArg()) != ELEMENT_TYPE_END) - { - TypeHandle thArgTypeHandle = sig.GetLastTypeHandleThrowing(); - bool isByRef = (argCorType == ELEMENT_TYPE_BYREF); - - if (isByRef) - { - sig.GetByRefType(&thArgTypeHandle); - } - - if (thArgTypeHandle.IsNull() || thArgTypeHandle.IsTypeDesc()) - { - continue; - } - - MethodTable* pArgMT = thArgTypeHandle.AsMethodTable(); - - if (!pArgMT->IsValueType()) - { - COMPlusThrow(kInvalidProgramException); - } - - if (isIntrinsicSIMDType(pArgMT)) - { - COMPlusThrow(kInvalidProgramException); - } - - if (isSwiftSelfType(pArgMT)) - { - swiftSelfCount++; - if (swiftSelfCount > 1) - { - COMPlusThrow(kInvalidProgramException); - } - } - - if (isSwiftErrorType(pArgMT)) - { - if (!isByRef) - { - COMPlusThrow(kInvalidProgramException); - } - swiftErrorCount++; - if (swiftErrorCount > 1) - { - COMPlusThrow(kInvalidProgramException); - } - } - - if (isSwiftIndirectResultType(pArgMT)) - { - swiftIndirectResultCount++; - if (swiftIndirectResultCount > 1) - { - COMPlusThrow(kInvalidProgramException); - } - } - } - - if (swiftIndirectResultCount > 0) - { - TypeHandle thReturnValueType; - CorElementType retType = sig.GetReturnTypeNormalized(&thReturnValueType); - if (retType != ELEMENT_TYPE_VOID) - { - COMPlusThrow(kInvalidProgramException); - } - } - - sig.Reset(); -} #endif // TARGET_ARM64 void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDesc *pMD) @@ -3049,7 +2957,6 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo #ifdef TARGET_ARM64 // Swift lowering info for expanded struct elements - // Max 4 elements per struct, max ~32 args = 128 entries should be plenty struct SwiftLoweringElement { uint16_t offset; // Offset within struct uint16_t structSize; // If non-zero, this is the last element, advance x9 by this amount @@ -3392,7 +3299,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo MethodTable* pInnerMT = innerType.AsMethodTable(); #if DEBUG CORINFO_SWIFT_LOWERING lowering = {}; - pInnerMT->GetNativeSwiftPhysicalLowering(&lowering, false /* useNativeLayout */); + pInnerMT->GetNativeSwiftPhysicalLowering(&lowering, false); _ASSERTE(lowering.byReference); #endif // DEBUG diff --git a/src/coreclr/vm/callstubgenerator.h b/src/coreclr/vm/callstubgenerator.h index 584b3871ea4d5d..eac23d30a4ce05 100644 --- a/src/coreclr/vm/callstubgenerator.h +++ b/src/coreclr/vm/callstubgenerator.h @@ -152,7 +152,7 @@ class CallStubGenerator // The total stack size used for the arguments. int m_totalStackSize = 0; #ifdef TARGET_ARM64 - // Size of struct for SwiftSelf byReference case + // Size of struct for SwiftSelf int m_swiftSelfByRefSize = 0; // Track if SwiftIndirectResult was used bool m_hasSwiftIndirectResult = false; From 44f93a9e96a66d424db2273d8d1a1ce8ee006717 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Fri, 16 Jan 2026 11:38:41 +0100 Subject: [PATCH 10/33] Fix windows build --- src/coreclr/vm/callstubgenerator.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 3976b2c64efe53..2fa918db7173b1 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -3072,7 +3072,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo if (!lowering.byReference && lowering.numLoweredElements > 0) { - newArgCount += lowering.numLoweredElements; + newArgCount += (int)lowering.numLoweredElements; continue; } } @@ -3083,7 +3083,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo // Build new signature with lowered structs and store lowering info SigBuilder swiftSigBuilder; - swiftSigBuilder.AppendByte(sig.GetCallingConventionInfo()); + swiftSigBuilder.AppendByte((BYTE)sig.GetCallingConventionInfo()); swiftSigBuilder.AppendData(newArgCount); // Copy return type From 20ec8e92a6c464272017ef47a8b987f760bff643 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Fri, 16 Jan 2026 15:51:45 +0100 Subject: [PATCH 11/33] Revert windows-related changes --- src/coreclr/vm/arm64/asmhelpers.asm | 501 ++++----------------------- src/coreclr/vm/callstubgenerator.cpp | 48 ++- src/coreclr/vm/callstubgenerator.h | 14 +- 3 files changed, 117 insertions(+), 446 deletions(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.asm b/src/coreclr/vm/arm64/asmhelpers.asm index 1e26087fda44d4..c9e5cfa21ffbef 100644 --- a/src/coreclr/vm/arm64/asmhelpers.asm +++ b/src/coreclr/vm/arm64/asmhelpers.asm @@ -1384,24 +1384,6 @@ HaveInterpThreadContext EPILOG_RETURN NESTED_END InterpreterStubRet4Vector128 - ; When interpreted method is called with Swift calling convention and returns a lowered struct, - ; this stub writes return values to a buffer. - NESTED_ENTRY InterpreterStubRetSwiftLowered - PROLOG_SAVE_REG_PAIR fp, lr, #-16! - ; The +16 is for the fp, lr above - add x0, sp, #__PWTB_TransitionBlock + 16 - mov x1, x19 ; the IR bytecode pointer - mov x2, xzr - bl ExecuteInterpretedMethod - mov x9, x0 ; Save return buffer address - ldp x0, x1, [x9] - ldp x2, x3, [x9, #16] - ldp d0, d1, [x9, #32] - ldp d2, d3, [x9, #48] - EPILOG_RESTORE_REG_PAIR fp, lr, #16! - EPILOG_RETURN - NESTED_END InterpreterStubRetSwiftLowered - ; Routines for passing value type arguments by reference in general purpose registers X0..X7 ; from native code to the interpreter @@ -1495,193 +1477,6 @@ RefCopyDone$argReg Store_Ref X6 Store_Ref X7 - LEAF_ENTRY Load_SwiftSelf - ldr x20, [x9], #8 - ldr x11, [x10], #8 - EPILOG_BRANCH_REG x11 - LEAF_END Load_SwiftSelf - - LEAF_ENTRY Load_SwiftSelf_ByRef - mov x20, x9 - ldr x11, [x10], #8 - add x9, x9, x11 - ldr x11, [x10], #8 - EPILOG_BRANCH_REG x11 - LEAF_END Load_SwiftSelf_ByRef - - LEAF_ENTRY Load_SwiftError - ldr x11, [x9], #8 - str x11, [fp, #40] - mov x21, xzr - ldr x11, [x10], #8 - EPILOG_BRANCH_REG x11 - LEAF_END Load_SwiftError - - LEAF_ENTRY Load_SwiftIndirectResult - ldr x8, [x9], #8 - ldr x11, [x10], #8 - EPILOG_BRANCH_REG x11 - LEAF_END Load_SwiftIndirectResult - - MACRO - SwiftLoad_AtOffset $reg - - ldr x12, [x10], #8 ; Load offset|struct_size - and w11, w12, #0xFFFF ; Extract offset (lower 16 bits) - ldr $reg, [x9, x11] ; Load from [x9 + offset] - lsr x12, x12, #16 ; Shift to get struct_size - cbz x12, %F1 ; If struct_size == 0, skip advance - add x9, x9, x12 ; Advance x9 by struct_size -1 - ldr x11, [x10], #8 - EPILOG_BRANCH_REG x11 - MEND - - LEAF_ENTRY Load_X0_AtOffset - SwiftLoad_AtOffset x0 - LEAF_END Load_X0_AtOffset - - LEAF_ENTRY Load_X1_AtOffset - SwiftLoad_AtOffset x1 - LEAF_END Load_X1_AtOffset - - LEAF_ENTRY Load_X2_AtOffset - SwiftLoad_AtOffset x2 - LEAF_END Load_X2_AtOffset - - LEAF_ENTRY Load_X3_AtOffset - SwiftLoad_AtOffset x3 - LEAF_END Load_X3_AtOffset - - LEAF_ENTRY Load_X4_AtOffset - SwiftLoad_AtOffset x4 - LEAF_END Load_X4_AtOffset - - LEAF_ENTRY Load_X5_AtOffset - SwiftLoad_AtOffset x5 - LEAF_END Load_X5_AtOffset - - LEAF_ENTRY Load_X6_AtOffset - SwiftLoad_AtOffset x6 - LEAF_END Load_X6_AtOffset - - LEAF_ENTRY Load_X7_AtOffset - SwiftLoad_AtOffset x7 - LEAF_END Load_X7_AtOffset - - MACRO - SwiftLoadFloat_AtOffset $reg - - ldr x12, [x10], #8 ; Load offset|struct_size - and w11, w12, #0xFFFF ; Extract offset (lower 16 bits) - ldr $reg, [x9, x11] ; Load float from [x9 + offset] - lsr x12, x12, #16 ; Shift to get struct_size - cbz x12, %F1 ; If struct_size == 0, skip advance - add x9, x9, x12 ; Advance x9 by struct_size -1 - ldr x11, [x10], #8 - EPILOG_BRANCH_REG x11 - MEND - - LEAF_ENTRY Load_D0_AtOffset - SwiftLoadFloat_AtOffset d0 - LEAF_END Load_D0_AtOffset - - LEAF_ENTRY Load_D1_AtOffset - SwiftLoadFloat_AtOffset d1 - LEAF_END Load_D1_AtOffset - - LEAF_ENTRY Load_D2_AtOffset - SwiftLoadFloat_AtOffset d2 - LEAF_END Load_D2_AtOffset - - LEAF_ENTRY Load_D3_AtOffset - SwiftLoadFloat_AtOffset d3 - LEAF_END Load_D3_AtOffset - - LEAF_ENTRY Load_D4_AtOffset - SwiftLoadFloat_AtOffset d4 - LEAF_END Load_D4_AtOffset - - LEAF_ENTRY Load_D5_AtOffset - SwiftLoadFloat_AtOffset d5 - LEAF_END Load_D5_AtOffset - - LEAF_ENTRY Load_D6_AtOffset - SwiftLoadFloat_AtOffset d6 - LEAF_END Load_D6_AtOffset - - LEAF_ENTRY Load_D7_AtOffset - SwiftLoadFloat_AtOffset d7 - LEAF_END Load_D7_AtOffset - - LEAF_ENTRY Load_Stack_AtOffset - ldr x12, [x10], #8 ; Load offset|structSize|stackOffset - and w11, w12, #0xFFFF ; Extract offset (lower 16 bits) - ldr x13, [x9, x11] ; Load 8 bytes from [x9 + offset] - lsr x14, x12, #32 ; Extract stackOffset (upper 32 bits) - add x14, sp, x14 ; Calculate stack destination - str x13, [x14] ; Store to native stack - ubfx x12, x12, #16, #16 ; Extract structSize (bits 16-31) - cbz x12, %F1 ; If structSize == 0, skip advance - add x9, x9, x12 ; Advance x9 by structSize -1 - ldr x11, [x10], #8 - EPILOG_BRANCH_REG x11 - LEAF_END Load_Stack_AtOffset - - MACRO - SwiftStore_AtOffset $reg - - ldr x11, [x10], #8 - and w11, w11, #0xFFFF - str $reg, [x9, x11] - ldr x11, [x10], #8 - br x11 - MEND - - LEAF_ENTRY Store_X0_AtOffset - SwiftStore_AtOffset x0 - LEAF_END Store_X0_AtOffset - - LEAF_ENTRY Store_X1_AtOffset - SwiftStore_AtOffset x1 - LEAF_END Store_X1_AtOffset - - LEAF_ENTRY Store_X2_AtOffset - SwiftStore_AtOffset x2 - LEAF_END Store_X2_AtOffset - - LEAF_ENTRY Store_X3_AtOffset - SwiftStore_AtOffset x3 - LEAF_END Store_X3_AtOffset - - MACRO - SwiftStoreFloat_AtOffset $reg - - ldr x11, [x10], #8 - and w11, w11, #0xFFFF - str $reg, [x9, x11] - ldr x11, [x10], #8 - br x11 - MEND - - LEAF_ENTRY Store_D0_AtOffset - SwiftStoreFloat_AtOffset d0 - LEAF_END Store_D0_AtOffset - - LEAF_ENTRY Store_D1_AtOffset - SwiftStoreFloat_AtOffset d1 - LEAF_END Store_D1_AtOffset - - LEAF_ENTRY Store_D2_AtOffset - SwiftStoreFloat_AtOffset d2 - LEAF_END Store_D2_AtOffset - - LEAF_ENTRY Store_D3_AtOffset - SwiftStoreFloat_AtOffset d3 - LEAF_END Store_D3_AtOffset - LEAF_ENTRY Store_X0 str x0, [x9], #8 ldr x11, [x10], #8 @@ -2768,22 +2563,17 @@ CopyLoop ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetVoid - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! str x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRetVoid_NoSwiftError - str x10, [x11] -CallJittedMethodRetVoid_NoSwiftError ldr x4, [fp, #16] str x2, [x4] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! ret lr NESTED_END CallJittedMethodRetVoid @@ -2793,23 +2583,18 @@ CallJittedMethodRetVoid_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetBuff - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! str x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 mov x8, x2 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRetBuff_NoSwiftError - str x10, [x11] -CallJittedMethodRetBuff_NoSwiftError ldr x4, [fp, #16] str x2, [x4] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRetBuff @@ -2819,24 +2604,19 @@ CallJittedMethodRetBuff_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetI8 - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRetI8_NoSwiftError - str x10, [x11] -CallJittedMethodRetI8_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] str x0, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRetI8 @@ -2846,24 +2626,19 @@ CallJittedMethodRetI8_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet2I8 - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRet2I8_NoSwiftError - str x10, [x11] -CallJittedMethodRet2I8_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] stp x0, x1, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRet2I8 @@ -2873,24 +2648,19 @@ CallJittedMethodRet2I8_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetDouble - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRetDouble_NoSwiftError - str x10, [x11] -CallJittedMethodRetDouble_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] str d0, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRetDouble @@ -2900,24 +2670,19 @@ CallJittedMethodRetDouble_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet2Double - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRet2Double_NoSwiftError - str x10, [x11] -CallJittedMethodRet2Double_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] stp d0, d1, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRet2Double @@ -2927,25 +2692,20 @@ CallJittedMethodRet2Double_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet3Double - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRet3Double_NoSwiftError - str x10, [x11] -CallJittedMethodRet3Double_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] stp d0, d1, [x9], #16 str d2, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRet3Double @@ -2955,25 +2715,20 @@ CallJittedMethodRet3Double_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet4Double - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRet4Double_NoSwiftError - str x10, [x11] -CallJittedMethodRet4Double_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] stp d0, d1, [x9], #16 stp d2, d3, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRet4Double @@ -2983,24 +2738,19 @@ CallJittedMethodRet4Double_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetFloat - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRetFloat_NoSwiftError - str x10, [x11] -CallJittedMethodRetFloat_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] str s0, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRetFloat @@ -3010,24 +2760,19 @@ CallJittedMethodRetFloat_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet2Float - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRet2Float_NoSwiftError - str x10, [x11] -CallJittedMethodRet2Float_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] stp s0, s1, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRet2Float @@ -3037,25 +2782,20 @@ CallJittedMethodRet2Float_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet3Float - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRet3Float_NoSwiftError - str x10, [x11] -CallJittedMethodRet3Float_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] stp s0, s1, [x9], #8 str s2, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRet3Float @@ -3065,25 +2805,20 @@ CallJittedMethodRet3Float_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet4Float - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRet4Float_NoSwiftError - str x10, [x11] -CallJittedMethodRet4Float_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] stp s0, s1, [x9], #8 stp s2, s3, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRet4Float @@ -3093,24 +2828,19 @@ CallJittedMethodRet4Float_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetVector64 - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRetVector64_NoSwiftError - str x10, [x11] -CallJittedMethodRetVector64_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] str d0, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRetVector64 @@ -3120,25 +2850,20 @@ CallJittedMethodRetVector64_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet2Vector64 - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRet2Vector64_NoSwiftError - str x10, [x11] -CallJittedMethodRet2Vector64_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] str d0, [x9], #8 str d1, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRet2Vector64 @@ -3148,26 +2873,21 @@ CallJittedMethodRet2Vector64_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet3Vector64 - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRet3Vector64_NoSwiftError - str x10, [x11] -CallJittedMethodRet3Vector64_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] str d0, [x9], #8 str d1, [x9], #8 str d2, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRet3Vector64 @@ -3177,27 +2897,22 @@ CallJittedMethodRet3Vector64_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet4Vector64 - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRet4Vector64_NoSwiftError - str x10, [x11] -CallJittedMethodRet4Vector64_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] str d0, [x9], #8 str d1, [x9], #8 str d2, [x9], #8 str d3, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRet4Vector64 @@ -3207,24 +2922,19 @@ CallJittedMethodRet4Vector64_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetVector128 - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRetVector128_NoSwiftError - str x10, [x11] -CallJittedMethodRetVector128_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] str q0, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRetVector128 @@ -3234,25 +2944,20 @@ CallJittedMethodRetVector128_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet2Vector128 - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRet2Vector128_NoSwiftError - str x10, [x11] -CallJittedMethodRet2Vector128_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] str q0, [x9], #16 str q1, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRet2Vector128 @@ -3262,26 +2967,21 @@ CallJittedMethodRet2Vector128_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet3Vector128 - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRet3Vector128_NoSwiftError - str x10, [x11] -CallJittedMethodRet3Vector128_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] str q0, [x9], #16 str q1, [x9], #16 str q2, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRet3Vector128 @@ -3291,74 +2991,25 @@ CallJittedMethodRet3Vector128_NoSwiftError ; X3 - stack arguments size (properly aligned) ; X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRet4Vector128 - PROLOG_SAVE_REG_PAIR fp, lr, #-48! + PROLOG_SAVE_REG_PAIR fp, lr, #-32! stp x2, x4, [fp, #16] - str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #40] - cbz x11, CallJittedMethodRet4Vector128_NoSwiftError - str x10, [x11] -CallJittedMethodRet4Vector128_NoSwiftError - ldr x9, [fp, #24] - str x2, [x9] ldr x9, [fp, #16] str q0, [x9], #16 str q1, [x9], #16 str q2, [x9], #16 str q3, [x9] + ldr x9, [fp, #24] + str x2, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #48! + EPILOG_RESTORE_REG_PAIR fp, lr, #32! EPILOG_RETURN NESTED_END CallJittedMethodRet4Vector128 - ; X0 - routines array - ; X1 - interpreter stack args location - ; X2 - interpreter stack return value location - ; X3 - stack arguments size (properly aligned) - ; X4 - address of continuation return value - NESTED_ENTRY CallJittedMethodRetSwiftLowered - PROLOG_SAVE_REG_PAIR fp, lr, #-64! - stp x2, x4, [fp, #16] - str xzr, [fp, #56] - ; Store the return address for the terminator to use - adr x11, CallJittedMethodRetSwiftLowered_Epilog - str x11, [fp, #32] - str x19, [fp, #48] - ldrsw x11, [x0, #-20] ; Load TargetSlotIndex - add x11, x11, #1 ; TargetSlotIndex + 1 - add x11, x0, x11, lsl #3 ; x0 + (TargetSlotIndex + 1) * 8 - str x11, [fp, #40] ; Save store routines start pointer - sub sp, sp, x3 - mov x10, x0 - mov x9, x1 - ldr x11, [x10], #8 - blr x11 - ldr x11, [fp, #56] - cbz x11, CallJittedMethodRetSwiftLowered_NoSwiftError - str x10, [x11] -CallJittedMethodRetSwiftLowered_NoSwiftError - ldr x12, [fp, #24] - str x2, [x12] ; Store continuation return value - ldr x9, [fp, #16] ; Load return buffer address into x9 - ldr x10, [fp, #40] ; Load store routines start pointer - ldr x11, [x10], #8 ; Load first store routine - br x11 -CallJittedMethodRetSwiftLowered_Epilog - ldr x19, [fp, #48] ; Restore x19 - EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR fp, lr, #64! - EPILOG_RETURN - NESTED_END CallJittedMethodRetSwiftLowered - - ; Terminator routine branches back to the epilog - LEAF_ENTRY SwiftLoweredReturnTerminator - ldr x11, [fp, #32] - br x11 - LEAF_END SwiftLoweredReturnTerminator #endif // FEATURE_INTERPRETER diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 2fa918db7173b1..02cfb877ad748f 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -593,6 +593,7 @@ extern "C" void Store_X6(); extern "C" void Store_X6_X7(); extern "C" void Store_X7(); +#if defined(TARGET_APPLE) extern "C" void Load_SwiftSelf(); extern "C" void Load_SwiftSelf_ByRef(); extern "C" void Load_SwiftError(); @@ -625,6 +626,7 @@ extern "C" void Store_D1_AtOffset(); extern "C" void Store_D2_AtOffset(); extern "C" void Store_D3_AtOffset(); extern "C" void SwiftLoweredReturnTerminator(); +#endif // TARGET_APPLE extern "C" void Load_Ref_X0(); extern "C" void Load_Ref_X1(); @@ -2002,7 +2004,9 @@ PCODE CallStubGenerator::GetFPReg32RangeRoutine(int x1, int x2) int index = x1 * NUM_FLOAT_ARGUMENT_REGISTERS + x2; return m_interpreterToNative ? FPRegs32LoadRoutines[index] : FPRegs32StoreRoutines[index]; } +#endif // TARGET_ARM64 +#if defined(TARGET_APPLE) && defined(TARGET_ARM64) PCODE CallStubGenerator::GetSwiftSelfRoutine() { #if LOG_COMPUTE_CALL_STUB @@ -2072,7 +2076,7 @@ PCODE CallStubGenerator::GetSwiftStoreFPAtOffsetRoutine(int regIndex) _ASSERTE(regIndex >= 0 && regIndex < 4); return routines[regIndex]; } -#endif // TARGET_ARM64 +#endif // TARGET_APPLE && TARGET_ARM64 extern "C" void CallJittedMethodRetVoid(PCODE *routines, int8_t*pArgs, int8_t*pRet, int totalStackSize, PTR_PTR_Object pContinuation); extern "C" void CallJittedMethodRetDouble(PCODE *routines, int8_t*pArgs, int8_t*pRet, int totalStackSize, PTR_PTR_Object pContinuation); @@ -2126,7 +2130,9 @@ extern "C" void CallJittedMethodRetVector128(PCODE *routines, int8_t *pArgs, int extern "C" void CallJittedMethodRet2Vector128(PCODE *routines, int8_t *pArgs, int8_t *pRet, int totalStackSize, PTR_PTR_Object pContinuation); extern "C" void CallJittedMethodRet3Vector128(PCODE *routines, int8_t *pArgs, int8_t *pRet, int totalStackSize, PTR_PTR_Object pContinuation); extern "C" void CallJittedMethodRet4Vector128(PCODE *routines, int8_t *pArgs, int8_t *pRet, int totalStackSize, PTR_PTR_Object pContinuation); +#if defined(TARGET_APPLE) extern "C" void CallJittedMethodRetSwiftLowered(PCODE *routines, int8_t*pArgs, int8_t*pRet, int totalStackSize, PTR_PTR_Object pContinuation); +#endif // TARGET_APPLE extern "C" void InterpreterStubRet2I8(); extern "C" void InterpreterStubRet2Double(); extern "C" void InterpreterStubRet3Double(); @@ -2143,7 +2149,9 @@ extern "C" void InterpreterStubRetVector128(); extern "C" void InterpreterStubRet2Vector128(); extern "C" void InterpreterStubRet3Vector128(); extern "C" void InterpreterStubRet4Vector128(); +#if defined(TARGET_APPLE) extern "C" void InterpreterStubRetSwiftLowered(); +#endif // TARGET_APPLE #endif // TARGET_ARM64 #if defined(TARGET_RISCV64) @@ -2234,8 +2242,10 @@ CallStubHeader::InvokeFunctionPtr CallStubGenerator::GetInvokeFunctionPtr(CallSt INVOKE_FUNCTION_PTR(CallJittedMethodRet3Vector128); case ReturnType4Vector128: INVOKE_FUNCTION_PTR(CallJittedMethodRet4Vector128); +#if defined(TARGET_APPLE) case ReturnTypeSwiftLowered: INVOKE_FUNCTION_PTR(CallJittedMethodRetSwiftLowered); +#endif // TARGET_APPLE #endif // TARGET_ARM64 #if defined(TARGET_RISCV64) case ReturnType2I8: @@ -2331,8 +2341,10 @@ PCODE CallStubGenerator::GetInterpreterReturnTypeHandler(CallStubGenerator::Retu RETURN_TYPE_HANDLER(InterpreterStubRet3Vector128); case ReturnType4Vector128: RETURN_TYPE_HANDLER(InterpreterStubRet4Vector128); +#if defined(TARGET_APPLE) case ReturnTypeSwiftLowered: RETURN_TYPE_HANDLER(InterpreterStubRetSwiftLowered); +#endif // TARGET_APPLE #endif // TARGET_ARM64 #if defined(TARGET_RISCV64) case ReturnType2I8: @@ -2556,6 +2568,7 @@ void CallStubGenerator::TerminateCurrentRoutineIfNotOfNewType(RoutineType type, m_x1 = NoRange; m_currentRoutineType = RoutineType::None; } +#if defined(TARGET_APPLE) else if ((m_currentRoutineType == RoutineType::SwiftSelf) && (type != RoutineType::SwiftSelf)) { pRoutines[m_routineIndex++] = GetSwiftSelfRoutine(); @@ -2578,6 +2591,7 @@ void CallStubGenerator::TerminateCurrentRoutineIfNotOfNewType(RoutineType type, pRoutines[m_routineIndex++] = GetSwiftIndirectResultRoutine(); m_currentRoutineType = RoutineType::None; } +#endif // TARGET_APPLE #endif // TARGET_ARM64 else if ((m_currentRoutineType == RoutineType::Stack) && (type != RoutineType::Stack)) { @@ -2624,7 +2638,7 @@ bool isNativePrimitiveStructType(MethodTable* pMT) return strcmp(typeName, "CLong") == 0 || strcmp(typeName, "CULong") == 0 || strcmp(typeName, "NFloat") == 0; } -#ifdef TARGET_ARM64 +#if defined(TARGET_APPLE) && defined(TARGET_ARM64) //--------------------------------------------------------------------------- // isSwiftSelfType: // Check if the given type is SwiftSelf. @@ -2779,7 +2793,7 @@ bool isIntrinsicSIMDType(MethodTable* pMT) return false; } -#endif // TARGET_ARM64 +#endif // TARGET_APPLE && TARGET_ARM64 void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDesc *pMD) { @@ -2850,7 +2864,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo { bool unmanagedThisCallConv = false; bool rewriteMetaSigFromExplicitThisToHasThis = false; -#ifdef TARGET_ARM64 +#if defined(TARGET_APPLE) && defined(TARGET_ARM64) bool isSwiftCallConv = false; #endif @@ -2864,7 +2878,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo case CorInfoCallConvExtension::FastcallMemberFunction: unmanagedThisCallConv = true; break; -#ifdef TARGET_ARM64 +#if defined(TARGET_APPLE) && defined(TARGET_ARM64) case CorInfoCallConvExtension::Swift: isSwiftCallConv = true; break; @@ -2955,7 +2969,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo sig = newSig; } -#ifdef TARGET_ARM64 +#if defined(TARGET_APPLE) && defined(TARGET_ARM64) // Swift lowering info for expanded struct elements struct SwiftLoweringElement { uint16_t offset; // Offset within struct @@ -3190,7 +3204,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo sig = swiftSig; } int swiftArgIndex = 0; -#endif +#endif // TARGET_APPLE && TARGET_ARM64 ArgIteratorType argIt(&sig); int32_t interpreterStackOffset = 0; @@ -3204,7 +3218,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo m_s2 = 0; m_routineIndex = 0; m_totalStackSize = argIt.SizeOfArgStack(); -#ifdef TARGET_ARM64 +#if defined(TARGET_APPLE) && defined(TARGET_ARM64) m_swiftSelfByRefSize = 0; m_hasSwiftIndirectResult = (isSwiftCallConv && swiftIndirectResultCount > 0); #endif @@ -3250,7 +3264,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo interpreterStackOffset += INTERP_STACK_SLOT_SIZE; } -#ifdef TARGET_ARM64 +#if defined(TARGET_APPLE) && defined(TARGET_ARM64) if (m_hasSwiftIndirectResult) { #if LOG_COMPUTE_CALL_STUB @@ -3279,7 +3293,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo TypeHandle thArgTypeHandle; CorElementType argCorType = argIt.GetArgType(&thArgTypeHandle); -#ifdef TARGET_ARM64 +#if defined(TARGET_APPLE) && defined(TARGET_ARM64) if (isSwiftCallConv) { if (argCorType == ELEMENT_TYPE_BYREF) @@ -3338,7 +3352,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo } } } -#endif // TARGET_ARM64 +#endif // TARGET_APPLE && TARGET_ARM64 if ((argCorType == ELEMENT_TYPE_VALUETYPE) && thArgTypeHandle.GetSize() > 8) { @@ -3369,7 +3383,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo } interpreterStackOffset += interpStackSlotSize; -#ifdef TARGET_ARM64 +#if defined(TARGET_APPLE) && defined(TARGET_ARM64) if (isSwiftCallConv && m_interpreterToNative && swiftArgIndex < swiftLoweringCount) { SwiftLoweringElement& elem = swiftLoweringInfo[swiftArgIndex]; @@ -3420,7 +3434,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo continue; } } -#endif // TARGET_ARM64 +#endif // TARGET_APPLE && TARGET_ARM64 #ifdef UNIX_AMD64_ABI ArgLocDesc* argLocDescForStructInRegs = argIt.GetArgLocDescForStructInRegs(); @@ -3491,7 +3505,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo m_targetSlotIndex = m_routineIndex; m_routineIndex++; // Reserve one extra slot for the target method pointer -#ifdef TARGET_ARM64 +#if defined(TARGET_APPLE) && defined(TARGET_ARM64) if (m_hasSwiftReturnLowering) { int gpRegIndex = 0; @@ -3528,7 +3542,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo pRoutines[m_routineIndex++] = (PCODE)SwiftLoweredReturnTerminator; } -#endif // TARGET_ARM64 +#endif // TARGET_APPLE && TARGET_ARM64 } else { @@ -3744,12 +3758,12 @@ void CallStubGenerator::ProcessArgument(ArgIteratorType *pArgIt, ArgLocDesc& arg template CallStubGenerator::ReturnType CallStubGenerator::GetReturnType(ArgIteratorType *pArgIt) { -#ifdef TARGET_ARM64 +#if defined(TARGET_APPLE) && defined(TARGET_ARM64) if (m_hasSwiftReturnLowering) { return ReturnTypeSwiftLowered; } -#endif // TARGET_ARM64 +#endif // TARGET_APPLE && TARGET_ARM64 if (pArgIt->HasRetBuffArg()) { diff --git a/src/coreclr/vm/callstubgenerator.h b/src/coreclr/vm/callstubgenerator.h index eac23d30a4ce05..51ccd59f64c607 100644 --- a/src/coreclr/vm/callstubgenerator.h +++ b/src/coreclr/vm/callstubgenerator.h @@ -105,7 +105,9 @@ class CallStubGenerator ReturnType2Vector128, ReturnType3Vector128, ReturnType4Vector128, +#if defined(TARGET_APPLE) ReturnTypeSwiftLowered, +#endif // TARGET_APPLE #endif // TARGET_ARM64 #if defined(TARGET_RISCV64) ReturnType2I8, @@ -123,11 +125,13 @@ class CallStubGenerator #ifdef TARGET_ARM64 FPReg32, FPReg128, +#if defined(TARGET_APPLE) SwiftSelf, SwiftSelfByRef, SwiftError, SwiftIndirectResult, -#endif +#endif // TARGET_APPLE +#endif // TARGET_ARM64 Stack }; @@ -151,7 +155,7 @@ class CallStubGenerator int m_targetSlotIndex = -1; // The total stack size used for the arguments. int m_totalStackSize = 0; -#ifdef TARGET_ARM64 +#if defined(TARGET_APPLE) && defined(TARGET_ARM64) // Size of struct for SwiftSelf int m_swiftSelfByRefSize = 0; // Track if SwiftIndirectResult was used @@ -159,7 +163,7 @@ class CallStubGenerator // Swift return lowering info CORINFO_SWIFT_LOWERING m_swiftReturnLowering = {}; bool m_hasSwiftReturnLowering = false; -#endif +#endif // TARGET_APPLE && TARGET_ARM64 CallStubHeader::InvokeFunctionPtr m_pInvokeFunction = NULL; bool m_interpreterToNative = false; @@ -178,6 +182,7 @@ class CallStubGenerator #ifdef TARGET_ARM64 PCODE GetFPReg128RangeRoutine(int x1, int x2); PCODE GetFPReg32RangeRoutine(int x1, int x2); +#if defined(TARGET_APPLE) PCODE GetSwiftSelfRoutine(); PCODE GetSwiftSelfByRefRoutine(); PCODE GetSwiftErrorRoutine(); @@ -186,7 +191,8 @@ class CallStubGenerator PCODE GetSwiftLoadFPAtOffsetRoutine(int regIndex); PCODE GetSwiftStoreGPAtOffsetRoutine(int regIndex); PCODE GetSwiftStoreFPAtOffsetRoutine(int regIndex); -#endif +#endif // TARGET_APPLE +#endif // TARGET_ARM64 PCODE GetGPRegRangeRoutine(int r1, int r2); template ReturnType GetReturnType(ArgIteratorType *pArgIt); From 675c6ef6e90987abb7614733c1fa4ed2358a9c9b Mon Sep 17 00:00:00 2001 From: Jan Vorlicek Date: Fri, 16 Jan 2026 19:22:36 +0100 Subject: [PATCH 12/33] Fix SwiftError handling The SwiftError argument is a pointer (ELEMENT_TYPE_PTR), so it needs to be handled differently. Also fixes the register the asm helpers store as the swift error. The correct register is x21 while the code was using x10. --- src/coreclr/vm/arm64/asmhelpers.S | 42 ++++++++++++------------ src/coreclr/vm/callstubgenerator.cpp | 48 ++++++++++++++++++---------- 2 files changed, 52 insertions(+), 38 deletions(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index df07cab1ceceb6..1cbfeccaefee65 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -2513,7 +2513,7 @@ NESTED_ENTRY CallJittedMethodRetVoid, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetVoid_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRetVoid_NoSwiftError): ldr x4, [fp, #16] str x2, [x4] @@ -2539,7 +2539,7 @@ NESTED_ENTRY CallJittedMethodRetBuff, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetBuff_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRetBuff_NoSwiftError): ldr x4, [fp, #16] str x2, [x4] @@ -2564,7 +2564,7 @@ NESTED_ENTRY CallJittedMethodRetI8, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetI8_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRetI8_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -2591,7 +2591,7 @@ NESTED_ENTRY CallJittedMethodRet2I8, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet2I8_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRet2I8_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -2618,7 +2618,7 @@ NESTED_ENTRY CallJittedMethodRetDouble, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetDouble_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRetDouble_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -2645,7 +2645,7 @@ NESTED_ENTRY CallJittedMethodRet2Double, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet2Double_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRet2Double_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -2672,7 +2672,7 @@ NESTED_ENTRY CallJittedMethodRet3Double, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet3Double_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRet3Double_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -2700,7 +2700,7 @@ NESTED_ENTRY CallJittedMethodRet4Double, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet4Double_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRet4Double_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -2728,7 +2728,7 @@ NESTED_ENTRY CallJittedMethodRetFloat, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetFloat_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRetFloat_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -2755,7 +2755,7 @@ NESTED_ENTRY CallJittedMethodRet2Float, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet2Float_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRet2Float_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -2782,7 +2782,7 @@ NESTED_ENTRY CallJittedMethodRet3Float, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet3Float_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRet3Float_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -2810,7 +2810,7 @@ NESTED_ENTRY CallJittedMethodRet4Float, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet4Float_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRet4Float_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -2838,7 +2838,7 @@ NESTED_ENTRY CallJittedMethodRetVector64, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetVector64_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRetVector64_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -2865,7 +2865,7 @@ NESTED_ENTRY CallJittedMethodRet2Vector64, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet2Vector64_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRet2Vector64_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -2893,7 +2893,7 @@ NESTED_ENTRY CallJittedMethodRet3Vector64, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet3Vector64_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRet3Vector64_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -2922,7 +2922,7 @@ NESTED_ENTRY CallJittedMethodRet4Vector64, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet4Vector64_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRet4Vector64_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -2952,7 +2952,7 @@ NESTED_ENTRY CallJittedMethodRetVector128, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetVector128_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRetVector128_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -2979,7 +2979,7 @@ NESTED_ENTRY CallJittedMethodRet2Vector128, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet2Vector128_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRet2Vector128_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -3007,7 +3007,7 @@ NESTED_ENTRY CallJittedMethodRet3Vector128, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet3Vector128_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRet3Vector128_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -3036,7 +3036,7 @@ NESTED_ENTRY CallJittedMethodRet4Vector128, _TEXT, NoHandler blr x11 ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet4Vector128_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRet4Vector128_NoSwiftError): ldr x9, [fp, #24] str x2, [x9] @@ -3074,7 +3074,7 @@ NESTED_ENTRY CallJittedMethodRetSwiftLowered, _TEXT, NoHandler blr x11 ldr x11, [fp, #56] cbz x11, LOCAL_LABEL(CallJittedMethodRetSwiftLowered_NoSwiftError) - str x10, [x11] + str x21, [x11] LOCAL_LABEL(CallJittedMethodRetSwiftLowered_NoSwiftError): ldr x12, [fp, #24] str x2, [x12] // Store continuation return value diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 02cfb877ad748f..d3a4e088d40dca 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -3028,6 +3028,26 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo sig.GetByRefType(&thArgType); } + if (thArgType.IsTypeDesc() && !thArgType.AsTypeDesc()->GetTypeParam().IsNull())) + { + MethodTable* pArgMT = thArgType.AsTypeDesc()->GetTypeParam().AsMethodTable(); + if (!pArgMT->IsValueType()) + { + COMPlusThrow(kInvalidProgramException); + } + + if (isSwiftErrorType(pArgMT)) + { + swiftErrorCount++; + if (swiftErrorCount > 1) + { + COMPlusThrow(kInvalidProgramException); + } + newArgCount++; + continue; + } + } + if (!thArgType.IsNull() && !thArgType.IsTypeDesc()) { MethodTable* pArgMT = thArgType.AsMethodTable(); @@ -3053,21 +3073,6 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo continue; } - if (isSwiftErrorType(pArgMT)) - { - if (!isByRef) - { - COMPlusThrow(kInvalidProgramException); - } - swiftErrorCount++; - if (swiftErrorCount > 1) - { - COMPlusThrow(kInvalidProgramException); - } - newArgCount++; - continue; - } - if (isSwiftIndirectResultType(pArgMT)) { swiftIndirectResultCount++; @@ -3120,7 +3125,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo continue; } // Don't lower Swift* types except SwiftSelf - if (isSwiftSelfType(pArgMT) || isSwiftErrorType(pArgMT)) + if (isSwiftSelfType(pArgMT)) { SigPointer pArg = sig.GetArgProps(); pArg.ConvertToInternalExactlyOne(sig.GetModule(), sig.GetSigTypeContext(), &swiftSigBuilder); @@ -3339,7 +3344,16 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo interpreterStackOffset += interpStackSlotSize; continue; } - if (pArgMT != nullptr && isSwiftErrorType(pArgMT)) + } + + if (argCorType == ELEMENT_TYPE_PTR) + { + // Get the TypeHandle for the argument's type. + MetaSig *pSig = argIt.GetSig(); + thArgTypeHandle = pSig->GetLastTypeHandleThrowing(); + _ASSERTE(thArgTypeHandle.IsTypeDesc()); + MethodTable* pArgMT = thArgTypeHandle.AsTypeDesc()->GetTypeParam().AsMethodTable(); + if (isSwiftErrorType(pArgMT)) { #if LOG_COMPUTE_CALL_STUB printf("Swift Error argument detected\n"); From 489d41a0dcd86936b544a52bdeacc2095f2cb07f Mon Sep 17 00:00:00 2001 From: Jan Vorlicek Date: Tue, 20 Jan 2026 15:53:52 +0100 Subject: [PATCH 13/33] Apply suggestion from @janvorli Fix extra paren --- src/coreclr/vm/callstubgenerator.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index d3a4e088d40dca..3e236db68f0bae 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -3028,7 +3028,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo sig.GetByRefType(&thArgType); } - if (thArgType.IsTypeDesc() && !thArgType.AsTypeDesc()->GetTypeParam().IsNull())) + if (thArgType.IsTypeDesc() && !thArgType.AsTypeDesc()->GetTypeParam().IsNull()) { MethodTable* pArgMT = thArgType.AsTypeDesc()->GetTypeParam().AsMethodTable(); if (!pArgMT->IsValueType()) From a02c1e80660dd5b6b0c93e17ecfde9d868cd5f09 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Mon, 26 Jan 2026 18:07:23 +0100 Subject: [PATCH 14/33] Update OFFSETOF__CallStubHeader__Routines to match the correct offset --- src/coreclr/vm/arm/asmconstants.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/arm/asmconstants.h b/src/coreclr/vm/arm/asmconstants.h index e07132357c5cdd..8a994cd700eab7 100644 --- a/src/coreclr/vm/arm/asmconstants.h +++ b/src/coreclr/vm/arm/asmconstants.h @@ -221,7 +221,7 @@ ASMCONSTANTS_C_ASSERT(OFFSETOF__Thread__m_pInterpThreadContext == offsetof(Threa #define OFFSETOF__InterpThreadContext__pStackPointer 0x8 ASMCONSTANTS_C_ASSERT(OFFSETOF__InterpThreadContext__pStackPointer == offsetof(InterpThreadContext, pStackPointer)) -#define OFFSETOF__CallStubHeader__Routines 0x10 +#define OFFSETOF__CallStubHeader__Routines 0x14 ASMCONSTANTS_C_ASSERT(OFFSETOF__CallStubHeader__Routines == offsetof(CallStubHeader, Routines)) #define SIZEOF__TransitionBlock 0x34 From 0b7152e3c6f19427ae5d9e423ee64fde64187077 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Wed, 28 Jan 2026 14:18:17 +0100 Subject: [PATCH 15/33] Add interpreter stub epilog transition for P/Invokes with SwiftError --- src/coreclr/pal/inc/unixasmmacrosarm64.inc | 17 ++- src/coreclr/vm/arm64/asmhelpers.S | 8 +- src/coreclr/vm/callstubgenerator.cpp | 125 ++++++++++----------- 3 files changed, 84 insertions(+), 66 deletions(-) diff --git a/src/coreclr/pal/inc/unixasmmacrosarm64.inc b/src/coreclr/pal/inc/unixasmmacrosarm64.inc index 258d3e8c4c28c5..b8aaf85ae403bc 100644 --- a/src/coreclr/pal/inc/unixasmmacrosarm64.inc +++ b/src/coreclr/pal/inc/unixasmmacrosarm64.inc @@ -309,6 +309,21 @@ C_FUNC(\Name\()_End): .endm +// Same as EPILOG_WITH_TRANSITION_BLOCK_RETURN but skips restoring x21 (SwiftError) +.macro EPILOG_WITH_TRANSITION_BLOCK_RETURN_SKIP_SWIFT_ERROR + + EPILOG_STACK_FREE __PWTB_StackAlloc + + EPILOG_RESTORE_REG_PAIR x19, x20, 16 + EPILOG_RESTORE_REG x22, 40 + EPILOG_RESTORE_REG_PAIR x23, x24, 48 + EPILOG_RESTORE_REG_PAIR x25, x26, 64 + EPILOG_RESTORE_REG_PAIR x27, x28, 80 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 176 + ret + +.endm + //----------------------------------------------------------------------------- // Provides a matching epilog to PROLOG_WITH_TRANSITION_BLOCK and ends by preparing for tail-calling. // Since this is a tail call argument registers are restored. @@ -410,7 +425,7 @@ C_FUNC(\Name\()_End): PROLOG_SAVE_REG_PAIR x27, x28, 80 mov \target, sp -.endm +.endm .macro POP_COOP_PINVOKE_FRAME EPILOG_RESTORE_REG_PAIR x19, x20, 16 diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index cf84a1db09b7c7..7b0fe26309f546 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -748,8 +748,12 @@ LOCAL_LABEL(HaveInterpThreadContext): // Fill in the ContinuationContext register ldr x2, [sp, #(__PWTB_ArgumentRegister_FirstArg + 16)] - // Restore SwiftError to scratch reg - mov x10, x21 + // Check if caller has SwiftError + ldr x11, [fp] + ldr x11, [x11, #40] + cbz x11, LOCAL_LABEL(InterpreterStub_NoSwiftError) + EPILOG_WITH_TRANSITION_BLOCK_RETURN_SKIP_SWIFT_ERROR +LOCAL_LABEL(InterpreterStub_NoSwiftError): EPILOG_WITH_TRANSITION_BLOCK_RETURN NESTED_END InterpreterStub, _TEXT diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 5047b4cabc313c..b9580c27184574 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -3238,37 +3238,25 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo while ((argType = sig.NextArg()) != ELEMENT_TYPE_END) { TypeHandle thArgType = sig.GetLastTypeHandleThrowing(); - bool isByRef = (argType == ELEMENT_TYPE_BYREF); + MethodTable* pArgMT = nullptr; - if (isByRef) + if (argType == ELEMENT_TYPE_BYREF) { sig.GetByRefType(&thArgType); } + // Extract the underlying MT for pointer types or unwrapped byrefs if (thArgType.IsTypeDesc() && !thArgType.AsTypeDesc()->GetTypeParam().IsNull()) { - MethodTable* pArgMT = thArgType.AsTypeDesc()->GetTypeParam().AsMethodTable(); - if (!pArgMT->IsValueType()) - { - COMPlusThrow(kInvalidProgramException); - } - - if (isSwiftErrorType(pArgMT)) - { - swiftErrorCount++; - if (swiftErrorCount > 1) - { - COMPlusThrow(kInvalidProgramException); - } - newArgCount++; - continue; - } + pArgMT = thArgType.AsTypeDesc()->GetTypeParam().AsMethodTable(); } - - if (!thArgType.IsNull() && !thArgType.IsTypeDesc()) + else if (!thArgType.IsTypeDesc() && !thArgType.IsNull()) { - MethodTable* pArgMT = thArgType.AsMethodTable(); + pArgMT = thArgType.AsMethodTable(); + } + if (pArgMT != nullptr) + { if (!pArgMT->IsValueType()) { COMPlusThrow(kInvalidProgramException); @@ -3290,6 +3278,17 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo continue; } + if (isSwiftErrorType(pArgMT)) + { + swiftErrorCount++; + if (swiftErrorCount > 1) + { + COMPlusThrow(kInvalidProgramException); + } + newArgCount++; + continue; + } + if (isSwiftIndirectResultType(pArgMT)) { swiftIndirectResultCount++; @@ -3517,69 +3516,69 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo #if defined(TARGET_APPLE) && defined(TARGET_ARM64) if (isSwiftCallConv) { + MethodTable* pArgMT = nullptr; + if (corType == ELEMENT_TYPE_BYREF) { sig.GetByRefType(&thArgTypeHandle); } - if ((corType == ELEMENT_TYPE_VALUETYPE || corType == ELEMENT_TYPE_BYREF) && !thArgTypeHandle.IsNull()) + if (thArgTypeHandle.IsTypeDesc() && !thArgTypeHandle.AsTypeDesc()->GetTypeParam().IsNull()) { - MethodTable* pArgMT = thArgTypeHandle.IsTypeDesc() ? nullptr : thArgTypeHandle.AsMethodTable(); - if (pArgMT != nullptr && isSwiftSelfGenericType(pArgMT)) - { - Instantiation inst = pArgMT->GetInstantiation(); - _ASSERTE(inst.GetNumArgs() != 0); - TypeHandle innerType = inst[0]; - _ASSERTE(!innerType.IsNull() && !innerType.IsTypeDesc()); - MethodTable* pInnerMT = innerType.AsMethodTable(); + pArgMT = thArgTypeHandle.AsTypeDesc()->GetTypeParam().AsMethodTable(); + } + else if (!thArgTypeHandle.IsTypeDesc() && !thArgTypeHandle.IsNull()) + { + pArgMT = thArgTypeHandle.AsMethodTable(); + } + + if (pArgMT != nullptr && isSwiftSelfGenericType(pArgMT)) + { + Instantiation inst = pArgMT->GetInstantiation(); + _ASSERTE(inst.GetNumArgs() != 0); + TypeHandle innerType = inst[0]; + _ASSERTE(!innerType.IsNull() && !innerType.IsTypeDesc()); + MethodTable* pInnerMT = innerType.AsMethodTable(); #if DEBUG - CORINFO_SWIFT_LOWERING lowering = {}; - pInnerMT->GetNativeSwiftPhysicalLowering(&lowering, false); - _ASSERTE(lowering.byReference); + CORINFO_SWIFT_LOWERING lowering = {}; + pInnerMT->GetNativeSwiftPhysicalLowering(&lowering, false); + _ASSERTE(lowering.byReference); #endif // DEBUG #if LOG_COMPUTE_CALL_STUB - printf("SwiftSelf argument detected\n"); + printf("SwiftSelf argument detected\n"); #endif - TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftSelfByRef, pRoutines); - m_currentRoutineType = RoutineType::SwiftSelfByRef; + TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftSelfByRef, pRoutines); + m_currentRoutineType = RoutineType::SwiftSelfByRef; - int structSize = ALIGN_UP(pInnerMT->GetNumInstanceFieldBytes(), INTERP_STACK_SLOT_SIZE); - m_swiftSelfByRefSize = structSize; - interpreterStackOffset += structSize; - continue; - } - if (pArgMT != nullptr && isSwiftSelfType(pArgMT)) - { + int structSize = ALIGN_UP(pInnerMT->GetNumInstanceFieldBytes(), INTERP_STACK_SLOT_SIZE); + m_swiftSelfByRefSize = structSize; + interpreterStackOffset += structSize; + continue; + } + + if (pArgMT != nullptr && isSwiftSelfType(pArgMT)) + { #if LOG_COMPUTE_CALL_STUB - printf("Swift Self argument detected\n"); + printf("Swift Self argument detected\n"); #endif - TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftSelf, pRoutines); - m_currentRoutineType = RoutineType::SwiftSelf; - interpreterStackOffset += interpStackSlotSize; - continue; - } + TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftSelf, pRoutines); + m_currentRoutineType = RoutineType::SwiftSelf; + interpreterStackOffset += interpStackSlotSize; + continue; } - if (corType == ELEMENT_TYPE_PTR) + if (pArgMT != nullptr && isSwiftErrorType(pArgMT)) { - // Get the TypeHandle for the argument's type. - MetaSig *pSig = argIt.GetSig(); - thArgTypeHandle = pSig->GetLastTypeHandleThrowing(); - _ASSERTE(thArgTypeHandle.IsTypeDesc()); - MethodTable* pArgMT = thArgTypeHandle.AsTypeDesc()->GetTypeParam().AsMethodTable(); - if (isSwiftErrorType(pArgMT)) - { #if LOG_COMPUTE_CALL_STUB - printf("Swift Error argument detected\n"); + printf("Swift Error argument detected\n"); #endif - TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftError, pRoutines); - m_currentRoutineType = RoutineType::SwiftError; - interpreterStackOffset += interpStackSlotSize; - continue; - } + TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftError, pRoutines); + m_currentRoutineType = RoutineType::SwiftError; + interpreterStackOffset += interpStackSlotSize; + continue; } } #endif // TARGET_APPLE && TARGET_ARM64 From de7f6d271cdb4db70ae6b1e454d44b9da75363b0 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Wed, 28 Jan 2026 14:23:55 +0100 Subject: [PATCH 16/33] Update src/coreclr/vm/callstubgenerator.cpp Co-authored-by: Aaron R Robinson --- src/coreclr/vm/callstubgenerator.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index b9580c27184574..98f5d04380e317 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -2184,7 +2184,7 @@ PCODE CallStubGenerator::GetSwiftLoadGPAtOffsetRoutine(int regIndex) (PCODE)Load_X0_AtOffset, (PCODE)Load_X1_AtOffset, (PCODE)Load_X2_AtOffset, (PCODE)Load_X3_AtOffset, (PCODE)Load_X4_AtOffset, (PCODE)Load_X5_AtOffset, (PCODE)Load_X6_AtOffset, (PCODE)Load_X7_AtOffset }; - _ASSERTE(regIndex >= 0 && regIndex < 8); + _ASSERTE(regIndex >= 0 && regIndex < ARRAY_SIZE(routines)); return routines[regIndex]; } From fcf14ee9cb84718b3ce9c37bacf02ff2c9c775dd Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Wed, 28 Jan 2026 14:24:04 +0100 Subject: [PATCH 17/33] Update src/coreclr/vm/callstubgenerator.cpp Co-authored-by: Aaron R Robinson --- src/coreclr/vm/callstubgenerator.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 98f5d04380e317..fe5026ad4c6987 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -2194,7 +2194,7 @@ PCODE CallStubGenerator::GetSwiftLoadFPAtOffsetRoutine(int regIndex) (PCODE)Load_D0_AtOffset, (PCODE)Load_D1_AtOffset, (PCODE)Load_D2_AtOffset, (PCODE)Load_D3_AtOffset, (PCODE)Load_D4_AtOffset, (PCODE)Load_D5_AtOffset, (PCODE)Load_D6_AtOffset, (PCODE)Load_D7_AtOffset }; - _ASSERTE(regIndex >= 0 && regIndex < 8); + _ASSERTE(regIndex >= 0 && regIndex < ARRAY_SIZE(routines)); return routines[regIndex]; } From f6fe00b977395e13d0b039aad05851e8e5a7fc55 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Wed, 28 Jan 2026 14:24:12 +0100 Subject: [PATCH 18/33] Update src/coreclr/vm/callstubgenerator.cpp Co-authored-by: Aaron R Robinson --- src/coreclr/vm/callstubgenerator.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index fe5026ad4c6987..969e2c882a43e4 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -2203,7 +2203,7 @@ PCODE CallStubGenerator::GetSwiftStoreGPAtOffsetRoutine(int regIndex) static PCODE routines[] = { (PCODE)Store_X0_AtOffset, (PCODE)Store_X1_AtOffset, (PCODE)Store_X2_AtOffset, (PCODE)Store_X3_AtOffset }; - _ASSERTE(regIndex >= 0 && regIndex < 4); + _ASSERTE(regIndex >= 0 && regIndex < ARRAY_SIZE(routines)); return routines[regIndex]; } From b6182a7f97c275863c3f4a25a4c549d56ac798e8 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Wed, 28 Jan 2026 14:24:21 +0100 Subject: [PATCH 19/33] Update src/coreclr/vm/callstubgenerator.cpp Co-authored-by: Aaron R Robinson --- src/coreclr/vm/callstubgenerator.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 969e2c882a43e4..59d1fdaa85c910 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -2212,7 +2212,7 @@ PCODE CallStubGenerator::GetSwiftStoreFPAtOffsetRoutine(int regIndex) static PCODE routines[] = { (PCODE)Store_D0_AtOffset, (PCODE)Store_D1_AtOffset, (PCODE)Store_D2_AtOffset, (PCODE)Store_D3_AtOffset }; - _ASSERTE(regIndex >= 0 && regIndex < 4); + _ASSERTE(regIndex >= 0 && regIndex < ARRAY_SIZE(routines)); return routines[regIndex]; } #endif // TARGET_APPLE && TARGET_ARM64 From 89835830dade78f8717f2da8c705995cafd4fbf6 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Wed, 28 Jan 2026 14:25:23 +0100 Subject: [PATCH 20/33] Update src/coreclr/vm/callstubgenerator.h Co-authored-by: Aaron R Robinson --- src/coreclr/vm/callstubgenerator.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/vm/callstubgenerator.h b/src/coreclr/vm/callstubgenerator.h index 47cbf792422125..41eec82731f19f 100644 --- a/src/coreclr/vm/callstubgenerator.h +++ b/src/coreclr/vm/callstubgenerator.h @@ -230,7 +230,7 @@ class CallStubGenerator // The size of the temporary storage is the size of the CallStubHeader plus the size of the routines array. // The size of the routines array is three times the number of arguments plus one slot for the target method pointer. // Add extra space for Swift return lowering (up to 4 elements * 2 slots + terminator = 9 slots). - return sizeof(CallStubHeader) + ((numArgs + 1) * 3 + 10) * sizeof(PCODE); + return sizeof(CallStubHeader) + ((numArgs + 1) * 3 + 1 + 9) * sizeof(PCODE); } void ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDesc *pMD); template From a4c92f0691bfb3a23457c40236810ad61cd14c50 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Wed, 28 Jan 2026 14:26:36 +0100 Subject: [PATCH 21/33] Revert windows-related changes --- src/coreclr/vm/arm64/asmhelpers.asm | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.asm b/src/coreclr/vm/arm64/asmhelpers.asm index cabc98018705d9..13a93f9340b62e 100644 --- a/src/coreclr/vm/arm64/asmhelpers.asm +++ b/src/coreclr/vm/arm64/asmhelpers.asm @@ -105,14 +105,12 @@ PROLOG_SAVE_REG_PAIR fp, lr, #-224! SAVE_ARGUMENT_REGISTERS sp, 16 SAVE_FLOAT_ARGUMENT_REGISTERS sp, 96 - str x8, [sp, #208] ; Save x8 (indirect result register for Swift) mov x0, x12 bl PInvokeImportWorker mov x12, x0 ; pop the stack and restore original register state - ldr x8, [sp, #208] ; Restore x8 (indirect result register for Swift) RESTORE_FLOAT_ARGUMENT_REGISTERS sp, 96 RESTORE_ARGUMENT_REGISTERS sp, 16 EPILOG_RESTORE_REG_PAIR fp, lr, #224! From fa26caccf84e8a4b02e40f062085d95b065b606e Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Wed, 28 Jan 2026 14:50:16 +0100 Subject: [PATCH 22/33] Refactor swiftLoweringInfo to use CQuickArray --- src/coreclr/vm/callstubgenerator.cpp | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 59d1fdaa85c910..9db58e61534919 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -3194,7 +3194,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo bool isFloat; // True if this element goes in FP register bool isLowered; // True if this is part of a lowered struct (not a regular arg) }; - SwiftLoweringElement swiftLoweringInfo[128]; + CQuickArray swiftLoweringInfo; int swiftLoweringCount = 0; int swiftIndirectResultCount = 0; @@ -3316,6 +3316,8 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo newArgCount++; } + swiftLoweringInfo.AllocThrows(newArgCount); + // Build new signature with lowered structs and store lowering info SigBuilder swiftSigBuilder; swiftSigBuilder.AppendByte((BYTE)sig.GetCallingConventionInfo()); From 3b44e03298d1754e56f33ea229e7ebbe732c8da5 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Wed, 28 Jan 2026 16:01:13 +0100 Subject: [PATCH 23/33] Encapsulate Swift-related code in functions --- src/coreclr/vm/callstubgenerator.cpp | 733 ++++++++++++++------------- src/coreclr/vm/callstubgenerator.h | 32 +- 2 files changed, 399 insertions(+), 366 deletions(-) diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 9db58e61534919..793a1d86e9398e 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -3187,246 +3187,19 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo } #if defined(TARGET_APPLE) && defined(TARGET_ARM64) - // Swift lowering info for expanded struct elements - struct SwiftLoweringElement { - uint16_t offset; // Offset within struct - uint16_t structSize; // If non-zero, this is the last element, advance x9 by this amount - bool isFloat; // True if this element goes in FP register - bool isLowered; // True if this is part of a lowered struct (not a regular arg) - }; CQuickArray swiftLoweringInfo; - int swiftLoweringCount = 0; + SigBuilder swiftSigBuilder; int swiftIndirectResultCount = 0; + int swiftArgIndex = 0; m_hasSwiftReturnLowering = false; m_swiftReturnLowering = {}; + m_swiftSelfByRefSize = 0; if (isSwiftCallConv) { - if (!m_interpreterToNative) - { - COMPlusThrow(kNotImplementedException); - } - - sig.Reset(); - TypeHandle thReturnType; - CorElementType retCorType = sig.GetReturnTypeNormalized(&thReturnType); - if (retCorType == ELEMENT_TYPE_VALUETYPE && !thReturnType.IsNull() && !thReturnType.IsTypeDesc()) - { - MethodTable* pRetMT = thReturnType.AsMethodTable(); - if (pRetMT->IsValueType() && !pRetMT->IsHFA() && !isIntrinsicSIMDType(pRetMT)) - { - CORINFO_SWIFT_LOWERING lowering = {}; - pRetMT->GetNativeSwiftPhysicalLowering(&lowering, false); - if (!lowering.byReference && lowering.numLoweredElements > 0) - { - m_hasSwiftReturnLowering = true; - m_swiftReturnLowering = lowering; -#if LOG_COMPUTE_CALL_STUB - printf("Swift return lowering detected: %d elements\n", lowering.numLoweredElements); -#endif - } - } - } - - // Count how many extra arguments we need due to Swift lowering - sig.Reset(); - int newArgCount = 0; - int swiftSelfCount = 0; - int swiftErrorCount = 0; - CorElementType argType; - while ((argType = sig.NextArg()) != ELEMENT_TYPE_END) - { - TypeHandle thArgType = sig.GetLastTypeHandleThrowing(); - MethodTable* pArgMT = nullptr; - - if (argType == ELEMENT_TYPE_BYREF) - { - sig.GetByRefType(&thArgType); - } - - // Extract the underlying MT for pointer types or unwrapped byrefs - if (thArgType.IsTypeDesc() && !thArgType.AsTypeDesc()->GetTypeParam().IsNull()) - { - pArgMT = thArgType.AsTypeDesc()->GetTypeParam().AsMethodTable(); - } - else if (!thArgType.IsTypeDesc() && !thArgType.IsNull()) - { - pArgMT = thArgType.AsMethodTable(); - } - - if (pArgMT != nullptr) - { - if (!pArgMT->IsValueType()) - { - COMPlusThrow(kInvalidProgramException); - } - - if (isIntrinsicSIMDType(pArgMT)) - { - COMPlusThrow(kInvalidProgramException); - } - - if (isSwiftSelfType(pArgMT)) - { - swiftSelfCount++; - if (swiftSelfCount > 1) - { - COMPlusThrow(kInvalidProgramException); - } - newArgCount++; - continue; - } - - if (isSwiftErrorType(pArgMT)) - { - swiftErrorCount++; - if (swiftErrorCount > 1) - { - COMPlusThrow(kInvalidProgramException); - } - newArgCount++; - continue; - } - - if (isSwiftIndirectResultType(pArgMT)) - { - swiftIndirectResultCount++; - if (swiftIndirectResultCount > 1) - { - COMPlusThrow(kInvalidProgramException); - } - // SwiftIndirectResult goes in x8, not in argument registers - continue; - } - - if (argType == ELEMENT_TYPE_VALUETYPE) - { - CORINFO_SWIFT_LOWERING lowering = {}; - pArgMT->GetNativeSwiftPhysicalLowering(&lowering, false); - - if (!lowering.byReference && lowering.numLoweredElements > 0) - { - newArgCount += (int)lowering.numLoweredElements; - continue; - } - } - } - - newArgCount++; - } - - swiftLoweringInfo.AllocThrows(newArgCount); - - // Build new signature with lowered structs and store lowering info - SigBuilder swiftSigBuilder; - swiftSigBuilder.AppendByte((BYTE)sig.GetCallingConventionInfo()); - swiftSigBuilder.AppendData(newArgCount); - - // Copy return type - SigPointer pReturn = sig.GetReturnProps(); - pReturn.ConvertToInternalExactlyOne(sig.GetModule(), sig.GetSigTypeContext(), &swiftSigBuilder); - - // Process arguments - sig.Reset(); - while ((argType = sig.NextArg()) != ELEMENT_TYPE_END) - { - if (argType == ELEMENT_TYPE_VALUETYPE) - { - TypeHandle thArgType = sig.GetLastTypeHandleThrowing(); - MethodTable* pArgMT = thArgType.IsTypeDesc() ? nullptr : thArgType.AsMethodTable(); - if (pArgMT != nullptr) - { - if (isSwiftIndirectResultType(pArgMT)) - { - // SwiftIndirectResult goes in x8, not in argument registers - continue; - } - // Don't lower Swift* types except SwiftSelf - if (isSwiftSelfType(pArgMT)) - { - SigPointer pArg = sig.GetArgProps(); - pArg.ConvertToInternalExactlyOne(sig.GetModule(), sig.GetSigTypeContext(), &swiftSigBuilder); - swiftLoweringInfo[swiftLoweringCount++] = { 0, 0, false, false }; - continue; - } - - CORINFO_SWIFT_LOWERING lowering = {}; - pArgMT->GetNativeSwiftPhysicalLowering(&lowering, false); - - if (!lowering.byReference && lowering.numLoweredElements > 0) - { - // Emit primitive types instead of struct - int structSize = ALIGN_UP(pArgMT->GetNumInstanceFieldBytes(), INTERP_STACK_SLOT_SIZE); - for (size_t i = 0; i < lowering.numLoweredElements; i++) - { - bool isFloat = false; - switch (lowering.loweredElements[i]) - { - case CORINFO_TYPE_BYTE: - case CORINFO_TYPE_UBYTE: - swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I1); - break; - case CORINFO_TYPE_SHORT: - swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I2); - break; - case CORINFO_TYPE_USHORT: - swiftSigBuilder.AppendElementType(ELEMENT_TYPE_U2); - break; - case CORINFO_TYPE_INT: - swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I4); - break; - case CORINFO_TYPE_UINT: - swiftSigBuilder.AppendElementType(ELEMENT_TYPE_U4); - break; - case CORINFO_TYPE_LONG: - swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I8); - break; - case CORINFO_TYPE_ULONG: - swiftSigBuilder.AppendElementType(ELEMENT_TYPE_U8); - break; - case CORINFO_TYPE_NATIVEINT: - swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I); - break; - case CORINFO_TYPE_NATIVEUINT: - swiftSigBuilder.AppendElementType(ELEMENT_TYPE_U); - break; - case CORINFO_TYPE_FLOAT: - swiftSigBuilder.AppendElementType(ELEMENT_TYPE_R4); - isFloat = true; - break; - case CORINFO_TYPE_DOUBLE: - swiftSigBuilder.AppendElementType(ELEMENT_TYPE_R8); - isFloat = true; - break; - default: - swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I); - break; - } - bool isLast = (i == lowering.numLoweredElements - 1); - swiftLoweringInfo[swiftLoweringCount++] = { - (uint16_t)lowering.offsets[i], - isLast ? (uint16_t)structSize : (uint16_t)0, - isFloat, - true - }; - } - continue; - } - } - } - - SigPointer pArg = sig.GetArgProps(); - pArg.ConvertToInternalExactlyOne(sig.GetModule(), sig.GetSigTypeContext(), &swiftSigBuilder); - swiftLoweringInfo[swiftLoweringCount++] = { 0, 0, false, false }; - } - - DWORD cSwiftSig; - PCCOR_SIGNATURE pSwiftSig = (PCCOR_SIGNATURE)swiftSigBuilder.GetSignature(&cSwiftSig); - MetaSig swiftSig(pSwiftSig, cSwiftSig, sig.GetModule(), NULL, MetaSig::sigMember); - sig = swiftSig; + RewriteSignatureForSwiftLowering(sig, swiftSigBuilder, swiftLoweringInfo, swiftIndirectResultCount); } - int swiftArgIndex = 0; #endif // TARGET_APPLE && TARGET_ARM64 ArgIteratorType argIt(&sig); @@ -3441,10 +3214,6 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo m_s2 = 0; m_routineIndex = 0; m_totalStackSize = argIt.SizeOfArgStack(); -#if defined(TARGET_APPLE) && defined(TARGET_ARM64) - m_swiftSelfByRefSize = 0; - m_hasSwiftIndirectResult = (isSwiftCallConv && swiftIndirectResultCount > 0); -#endif #if LOG_COMPUTE_CALL_STUB printf("ComputeCallStub\n"); #endif @@ -3488,7 +3257,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo } #if defined(TARGET_APPLE) && defined(TARGET_ARM64) - if (m_hasSwiftIndirectResult) + if (swiftIndirectResultCount > 0) { #if LOG_COMPUTE_CALL_STUB printf("Emitting Load_SwiftIndirectResult routine\n"); @@ -3534,52 +3303,8 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo pArgMT = thArgTypeHandle.AsMethodTable(); } - if (pArgMT != nullptr && isSwiftSelfGenericType(pArgMT)) - { - Instantiation inst = pArgMT->GetInstantiation(); - _ASSERTE(inst.GetNumArgs() != 0); - TypeHandle innerType = inst[0]; - _ASSERTE(!innerType.IsNull() && !innerType.IsTypeDesc()); - MethodTable* pInnerMT = innerType.AsMethodTable(); -#if DEBUG - CORINFO_SWIFT_LOWERING lowering = {}; - pInnerMT->GetNativeSwiftPhysicalLowering(&lowering, false); - _ASSERTE(lowering.byReference); -#endif // DEBUG - -#if LOG_COMPUTE_CALL_STUB - printf("SwiftSelf argument detected\n"); -#endif - TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftSelfByRef, pRoutines); - m_currentRoutineType = RoutineType::SwiftSelfByRef; - - int structSize = ALIGN_UP(pInnerMT->GetNumInstanceFieldBytes(), INTERP_STACK_SLOT_SIZE); - m_swiftSelfByRefSize = structSize; - interpreterStackOffset += structSize; - continue; - } - - if (pArgMT != nullptr && isSwiftSelfType(pArgMT)) + if (ProcessSwiftSpecialArgument(pArgMT, interpStackSlotSize, interpreterStackOffset, pRoutines)) { -#if LOG_COMPUTE_CALL_STUB - printf("Swift Self argument detected\n"); -#endif - - TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftSelf, pRoutines); - m_currentRoutineType = RoutineType::SwiftSelf; - interpreterStackOffset += interpStackSlotSize; - continue; - } - - if (pArgMT != nullptr && isSwiftErrorType(pArgMT)) - { -#if LOG_COMPUTE_CALL_STUB - printf("Swift Error argument detected\n"); -#endif - - TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftError, pRoutines); - m_currentRoutineType = RoutineType::SwiftError; - interpreterStackOffset += interpStackSlotSize; continue; } } @@ -3615,53 +3340,14 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo interpreterStackOffset += interpStackSlotSize; #if defined(TARGET_APPLE) && defined(TARGET_ARM64) - if (isSwiftCallConv && m_interpreterToNative && swiftArgIndex < swiftLoweringCount) + if (isSwiftCallConv && m_interpreterToNative && swiftArgIndex < (int)swiftLoweringInfo.Size()) { SwiftLoweringElement& elem = swiftLoweringInfo[swiftArgIndex]; swiftArgIndex++; if (elem.isLowered) { - TerminateCurrentRoutineIfNotOfNewType(RoutineType::None, pRoutines); - - if (elem.isFloat && argLocDesc.m_cFloatReg > 0) - { - int regIndex = argLocDesc.m_idxFloatReg; - pRoutines[m_routineIndex++] = GetSwiftLoadFPAtOffsetRoutine(regIndex); - // Pack offset (lower 16 bits) and structSize (bits 16-31) - PCODE packedData = (PCODE)elem.offset | ((PCODE)elem.structSize << 16); - pRoutines[m_routineIndex++] = packedData; -#if LOG_COMPUTE_CALL_STUB - printf("Swift lowered element to FP reg: offset=%d, structSize=%d, reg=d%d\n", - elem.offset, elem.structSize, regIndex); -#endif - } - else if (!elem.isFloat && argLocDesc.m_cGenReg > 0) - { - int regIndex = argLocDesc.m_idxGenReg; - pRoutines[m_routineIndex++] = GetSwiftLoadGPAtOffsetRoutine(regIndex); - // Pack offset (lower 16 bits) and structSize (bits 16-31) - PCODE packedData = (PCODE)elem.offset | ((PCODE)elem.structSize << 16); - pRoutines[m_routineIndex++] = packedData; -#if LOG_COMPUTE_CALL_STUB - printf("Swift lowered element to GP reg: offset=%d, structSize=%d, reg=x%d\n", - elem.offset, elem.structSize, regIndex); -#endif - } - else - { - // Spilled to stack - pRoutines[m_routineIndex++] = (PCODE)Load_Stack_AtOffset; - // Pack offset (lower 16 bits), structSize (bits 16-31), and stackOffset (bits 32-63) - PCODE packedData = (PCODE)elem.offset | - ((PCODE)elem.structSize << 16) | - ((PCODE)argLocDesc.m_byteStackIndex << 32); - pRoutines[m_routineIndex++] = packedData; -#if LOG_COMPUTE_CALL_STUB - printf("Swift lowered element to stack: offset=%d, structSize=%d, stackOffset=%d\n", - elem.offset, elem.structSize, argLocDesc.m_byteStackIndex); -#endif - } + EmitSwiftLoweredElementRoutine(elem, argLocDesc, pRoutines); continue; } } @@ -3753,39 +3439,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo #if defined(TARGET_APPLE) && defined(TARGET_ARM64) if (m_hasSwiftReturnLowering) { - int gpRegIndex = 0; - int fpRegIndex = 0; - - for (size_t i = 0; i < m_swiftReturnLowering.numLoweredElements; i++) - { - CorInfoType elemType = m_swiftReturnLowering.loweredElements[i]; - uint32_t offset = m_swiftReturnLowering.offsets[i]; - - bool isFloat = (elemType == CORINFO_TYPE_FLOAT || elemType == CORINFO_TYPE_DOUBLE); - - if (isFloat) - { - _ASSERTE(fpRegIndex < 4); - pRoutines[m_routineIndex++] = GetSwiftStoreFPAtOffsetRoutine(fpRegIndex); - pRoutines[m_routineIndex++] = (PCODE)offset; - fpRegIndex++; -#if LOG_COMPUTE_CALL_STUB - printf("Swift return store FP d%d at offset %d\n", fpRegIndex - 1, offset); -#endif - } - else - { - _ASSERTE(gpRegIndex < 4); - pRoutines[m_routineIndex++] = GetSwiftStoreGPAtOffsetRoutine(gpRegIndex); - pRoutines[m_routineIndex++] = (PCODE)offset; - gpRegIndex++; -#if LOG_COMPUTE_CALL_STUB - printf("Swift return store GP x%d at offset %d\n", gpRegIndex - 1, offset); -#endif - } - } - - pRoutines[m_routineIndex++] = (PCODE)SwiftLoweredReturnTerminator; + EmitSwiftReturnLoweringRoutines(pRoutines); } #endif // TARGET_APPLE && TARGET_ARM64 } @@ -4322,4 +3976,373 @@ CallStubGenerator::ReturnType CallStubGenerator::GetReturnType(ArgIteratorType * return ReturnTypeVoid; } +#if defined(TARGET_APPLE) && defined(TARGET_ARM64) +void CallStubGenerator::RewriteSignatureForSwiftLowering(MetaSig &sig, SigBuilder &swiftSigBuilder, CQuickArray &swiftLoweringInfo, int &swiftIndirectResultCount) +{ + if (!m_interpreterToNative) + { + COMPlusThrow(kNotImplementedException); + } + + sig.Reset(); + TypeHandle thReturnType; + CorElementType retCorType = sig.GetReturnTypeNormalized(&thReturnType); + if (retCorType == ELEMENT_TYPE_VALUETYPE && !thReturnType.IsNull() && !thReturnType.IsTypeDesc()) + { + MethodTable* pRetMT = thReturnType.AsMethodTable(); + if (pRetMT->IsValueType() && !pRetMT->IsHFA() && !isIntrinsicSIMDType(pRetMT)) + { + CORINFO_SWIFT_LOWERING lowering = {}; + pRetMT->GetNativeSwiftPhysicalLowering(&lowering, false); + if (!lowering.byReference && lowering.numLoweredElements > 0) + { + m_hasSwiftReturnLowering = true; + m_swiftReturnLowering = lowering; +#if LOG_COMPUTE_CALL_STUB + printf("Swift return lowering detected: %d elements\n", lowering.numLoweredElements); +#endif + } + } + } + + // Count how many extra arguments we need due to Swift lowering + sig.Reset(); + int newArgCount = 0; + int swiftSelfCount = 0; + int swiftErrorCount = 0; + swiftIndirectResultCount = 0; + CorElementType argType; + while ((argType = sig.NextArg()) != ELEMENT_TYPE_END) + { + TypeHandle thArgType = sig.GetLastTypeHandleThrowing(); + MethodTable* pArgMT = nullptr; + + if (argType == ELEMENT_TYPE_BYREF) + { + sig.GetByRefType(&thArgType); + } + + // Extract the underlying MT for pointer types or unwrapped byrefs + if (thArgType.IsTypeDesc() && !thArgType.AsTypeDesc()->GetTypeParam().IsNull()) + { + pArgMT = thArgType.AsTypeDesc()->GetTypeParam().AsMethodTable(); + } + else if (!thArgType.IsTypeDesc() && !thArgType.IsNull()) + { + pArgMT = thArgType.AsMethodTable(); + } + + if (pArgMT != nullptr) + { + if (!pArgMT->IsValueType()) + { + COMPlusThrow(kInvalidProgramException); + } + + if (isIntrinsicSIMDType(pArgMT)) + { + COMPlusThrow(kInvalidProgramException); + } + + if (isSwiftSelfType(pArgMT)) + { + swiftSelfCount++; + if (swiftSelfCount > 1) + { + COMPlusThrow(kInvalidProgramException); + } + newArgCount++; + continue; + } + + if (isSwiftErrorType(pArgMT)) + { + swiftErrorCount++; + if (swiftErrorCount > 1) + { + COMPlusThrow(kInvalidProgramException); + } + newArgCount++; + continue; + } + + if (isSwiftIndirectResultType(pArgMT)) + { + swiftIndirectResultCount++; + if (swiftIndirectResultCount > 1) + { + COMPlusThrow(kInvalidProgramException); + } + // SwiftIndirectResult goes in x8, not in argument registers + continue; + } + + if (argType == ELEMENT_TYPE_VALUETYPE) + { + CORINFO_SWIFT_LOWERING lowering = {}; + pArgMT->GetNativeSwiftPhysicalLowering(&lowering, false); + + if (!lowering.byReference && lowering.numLoweredElements > 0) + { + newArgCount += (int)lowering.numLoweredElements; + continue; + } + } + } + + newArgCount++; + } + + swiftLoweringInfo.ReSizeThrows(newArgCount); + int loweringIndex = 0; + + // Build new signature with lowered structs and store lowering info + swiftSigBuilder.AppendByte((BYTE)sig.GetCallingConventionInfo()); + swiftSigBuilder.AppendData(newArgCount); + + // Copy return type + SigPointer pReturn = sig.GetReturnProps(); + pReturn.ConvertToInternalExactlyOne(sig.GetModule(), sig.GetSigTypeContext(), &swiftSigBuilder); + + // Process arguments + sig.Reset(); + while ((argType = sig.NextArg()) != ELEMENT_TYPE_END) + { + if (argType == ELEMENT_TYPE_VALUETYPE) + { + TypeHandle thArgType = sig.GetLastTypeHandleThrowing(); + MethodTable* pArgMT = thArgType.IsTypeDesc() ? nullptr : thArgType.AsMethodTable(); + if (pArgMT != nullptr) + { + if (isSwiftIndirectResultType(pArgMT)) + { + // SwiftIndirectResult goes in x8, not in argument registers + continue; + } + // Don't lower Swift* types except SwiftSelf + if (isSwiftSelfType(pArgMT)) + { + SigPointer pArg = sig.GetArgProps(); + pArg.ConvertToInternalExactlyOne(sig.GetModule(), sig.GetSigTypeContext(), &swiftSigBuilder); + swiftLoweringInfo[loweringIndex++] = { 0, 0, false, false }; + continue; + } + + CORINFO_SWIFT_LOWERING lowering = {}; + pArgMT->GetNativeSwiftPhysicalLowering(&lowering, false); + + if (!lowering.byReference && lowering.numLoweredElements > 0) + { + // Emit primitive types instead of struct + int structSize = ALIGN_UP(pArgMT->GetNumInstanceFieldBytes(), INTERP_STACK_SLOT_SIZE); + for (size_t i = 0; i < lowering.numLoweredElements; i++) + { + bool isFloat = false; + switch (lowering.loweredElements[i]) + { + case CORINFO_TYPE_BYTE: + case CORINFO_TYPE_UBYTE: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I1); + break; + case CORINFO_TYPE_SHORT: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I2); + break; + case CORINFO_TYPE_USHORT: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_U2); + break; + case CORINFO_TYPE_INT: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I4); + break; + case CORINFO_TYPE_UINT: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_U4); + break; + case CORINFO_TYPE_LONG: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I8); + break; + case CORINFO_TYPE_ULONG: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_U8); + break; + case CORINFO_TYPE_NATIVEINT: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I); + break; + case CORINFO_TYPE_NATIVEUINT: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_U); + break; + case CORINFO_TYPE_FLOAT: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_R4); + isFloat = true; + break; + case CORINFO_TYPE_DOUBLE: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_R8); + isFloat = true; + break; + default: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I); + break; + } + bool isLast = (i == lowering.numLoweredElements - 1); + swiftLoweringInfo[loweringIndex++] = { + (uint16_t)lowering.offsets[i], + isLast ? (uint16_t)structSize : (uint16_t)0, + isFloat, + true + }; + } + continue; + } + } + } + + SigPointer pArg = sig.GetArgProps(); + pArg.ConvertToInternalExactlyOne(sig.GetModule(), sig.GetSigTypeContext(), &swiftSigBuilder); + swiftLoweringInfo[loweringIndex++] = { 0, 0, false, false }; + } + + DWORD cSwiftSig; + PCCOR_SIGNATURE pSwiftSig = (PCCOR_SIGNATURE)swiftSigBuilder.GetSignature(&cSwiftSig); + MetaSig swiftSig(pSwiftSig, cSwiftSig, sig.GetModule(), NULL, MetaSig::sigMember); + sig = swiftSig; +} + +bool CallStubGenerator::ProcessSwiftSpecialArgument(MethodTable* pArgMT, int interpStackSlotSize, int32_t &interpreterStackOffset, PCODE *pRoutines) +{ + if (pArgMT == nullptr) + { + return false; + } + + if (isSwiftSelfGenericType(pArgMT)) + { + Instantiation inst = pArgMT->GetInstantiation(); + _ASSERTE(inst.GetNumArgs() != 0); + TypeHandle innerType = inst[0]; + _ASSERTE(!innerType.IsNull() && !innerType.IsTypeDesc()); + MethodTable* pInnerMT = innerType.AsMethodTable(); +#if DEBUG + CORINFO_SWIFT_LOWERING lowering = {}; + pInnerMT->GetNativeSwiftPhysicalLowering(&lowering, false); + _ASSERTE(lowering.byReference); +#endif // DEBUG + +#if LOG_COMPUTE_CALL_STUB + printf("SwiftSelf argument detected\n"); +#endif + TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftSelfByRef, pRoutines); + m_currentRoutineType = RoutineType::SwiftSelfByRef; + + int structSize = ALIGN_UP(pInnerMT->GetNumInstanceFieldBytes(), INTERP_STACK_SLOT_SIZE); + m_swiftSelfByRefSize = structSize; + interpreterStackOffset += structSize; + return true; + } + + if (isSwiftSelfType(pArgMT)) + { +#if LOG_COMPUTE_CALL_STUB + printf("Swift Self argument detected\n"); +#endif + + TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftSelf, pRoutines); + m_currentRoutineType = RoutineType::SwiftSelf; + interpreterStackOffset += interpStackSlotSize; + return true; + } + + if (isSwiftErrorType(pArgMT)) + { +#if LOG_COMPUTE_CALL_STUB + printf("Swift Error argument detected\n"); +#endif + + TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftError, pRoutines); + m_currentRoutineType = RoutineType::SwiftError; + interpreterStackOffset += interpStackSlotSize; + return true; + } + + return false; +} + +void CallStubGenerator::EmitSwiftLoweredElementRoutine(SwiftLoweringElement &elem, ArgLocDesc &argLocDesc, PCODE *pRoutines) +{ + TerminateCurrentRoutineIfNotOfNewType(RoutineType::None, pRoutines); + + if (elem.isFloat && argLocDesc.m_cFloatReg > 0) + { + int regIndex = argLocDesc.m_idxFloatReg; + pRoutines[m_routineIndex++] = GetSwiftLoadFPAtOffsetRoutine(regIndex); + // Pack offset (lower 16 bits) and structSize (bits 16-31) + PCODE packedData = (PCODE)elem.offset | ((PCODE)elem.structSize << 16); + pRoutines[m_routineIndex++] = packedData; +#if LOG_COMPUTE_CALL_STUB + printf("Swift lowered element to FP reg: offset=%d, structSize=%d, reg=d%d\n", + elem.offset, elem.structSize, regIndex); +#endif + } + else if (!elem.isFloat && argLocDesc.m_cGenReg > 0) + { + int regIndex = argLocDesc.m_idxGenReg; + pRoutines[m_routineIndex++] = GetSwiftLoadGPAtOffsetRoutine(regIndex); + // Pack offset (lower 16 bits) and structSize (bits 16-31) + PCODE packedData = (PCODE)elem.offset | ((PCODE)elem.structSize << 16); + pRoutines[m_routineIndex++] = packedData; +#if LOG_COMPUTE_CALL_STUB + printf("Swift lowered element to GP reg: offset=%d, structSize=%d, reg=x%d\n", + elem.offset, elem.structSize, regIndex); +#endif + } + else + { + // Spilled to stack + pRoutines[m_routineIndex++] = (PCODE)Load_Stack_AtOffset; + // Pack offset (lower 16 bits), structSize (bits 16-31), and stackOffset (bits 32-63) + PCODE packedData = (PCODE)elem.offset | + ((PCODE)elem.structSize << 16) | + ((PCODE)argLocDesc.m_byteStackIndex << 32); + pRoutines[m_routineIndex++] = packedData; +#if LOG_COMPUTE_CALL_STUB + printf("Swift lowered element to stack: offset=%d, structSize=%d, stackOffset=%d\n", + elem.offset, elem.structSize, argLocDesc.m_byteStackIndex); +#endif + } +} + +void CallStubGenerator::EmitSwiftReturnLoweringRoutines(PCODE *pRoutines) +{ + int gpRegIndex = 0; + int fpRegIndex = 0; + + for (size_t i = 0; i < m_swiftReturnLowering.numLoweredElements; i++) + { + CorInfoType elemType = m_swiftReturnLowering.loweredElements[i]; + uint32_t offset = m_swiftReturnLowering.offsets[i]; + + bool isFloat = (elemType == CORINFO_TYPE_FLOAT || elemType == CORINFO_TYPE_DOUBLE); + + if (isFloat) + { + _ASSERTE(fpRegIndex < 4); + pRoutines[m_routineIndex++] = GetSwiftStoreFPAtOffsetRoutine(fpRegIndex); + pRoutines[m_routineIndex++] = (PCODE)offset; + fpRegIndex++; +#if LOG_COMPUTE_CALL_STUB + printf("Swift return store FP d%d at offset %d\n", fpRegIndex - 1, offset); +#endif + } + else + { + _ASSERTE(gpRegIndex < 4); + pRoutines[m_routineIndex++] = GetSwiftStoreGPAtOffsetRoutine(gpRegIndex); + pRoutines[m_routineIndex++] = (PCODE)offset; + gpRegIndex++; +#if LOG_COMPUTE_CALL_STUB + printf("Swift return store GP x%d at offset %d\n", gpRegIndex - 1, offset); +#endif + } + } + + pRoutines[m_routineIndex++] = (PCODE)SwiftLoweredReturnTerminator; +} +#endif + #endif // FEATURE_INTERPRETER && !TARGET_WASM diff --git a/src/coreclr/vm/callstubgenerator.h b/src/coreclr/vm/callstubgenerator.h index 41eec82731f19f..a64458738a97f2 100644 --- a/src/coreclr/vm/callstubgenerator.h +++ b/src/coreclr/vm/callstubgenerator.h @@ -166,15 +166,6 @@ class CallStubGenerator int m_targetSlotIndex = -1; // The total stack size used for the arguments. int m_totalStackSize = 0; -#if defined(TARGET_APPLE) && defined(TARGET_ARM64) - // Size of struct for SwiftSelf - int m_swiftSelfByRefSize = 0; - // Track if SwiftIndirectResult was used - bool m_hasSwiftIndirectResult = false; - // Swift return lowering info - CORINFO_SWIFT_LOWERING m_swiftReturnLowering = {}; - bool m_hasSwiftReturnLowering = false; -#endif // TARGET_APPLE && TARGET_ARM64 CallStubHeader::InvokeFunctionPtr m_pInvokeFunction = NULL; bool m_interpreterToNative = false; @@ -194,7 +185,13 @@ class CallStubGenerator PCODE GetFPReg128RangeRoutine(int x1, int x2); PCODE GetFPReg32RangeRoutine(int x1, int x2); #endif // TARGET_ARM64 -#if defined(TARGET_APPLE) +#if defined(TARGET_APPLE) && defined(TARGET_ARM64) + // Swift calling convention state + int m_swiftSelfByRefSize = 0; + CORINFO_SWIFT_LOWERING m_swiftReturnLowering = {}; + bool m_hasSwiftReturnLowering = false; + + // Swift routine helpers PCODE GetSwiftSelfRoutine(); PCODE GetSwiftSelfByRefRoutine(); PCODE GetSwiftErrorRoutine(); @@ -203,7 +200,20 @@ class CallStubGenerator PCODE GetSwiftLoadFPAtOffsetRoutine(int regIndex); PCODE GetSwiftStoreGPAtOffsetRoutine(int regIndex); PCODE GetSwiftStoreFPAtOffsetRoutine(int regIndex); -#endif // TARGET_APPLE + + // Swift lowering info for expanded struct elements + struct SwiftLoweringElement { + uint16_t offset; // Offset within struct + uint16_t structSize; // If non-zero, this is the last element, advance x9 by this amount + bool isFloat; // True if this element goes in FP register + bool isLowered; // True if this is part of a lowered struct + }; + + void RewriteSignatureForSwiftLowering(MetaSig &sig, SigBuilder &swiftSigBuilder, CQuickArray &swiftLoweringInfo, int &swiftIndirectResultCount); + bool ProcessSwiftSpecialArgument(MethodTable* pArgMT, int interpStackSlotSize, int32_t &interpreterStackOffset, PCODE *pRoutines); + void EmitSwiftLoweredElementRoutine(SwiftLoweringElement &elem, ArgLocDesc &argLocDesc, PCODE *pRoutines); + void EmitSwiftReturnLoweringRoutines(PCODE *pRoutines); +#endif // TARGET_APPLE && TARGET_ARM64 PCODE GetGPRegRangeRoutine(int r1, int r2); template ReturnType GetReturnType(ArgIteratorType *pArgIt); From 6926d33658f59248f7de8290cda74e3e22dd55f5 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Wed, 28 Jan 2026 16:37:44 +0100 Subject: [PATCH 24/33] Simplify type checks --- src/coreclr/vm/arm64/asmhelpers.S | 12 +-- src/coreclr/vm/callstubgenerator.cpp | 132 ++------------------------- src/coreclr/vm/corelib.h | 5 + src/coreclr/vm/namespace.h | 1 + 4 files changed, 17 insertions(+), 133 deletions(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index 7b0fe26309f546..6cd03b86fe9db6 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -1225,9 +1225,7 @@ LEAF_END Load_SwiftIndirectResult and w11, w12, #0xFFFF // Extract offset (lower 16 bits) ldr \reg, [x9, x11] // Load from [x9 + offset] lsr x12, x12, #16 // Shift to get struct_size - cbz x12, 1f // If struct_size == 0, skip advance - add x9, x9, x12 // Advance x9 by struct_size -1: + add x9, x9, x12 // Advance x9 by struct_size (no-op if zero) ldr x11, [x10], #8 EPILOG_BRANCH_REG x11 .endm @@ -1269,9 +1267,7 @@ LEAF_END Load_X7_AtOffset and w11, w12, #0xFFFF // Extract offset (lower 16 bits) ldr \reg, [x9, x11] // Load float from [x9 + offset] lsr x12, x12, #16 // Shift to get struct_size - cbz x12, 1f // If struct_size == 0, skip advance - add x9, x9, x12 // Advance x9 by struct_size -1: + add x9, x9, x12 // Advance x9 by struct_size (no-op if zero) ldr x11, [x10], #8 EPILOG_BRANCH_REG x11 .endm @@ -1316,9 +1312,7 @@ LEAF_ENTRY Load_Stack_AtOffset add x14, sp, x14 // Calculate stack destination str x13, [x14] // Store to native stack ubfx x12, x12, #16, #16 // Extract structSize (bits 16-31) - cbz x12, 1f // If structSize == 0, skip advance - add x9, x9, x12 // Advance x9 by structSize -1: + add x9, x9, x12 // Advance x9 by structSize (no-op if zero) ldr x11, [x10], #8 EPILOG_BRANCH_REG x11 LEAF_END Load_Stack_AtOffset diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 793a1d86e9398e..a5492922fb8a1c 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -2856,122 +2856,6 @@ bool isNativePrimitiveStructType(MethodTable* pMT) } #if defined(TARGET_APPLE) && defined(TARGET_ARM64) -//--------------------------------------------------------------------------- -// isSwiftSelfType: -// Check if the given type is SwiftSelf. -// -// Arguments: -// pMT - the handle for the type. -// -// Return Value: -// true if the given type is SwiftSelf, -// false otherwise. -// -bool isSwiftSelfType(MethodTable* pMT) -{ - const char* namespaceName = nullptr; - const char* typeName = pMT->GetFullyQualifiedNameInfo(&namespaceName); - - if ((namespaceName == NULL) || (typeName == NULL)) - { - return false; - } - - if (strcmp(namespaceName, "System.Runtime.InteropServices.Swift") != 0) - { - return false; - } - - return strcmp(typeName, "SwiftSelf") == 0; -} - -//--------------------------------------------------------------------------- -// isSwiftSelfGenericType: -// Check if the given type is SwiftSelf. -// -// Arguments: -// pMT - the handle for the type. -// -// Return Value: -// true if the given type is SwiftSelf, -// false otherwise. -// -bool isSwiftSelfGenericType(MethodTable* pMT) -{ - const char* namespaceName = nullptr; - const char* typeName = pMT->GetFullyQualifiedNameInfo(&namespaceName); - - if ((namespaceName == NULL) || (typeName == NULL)) - { - return false; - } - - if (strcmp(namespaceName, "System.Runtime.InteropServices.Swift") != 0) - { - return false; - } - - return strcmp(typeName, "SwiftSelf`1") == 0; -} - -//--------------------------------------------------------------------------- -// isSwiftErrorType: -// Check if the given type is SwiftError. -// -// Arguments: -// pMT - the handle for the type. -// -// Return Value: -// true if the given type is SwiftError -// false otherwise. -// -bool isSwiftErrorType(MethodTable* pMT) -{ - const char* namespaceName = nullptr; - const char* typeName = pMT->GetFullyQualifiedNameInfo(&namespaceName); - - if ((namespaceName == NULL) || (typeName == NULL)) - { - return false; - } - - if (strcmp(namespaceName, "System.Runtime.InteropServices.Swift") != 0) - { - return false; - } - - return strcmp(typeName, "SwiftError") == 0; -} - -//--------------------------------------------------------------------------- -// isSwiftIndirectResultType: -// Check if the given type is SwiftIndirectResult. -// -// Arguments: -// pMT - the handle for the type. -// -// Return Value: -// true if the given type is SwiftIndirectResult, -// false otherwise. -// -bool isSwiftIndirectResultType(MethodTable* pMT) -{ - const char* namespaceName = nullptr; - const char* typeName = pMT->GetFullyQualifiedNameInfo(&namespaceName); - - if ((namespaceName == NULL) || (typeName == NULL)) - { - return false; - } - - if (strcmp(namespaceName, "System.Runtime.InteropServices.Swift") != 0) - { - return false; - } - - return strcmp(typeName, "SwiftIndirectResult") == 0; -} - //--------------------------------------------------------------------------- // isIntrinsicSIMDType: // Check if the given type is a SIMD type (Vector, Vector64, Vector128, etc.). @@ -4044,7 +3928,7 @@ void CallStubGenerator::RewriteSignatureForSwiftLowering(MetaSig &sig, SigBuilde COMPlusThrow(kInvalidProgramException); } - if (isSwiftSelfType(pArgMT)) + if (pArgMT == CoreLibBinder::GetClass(CLASS__SWIFT_SELF)) { swiftSelfCount++; if (swiftSelfCount > 1) @@ -4055,7 +3939,7 @@ void CallStubGenerator::RewriteSignatureForSwiftLowering(MetaSig &sig, SigBuilde continue; } - if (isSwiftErrorType(pArgMT)) + if (pArgMT == CoreLibBinder::GetClass(CLASS__SWIFT_ERROR)) { swiftErrorCount++; if (swiftErrorCount > 1) @@ -4066,7 +3950,7 @@ void CallStubGenerator::RewriteSignatureForSwiftLowering(MetaSig &sig, SigBuilde continue; } - if (isSwiftIndirectResultType(pArgMT)) + if (pArgMT == CoreLibBinder::GetClass(CLASS__SWIFT_INDIRECT_RESULT)) { swiftIndirectResultCount++; if (swiftIndirectResultCount > 1) @@ -4114,13 +3998,13 @@ void CallStubGenerator::RewriteSignatureForSwiftLowering(MetaSig &sig, SigBuilde MethodTable* pArgMT = thArgType.IsTypeDesc() ? nullptr : thArgType.AsMethodTable(); if (pArgMT != nullptr) { - if (isSwiftIndirectResultType(pArgMT)) + if (pArgMT == CoreLibBinder::GetClass(CLASS__SWIFT_INDIRECT_RESULT)) { // SwiftIndirectResult goes in x8, not in argument registers continue; } // Don't lower Swift* types except SwiftSelf - if (isSwiftSelfType(pArgMT)) + if (pArgMT == CoreLibBinder::GetClass(CLASS__SWIFT_SELF)) { SigPointer pArg = sig.GetArgProps(); pArg.ConvertToInternalExactlyOne(sig.GetModule(), sig.GetSigTypeContext(), &swiftSigBuilder); @@ -4211,7 +4095,7 @@ bool CallStubGenerator::ProcessSwiftSpecialArgument(MethodTable* pArgMT, int int return false; } - if (isSwiftSelfGenericType(pArgMT)) + if (pArgMT->HasSameTypeDefAs(CoreLibBinder::GetClass(CLASS__SWIFT_SELF_T))) { Instantiation inst = pArgMT->GetInstantiation(); _ASSERTE(inst.GetNumArgs() != 0); @@ -4236,7 +4120,7 @@ bool CallStubGenerator::ProcessSwiftSpecialArgument(MethodTable* pArgMT, int int return true; } - if (isSwiftSelfType(pArgMT)) + if (pArgMT == CoreLibBinder::GetClass(CLASS__SWIFT_SELF)) { #if LOG_COMPUTE_CALL_STUB printf("Swift Self argument detected\n"); @@ -4248,7 +4132,7 @@ bool CallStubGenerator::ProcessSwiftSpecialArgument(MethodTable* pArgMT, int int return true; } - if (isSwiftErrorType(pArgMT)) + if (pArgMT == CoreLibBinder::GetClass(CLASS__SWIFT_ERROR)) { #if LOG_COMPUTE_CALL_STUB printf("Swift Error argument detected\n"); diff --git a/src/coreclr/vm/corelib.h b/src/coreclr/vm/corelib.h index 93e80be8c2bec9..b14747f1eba102 100644 --- a/src/coreclr/vm/corelib.h +++ b/src/coreclr/vm/corelib.h @@ -889,6 +889,11 @@ DEFINE_CLASS(CALLCONV_SUPPRESSGCTRANSITION, CompilerServices, CallConvSup DEFINE_CLASS(CALLCONV_MEMBERFUNCTION, CompilerServices, CallConvMemberFunction) DEFINE_CLASS(CALLCONV_SWIFT, CompilerServices, CallConvSwift) +DEFINE_CLASS(SWIFT_SELF, Swift, SwiftSelf) +DEFINE_CLASS(SWIFT_SELF_T, Swift, SwiftSelf`1) +DEFINE_CLASS(SWIFT_ERROR, Swift, SwiftError) +DEFINE_CLASS(SWIFT_INDIRECT_RESULT, Swift, SwiftIndirectResult) + DEFINE_CLASS(SAFE_HANDLE, Interop, SafeHandle) DEFINE_FIELD(SAFE_HANDLE, HANDLE, handle) diff --git a/src/coreclr/vm/namespace.h b/src/coreclr/vm/namespace.h index f3ba695d07dc2d..acc36a9ba11fe4 100644 --- a/src/coreclr/vm/namespace.h +++ b/src/coreclr/vm/namespace.h @@ -36,6 +36,7 @@ #define g_CustomMarshalersNS g_InteropNS ".CustomMarshalers" #define g_ObjectiveCNS g_InteropNS ".ObjectiveC" #define g_MarshallingNS g_InteropNS ".Marshalling" +#define g_SwiftNS g_InteropNS ".Swift" #define g_IntrinsicsNS g_RuntimeNS ".Intrinsics" #define g_NumericsNS g_SystemNS ".Numerics" From 35fb7ac735a2eefbdfe8b23922a0287c92a7b44c Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Fri, 30 Jan 2026 12:01:25 +0100 Subject: [PATCH 25/33] Add magic word --- src/coreclr/vm/arm64/asmhelpers.S | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index 6cd03b86fe9db6..47f52bad7f64d3 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -748,10 +748,11 @@ LOCAL_LABEL(HaveInterpThreadContext): // Fill in the ContinuationContext register ldr x2, [sp, #(__PWTB_ArgumentRegister_FirstArg + 16)] - // Check if caller has SwiftError - ldr x11, [fp] - ldr x11, [x11, #40] - cbz x11, LOCAL_LABEL(InterpreterStub_NoSwiftError) + // Check if caller has Swift error + ldr x11, [fp, #48] + mov x12, #0x5E5E + cmp x11, x12 + bne LOCAL_LABEL(InterpreterStub_NoSwiftError) EPILOG_WITH_TRANSITION_BLOCK_RETURN_SKIP_SWIFT_ERROR LOCAL_LABEL(InterpreterStub_NoSwiftError): EPILOG_WITH_TRANSITION_BLOCK_RETURN @@ -1209,6 +1210,7 @@ LEAF_END Load_SwiftSelf_ByRef LEAF_ENTRY Load_SwiftError ldr x11, [x9], #8 str x11, [fp, #40] + mov x23, #0x5E5E // Signal to InterpreterStub that Swift error is used mov x21, xzr ldr x11, [x10], #8 EPILOG_BRANCH_REG x11 @@ -3150,7 +3152,7 @@ NESTED_END CallJittedMethodRet4Vector128, _TEXT NESTED_ENTRY CallJittedMethodRetSwiftLowered, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -64 stp x2, x4, [fp, #16] - str xzr, [fp, #56] + str xzr, [fp, #40] // Store the return address for the terminator to use adr x11, LOCAL_LABEL(CallJittedMethodRetSwiftLowered_Epilog) str x11, [fp, #32] @@ -3158,20 +3160,20 @@ NESTED_ENTRY CallJittedMethodRetSwiftLowered, _TEXT, NoHandler ldrsw x11, [x0, #-20] // Load TargetSlotIndex add x11, x11, #1 // TargetSlotIndex + 1 add x11, x0, x11, lsl #3 // x0 + (TargetSlotIndex + 1) * 8 - str x11, [fp, #40] // Save store routines start pointer + str x11, [fp, #56] // Save store routines start pointer sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 - ldr x11, [fp, #56] + ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetSwiftLowered_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRetSwiftLowered_NoSwiftError): ldr x12, [fp, #24] str x2, [x12] // Store continuation return value ldr x9, [fp, #16] // Load return buffer address into x9 - ldr x10, [fp, #40] // Load store routines start pointer + ldr x10, [fp, #56] // Load store routines start pointer ldr x11, [x10], #8 // Load first store routine br x11 LOCAL_LABEL(CallJittedMethodRetSwiftLowered_Epilog): From c57c6a539fe040149e1b5283a619293e56298b01 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Fri, 30 Jan 2026 13:58:45 +0100 Subject: [PATCH 26/33] Add Swift error handling support in call stubs --- src/coreclr/vm/arm64/asmconstants.h | 3 ++ src/coreclr/vm/arm64/asmhelpers.S | 11 +++---- src/coreclr/vm/callstubgenerator.cpp | 47 ++++++++++++++++++---------- src/coreclr/vm/callstubgenerator.h | 6 +++- 4 files changed, 44 insertions(+), 23 deletions(-) diff --git a/src/coreclr/vm/arm64/asmconstants.h b/src/coreclr/vm/arm64/asmconstants.h index 1575d54928342d..27427901ddbbff 100644 --- a/src/coreclr/vm/arm64/asmconstants.h +++ b/src/coreclr/vm/arm64/asmconstants.h @@ -344,6 +344,9 @@ ASMCONSTANTS_C_ASSERT(OFFSETOF__Thread__m_pInterpThreadContext == offsetof(Threa #define OFFSETOF__InterpThreadContext__pStackPointer 0x10 ASMCONSTANTS_C_ASSERT(OFFSETOF__InterpThreadContext__pStackPointer == offsetof(InterpThreadContext, pStackPointer)) +#define OFFSETOF__CallStubHeader__HasSwiftError 0x0D +ASMCONSTANTS_C_ASSERT(OFFSETOF__CallStubHeader__HasSwiftError == offsetof(CallStubHeader, HasSwiftError)) + #define OFFSETOF__CallStubHeader__Routines 0x18 ASMCONSTANTS_C_ASSERT(OFFSETOF__CallStubHeader__Routines == offsetof(CallStubHeader, Routines)) diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index 47f52bad7f64d3..1f6e6c86372c41 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -748,11 +748,11 @@ LOCAL_LABEL(HaveInterpThreadContext): // Fill in the ContinuationContext register ldr x2, [sp, #(__PWTB_ArgumentRegister_FirstArg + 16)] - // Check if caller has Swift error - ldr x11, [fp, #48] - mov x12, #0x5E5E - cmp x11, x12 - bne LOCAL_LABEL(InterpreterStub_NoSwiftError) + // Check if this stub has Swift error handling + ldr x11, [x19] // InterpMethod* + ldr x11, [x11, #OFFSETOF__InterpMethod__pCallStub] // CallStubHeader* + ldrb w11, [x11, #OFFSETOF__CallStubHeader__HasSwiftError] + cbz x11, LOCAL_LABEL(InterpreterStub_NoSwiftError) EPILOG_WITH_TRANSITION_BLOCK_RETURN_SKIP_SWIFT_ERROR LOCAL_LABEL(InterpreterStub_NoSwiftError): EPILOG_WITH_TRANSITION_BLOCK_RETURN @@ -1210,7 +1210,6 @@ LEAF_END Load_SwiftSelf_ByRef LEAF_ENTRY Load_SwiftError ldr x11, [x9], #8 str x11, [fp, #40] - mov x23, #0x5E5E // Signal to InterpreterStub that Swift error is used mov x21, xzr ldr x11, [x10], #8 EPILOG_BRANCH_REG x11 diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index a5492922fb8a1c..3a57ae4b88ccd8 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -2608,17 +2608,18 @@ CallStubHeader *CallStubGenerator::GenerateCallStub(MethodDesc *pMD, AllocMemTra LoaderAllocator *pLoaderAllocator = pMD->GetLoaderAllocator(); S_SIZE_T finalStubSize(sizeof(CallStubHeader) + m_routineIndex * sizeof(PCODE)); void *pHeaderStorage = pamTracker->Track(pLoaderAllocator->GetHighFrequencyHeap()->AllocMem(finalStubSize)); + bool hasSwiftError = m_isSwiftCallConv && m_hasSwiftError && pMD->IsILStub(); int targetSlotIndex = m_interpreterToNative ? m_targetSlotIndex : (m_routineIndex - 1); - CallStubHeader *pHeader = new (pHeaderStorage) CallStubHeader(m_routineIndex, targetSlotIndex, pRoutines, ALIGN_UP(m_totalStackSize, STACK_ALIGN_SIZE), sig.IsAsyncCall(), m_pInvokeFunction); + CallStubHeader *pHeader = new (pHeaderStorage) CallStubHeader(m_routineIndex, targetSlotIndex, pRoutines, ALIGN_UP(m_totalStackSize, STACK_ALIGN_SIZE), sig.IsAsyncCall(), hasSwiftError, m_pInvokeFunction); return pHeader; } struct CachedCallStubKey { - CachedCallStubKey(int32_t hashCode, int numRoutines, int targetSlotIndex, PCODE *pRoutines, int totalStackSize, bool hasContinuationRet, CallStubHeader::InvokeFunctionPtr pInvokeFunction) - : HashCode(hashCode), NumRoutines(numRoutines), TargetSlotIndex(targetSlotIndex), TotalStackSize(totalStackSize), HasContinuationRet(hasContinuationRet), Invoke(pInvokeFunction), Routines(pRoutines) + CachedCallStubKey(int32_t hashCode, int numRoutines, int targetSlotIndex, PCODE *pRoutines, int totalStackSize, bool hasContinuationRet, bool hasSwiftError, CallStubHeader::InvokeFunctionPtr pInvokeFunction) + : HashCode(hashCode), NumRoutines(numRoutines), TargetSlotIndex(targetSlotIndex), TotalStackSize(totalStackSize), HasContinuationRet(hasContinuationRet), HasSwiftError(hasSwiftError), Invoke(pInvokeFunction), Routines(pRoutines) { } @@ -2626,7 +2627,7 @@ struct CachedCallStubKey { LIMITED_METHOD_CONTRACT; - if (HashCode != other.HashCode || NumRoutines != other.NumRoutines || TargetSlotIndex != other.TargetSlotIndex || TotalStackSize != other.TotalStackSize || Invoke != other.Invoke || HasContinuationRet != other.HasContinuationRet) + if (HashCode != other.HashCode || NumRoutines != other.NumRoutines || TargetSlotIndex != other.TargetSlotIndex || TotalStackSize != other.TotalStackSize || Invoke != other.Invoke || HasContinuationRet != other.HasContinuationRet || HasSwiftError != other.HasSwiftError) return false; for (int i = 0; i < NumRoutines; i++) @@ -2642,15 +2643,16 @@ struct CachedCallStubKey const int TargetSlotIndex = 0; const int TotalStackSize = 0; const bool HasContinuationRet = false; + const bool HasSwiftError = false; const CallStubHeader::InvokeFunctionPtr Invoke = NULL; // Pointer to the invoke function const PCODE *Routines; }; struct CachedCallStub { - CachedCallStub(int32_t hashCode, int numRoutines, int targetSlotIndex, PCODE *pRoutines, int totalStackSize, bool hasContinuationRet, CallStubHeader::InvokeFunctionPtr pInvokeFunction) : + CachedCallStub(int32_t hashCode, int numRoutines, int targetSlotIndex, PCODE *pRoutines, int totalStackSize, bool hasContinuationRet, bool hasSwiftError, CallStubHeader::InvokeFunctionPtr pInvokeFunction) : HashCode(hashCode), - Header(numRoutines, targetSlotIndex, pRoutines, totalStackSize, hasContinuationRet, pInvokeFunction) + Header(numRoutines, targetSlotIndex, pRoutines, totalStackSize, hasContinuationRet, hasSwiftError, pInvokeFunction) { } @@ -2666,6 +2668,7 @@ struct CachedCallStub &Header.Routines[0], Header.TotalStackSize, Header.HasContinuationRet, + Header.HasSwiftError, Header.Invoke); } @@ -2714,6 +2717,7 @@ CallStubHeader *CallStubGenerator::GenerateCallStubForSig(MetaSig &sig) hashState.AddPointer((void*)m_pInvokeFunction); hashState.Add(sig.IsAsyncCall() ? 1 : 0); hashState.Add(m_targetSlotIndex); + hashState.Add(m_hasSwiftError ? 1 : 0); CachedCallStubKey cachedHeaderKey( hashState.ToHashCode(), @@ -2722,6 +2726,7 @@ CallStubHeader *CallStubGenerator::GenerateCallStubForSig(MetaSig &sig) pRoutines, ALIGN_UP(m_totalStackSize, STACK_ALIGN_SIZE), sig.IsAsyncCall(), + m_hasSwiftError, m_pInvokeFunction); CrstHolder lockHolder(&s_callStubCrst); @@ -2741,7 +2746,7 @@ CallStubHeader *CallStubGenerator::GenerateCallStubForSig(MetaSig &sig) // We only need to allocate the actual pRoutines array, and then we can just use the cachedHeader we already constructed size_t finalCachedCallStubSize = sizeof(CachedCallStub) + m_routineIndex * sizeof(PCODE); void* pHeaderStorage = amTracker.Track(SystemDomain::GetGlobalLoaderAllocator()->GetHighFrequencyHeap()->AllocMem(S_SIZE_T(finalCachedCallStubSize))); - CachedCallStub *pHeader = new (pHeaderStorage) CachedCallStub(cachedHeaderKey.HashCode, m_routineIndex, m_targetSlotIndex, pRoutines, ALIGN_UP(m_totalStackSize, STACK_ALIGN_SIZE), sig.IsAsyncCall(), m_pInvokeFunction); + CachedCallStub *pHeader = new (pHeaderStorage) CachedCallStub(cachedHeaderKey.HashCode, m_routineIndex, m_targetSlotIndex, pRoutines, ALIGN_UP(m_totalStackSize, STACK_ALIGN_SIZE), sig.IsAsyncCall(), m_hasSwiftError, m_pInvokeFunction); s_callStubCache->Add(pHeader); amTracker.SuppressRelease(); @@ -2906,9 +2911,16 @@ void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDe PInvoke::GetCallingConvention_IgnoreErrors(pMD, &unmanagedCallConv, NULL); hasUnmanagedCallConv = true; } - // NOTE: IL stubs don't actually have an UnmanagedCallersOnly attribute, - // even though the HasUnmanagedCallersOnlyAttribute method may return true for them. - else if (pMD != NULL && pMD->HasUnmanagedCallersOnlyAttribute() && !pMD->IsILStub()) + else if (pMD != NULL && pMD->IsILStub()) + { + MethodDesc* pTargetMD = pMD->AsDynamicMethodDesc()->GetILStubResolver()->GetStubTargetMethodDesc(); + if (pTargetMD != NULL && pTargetMD->IsPInvoke()) + { + PInvoke::GetCallingConvention_IgnoreErrors(pTargetMD, &unmanagedCallConv, NULL); + hasUnmanagedCallConv = true; + } + } + else if (pMD != NULL && pMD->HasUnmanagedCallersOnlyAttribute()) { if (CallConv::TryGetCallingConventionFromUnmanagedCallersOnly(pMD, &unmanagedCallConv)) { @@ -2982,6 +2994,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo { #if defined(TARGET_APPLE) && defined(TARGET_ARM64) isSwiftCallConv = (unmanagedCallConv == CorInfoCallConvExtension::Swift); + m_isSwiftCallConv = isSwiftCallConv; if (!isSwiftCallConv) #endif { @@ -3169,7 +3182,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo TypeHandle thArgTypeHandle; CorElementType corType = argIt.GetArgType(&thArgTypeHandle); #if defined(TARGET_APPLE) && defined(TARGET_ARM64) - if (isSwiftCallConv) + if (isSwiftCallConv && m_interpreterToNative) { MethodTable* pArgMT = nullptr; @@ -3863,11 +3876,6 @@ CallStubGenerator::ReturnType CallStubGenerator::GetReturnType(ArgIteratorType * #if defined(TARGET_APPLE) && defined(TARGET_ARM64) void CallStubGenerator::RewriteSignatureForSwiftLowering(MetaSig &sig, SigBuilder &swiftSigBuilder, CQuickArray &swiftLoweringInfo, int &swiftIndirectResultCount) { - if (!m_interpreterToNative) - { - COMPlusThrow(kNotImplementedException); - } - sig.Reset(); TypeHandle thReturnType; CorElementType retCorType = sig.GetReturnTypeNormalized(&thReturnType); @@ -3942,6 +3950,7 @@ void CallStubGenerator::RewriteSignatureForSwiftLowering(MetaSig &sig, SigBuilde if (pArgMT == CoreLibBinder::GetClass(CLASS__SWIFT_ERROR)) { swiftErrorCount++; + m_hasSwiftError = true; if (swiftErrorCount > 1) { COMPlusThrow(kInvalidProgramException); @@ -3977,6 +3986,12 @@ void CallStubGenerator::RewriteSignatureForSwiftLowering(MetaSig &sig, SigBuilde newArgCount++; } + if (!m_interpreterToNative) + { + sig.Reset(); + return; + } + swiftLoweringInfo.ReSizeThrows(newArgCount); int loweringIndex = 0; diff --git a/src/coreclr/vm/callstubgenerator.h b/src/coreclr/vm/callstubgenerator.h index a64458738a97f2..a499090bc24109 100644 --- a/src/coreclr/vm/callstubgenerator.h +++ b/src/coreclr/vm/callstubgenerator.h @@ -24,13 +24,14 @@ struct CallStubHeader // Total stack size used for the arguments. int TotalStackSize; bool HasContinuationRet; // Indicates whether the stub supports returning a continuation + bool HasSwiftError; // Indicates whether the stub has a Swift error parameter // This is a pointer to a helper function that invokes the target method. There are several // versions of this function, depending on the return type of the target method. InvokeFunctionPtr Invoke; // This is an array of routines that translate the arguments from the interpreter stack to the CPU registers and native stack. PCODE Routines[0]; - CallStubHeader(int numRoutines, int targetSlotIndex, PCODE *pRoutines, int totalStackSize, bool hasContinuationRet, InvokeFunctionPtr pInvokeFunction) + CallStubHeader(int numRoutines, int targetSlotIndex, PCODE *pRoutines, int totalStackSize, bool hasContinuationRet, bool hasSwiftError, InvokeFunctionPtr pInvokeFunction) { LIMITED_METHOD_CONTRACT; @@ -39,6 +40,7 @@ struct CallStubHeader TotalStackSize = totalStackSize; Invoke = pInvokeFunction; HasContinuationRet = hasContinuationRet; + HasSwiftError = hasSwiftError; memcpy(Routines, pRoutines, NumRoutines * sizeof(PCODE)); } @@ -169,6 +171,8 @@ class CallStubGenerator CallStubHeader::InvokeFunctionPtr m_pInvokeFunction = NULL; bool m_interpreterToNative = false; + bool m_hasSwiftError = false; + bool m_isSwiftCallConv = false; #if !defined(UNIX_AMD64_ABI) && defined(ENREGISTERED_PARAMTYPE_MAXSIZE) PCODE GetGPRegRefRoutine(int r); From 525b8d6a25b961db94ace5509b81e06dd68fd38b Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Fri, 30 Jan 2026 16:43:57 +0100 Subject: [PATCH 27/33] Remove unused Swift lowered interpreter stub and related references --- src/coreclr/vm/arm64/asmhelpers.S | 19 ------------------- src/coreclr/vm/callstubgenerator.cpp | 7 ------- 2 files changed, 26 deletions(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index 1f6e6c86372c41..c4028f2e7e282c 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -37,14 +37,12 @@ NESTED_ENTRY PInvokeImportThunk, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -224 SAVE_ARGUMENT_REGISTERS sp, 16 SAVE_FLOAT_ARGUMENT_REGISTERS sp, 96 - str x8, [sp, #208] // Save x8 (indirect result register for Swift) mov x0, x12 bl C_FUNC(PInvokeImportWorker) mov x12, x0 // pop the stack and restore original register state - ldr x8, [sp, #208] // Restore x8 (indirect result register for Swift) RESTORE_FLOAT_ARGUMENT_REGISTERS sp, 96 RESTORE_ARGUMENT_REGISTERS sp, 16 EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 224 @@ -1015,23 +1013,6 @@ NESTED_ENTRY InterpreterStubRet4Vector128, _TEXT, NoHandler EPILOG_RETURN NESTED_END InterpreterStubRet4Vector128, _TEXT -// When interpreted method is called with Swift calling convention and returns a lowered struct, -// this stub writes return values to a buffer. -NESTED_ENTRY InterpreterStubRetSwiftLowered, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_NO_FP_INDEXED fp, lr, -16 - add x0, sp, #__PWTB_TransitionBlock + 16 - mov x1, x19 // the IR bytecode pointer - mov x2, xzr - bl C_FUNC(ExecuteInterpretedMethod) - mov x9, x0 // Save return buffer address - ldp x0, x1, [x9] - ldp x2, x3, [x9, #16] - ldp d0, d1, [x9, #32] - ldp d2, d3, [x9, #48] - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 16 - EPILOG_RETURN -NESTED_END InterpreterStubRetSwiftLowered, _TEXT - // Copy arguments from the processor stack to the interpreter stack // The CPU stack slots are aligned to pointer size. diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 3a57ae4b88ccd8..e02c67b2a9a8af 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -2309,9 +2309,6 @@ extern "C" void InterpreterStubRetVector128(); extern "C" void InterpreterStubRet2Vector128(); extern "C" void InterpreterStubRet3Vector128(); extern "C" void InterpreterStubRet4Vector128(); -#if defined(TARGET_APPLE) -extern "C" void InterpreterStubRetSwiftLowered(); -#endif // TARGET_APPLE #endif // TARGET_ARM64 #if defined(TARGET_RISCV64) @@ -2553,10 +2550,6 @@ PCODE CallStubGenerator::GetInterpreterReturnTypeHandler(CallStubGenerator::Retu RETURN_TYPE_HANDLER(InterpreterStubRet3Vector128); case ReturnType4Vector128: RETURN_TYPE_HANDLER(InterpreterStubRet4Vector128); -#if defined(TARGET_APPLE) - case ReturnTypeSwiftLowered: - RETURN_TYPE_HANDLER(InterpreterStubRetSwiftLowered); -#endif // TARGET_APPLE #endif // TARGET_ARM64 #if defined(TARGET_RISCV64) case ReturnType2I8: From 306e656473fd96739b3ec0e30400c3dcdc48939d Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Fri, 30 Jan 2026 17:18:56 +0100 Subject: [PATCH 28/33] Improve code checks and remove unused stubs --- src/coreclr/vm/arm64/asmhelpers.S | 7 +-- src/coreclr/vm/callstubgenerator.cpp | 65 ++++++++++------------------ 2 files changed, 24 insertions(+), 48 deletions(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index c4028f2e7e282c..acb818ef81816c 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -737,6 +737,7 @@ LOCAL_LABEL(HaveInterpThreadContext): ldr x9, [x19] // InterpMethod* ldr x9, [x9, #OFFSETOF__InterpMethod__pCallStub] cbz x9, LOCAL_LABEL(NoManagedThreadOrCallStub) + mov x23, x9 // Save CallStubHeader* for later use add x10, x9, #OFFSETOF__CallStubHeader__Routines ldr x9, [x11, #OFFSETOF__InterpThreadContext__pStackPointer] // x19 contains IR bytecode address @@ -747,9 +748,7 @@ LOCAL_LABEL(HaveInterpThreadContext): ldr x2, [sp, #(__PWTB_ArgumentRegister_FirstArg + 16)] // Check if this stub has Swift error handling - ldr x11, [x19] // InterpMethod* - ldr x11, [x11, #OFFSETOF__InterpMethod__pCallStub] // CallStubHeader* - ldrb w11, [x11, #OFFSETOF__CallStubHeader__HasSwiftError] + ldrb w11, [x23, #OFFSETOF__CallStubHeader__HasSwiftError] cbz x11, LOCAL_LABEL(InterpreterStub_NoSwiftError) EPILOG_WITH_TRANSITION_BLOCK_RETURN_SKIP_SWIFT_ERROR LOCAL_LABEL(InterpreterStub_NoSwiftError): @@ -3136,7 +3135,6 @@ NESTED_ENTRY CallJittedMethodRetSwiftLowered, _TEXT, NoHandler // Store the return address for the terminator to use adr x11, LOCAL_LABEL(CallJittedMethodRetSwiftLowered_Epilog) str x11, [fp, #32] - str x19, [fp, #48] ldrsw x11, [x0, #-20] // Load TargetSlotIndex add x11, x11, #1 // TargetSlotIndex + 1 add x11, x0, x11, lsl #3 // x0 + (TargetSlotIndex + 1) * 8 @@ -3157,7 +3155,6 @@ LOCAL_LABEL(CallJittedMethodRetSwiftLowered_NoSwiftError): ldr x11, [x10], #8 // Load first store routine br x11 LOCAL_LABEL(CallJittedMethodRetSwiftLowered_Epilog): - ldr x19, [fp, #48] // Restore x19 EPILOG_STACK_RESTORE EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 64 EPILOG_RETURN diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index e02c67b2a9a8af..83857ab2c34d1c 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -2853,47 +2853,6 @@ bool isNativePrimitiveStructType(MethodTable* pMT) return strcmp(typeName, "CLong") == 0 || strcmp(typeName, "CULong") == 0 || strcmp(typeName, "NFloat") == 0; } -#if defined(TARGET_APPLE) && defined(TARGET_ARM64) -//--------------------------------------------------------------------------- -// isIntrinsicSIMDType: -// Check if the given type is a SIMD type (Vector, Vector64, Vector128, etc.). -// -// Arguments: -// pMT - the handle for the type. -// -// Return Value: -// true if the given type is a SIMD type, -// false otherwise. -// -bool isIntrinsicSIMDType(MethodTable* pMT) -{ - if (!pMT->IsIntrinsicType()) - { - return false; - } - - const char* namespaceName = nullptr; - const char* typeName = pMT->GetFullyQualifiedNameInfo(&namespaceName); - - if ((namespaceName == NULL) || (typeName == NULL)) - { - return false; - } - - if (strcmp(namespaceName, "System.Runtime.Intrinsics") == 0) - { - return true; - } - - if (strcmp(namespaceName, "System.Numerics") == 0) - { - return true; - } - - return false; -} -#endif // TARGET_APPLE && TARGET_ARM64 - void CallStubGenerator::ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDesc *pMD) { bool hasUnmanagedCallConv = false; @@ -3875,7 +3834,12 @@ void CallStubGenerator::RewriteSignatureForSwiftLowering(MetaSig &sig, SigBuilde if (retCorType == ELEMENT_TYPE_VALUETYPE && !thReturnType.IsNull() && !thReturnType.IsTypeDesc()) { MethodTable* pRetMT = thReturnType.AsMethodTable(); - if (pRetMT->IsValueType() && !pRetMT->IsHFA() && !isIntrinsicSIMDType(pRetMT)) + if (pRetMT->IsValueType() && !pRetMT->IsHFA() && + !pRetMT->HasSameTypeDefAs(CoreLibBinder::GetClass(CLASS__VECTOR64T)) && + !pRetMT->HasSameTypeDefAs(CoreLibBinder::GetClass(CLASS__VECTOR128T)) && + !pRetMT->HasSameTypeDefAs(CoreLibBinder::GetClass(CLASS__VECTOR256T)) && + !pRetMT->HasSameTypeDefAs(CoreLibBinder::GetClass(CLASS__VECTOR512T)) && + !pRetMT->HasSameTypeDefAs(CoreLibBinder::GetClass(CLASS__VECTORT))) { CORINFO_SWIFT_LOWERING lowering = {}; pRetMT->GetNativeSwiftPhysicalLowering(&lowering, false); @@ -3924,7 +3888,11 @@ void CallStubGenerator::RewriteSignatureForSwiftLowering(MetaSig &sig, SigBuilde COMPlusThrow(kInvalidProgramException); } - if (isIntrinsicSIMDType(pArgMT)) + if (pArgMT->HasSameTypeDefAs(CoreLibBinder::GetClass(CLASS__VECTOR64T)) || + pArgMT->HasSameTypeDefAs(CoreLibBinder::GetClass(CLASS__VECTOR128T)) || + pArgMT->HasSameTypeDefAs(CoreLibBinder::GetClass(CLASS__VECTOR256T)) || + pArgMT->HasSameTypeDefAs(CoreLibBinder::GetClass(CLASS__VECTOR512T)) || + pArgMT->HasSameTypeDefAs(CoreLibBinder::GetClass(CLASS__VECTORT))) { COMPlusThrow(kInvalidProgramException); } @@ -3940,6 +3908,17 @@ void CallStubGenerator::RewriteSignatureForSwiftLowering(MetaSig &sig, SigBuilde continue; } + if (pArgMT->HasSameTypeDefAs(CoreLibBinder::GetClass(CLASS__SWIFT_SELF_T))) + { + swiftSelfCount++; + if (swiftSelfCount > 1) + { + COMPlusThrow(kInvalidProgramException); + } + + // Fall through for struct lowering + } + if (pArgMT == CoreLibBinder::GetClass(CLASS__SWIFT_ERROR)) { swiftErrorCount++; From 79527ecc4a3fe6d30ed0296b3eb5b994c089239e Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Fri, 30 Jan 2026 17:50:24 +0100 Subject: [PATCH 29/33] Add Swift error handling to missing stubs --- src/coreclr/vm/arm64/asmhelpers.S | 36 ++++++++++++++++++++++++------- 1 file changed, 28 insertions(+), 8 deletions(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index acb818ef81816c..d5dffd31dec298 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -2535,20 +2535,25 @@ NESTED_END CallJittedMethodRetBuff, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetI1, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRetI1_NoSwiftError) + str x21, [x11] +LOCAL_LABEL(CallJittedMethodRetI1_NoSwiftError): sxtb x0, w0 ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] str x0, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRetI1, _TEXT @@ -2558,20 +2563,25 @@ NESTED_END CallJittedMethodRetI1, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetI2, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRetI2_NoSwiftError) + str x21, [x11] +LOCAL_LABEL(CallJittedMethodRetI2_NoSwiftError): sxth x0, w0 ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] str x0, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRetI2, _TEXT @@ -2608,20 +2618,25 @@ NESTED_END CallJittedMethodRetI8, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetU1, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRetU1_NoSwiftError) + str x21, [x11] +LOCAL_LABEL(CallJittedMethodRetU1_NoSwiftError): uxtb x0, w0 ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] str x0, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRetU1, _TEXT @@ -2631,20 +2646,25 @@ NESTED_END CallJittedMethodRetU1, _TEXT // X3 - stack arguments size (properly aligned) // X4 - address of continuation return value NESTED_ENTRY CallJittedMethodRetU2, _TEXT, NoHandler - PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -32 + PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] + str xzr, [fp, #40] sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 + ldr x11, [fp, #40] + cbz x11, LOCAL_LABEL(CallJittedMethodRetU2_NoSwiftError) + str x21, [x11] +LOCAL_LABEL(CallJittedMethodRetU2_NoSwiftError): uxth x0, w0 ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] str x0, [x9] EPILOG_STACK_RESTORE - EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 32 + EPILOG_RESTORE_REG_PAIR_INDEXED fp, lr, 48 EPILOG_RETURN NESTED_END CallJittedMethodRetU2, _TEXT From 04e4deb33bb180fd76b57d81bf4f01ac1675dc94 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Fri, 30 Jan 2026 17:50:50 +0100 Subject: [PATCH 30/33] Moved stub arrays to getters --- src/coreclr/vm/callstubgenerator.cpp | 1511 +++++--------------------- 1 file changed, 252 insertions(+), 1259 deletions(-) diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index 83857ab2c34d1c..c00986c2d24e1a 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -81,102 +81,6 @@ extern "C" void Store_Ref_RDX(); extern "C" void Store_Ref_R8(); extern "C" void Store_Ref_R9(); -static const PCODE GPRegsRoutines[] = -{ - (PCODE)Load_RCX, // 00 - (PCODE)Load_RCX_RDX, // 01 - (PCODE)Load_RCX_RDX_R8, // 02 - (PCODE)Load_RCX_RDX_R8_R9, // 03 - (PCODE)0, // 10 - (PCODE)Load_RDX, // 11 - (PCODE)Load_RDX_R8, // 12 - (PCODE)Load_RDX_R8_R9, // 13 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Load_R8, // 22 - (PCODE)Load_R8_R9, // 23 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Load_R9 // 33 -}; - -static const PCODE GPRegsStoreRoutines[] = -{ - (PCODE)Store_RCX, // 00 - (PCODE)Store_RCX_RDX, // 01 - (PCODE)Store_RCX_RDX_R8, // 02 - (PCODE)Store_RCX_RDX_R8_R9, // 03 - (PCODE)0, // 10 - (PCODE)Store_RDX, // 11 - (PCODE)Store_RDX_R8, // 12 - (PCODE)Store_RDX_R8_R9, // 13 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Store_R8, // 22 - (PCODE)Store_R8_R9, // 23 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Store_R9 // 33 -}; - -static const PCODE GPRegsRefRoutines[] = -{ - (PCODE)Load_Ref_RCX, // 0 - (PCODE)Load_Ref_RDX, // 1 - (PCODE)Load_Ref_R8, // 2 - (PCODE)Load_Ref_R9, // 3 -}; - -static const PCODE GPRegsRefStoreRoutines[] = -{ - (PCODE)Store_Ref_RCX, // 0 - (PCODE)Store_Ref_RDX, // 1 - (PCODE)Store_Ref_R8, // 2 - (PCODE)Store_Ref_R9, // 3 -}; - -static const PCODE FPRegsRoutines[] = -{ - (PCODE)Load_XMM0, // 00 - (PCODE)Load_XMM0_XMM1, // 01 - (PCODE)Load_XMM0_XMM1_XMM2, // 02 - (PCODE)Load_XMM0_XMM1_XMM2_XMM3, // 03 - (PCODE)0, // 10 - (PCODE)Load_XMM1, // 11 - (PCODE)Load_XMM1_XMM2, // 12 - (PCODE)Load_XMM1_XMM2_XMM3, // 13 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Load_XMM2, // 22 - (PCODE)Load_XMM2_XMM3, // 23 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Load_XMM3 // 33 -}; - -static const PCODE FPRegsStoreRoutines[] = -{ - (PCODE)Store_XMM0, // 00 - (PCODE)Store_XMM0_XMM1, // 01 - (PCODE)Store_XMM0_XMM1_XMM2, // 02 - (PCODE)Store_XMM0_XMM1_XMM2_XMM3, // 03 - (PCODE)0, // 10 - (PCODE)Store_XMM1, // 11 - (PCODE)Store_XMM1_XMM2, // 12 - (PCODE)Store_XMM1_XMM2_XMM3, // 13 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Store_XMM2, // 22 - (PCODE)Store_XMM2_XMM3, // 23 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Store_XMM3 // 33 -}; - #else // TARGET_WINDOWS extern "C" void Load_RDI(); @@ -223,86 +127,6 @@ extern "C" void Store_R8(); extern "C" void Store_R8_R9(); extern "C" void Store_R9(); -static const PCODE GPRegsRoutines[] = -{ - (PCODE)Load_RDI, // 00 - (PCODE)Load_RDI_RSI, // 01 - (PCODE)Load_RDI_RSI_RDX, // 02 - (PCODE)Load_RDI_RSI_RDX_RCX, // 03 - (PCODE)Load_RDI_RSI_RDX_RCX_R8, // 04 - (PCODE)Load_RDI_RSI_RDX_RCX_R8_R9, // 05 - (PCODE)0, // 10 - (PCODE)Load_RSI, // 11 - (PCODE)Load_RSI_RDX, // 12 - (PCODE)Load_RSI_RDX_RCX, // 13 - (PCODE)Load_RSI_RDX_RCX_R8, // 14 - (PCODE)Load_RSI_RDX_RCX_R8_R9, // 15 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Load_RDX, // 22 - (PCODE)Load_RDX_RCX, // 23 - (PCODE)Load_RDX_RCX_R8, // 24 - (PCODE)Load_RDX_RCX_R8_R9, // 25 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Load_RCX, // 33 - (PCODE)Load_RCX_R8, // 34 - (PCODE)Load_RCX_R8_R9, // 35 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Load_R8, // 44 - (PCODE)Load_R8_R9, // 45 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Load_R9 // 55 -}; - -static const PCODE GPRegsStoreRoutines[] = -{ - (PCODE)Store_RDI, // 00 - (PCODE)Store_RDI_RSI, // 01 - (PCODE)Store_RDI_RSI_RDX, // 02 - (PCODE)Store_RDI_RSI_RDX_RCX, // 03 - (PCODE)Store_RDI_RSI_RDX_RCX_R8, // 04 - (PCODE)Store_RDI_RSI_RDX_RCX_R8_R9, // 05 - (PCODE)0, // 10 - (PCODE)Store_RSI, // 11 - (PCODE)Store_RSI_RDX, // 12 - (PCODE)Store_RSI_RDX_RCX, // 13 - (PCODE)Store_RSI_RDX_RCX_R8, // 14 - (PCODE)Store_RSI_RDX_RCX_R8_R9, // 15 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Store_RDX, // 22 - (PCODE)Store_RDX_RCX, // 23 - (PCODE)Store_RDX_RCX_R8, // 24 - (PCODE)Store_RDX_RCX_R8_R9, // 25 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Store_RCX, // 33 - (PCODE)Store_RCX_R8, // 34 - (PCODE)Store_RCX_R8_R9, // 35 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Store_R8, // 44 - (PCODE)Store_R8_R9, // 45 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Store_R9 // 55 -}; - extern "C" void Load_XMM0(); extern "C" void Load_XMM0_XMM1(); extern "C" void Load_XMM0_XMM1_XMM2(); @@ -377,142 +201,6 @@ extern "C" void Store_XMM6(); extern "C" void Store_XMM6_XMM7(); extern "C" void Store_XMM7(); -static const PCODE FPRegsRoutines[] = -{ - (PCODE)Load_XMM0, // 00 - (PCODE)Load_XMM0_XMM1, // 01 - (PCODE)Load_XMM0_XMM1_XMM2, // 02 - (PCODE)Load_XMM0_XMM1_XMM2_XMM3, // 03 - (PCODE)Load_XMM0_XMM1_XMM2_XMM3_XMM4, // 04 - (PCODE)Load_XMM0_XMM1_XMM2_XMM3_XMM4_XMM5, // 05 - (PCODE)Load_XMM0_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6, // 06 - (PCODE)Load_XMM0_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6_XMM7,// 07 - (PCODE)0, // 10 - (PCODE)Load_XMM1, // 11 - (PCODE)Load_XMM1_XMM2, // 12 - (PCODE)Load_XMM1_XMM2_XMM3, // 13 - (PCODE)Load_XMM1_XMM2_XMM3_XMM4, // 14 - (PCODE)Load_XMM1_XMM2_XMM3_XMM4_XMM5, // 15 - (PCODE)Load_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6, // 16 - (PCODE)Load_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6_XMM7, // 17 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Load_XMM2, // 22 - (PCODE)Load_XMM2_XMM3, // 23 - (PCODE)Load_XMM2_XMM3_XMM4, // 24 - (PCODE)Load_XMM2_XMM3_XMM4_XMM5, // 25 - (PCODE)Load_XMM2_XMM3_XMM4_XMM5_XMM6, // 26 - (PCODE)Load_XMM2_XMM3_XMM4_XMM5_XMM6_XMM7, // 27 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Load_XMM3, // 33 - (PCODE)Load_XMM3_XMM4, // 34 - (PCODE)Load_XMM3_XMM4_XMM5, // 35 - (PCODE)Load_XMM3_XMM4_XMM5_XMM6, // 36 - (PCODE)Load_XMM3_XMM4_XMM5_XMM6_XMM7, // 37 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Load_XMM4, // 44 - (PCODE)Load_XMM4_XMM5, // 45 - (PCODE)Load_XMM4_XMM5_XMM6, // 46 - (PCODE)Load_XMM4_XMM5_XMM6_XMM7, // 47 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Load_XMM5, // 55 - (PCODE)Load_XMM5_XMM6, // 56 - (PCODE)Load_XMM5_XMM6_XMM7, // 57 - (PCODE)0, // 60 - (PCODE)0, // 61 - (PCODE)0, // 62 - (PCODE)0, // 63 - (PCODE)0, // 64 - (PCODE)0, // 65 - (PCODE)Load_XMM6, // 66 - (PCODE)Load_XMM6_XMM7, // 67 - (PCODE)0, // 70 - (PCODE)0, // 71 - (PCODE)0, // 72 - (PCODE)0, // 73 - (PCODE)0, // 74 - (PCODE)0, // 75 - (PCODE)0, // 76 - (PCODE)Load_XMM7 // 77 -}; - -static const PCODE FPRegsStoreRoutines[] = -{ - (PCODE)Store_XMM0, // 00 - (PCODE)Store_XMM0_XMM1, // 01 - (PCODE)Store_XMM0_XMM1_XMM2, // 02 - (PCODE)Store_XMM0_XMM1_XMM2_XMM3, // 03 - (PCODE)Store_XMM0_XMM1_XMM2_XMM3_XMM4, // 04 - (PCODE)Store_XMM0_XMM1_XMM2_XMM3_XMM4_XMM5, // 05 - (PCODE)Store_XMM0_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6, // 06 - (PCODE)Store_XMM0_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6_XMM7,// 07 - (PCODE)0, // 10 - (PCODE)Store_XMM1, // 11 - (PCODE)Store_XMM1_XMM2, // 12 - (PCODE)Store_XMM1_XMM2_XMM3, // 13 - (PCODE)Store_XMM1_XMM2_XMM3_XMM4, // 14 - (PCODE)Store_XMM1_XMM2_XMM3_XMM4_XMM5, // 15 - (PCODE)Store_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6, // 16 - (PCODE)Store_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6_XMM7, // 17 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Store_XMM2, // 22 - (PCODE)Store_XMM2_XMM3, // 23 - (PCODE)Store_XMM2_XMM3_XMM4, // 24 - (PCODE)Store_XMM2_XMM3_XMM4_XMM5, // 25 - (PCODE)Store_XMM2_XMM3_XMM4_XMM5_XMM6, // 26 - (PCODE)Store_XMM2_XMM3_XMM4_XMM5_XMM6_XMM7, // 27 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Store_XMM3, // 33 - (PCODE)Store_XMM3_XMM4, // 34 - (PCODE)Store_XMM3_XMM4_XMM5, // 35 - (PCODE)Store_XMM3_XMM4_XMM5_XMM6, // 36 - (PCODE)Store_XMM3_XMM4_XMM5_XMM6_XMM7, // 37 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Store_XMM4, // 44 - (PCODE)Store_XMM4_XMM5, // 45 - (PCODE)Store_XMM4_XMM5_XMM6, // 46 - (PCODE)Store_XMM4_XMM5_XMM6_XMM7, // 47 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Store_XMM5, // 55 - (PCODE)Store_XMM5_XMM6, // 56 - (PCODE)Store_XMM5_XMM6_XMM7, // 57 - (PCODE)0, // 60 - (PCODE)0, // 61 - (PCODE)0, // 62 - (PCODE)0, // 63 - (PCODE)0, // 64 - (PCODE)0, // 65 - (PCODE)Store_XMM6, // 66 - (PCODE)Store_XMM6_XMM7, // 67 - (PCODE)0, // 70 - (PCODE)0, // 71 - (PCODE)0, // 72 - (PCODE)0, // 73 - (PCODE)0, // 74 - (PCODE)0, // 75 - (PCODE)0, // 76 - (PCODE)Store_XMM7 // 77 -}; - #endif // TARGET_WINDOWS #endif // TARGET_AMD64 @@ -646,166 +334,6 @@ extern "C" void Store_Ref_X5(); extern "C" void Store_Ref_X6(); extern "C" void Store_Ref_X7(); -static const PCODE GPRegsRoutines[] = -{ - (PCODE)Load_X0, // 00 - (PCODE)Load_X0_X1, // 01 - (PCODE)Load_X0_X1_X2, // 02 - (PCODE)Load_X0_X1_X2_X3, // 03 - (PCODE)Load_X0_X1_X2_X3_X4, // 04 - (PCODE)Load_X0_X1_X2_X3_X4_X5, // 05 - (PCODE)Load_X0_X1_X2_X3_X4_X5_X6, // 06 - (PCODE)Load_X0_X1_X2_X3_X4_X5_X6_X7, // 07 - (PCODE)0, // 10 - (PCODE)Load_X1, // 11 - (PCODE)Load_X1_X2, // 12 - (PCODE)Load_X1_X2_X3, // 13 - (PCODE)Load_X1_X2_X3_X4, // 14 - (PCODE)Load_X1_X2_X3_X4_X5, // 15 - (PCODE)Load_X1_X2_X3_X4_X5_X6, // 16 - (PCODE)Load_X1_X2_X3_X4_X5_X6_X7, // 17 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Load_X2, // 22 - (PCODE)Load_X2_X3, // 23 - (PCODE)Load_X2_X3_X4, // 24 - (PCODE)Load_X2_X3_X4_X5, // 25 - (PCODE)Load_X2_X3_X4_X5_X6, // 26 - (PCODE)Load_X2_X3_X4_X5_X6_X7, // 27 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Load_X3, // 33 - (PCODE)Load_X3_X4, // 34 - (PCODE)Load_X3_X4_X5, // 35 - (PCODE)Load_X3_X4_X5_X6, // 36 - (PCODE)Load_X3_X4_X5_X6_X7, // 37 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Load_X4, // 44 - (PCODE)Load_X4_X5, // 45 - (PCODE)Load_X4_X5_X6, // 46 - (PCODE)Load_X4_X5_X6_X7, // 47 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Load_X5, // 55 - (PCODE)Load_X5_X6, // 56 - (PCODE)Load_X5_X6_X7, // 57 - (PCODE)0, // 60 - (PCODE)0, // 61 - (PCODE)0, // 62 - (PCODE)0, // 63 - (PCODE)0, // 64 - (PCODE)0, // 65 - (PCODE)Load_X6, // 66 - (PCODE)Load_X6_X7, // 67 - (PCODE)0, // 70 - (PCODE)0, // 71 - (PCODE)0, // 72 - (PCODE)0, // 73 - (PCODE)0, // 74 - (PCODE)0, // 75 - (PCODE)0, // 76 - (PCODE)Load_X7 // 77 -}; - -static const PCODE GPRegsStoreRoutines[] = -{ - (PCODE)Store_X0, // 00 - (PCODE)Store_X0_X1, // 01 - (PCODE)Store_X0_X1_X2, // 02 - (PCODE)Store_X0_X1_X2_X3, // 03 - (PCODE)Store_X0_X1_X2_X3_X4, // 04 - (PCODE)Store_X0_X1_X2_X3_X4_X5, // 05 - (PCODE)Store_X0_X1_X2_X3_X4_X5_X6, // 06 - (PCODE)Store_X0_X1_X2_X3_X4_X5_X6_X7, // 07 - (PCODE)0, // 10 - (PCODE)Store_X1, // 11 - (PCODE)Store_X1_X2, // 12 - (PCODE)Store_X1_X2_X3, // 13 - (PCODE)Store_X1_X2_X3_X4, // 14 - (PCODE)Store_X1_X2_X3_X4_X5, // 15 - (PCODE)Store_X1_X2_X3_X4_X5_X6, // 16 - (PCODE)Store_X1_X2_X3_X4_X5_X6_X7, // 17 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Store_X2, // 22 - (PCODE)Store_X2_X3, // 23 - (PCODE)Store_X2_X3_X4, // 24 - (PCODE)Store_X2_X3_X4_X5, // 25 - (PCODE)Store_X2_X3_X4_X5_X6, // 26 - (PCODE)Store_X2_X3_X4_X5_X6_X7, // 27 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Store_X3, // 33 - (PCODE)Store_X3_X4, // 34 - (PCODE)Store_X3_X4_X5, // 35 - (PCODE)Store_X3_X4_X5_X6, // 36 - (PCODE)Store_X3_X4_X5_X6_X7, // 37 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Store_X4, // 44 - (PCODE)Store_X4_X5, // 45 - (PCODE)Store_X4_X5_X6, // 46 - (PCODE)Store_X4_X5_X6_X7, // 47 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Store_X5, // 55 - (PCODE)Store_X5_X6, // 56 - (PCODE)Store_X5_X6_X7, // 57 - (PCODE)0, // 60 - (PCODE)0, // 61 - (PCODE)0, // 62 - (PCODE)0, // 63 - (PCODE)0, // 64 - (PCODE)0, // 65 - (PCODE)Store_X6, // 66 - (PCODE)Store_X6_X7, // 67 - (PCODE)0, // 70 - (PCODE)0, // 71 - (PCODE)0, // 72 - (PCODE)0, // 73 - (PCODE)0, // 74 - (PCODE)0, // 75 - (PCODE)0, // 76 - (PCODE)Store_X7 // 77 -}; - -static const PCODE GPRegsRefRoutines[] = -{ - (PCODE)Load_Ref_X0, // 0 - (PCODE)Load_Ref_X1, // 1 - (PCODE)Load_Ref_X2, // 2 - (PCODE)Load_Ref_X3, // 3 - (PCODE)Load_Ref_X4, // 4 - (PCODE)Load_Ref_X5, // 5 - (PCODE)Load_Ref_X6, // 6 - (PCODE)Load_Ref_X7 // 7 -}; - -static const PCODE GPRegsRefStoreRoutines[] = -{ - (PCODE)Store_Ref_X0, // 0 - (PCODE)Store_Ref_X1, // 1 - (PCODE)Store_Ref_X2, // 2 - (PCODE)Store_Ref_X3, // 3 - (PCODE)Store_Ref_X4, // 4 - (PCODE)Store_Ref_X5, // 5 - (PCODE)Store_Ref_X6, // 6 - (PCODE)Store_Ref_X7 // 7 -}; - extern "C" void Load_D0(); extern "C" void Load_D0_D1(); extern "C" void Load_D0_D1_D2(); @@ -1030,414 +558,6 @@ extern "C" void Store_S6(); extern "C" void Store_S6_S7(); extern "C" void Store_S7(); -static const PCODE FPRegsStoreRoutines[] = -{ - (PCODE)Store_D0, // 00 - (PCODE)Store_D0_D1, // 01 - (PCODE)Store_D0_D1_D2, // 02 - (PCODE)Store_D0_D1_D2_D3, // 03 - (PCODE)Store_D0_D1_D2_D3_D4, // 04 - (PCODE)Store_D0_D1_D2_D3_D4_D5, // 05 - (PCODE)Store_D0_D1_D2_D3_D4_D5_D6, // 06 - (PCODE)Store_D0_D1_D2_D3_D4_D5_D6_D7, // 07 - (PCODE)0, // 10 - (PCODE)Store_D1, // 11 - (PCODE)Store_D1_D2, // 12 - (PCODE)Store_D1_D2_D3, // 13 - (PCODE)Store_D1_D2_D3_D4, // 14 - (PCODE)Store_D1_D2_D3_D4_D5, // 15 - (PCODE)Store_D1_D2_D3_D4_D5_D6, // 16 - (PCODE)Store_D1_D2_D3_D4_D5_D6_D7, // 17 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Store_D2, // 22 - (PCODE)Store_D2_D3, // 23 - (PCODE)Store_D2_D3_D4, // 24 - (PCODE)Store_D2_D3_D4_D5, // 25 - (PCODE)Store_D2_D3_D4_D5_D6, // 26 - (PCODE)Store_D2_D3_D4_D5_D6_D7, // 27 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Store_D3, // 33 - (PCODE)Store_D3_D4, // 34 - (PCODE)Store_D3_D4_D5, // 35 - (PCODE)Store_D3_D4_D5_D6, // 36 - (PCODE)Store_D3_D4_D5_D6_D7, // 37 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Store_D4, // 44 - (PCODE)Store_D4_D5, // 45 - (PCODE)Store_D4_D5_D6, // 46 - (PCODE)Store_D4_D5_D6_D7, // 47 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Store_D5, // 55 - (PCODE)Store_D5_D6, // 56 - (PCODE)Store_D5_D6_D7, // 57 - (PCODE)0, // 60 - (PCODE)0, // 61 - (PCODE)0, // 62 - (PCODE)0, // 63 - (PCODE)0, // 64 - (PCODE)0, // 65 - (PCODE)Store_D6, // 66 - (PCODE)Store_D6_D7, // 67 - (PCODE)0, // 70 - (PCODE)0, // 71 - (PCODE)0, // 72 - (PCODE)0, // 73 - (PCODE)0, // 74 - (PCODE)0, // 75 - (PCODE)0, // 76 - (PCODE)Store_D7 // 77 -}; - -static const PCODE FPRegsRoutines[] = -{ - (PCODE)Load_D0, // 00 - (PCODE)Load_D0_D1, // 01 - (PCODE)Load_D0_D1_D2, // 02 - (PCODE)Load_D0_D1_D2_D3, // 03 - (PCODE)Load_D0_D1_D2_D3_D4, // 04 - (PCODE)Load_D0_D1_D2_D3_D4_D5, // 05 - (PCODE)Load_D0_D1_D2_D3_D4_D5_D6, // 06 - (PCODE)Load_D0_D1_D2_D3_D4_D5_D6_D7, // 07 - (PCODE)0, // 10 - (PCODE)Load_D1, // 11 - (PCODE)Load_D1_D2, // 12 - (PCODE)Load_D1_D2_D3, // 13 - (PCODE)Load_D1_D2_D3_D4, // 14 - (PCODE)Load_D1_D2_D3_D4_D5, // 15 - (PCODE)Load_D1_D2_D3_D4_D5_D6, // 16 - (PCODE)Load_D1_D2_D3_D4_D5_D6_D7, // 17 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Load_D2, // 22 - (PCODE)Load_D2_D3, // 23 - (PCODE)Load_D2_D3_D4, // 24 - (PCODE)Load_D2_D3_D4_D5, // 25 - (PCODE)Load_D2_D3_D4_D5_D6, // 26 - (PCODE)Load_D2_D3_D4_D5_D6_D7, // 27 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Load_D3, // 33 - (PCODE)Load_D3_D4, // 34 - (PCODE)Load_D3_D4_D5, // 35 - (PCODE)Load_D3_D4_D5_D6, // 36 - (PCODE)Load_D3_D4_D5_D6_D7, // 37 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Load_D4, // 44 - (PCODE)Load_D4_D5, // 45 - (PCODE)Load_D4_D5_D6, // 46 - (PCODE)Load_D4_D5_D6_D7, // 47 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Load_D5, // 55 - (PCODE)Load_D5_D6, // 56 - (PCODE)Load_D5_D6_D7, // 57 - (PCODE)0, // 60 - (PCODE)0, // 61 - (PCODE)0, // 62 - (PCODE)0, // 63 - (PCODE)0, // 64 - (PCODE)0, // 65 - (PCODE)Load_D6, // 66 - (PCODE)Load_D6_D7, // 67 - (PCODE)0, // 70 - (PCODE)0, // 71 - (PCODE)0, // 72 - (PCODE)0, // 73 - (PCODE)0, // 74 - (PCODE)0, // 75 - (PCODE)0, // 76 - (PCODE)Load_D7 // 77 -}; - -PCODE FPRegs128StoreRoutines[] = -{ - (PCODE)Store_Q0, // 00 - (PCODE)Store_Q0_Q1, // 01 - (PCODE)Store_Q0_Q1_Q2, // 02 - (PCODE)Store_Q0_Q1_Q2_Q3, // 03 - (PCODE)Store_Q0_Q1_Q2_Q3_Q4, // 04 - (PCODE)Store_Q0_Q1_Q2_Q3_Q4_Q5, // 05 - (PCODE)Store_Q0_Q1_Q2_Q3_Q4_Q5_Q6, // 06 - (PCODE)Store_Q0_Q1_Q2_Q3_Q4_Q5_Q6_Q7, // 07 - (PCODE)0, // 10 - (PCODE)Store_Q1, // 11 - (PCODE)Store_Q1_Q2, // 12 - (PCODE)Store_Q1_Q2_Q3, // 13 - (PCODE)Store_Q1_Q2_Q3_Q4, // 14 - (PCODE)Store_Q1_Q2_Q3_Q4_Q5, // 15 - (PCODE)Store_Q1_Q2_Q3_Q4_Q5_Q6, // 16 - (PCODE)Store_Q1_Q2_Q3_Q4_Q5_Q6_Q7, // 17 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Store_Q2, // 22 - (PCODE)Store_Q2_Q3, // 23 - (PCODE)Store_Q2_Q3_Q4, // 24 - (PCODE)Store_Q2_Q3_Q4_Q5, // 25 - (PCODE)Store_Q2_Q3_Q4_Q5_Q6, // 26 - (PCODE)Store_Q2_Q3_Q4_Q5_Q6_Q7, // 27 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Store_Q3, // 33 - (PCODE)Store_Q3_Q4, // 34 - (PCODE)Store_Q3_Q4_Q5, // 35 - (PCODE)Store_Q3_Q4_Q5_Q6, // 36 - (PCODE)Store_Q3_Q4_Q5_Q6_Q7, // 37 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Store_Q4, // 44 - (PCODE)Store_Q4_Q5, // 45 - (PCODE)Store_Q4_Q5_Q6, // 46 - (PCODE)Store_Q4_Q5_Q6_Q7, // 47 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Store_Q5, // 55 - (PCODE)Store_Q5_Q6, // 56 - (PCODE)Store_Q5_Q6_Q7, // 57 - (PCODE)0, // 60 - (PCODE)0, // 61 - (PCODE)0, // 62 - (PCODE)0, // 63 - (PCODE)0, // 64 - (PCODE)0, // 65 - (PCODE)Store_Q6, // 66 - (PCODE)Store_Q6_Q7, // 67 - (PCODE)0, // 70 - (PCODE)0, // 71 - (PCODE)0, // 72 - (PCODE)0, // 73 - (PCODE)0, // 74 - (PCODE)0, // 75 - (PCODE)0, // 76 - (PCODE)Store_Q7 // 77 -}; - -static const PCODE FPRegs128LoadRoutines[] = -{ - (PCODE)Load_Q0, // 00 - (PCODE)Load_Q0_Q1, // 01 - (PCODE)Load_Q0_Q1_Q2, // 02 - (PCODE)Load_Q0_Q1_Q2_Q3, // 03 - (PCODE)Load_Q0_Q1_Q2_Q3_Q4, // 04 - (PCODE)Load_Q0_Q1_Q2_Q3_Q4_Q5, // 05 - (PCODE)Load_Q0_Q1_Q2_Q3_Q4_Q5_Q6, // 06 - (PCODE)Load_Q0_Q1_Q2_Q3_Q4_Q5_Q6_Q7, // 07 - (PCODE)0, // 10 - (PCODE)Load_Q1, // 11 - (PCODE)Load_Q1_Q2, // 12 - (PCODE)Load_Q1_Q2_Q3, // 13 - (PCODE)Load_Q1_Q2_Q3_Q4, // 14 - (PCODE)Load_Q1_Q2_Q3_Q4_Q5, // 15 - (PCODE)Load_Q1_Q2_Q3_Q4_Q5_Q6, // 16 - (PCODE)Load_Q1_Q2_Q3_Q4_Q5_Q6_Q7, // 17 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Load_Q2, // 22 - (PCODE)Load_Q2_Q3, // 23 - (PCODE)Load_Q2_Q3_Q4, // 24 - (PCODE)Load_Q2_Q3_Q4_Q5, // 25 - (PCODE)Load_Q2_Q3_Q4_Q5_Q6, // 26 - (PCODE)Load_Q2_Q3_Q4_Q5_Q6_Q7, // 27 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Load_Q3, // 33 - (PCODE)Load_Q3_Q4, // 34 - (PCODE)Load_Q3_Q4_Q5, // 35 - (PCODE)Load_Q3_Q4_Q5_Q6, // 36 - (PCODE)Load_Q3_Q4_Q5_Q6_Q7, // 37 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Load_Q4, // 44 - (PCODE)Load_Q4_Q5, // 45 - (PCODE)Load_Q4_Q5_Q6, // 46 - (PCODE)Load_Q4_Q5_Q6_Q7, // 47 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Load_Q5, // 55 - (PCODE)Load_Q5_Q6, // 56 - (PCODE)Load_Q5_Q6_Q7, // 57 - (PCODE)0, // 60 - (PCODE)0, // 61 - (PCODE)0, // 62 - (PCODE)0, // 63 - (PCODE)0, // 64 - (PCODE)0, // 65 - (PCODE)Load_Q6, // 66 - (PCODE)Load_Q6_Q7, // 67 - (PCODE)0, // 70 - (PCODE)0, // 71 - (PCODE)0, // 72 - (PCODE)0, // 73 - (PCODE)0, // 74 - (PCODE)0, // 75 - (PCODE)0, // 76 - (PCODE)Load_Q7 // 77 -}; - -static const PCODE FPRegs32StoreRoutines[] = -{ - (PCODE)Store_S0, // 00 - (PCODE)Store_S0_S1, // 01 - (PCODE)Store_S0_S1_S2, // 02 - (PCODE)Store_S0_S1_S2_S3, // 03 - (PCODE)Store_S0_S1_S2_S3_S4, // 04 - (PCODE)Store_S0_S1_S2_S3_S4_S5, // 05 - (PCODE)Store_S0_S1_S2_S3_S4_S5_S6, // 06 - (PCODE)Store_S0_S1_S2_S3_S4_S5_S6_S7, // 07 - (PCODE)0, // 10 - (PCODE)Store_S1, // 11 - (PCODE)Store_S1_S2, // 12 - (PCODE)Store_S1_S2_S3, // 13 - (PCODE)Store_S1_S2_S3_S4, // 14 - (PCODE)Store_S1_S2_S3_S4_S5, // 15 - (PCODE)Store_S1_S2_S3_S4_S5_S6, // 16 - (PCODE)Store_S1_S2_S3_S4_S5_S6_S7, // 17 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Store_S2, // 22 - (PCODE)Store_S2_S3, // 23 - (PCODE)Store_S2_S3_S4, // 24 - (PCODE)Store_S2_S3_S4_S5, // 25 - (PCODE)Store_S2_S3_S4_S5_S6, // 26 - (PCODE)Store_S2_S3_S4_S5_S6_S7, // 27 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Store_S3, // 33 - (PCODE)Store_S3_S4, // 34 - (PCODE)Store_S3_S4_S5, // 35 - (PCODE)Store_S3_S4_S5_S6, // 36 - (PCODE)Store_S3_S4_S5_S6_S7, // 37 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Store_S4, // 44 - (PCODE)Store_S4_S5, // 45 - (PCODE)Store_S4_S5_S6, // 46 - (PCODE)Store_S4_S5_S6_S7, // 47 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Store_S5, // 55 - (PCODE)Store_S5_S6, // 56 - (PCODE)Store_S5_S6_S7, // 57 - (PCODE)0, // 60 - (PCODE)0, // 61 - (PCODE)0, // 62 - (PCODE)0, // 63 - (PCODE)0, // 64 - (PCODE)0, // 65 - (PCODE)Store_S6, // 66 - (PCODE)Store_S6_S7, // 67 - (PCODE)0, // 70 - (PCODE)0, // 71 - (PCODE)0, // 72 - (PCODE)0, // 73 - (PCODE)0, // 74 - (PCODE)0, // 75 - (PCODE)0, // 76 - (PCODE)Store_S7 // 77 -}; - -static const PCODE FPRegs32LoadRoutines[] = -{ - (PCODE)Load_S0, // 00 - (PCODE)Load_S0_S1, // 01 - (PCODE)Load_S0_S1_S2, // 02 - (PCODE)Load_S0_S1_S2_S3, // 03 - (PCODE)Load_S0_S1_S2_S3_S4, // 04 - (PCODE)Load_S0_S1_S2_S3_S4_S5, // 05 - (PCODE)Load_S0_S1_S2_S3_S4_S5_S6, // 06 - (PCODE)Load_S0_S1_S2_S3_S4_S5_S6_S7, // 07 - (PCODE)0, // 10 - (PCODE)Load_S1, // 11 - (PCODE)Load_S1_S2, // 12 - (PCODE)Load_S1_S2_S3, // 13 - (PCODE)Load_S1_S2_S3_S4, // 14 - (PCODE)Load_S1_S2_S3_S4_S5, // 15 - (PCODE)Load_S1_S2_S3_S4_S5_S6, // 16 - (PCODE)Load_S1_S2_S3_S4_S5_S6_S7, // 17 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Load_S2, // 22 - (PCODE)Load_S2_S3, // 23 - (PCODE)Load_S2_S3_S4, // 24 - (PCODE)Load_S2_S3_S4_S5, // 25 - (PCODE)Load_S2_S3_S4_S5_S6, // 26 - (PCODE)Load_S2_S3_S4_S5_S6_S7, // 27 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Load_S3, // 33 - (PCODE)Load_S3_S4, // 34 - (PCODE)Load_S3_S4_S5, // 35 - (PCODE)Load_S3_S4_S5_S6, // 36 - (PCODE)Load_S3_S4_S5_S6_S7, // 37 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Load_S4, // 44 - (PCODE)Load_S4_S5, // 45 - (PCODE)Load_S4_S5_S6, // 46 - (PCODE)Load_S4_S5_S6_S7, // 47 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Load_S5, // 55 - (PCODE)Load_S5_S6, // 56 - (PCODE)Load_S5_S6_S7, // 57 - (PCODE)0, // 60 - (PCODE)0, // 61 - (PCODE)0, // 62 - (PCODE)0, // 63 - (PCODE)0, // 64 - (PCODE)0, // 65 - (PCODE)Load_S6, // 66 - (PCODE)Load_S6_S7, // 67 - (PCODE)0, // 70 - (PCODE)0, // 71 - (PCODE)0, // 72 - (PCODE)0, // 73 - (PCODE)0, // 74 - (PCODE)0, // 75 - (PCODE)0, // 76 - (PCODE)Load_S7 // 77 -}; - #endif // TARGET_ARM64 #ifdef TARGET_ARM @@ -1473,86 +593,6 @@ extern "C" void Store_R0_R1_R2_R3_4B(); extern "C" void Store_R2_R3_4B(); extern "C" void Store_Stack_4B(); -PCODE GPRegsRoutines[] = -{ - (PCODE)Load_R0, // 00 - (PCODE)Load_R0_R1, // 01 - (PCODE)Load_R0_R1_R2, // 02 - (PCODE)Load_R0_R1_R2_R3, // 03 - (PCODE)0, // 04 - (PCODE)Load_R1, // 05 - (PCODE)Load_R1_R2, // 06 - (PCODE)Load_R1_R2_R3, // 07 - (PCODE)0, // 08 - (PCODE)0, // 09 - (PCODE)Load_R2, // 10 - (PCODE)Load_R2_R3, // 11 - (PCODE)0, // 12 - (PCODE)0, // 13 - (PCODE)0, // 14 - (PCODE)Load_R3, // 15 -}; - -PCODE GPRegsStoreRoutines[] = -{ - (PCODE)Store_R0, // 00 - (PCODE)Store_R0_R1, // 01 - (PCODE)Store_R0_R1_R2, // 02 - (PCODE)Store_R0_R1_R2_R3, // 03 - (PCODE)0, // 04 - (PCODE)Store_R1, // 05 - (PCODE)Store_R1_R2, // 06 - (PCODE)Store_R1_R2_R3, // 07 - (PCODE)0, // 08 - (PCODE)0, // 09 - (PCODE)Store_R2, // 10 - (PCODE)Store_R2_R3, // 11 - (PCODE)0, // 12 - (PCODE)0, // 13 - (PCODE)0, // 14 - (PCODE)Store_R3, // 15 -}; - -PCODE GPRegLoadRoutines_4B[] = -{ - (PCODE)0, // 00 - (PCODE)Load_R0_R1_4B, // 01 - (PCODE)0, // 02 - (PCODE)Load_R0_R1_R2_R3_4B, // 03 - (PCODE)0, // 04 - (PCODE)0, // 05 - (PCODE)0, // 06 - (PCODE)0, // 07 - (PCODE)0, // 08 - (PCODE)0, // 09 - (PCODE)0, // 10 - (PCODE)Load_R2_R3_4B, // 11 - (PCODE)0, // 12 - (PCODE)0, // 13 - (PCODE)0, // 14 - (PCODE)0, // 15 -}; - -PCODE GPRegStoreRoutines_4B[] = -{ - (PCODE)0, // 00 - (PCODE)Store_R0_R1_4B, // 01 - (PCODE)0, // 02 - (PCODE)Store_R0_R1_R2_R3_4B, // 03 - (PCODE)0, // 04 - (PCODE)0, // 05 - (PCODE)0, // 06 - (PCODE)0, // 07 - (PCODE)0, // 08 - (PCODE)0, // 09 - (PCODE)0, // 10 - (PCODE)Store_R2_R3_4B, // 11 - (PCODE)0, // 12 - (PCODE)0, // 13 - (PCODE)0, // 14 - (PCODE)0, // 15 -}; - #endif // TARGET_ARM #ifdef TARGET_RISCV64 @@ -1649,142 +689,6 @@ extern "C" void Store_Ref_A5(); extern "C" void Store_Ref_A6(); extern "C" void Store_Ref_A7(); -PCODE GPRegsRoutines[] = -{ - (PCODE)Load_A0, // 00 - (PCODE)Load_A0_A1, // 01 - (PCODE)Load_A0_A1_A2, // 02 - (PCODE)Load_A0_A1_A2_A3, // 03 - (PCODE)Load_A0_A1_A2_A3_A4, // 04 - (PCODE)Load_A0_A1_A2_A3_A4_A5, // 05 - (PCODE)Load_A0_A1_A2_A3_A4_A5_A6, // 06 - (PCODE)Load_A0_A1_A2_A3_A4_A5_A6_A7, // 07 - (PCODE)0, // 10 - (PCODE)Load_A1, // 11 - (PCODE)Load_A1_A2, // 12 - (PCODE)Load_A1_A2_A3, // 13 - (PCODE)Load_A1_A2_A3_A4, // 14 - (PCODE)Load_A1_A2_A3_A4_A5, // 15 - (PCODE)Load_A1_A2_A3_A4_A5_A6, // 16 - (PCODE)Load_A1_A2_A3_A4_A5_A6_A7, // 17 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Load_A2, // 22 - (PCODE)Load_A2_A3, // 23 - (PCODE)Load_A2_A3_A4, // 24 - (PCODE)Load_A2_A3_A4_A5, // 25 - (PCODE)Load_A2_A3_A4_A5_A6, // 26 - (PCODE)Load_A2_A3_A4_A5_A6_A7, // 27 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Load_A3, // 33 - (PCODE)Load_A3_A4, // 34 - (PCODE)Load_A3_A4_A5, // 35 - (PCODE)Load_A3_A4_A5_A6, // 36 - (PCODE)Load_A3_A4_A5_A6_A7, // 37 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Load_A4, // 44 - (PCODE)Load_A4_A5, // 45 - (PCODE)Load_A4_A5_A6, // 46 - (PCODE)Load_A4_A5_A6_A7, // 47 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Load_A5, // 55 - (PCODE)Load_A5_A6, // 56 - (PCODE)Load_A5_A6_A7, // 57 - (PCODE)0, // 60 - (PCODE)0, // 61 - (PCODE)0, // 62 - (PCODE)0, // 63 - (PCODE)0, // 64 - (PCODE)0, // 65 - (PCODE)Load_A6, // 66 - (PCODE)Load_A6_A7, // 67 - (PCODE)0, // 70 - (PCODE)0, // 71 - (PCODE)0, // 72 - (PCODE)0, // 73 - (PCODE)0, // 74 - (PCODE)0, // 75 - (PCODE)0, // 76 - (PCODE)Load_A7 // 77 -}; - -PCODE GPRegsStoreRoutines[] = -{ - (PCODE)Store_A0, // 00 - (PCODE)Store_A0_A1, // 01 - (PCODE)Store_A0_A1_A2, // 02 - (PCODE)Store_A0_A1_A2_A3, // 03 - (PCODE)Store_A0_A1_A2_A3_A4, // 04 - (PCODE)Store_A0_A1_A2_A3_A4_A5, // 05 - (PCODE)Store_A0_A1_A2_A3_A4_A5_A6, // 06 - (PCODE)Store_A0_A1_A2_A3_A4_A5_A6_A7, // 07 - (PCODE)0, // 10 - (PCODE)Store_A1, // 11 - (PCODE)Store_A1_A2, // 12 - (PCODE)Store_A1_A2_A3, // 13 - (PCODE)Store_A1_A2_A3_A4, // 14 - (PCODE)Store_A1_A2_A3_A4_A5, // 15 - (PCODE)Store_A1_A2_A3_A4_A5_A6, // 16 - (PCODE)Store_A1_A2_A3_A4_A5_A6_A7, // 17 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Store_A2, // 22 - (PCODE)Store_A2_A3, // 23 - (PCODE)Store_A2_A3_A4, // 24 - (PCODE)Store_A2_A3_A4_A5, // 25 - (PCODE)Store_A2_A3_A4_A5_A6, // 26 - (PCODE)Store_A2_A3_A4_A5_A6_A7, // 27 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Store_A3, // 33 - (PCODE)Store_A3_A4, // 34 - (PCODE)Store_A3_A4_A5, // 35 - (PCODE)Store_A3_A4_A5_A6, // 36 - (PCODE)Store_A3_A4_A5_A6_A7, // 37 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Store_A4, // 44 - (PCODE)Store_A4_A5, // 45 - (PCODE)Store_A4_A5_A6, // 46 - (PCODE)Store_A4_A5_A6_A7, // 47 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Store_A5, // 55 - (PCODE)Store_A5_A6, // 56 - (PCODE)Store_A5_A6_A7, // 57 - (PCODE)0, // 60 - (PCODE)0, // 61 - (PCODE)0, // 62 - (PCODE)0, // 63 - (PCODE)0, // 64 - (PCODE)0, // 65 - (PCODE)Store_A6, // 66 - (PCODE)Store_A6_A7, // 67 - (PCODE)0, // 70 - (PCODE)0, // 71 - (PCODE)0, // 72 - (PCODE)0, // 73 - (PCODE)0, // 74 - (PCODE)0, // 75 - (PCODE)0, // 76 - (PCODE)Store_A7 // 77 -}; - extern "C" void Load_FA0(); extern "C" void Load_FA0_FA1(); extern "C" void Load_FA0_FA1_FA2(); @@ -1859,166 +763,6 @@ extern "C" void Store_FA6(); extern "C" void Store_FA6_FA7(); extern "C" void Store_FA7(); -PCODE FPRegsRoutines[] = -{ - (PCODE)Load_FA0, // 00 - (PCODE)Load_FA0_FA1, // 01 - (PCODE)Load_FA0_FA1_FA2, // 02 - (PCODE)Load_FA0_FA1_FA2_FA3, // 03 - (PCODE)Load_FA0_FA1_FA2_FA3_FA4, // 04 - (PCODE)Load_FA0_FA1_FA2_FA3_FA4_FA5, // 05 - (PCODE)Load_FA0_FA1_FA2_FA3_FA4_FA5_FA6, // 06 - (PCODE)Load_FA0_FA1_FA2_FA3_FA4_FA5_FA6_FA7, // 07 - (PCODE)0, // 10 - (PCODE)Load_FA1, // 11 - (PCODE)Load_FA1_FA2, // 12 - (PCODE)Load_FA1_FA2_FA3, // 13 - (PCODE)Load_FA1_FA2_FA3_FA4, // 14 - (PCODE)Load_FA1_FA2_FA3_FA4_FA5, // 15 - (PCODE)Load_FA1_FA2_FA3_FA4_FA5_FA6, // 16 - (PCODE)Load_FA1_FA2_FA3_FA4_FA5_FA6_FA7, // 17 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Load_FA2, // 22 - (PCODE)Load_FA2_FA3, // 23 - (PCODE)Load_FA2_FA3_FA4, // 24 - (PCODE)Load_FA2_FA3_FA4_FA5, // 25 - (PCODE)Load_FA2_FA3_FA4_FA5_FA6, // 26 - (PCODE)Load_FA2_FA3_FA4_FA5_FA6_FA7, // 27 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Load_FA3, // 33 - (PCODE)Load_FA3_FA4, // 34 - (PCODE)Load_FA3_FA4_FA5, // 35 - (PCODE)Load_FA3_FA4_FA5_FA6, // 36 - (PCODE)Load_FA3_FA4_FA5_FA6_FA7, // 37 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Load_FA4, // 44 - (PCODE)Load_FA4_FA5, // 45 - (PCODE)Load_FA4_FA5_FA6, // 46 - (PCODE)Load_FA4_FA5_FA6_FA7, // 47 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Load_FA5, // 55 - (PCODE)Load_FA5_FA6, // 56 - (PCODE)Load_FA5_FA6_FA7, // 57 - (PCODE)0, // 60 - (PCODE)0, // 61 - (PCODE)0, // 62 - (PCODE)0, // 63 - (PCODE)0, // 64 - (PCODE)0, // 65 - (PCODE)Load_FA6, // 66 - (PCODE)Load_FA6_FA7, // 67 - (PCODE)0, // 70 - (PCODE)0, // 71 - (PCODE)0, // 72 - (PCODE)0, // 73 - (PCODE)0, // 74 - (PCODE)0, // 75 - (PCODE)0, // 76 - (PCODE)Load_FA7 // 77 -}; - -PCODE FPRegsStoreRoutines[] = -{ - (PCODE)Store_FA0, // 00 - (PCODE)Store_FA0_FA1, // 01 - (PCODE)Store_FA0_FA1_FA2, // 02 - (PCODE)Store_FA0_FA1_FA2_FA3, // 03 - (PCODE)Store_FA0_FA1_FA2_FA3_FA4, // 04 - (PCODE)Store_FA0_FA1_FA2_FA3_FA4_FA5, // 05 - (PCODE)Store_FA0_FA1_FA2_FA3_FA4_FA5_FA6, // 06 - (PCODE)Store_FA0_FA1_FA2_FA3_FA4_FA5_FA6_FA7, // 07 - (PCODE)0, // 10 - (PCODE)Store_FA1, // 11 - (PCODE)Store_FA1_FA2, // 12 - (PCODE)Store_FA1_FA2_FA3, // 13 - (PCODE)Store_FA1_FA2_FA3_FA4, // 14 - (PCODE)Store_FA1_FA2_FA3_FA4_FA5, // 15 - (PCODE)Store_FA1_FA2_FA3_FA4_FA5_FA6, // 16 - (PCODE)Store_FA1_FA2_FA3_FA4_FA5_FA6_FA7, // 17 - (PCODE)0, // 20 - (PCODE)0, // 21 - (PCODE)Store_FA2, // 22 - (PCODE)Store_FA2_FA3, // 23 - (PCODE)Store_FA2_FA3_FA4, // 24 - (PCODE)Store_FA2_FA3_FA4_FA5, // 25 - (PCODE)Store_FA2_FA3_FA4_FA5_FA6, // 26 - (PCODE)Store_FA2_FA3_FA4_FA5_FA6_FA7, // 27 - (PCODE)0, // 30 - (PCODE)0, // 31 - (PCODE)0, // 32 - (PCODE)Store_FA3, // 33 - (PCODE)Store_FA3_FA4, // 34 - (PCODE)Store_FA3_FA4_FA5, // 35 - (PCODE)Store_FA3_FA4_FA5_FA6, // 36 - (PCODE)Store_FA3_FA4_FA5_FA6_FA7, // 37 - (PCODE)0, // 40 - (PCODE)0, // 41 - (PCODE)0, // 42 - (PCODE)0, // 43 - (PCODE)Store_FA4, // 44 - (PCODE)Store_FA4_FA5, // 45 - (PCODE)Store_FA4_FA5_FA6, // 46 - (PCODE)Store_FA4_FA5_FA6_FA7, // 47 - (PCODE)0, // 50 - (PCODE)0, // 51 - (PCODE)0, // 52 - (PCODE)0, // 53 - (PCODE)0, // 54 - (PCODE)Store_FA5, // 55 - (PCODE)Store_FA5_FA6, // 56 - (PCODE)Store_FA5_FA6_FA7, // 57 - (PCODE)0, // 60 - (PCODE)0, // 61 - (PCODE)0, // 62 - (PCODE)0, // 63 - (PCODE)0, // 64 - (PCODE)0, // 65 - (PCODE)Store_FA6, // 66 - (PCODE)Store_FA6_FA7, // 67 - (PCODE)0, // 70 - (PCODE)0, // 71 - (PCODE)0, // 72 - (PCODE)0, // 73 - (PCODE)0, // 74 - (PCODE)0, // 75 - (PCODE)0, // 76 - (PCODE)Store_FA7 // 77 -}; - -PCODE GPRegsRefRoutines[] = -{ - (PCODE)Load_Ref_A0, // 0 - a0 - (PCODE)Load_Ref_A1, // 1 - a1 - (PCODE)Load_Ref_A2, // 2 - a2 - (PCODE)Load_Ref_A3, // 3 - a3 - (PCODE)Load_Ref_A4, // 4 - a4 - (PCODE)Load_Ref_A5, // 5 - a5 - (PCODE)Load_Ref_A6, // 6 - a6 - (PCODE)Load_Ref_A7 // 7 - a7 -}; - -PCODE GPRegsRefStoreRoutines[] = -{ - (PCODE)Store_Ref_A0, // 0 - a0 - (PCODE)Store_Ref_A1, // 1 - a1 - (PCODE)Store_Ref_A2, // 2 - a2 - (PCODE)Store_Ref_A3, // 3 - a3 - (PCODE)Store_Ref_A4, // 4 - a4 - (PCODE)Store_Ref_A5, // 5 - a5 - (PCODE)Store_Ref_A6, // 6 - a6 - (PCODE)Store_Ref_A7 // 7 - a7 -}; - #endif // TARGET_RISCV64 #define LOG_COMPUTE_CALL_STUB 0 @@ -2063,8 +807,95 @@ PCODE CallStubGenerator::GetGPRegRangeRoutine(int r1, int r2) printf("GetGPRegRangeRoutine %d %d\n", r1, r2); #endif +#if defined(TARGET_AMD64) && defined(TARGET_WINDOWS) + static const PCODE GPRegsLoadRoutines[] = { + (PCODE)Load_RCX, (PCODE)Load_RCX_RDX, (PCODE)Load_RCX_RDX_R8, (PCODE)Load_RCX_RDX_R8_R9, + (PCODE)0, (PCODE)Load_RDX, (PCODE)Load_RDX_R8, (PCODE)Load_RDX_R8_R9, + (PCODE)0, (PCODE)0, (PCODE)Load_R8, (PCODE)Load_R8_R9, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_R9 + }; + static const PCODE GPRegsStoreRoutines[] = { + (PCODE)Store_RCX, (PCODE)Store_RCX_RDX, (PCODE)Store_RCX_RDX_R8, (PCODE)Store_RCX_RDX_R8_R9, + (PCODE)0, (PCODE)Store_RDX, (PCODE)Store_RDX_R8, (PCODE)Store_RDX_R8_R9, + (PCODE)0, (PCODE)0, (PCODE)Store_R8, (PCODE)Store_R8_R9, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_R9 + }; +#elif defined(TARGET_AMD64) // Unix AMD64 + static const PCODE GPRegsLoadRoutines[] = { + (PCODE)Load_RDI, (PCODE)Load_RDI_RSI, (PCODE)Load_RDI_RSI_RDX, (PCODE)Load_RDI_RSI_RDX_RCX, (PCODE)Load_RDI_RSI_RDX_RCX_R8, (PCODE)Load_RDI_RSI_RDX_RCX_R8_R9, + (PCODE)0, (PCODE)Load_RSI, (PCODE)Load_RSI_RDX, (PCODE)Load_RSI_RDX_RCX, (PCODE)Load_RSI_RDX_RCX_R8, (PCODE)Load_RSI_RDX_RCX_R8_R9, + (PCODE)0, (PCODE)0, (PCODE)Load_RDX, (PCODE)Load_RDX_RCX, (PCODE)Load_RDX_RCX_R8, (PCODE)Load_RDX_RCX_R8_R9, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_RCX, (PCODE)Load_RCX_R8, (PCODE)Load_RCX_R8_R9, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_R8, (PCODE)Load_R8_R9, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_R9 + }; + static const PCODE GPRegsStoreRoutines[] = { + (PCODE)Store_RDI, (PCODE)Store_RDI_RSI, (PCODE)Store_RDI_RSI_RDX, (PCODE)Store_RDI_RSI_RDX_RCX, (PCODE)Store_RDI_RSI_RDX_RCX_R8, (PCODE)Store_RDI_RSI_RDX_RCX_R8_R9, + (PCODE)0, (PCODE)Store_RSI, (PCODE)Store_RSI_RDX, (PCODE)Store_RSI_RDX_RCX, (PCODE)Store_RSI_RDX_RCX_R8, (PCODE)Store_RSI_RDX_RCX_R8_R9, + (PCODE)0, (PCODE)0, (PCODE)Store_RDX, (PCODE)Store_RDX_RCX, (PCODE)Store_RDX_RCX_R8, (PCODE)Store_RDX_RCX_R8_R9, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_RCX, (PCODE)Store_RCX_R8, (PCODE)Store_RCX_R8_R9, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_R8, (PCODE)Store_R8_R9, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_R9 + }; +#elif defined(TARGET_ARM64) + static const PCODE GPRegsLoadRoutines[] = { + (PCODE)Load_X0, (PCODE)Load_X0_X1, (PCODE)Load_X0_X1_X2, (PCODE)Load_X0_X1_X2_X3, (PCODE)Load_X0_X1_X2_X3_X4, (PCODE)Load_X0_X1_X2_X3_X4_X5, (PCODE)Load_X0_X1_X2_X3_X4_X5_X6, (PCODE)Load_X0_X1_X2_X3_X4_X5_X6_X7, + (PCODE)0, (PCODE)Load_X1, (PCODE)Load_X1_X2, (PCODE)Load_X1_X2_X3, (PCODE)Load_X1_X2_X3_X4, (PCODE)Load_X1_X2_X3_X4_X5, (PCODE)Load_X1_X2_X3_X4_X5_X6, (PCODE)Load_X1_X2_X3_X4_X5_X6_X7, + (PCODE)0, (PCODE)0, (PCODE)Load_X2, (PCODE)Load_X2_X3, (PCODE)Load_X2_X3_X4, (PCODE)Load_X2_X3_X4_X5, (PCODE)Load_X2_X3_X4_X5_X6, (PCODE)Load_X2_X3_X4_X5_X6_X7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_X3, (PCODE)Load_X3_X4, (PCODE)Load_X3_X4_X5, (PCODE)Load_X3_X4_X5_X6, (PCODE)Load_X3_X4_X5_X6_X7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_X4, (PCODE)Load_X4_X5, (PCODE)Load_X4_X5_X6, (PCODE)Load_X4_X5_X6_X7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_X5, (PCODE)Load_X5_X6, (PCODE)Load_X5_X6_X7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_X6, (PCODE)Load_X6_X7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_X7 + }; + static const PCODE GPRegsStoreRoutines[] = { + (PCODE)Store_X0, (PCODE)Store_X0_X1, (PCODE)Store_X0_X1_X2, (PCODE)Store_X0_X1_X2_X3, (PCODE)Store_X0_X1_X2_X3_X4, (PCODE)Store_X0_X1_X2_X3_X4_X5, (PCODE)Store_X0_X1_X2_X3_X4_X5_X6, (PCODE)Store_X0_X1_X2_X3_X4_X5_X6_X7, + (PCODE)0, (PCODE)Store_X1, (PCODE)Store_X1_X2, (PCODE)Store_X1_X2_X3, (PCODE)Store_X1_X2_X3_X4, (PCODE)Store_X1_X2_X3_X4_X5, (PCODE)Store_X1_X2_X3_X4_X5_X6, (PCODE)Store_X1_X2_X3_X4_X5_X6_X7, + (PCODE)0, (PCODE)0, (PCODE)Store_X2, (PCODE)Store_X2_X3, (PCODE)Store_X2_X3_X4, (PCODE)Store_X2_X3_X4_X5, (PCODE)Store_X2_X3_X4_X5_X6, (PCODE)Store_X2_X3_X4_X5_X6_X7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_X3, (PCODE)Store_X3_X4, (PCODE)Store_X3_X4_X5, (PCODE)Store_X3_X4_X5_X6, (PCODE)Store_X3_X4_X5_X6_X7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_X4, (PCODE)Store_X4_X5, (PCODE)Store_X4_X5_X6, (PCODE)Store_X4_X5_X6_X7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_X5, (PCODE)Store_X5_X6, (PCODE)Store_X5_X6_X7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_X6, (PCODE)Store_X6_X7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_X7 + }; +#elif defined(TARGET_ARM) + static const PCODE GPRegsLoadRoutines[] = { + (PCODE)Load_R0, (PCODE)Load_R0_R1, (PCODE)Load_R0_R1_R2, (PCODE)Load_R0_R1_R2_R3, + (PCODE)0, (PCODE)Load_R1, (PCODE)Load_R1_R2, (PCODE)Load_R1_R2_R3, + (PCODE)0, (PCODE)0, (PCODE)Load_R2, (PCODE)Load_R2_R3, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_R3 + }; + static const PCODE GPRegsStoreRoutines[] = { + (PCODE)Store_R0, (PCODE)Store_R0_R1, (PCODE)Store_R0_R1_R2, (PCODE)Store_R0_R1_R2_R3, + (PCODE)0, (PCODE)Store_R1, (PCODE)Store_R1_R2, (PCODE)Store_R1_R2_R3, + (PCODE)0, (PCODE)0, (PCODE)Store_R2, (PCODE)Store_R2_R3, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_R3 + }; +#elif defined(TARGET_RISCV64) + static const PCODE GPRegsLoadRoutines[] = { + (PCODE)Load_A0, (PCODE)Load_A0_A1, (PCODE)Load_A0_A1_A2, (PCODE)Load_A0_A1_A2_A3, (PCODE)Load_A0_A1_A2_A3_A4, (PCODE)Load_A0_A1_A2_A3_A4_A5, (PCODE)Load_A0_A1_A2_A3_A4_A5_A6, (PCODE)Load_A0_A1_A2_A3_A4_A5_A6_A7, + (PCODE)0, (PCODE)Load_A1, (PCODE)Load_A1_A2, (PCODE)Load_A1_A2_A3, (PCODE)Load_A1_A2_A3_A4, (PCODE)Load_A1_A2_A3_A4_A5, (PCODE)Load_A1_A2_A3_A4_A5_A6, (PCODE)Load_A1_A2_A3_A4_A5_A6_A7, + (PCODE)0, (PCODE)0, (PCODE)Load_A2, (PCODE)Load_A2_A3, (PCODE)Load_A2_A3_A4, (PCODE)Load_A2_A3_A4_A5, (PCODE)Load_A2_A3_A4_A5_A6, (PCODE)Load_A2_A3_A4_A5_A6_A7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_A3, (PCODE)Load_A3_A4, (PCODE)Load_A3_A4_A5, (PCODE)Load_A3_A4_A5_A6, (PCODE)Load_A3_A4_A5_A6_A7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_A4, (PCODE)Load_A4_A5, (PCODE)Load_A4_A5_A6, (PCODE)Load_A4_A5_A6_A7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_A5, (PCODE)Load_A5_A6, (PCODE)Load_A5_A6_A7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_A6, (PCODE)Load_A6_A7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_A7 + }; + static const PCODE GPRegsStoreRoutines[] = { + (PCODE)Store_A0, (PCODE)Store_A0_A1, (PCODE)Store_A0_A1_A2, (PCODE)Store_A0_A1_A2_A3, (PCODE)Store_A0_A1_A2_A3_A4, (PCODE)Store_A0_A1_A2_A3_A4_A5, (PCODE)Store_A0_A1_A2_A3_A4_A5_A6, (PCODE)Store_A0_A1_A2_A3_A4_A5_A6_A7, + (PCODE)0, (PCODE)Store_A1, (PCODE)Store_A1_A2, (PCODE)Store_A1_A2_A3, (PCODE)Store_A1_A2_A3_A4, (PCODE)Store_A1_A2_A3_A4_A5, (PCODE)Store_A1_A2_A3_A4_A5_A6, (PCODE)Store_A1_A2_A3_A4_A5_A6_A7, + (PCODE)0, (PCODE)0, (PCODE)Store_A2, (PCODE)Store_A2_A3, (PCODE)Store_A2_A3_A4, (PCODE)Store_A2_A3_A4_A5, (PCODE)Store_A2_A3_A4_A5_A6, (PCODE)Store_A2_A3_A4_A5_A6_A7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_A3, (PCODE)Store_A3_A4, (PCODE)Store_A3_A4_A5, (PCODE)Store_A3_A4_A5_A6, (PCODE)Store_A3_A4_A5_A6_A7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_A4, (PCODE)Store_A4_A5, (PCODE)Store_A4_A5_A6, (PCODE)Store_A4_A5_A6_A7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_A5, (PCODE)Store_A5_A6, (PCODE)Store_A5_A6_A7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_A6, (PCODE)Store_A6_A7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_A7 + }; +#endif + int index = r1 * NUM_ARGUMENT_REGISTERS + r2; - PCODE routine = m_interpreterToNative ? GPRegsRoutines[index] : GPRegsStoreRoutines[index]; + PCODE routine = m_interpreterToNative ? GPRegsLoadRoutines[index] : GPRegsStoreRoutines[index]; _ASSERTE(routine != 0); return routine; } @@ -2075,7 +906,35 @@ PCODE CallStubGenerator::GetGPRegRefRoutine(int r) #if LOG_COMPUTE_CALL_STUB printf("GetGPRegRefRoutine %d\n", r); #endif - return m_interpreterToNative ? GPRegsRefRoutines[r] : GPRegsRefStoreRoutines[r]; + +#if defined(TARGET_AMD64) && defined(TARGET_WINDOWS) + static const PCODE GPRegsRefLoadRoutines[] = { + (PCODE)Load_Ref_RCX, (PCODE)Load_Ref_RDX, (PCODE)Load_Ref_R8, (PCODE)Load_Ref_R9 + }; + static const PCODE GPRegsRefStoreRoutines[] = { + (PCODE)Store_Ref_RCX, (PCODE)Store_Ref_RDX, (PCODE)Store_Ref_R8, (PCODE)Store_Ref_R9 + }; +#elif defined(TARGET_ARM64) + static const PCODE GPRegsRefLoadRoutines[] = { + (PCODE)Load_Ref_X0, (PCODE)Load_Ref_X1, (PCODE)Load_Ref_X2, (PCODE)Load_Ref_X3, + (PCODE)Load_Ref_X4, (PCODE)Load_Ref_X5, (PCODE)Load_Ref_X6, (PCODE)Load_Ref_X7 + }; + static const PCODE GPRegsRefStoreRoutines[] = { + (PCODE)Store_Ref_X0, (PCODE)Store_Ref_X1, (PCODE)Store_Ref_X2, (PCODE)Store_Ref_X3, + (PCODE)Store_Ref_X4, (PCODE)Store_Ref_X5, (PCODE)Store_Ref_X6, (PCODE)Store_Ref_X7 + }; +#elif defined(TARGET_RISCV64) + static const PCODE GPRegsRefLoadRoutines[] = { + (PCODE)Load_Ref_A0, (PCODE)Load_Ref_A1, (PCODE)Load_Ref_A2, (PCODE)Load_Ref_A3, + (PCODE)Load_Ref_A4, (PCODE)Load_Ref_A5, (PCODE)Load_Ref_A6, (PCODE)Load_Ref_A7 + }; + static const PCODE GPRegsRefStoreRoutines[] = { + (PCODE)Store_Ref_A0, (PCODE)Store_Ref_A1, (PCODE)Store_Ref_A2, (PCODE)Store_Ref_A3, + (PCODE)Store_Ref_A4, (PCODE)Store_Ref_A5, (PCODE)Store_Ref_A6, (PCODE)Store_Ref_A7 + }; +#endif + + return m_interpreterToNative ? GPRegsRefLoadRoutines[r] : GPRegsRefStoreRoutines[r]; } PCODE CallStubGenerator::GetStackRefRoutine() @@ -2098,8 +957,87 @@ PCODE CallStubGenerator::GetFPRegRangeRoutine(int x1, int x2) _ASSERTE(!"Not support FP reg yet"); return 0; #else + +#if defined(TARGET_AMD64) && defined(TARGET_WINDOWS) + static const PCODE FPRegsLoadRoutines[] = { + (PCODE)Load_XMM0, (PCODE)Load_XMM0_XMM1, (PCODE)Load_XMM0_XMM1_XMM2, (PCODE)Load_XMM0_XMM1_XMM2_XMM3, + (PCODE)0, (PCODE)Load_XMM1, (PCODE)Load_XMM1_XMM2, (PCODE)Load_XMM1_XMM2_XMM3, + (PCODE)0, (PCODE)0, (PCODE)Load_XMM2, (PCODE)Load_XMM2_XMM3, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_XMM3 + }; + static const PCODE FPRegsStoreRoutines[] = { + (PCODE)Store_XMM0, (PCODE)Store_XMM0_XMM1, (PCODE)Store_XMM0_XMM1_XMM2, (PCODE)Store_XMM0_XMM1_XMM2_XMM3, + (PCODE)0, (PCODE)Store_XMM1, (PCODE)Store_XMM1_XMM2, (PCODE)Store_XMM1_XMM2_XMM3, + (PCODE)0, (PCODE)0, (PCODE)Store_XMM2, (PCODE)Store_XMM2_XMM3, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_XMM3 + }; +#elif defined(TARGET_AMD64) // Unix AMD64 + static const PCODE FPRegsLoadRoutines[] = { + (PCODE)Load_XMM0, (PCODE)Load_XMM0_XMM1, (PCODE)Load_XMM0_XMM1_XMM2, (PCODE)Load_XMM0_XMM1_XMM2_XMM3, (PCODE)Load_XMM0_XMM1_XMM2_XMM3_XMM4, (PCODE)Load_XMM0_XMM1_XMM2_XMM3_XMM4_XMM5, (PCODE)Load_XMM0_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6, (PCODE)Load_XMM0_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6_XMM7, + (PCODE)0, (PCODE)Load_XMM1, (PCODE)Load_XMM1_XMM2, (PCODE)Load_XMM1_XMM2_XMM3, (PCODE)Load_XMM1_XMM2_XMM3_XMM4, (PCODE)Load_XMM1_XMM2_XMM3_XMM4_XMM5, (PCODE)Load_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6, (PCODE)Load_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6_XMM7, + (PCODE)0, (PCODE)0, (PCODE)Load_XMM2, (PCODE)Load_XMM2_XMM3, (PCODE)Load_XMM2_XMM3_XMM4, (PCODE)Load_XMM2_XMM3_XMM4_XMM5, (PCODE)Load_XMM2_XMM3_XMM4_XMM5_XMM6, (PCODE)Load_XMM2_XMM3_XMM4_XMM5_XMM6_XMM7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_XMM3, (PCODE)Load_XMM3_XMM4, (PCODE)Load_XMM3_XMM4_XMM5, (PCODE)Load_XMM3_XMM4_XMM5_XMM6, (PCODE)Load_XMM3_XMM4_XMM5_XMM6_XMM7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_XMM4, (PCODE)Load_XMM4_XMM5, (PCODE)Load_XMM4_XMM5_XMM6, (PCODE)Load_XMM4_XMM5_XMM6_XMM7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_XMM5, (PCODE)Load_XMM5_XMM6, (PCODE)Load_XMM5_XMM6_XMM7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_XMM6, (PCODE)Load_XMM6_XMM7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_XMM7 + }; + static const PCODE FPRegsStoreRoutines[] = { + (PCODE)Store_XMM0, (PCODE)Store_XMM0_XMM1, (PCODE)Store_XMM0_XMM1_XMM2, (PCODE)Store_XMM0_XMM1_XMM2_XMM3, (PCODE)Store_XMM0_XMM1_XMM2_XMM3_XMM4, (PCODE)Store_XMM0_XMM1_XMM2_XMM3_XMM4_XMM5, (PCODE)Store_XMM0_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6, (PCODE)Store_XMM0_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6_XMM7, + (PCODE)0, (PCODE)Store_XMM1, (PCODE)Store_XMM1_XMM2, (PCODE)Store_XMM1_XMM2_XMM3, (PCODE)Store_XMM1_XMM2_XMM3_XMM4, (PCODE)Store_XMM1_XMM2_XMM3_XMM4_XMM5, (PCODE)Store_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6, (PCODE)Store_XMM1_XMM2_XMM3_XMM4_XMM5_XMM6_XMM7, + (PCODE)0, (PCODE)0, (PCODE)Store_XMM2, (PCODE)Store_XMM2_XMM3, (PCODE)Store_XMM2_XMM3_XMM4, (PCODE)Store_XMM2_XMM3_XMM4_XMM5, (PCODE)Store_XMM2_XMM3_XMM4_XMM5_XMM6, (PCODE)Store_XMM2_XMM3_XMM4_XMM5_XMM6_XMM7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_XMM3, (PCODE)Store_XMM3_XMM4, (PCODE)Store_XMM3_XMM4_XMM5, (PCODE)Store_XMM3_XMM4_XMM5_XMM6, (PCODE)Store_XMM3_XMM4_XMM5_XMM6_XMM7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_XMM4, (PCODE)Store_XMM4_XMM5, (PCODE)Store_XMM4_XMM5_XMM6, (PCODE)Store_XMM4_XMM5_XMM6_XMM7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_XMM5, (PCODE)Store_XMM5_XMM6, (PCODE)Store_XMM5_XMM6_XMM7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_XMM6, (PCODE)Store_XMM6_XMM7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_XMM7 + }; +#elif defined(TARGET_ARM64) + static const PCODE FPRegsLoadRoutines[] = { + (PCODE)Load_D0, (PCODE)Load_D0_D1, (PCODE)Load_D0_D1_D2, (PCODE)Load_D0_D1_D2_D3, (PCODE)Load_D0_D1_D2_D3_D4, (PCODE)Load_D0_D1_D2_D3_D4_D5, (PCODE)Load_D0_D1_D2_D3_D4_D5_D6, (PCODE)Load_D0_D1_D2_D3_D4_D5_D6_D7, + (PCODE)0, (PCODE)Load_D1, (PCODE)Load_D1_D2, (PCODE)Load_D1_D2_D3, (PCODE)Load_D1_D2_D3_D4, (PCODE)Load_D1_D2_D3_D4_D5, (PCODE)Load_D1_D2_D3_D4_D5_D6, (PCODE)Load_D1_D2_D3_D4_D5_D6_D7, + (PCODE)0, (PCODE)0, (PCODE)Load_D2, (PCODE)Load_D2_D3, (PCODE)Load_D2_D3_D4, (PCODE)Load_D2_D3_D4_D5, (PCODE)Load_D2_D3_D4_D5_D6, (PCODE)Load_D2_D3_D4_D5_D6_D7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_D3, (PCODE)Load_D3_D4, (PCODE)Load_D3_D4_D5, (PCODE)Load_D3_D4_D5_D6, (PCODE)Load_D3_D4_D5_D6_D7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_D4, (PCODE)Load_D4_D5, (PCODE)Load_D4_D5_D6, (PCODE)Load_D4_D5_D6_D7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_D5, (PCODE)Load_D5_D6, (PCODE)Load_D5_D6_D7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_D6, (PCODE)Load_D6_D7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_D7 + }; + static const PCODE FPRegsStoreRoutines[] = { + (PCODE)Store_D0, (PCODE)Store_D0_D1, (PCODE)Store_D0_D1_D2, (PCODE)Store_D0_D1_D2_D3, (PCODE)Store_D0_D1_D2_D3_D4, (PCODE)Store_D0_D1_D2_D3_D4_D5, (PCODE)Store_D0_D1_D2_D3_D4_D5_D6, (PCODE)Store_D0_D1_D2_D3_D4_D5_D6_D7, + (PCODE)0, (PCODE)Store_D1, (PCODE)Store_D1_D2, (PCODE)Store_D1_D2_D3, (PCODE)Store_D1_D2_D3_D4, (PCODE)Store_D1_D2_D3_D4_D5, (PCODE)Store_D1_D2_D3_D4_D5_D6, (PCODE)Store_D1_D2_D3_D4_D5_D6_D7, + (PCODE)0, (PCODE)0, (PCODE)Store_D2, (PCODE)Store_D2_D3, (PCODE)Store_D2_D3_D4, (PCODE)Store_D2_D3_D4_D5, (PCODE)Store_D2_D3_D4_D5_D6, (PCODE)Store_D2_D3_D4_D5_D6_D7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_D3, (PCODE)Store_D3_D4, (PCODE)Store_D3_D4_D5, (PCODE)Store_D3_D4_D5_D6, (PCODE)Store_D3_D4_D5_D6_D7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_D4, (PCODE)Store_D4_D5, (PCODE)Store_D4_D5_D6, (PCODE)Store_D4_D5_D6_D7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_D5, (PCODE)Store_D5_D6, (PCODE)Store_D5_D6_D7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_D6, (PCODE)Store_D6_D7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_D7 + }; +#elif defined(TARGET_RISCV64) + static const PCODE FPRegsLoadRoutines[] = { + (PCODE)Load_FA0, (PCODE)Load_FA0_FA1, (PCODE)Load_FA0_FA1_FA2, (PCODE)Load_FA0_FA1_FA2_FA3, (PCODE)Load_FA0_FA1_FA2_FA3_FA4, (PCODE)Load_FA0_FA1_FA2_FA3_FA4_FA5, (PCODE)Load_FA0_FA1_FA2_FA3_FA4_FA5_FA6, (PCODE)Load_FA0_FA1_FA2_FA3_FA4_FA5_FA6_FA7, + (PCODE)0, (PCODE)Load_FA1, (PCODE)Load_FA1_FA2, (PCODE)Load_FA1_FA2_FA3, (PCODE)Load_FA1_FA2_FA3_FA4, (PCODE)Load_FA1_FA2_FA3_FA4_FA5, (PCODE)Load_FA1_FA2_FA3_FA4_FA5_FA6, (PCODE)Load_FA1_FA2_FA3_FA4_FA5_FA6_FA7, + (PCODE)0, (PCODE)0, (PCODE)Load_FA2, (PCODE)Load_FA2_FA3, (PCODE)Load_FA2_FA3_FA4, (PCODE)Load_FA2_FA3_FA4_FA5, (PCODE)Load_FA2_FA3_FA4_FA5_FA6, (PCODE)Load_FA2_FA3_FA4_FA5_FA6_FA7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_FA3, (PCODE)Load_FA3_FA4, (PCODE)Load_FA3_FA4_FA5, (PCODE)Load_FA3_FA4_FA5_FA6, (PCODE)Load_FA3_FA4_FA5_FA6_FA7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_FA4, (PCODE)Load_FA4_FA5, (PCODE)Load_FA4_FA5_FA6, (PCODE)Load_FA4_FA5_FA6_FA7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_FA5, (PCODE)Load_FA5_FA6, (PCODE)Load_FA5_FA6_FA7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_FA6, (PCODE)Load_FA6_FA7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_FA7 + }; + static const PCODE FPRegsStoreRoutines[] = { + (PCODE)Store_FA0, (PCODE)Store_FA0_FA1, (PCODE)Store_FA0_FA1_FA2, (PCODE)Store_FA0_FA1_FA2_FA3, (PCODE)Store_FA0_FA1_FA2_FA3_FA4, (PCODE)Store_FA0_FA1_FA2_FA3_FA4_FA5, (PCODE)Store_FA0_FA1_FA2_FA3_FA4_FA5_FA6, (PCODE)Store_FA0_FA1_FA2_FA3_FA4_FA5_FA6_FA7, + (PCODE)0, (PCODE)Store_FA1, (PCODE)Store_FA1_FA2, (PCODE)Store_FA1_FA2_FA3, (PCODE)Store_FA1_FA2_FA3_FA4, (PCODE)Store_FA1_FA2_FA3_FA4_FA5, (PCODE)Store_FA1_FA2_FA3_FA4_FA5_FA6, (PCODE)Store_FA1_FA2_FA3_FA4_FA5_FA6_FA7, + (PCODE)0, (PCODE)0, (PCODE)Store_FA2, (PCODE)Store_FA2_FA3, (PCODE)Store_FA2_FA3_FA4, (PCODE)Store_FA2_FA3_FA4_FA5, (PCODE)Store_FA2_FA3_FA4_FA5_FA6, (PCODE)Store_FA2_FA3_FA4_FA5_FA6_FA7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_FA3, (PCODE)Store_FA3_FA4, (PCODE)Store_FA3_FA4_FA5, (PCODE)Store_FA3_FA4_FA5_FA6, (PCODE)Store_FA3_FA4_FA5_FA6_FA7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_FA4, (PCODE)Store_FA4_FA5, (PCODE)Store_FA4_FA5_FA6, (PCODE)Store_FA4_FA5_FA6_FA7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_FA5, (PCODE)Store_FA5_FA6, (PCODE)Store_FA5_FA6_FA7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_FA6, (PCODE)Store_FA6_FA7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_FA7 + }; +#endif + int index = x1 * NUM_FLOAT_ARGUMENT_REGISTERS + x2; - PCODE routine = m_interpreterToNative ? FPRegsRoutines[index] : FPRegsStoreRoutines[index]; + PCODE routine = m_interpreterToNative ? FPRegsLoadRoutines[index] : FPRegsStoreRoutines[index]; _ASSERTE(routine != 0); return routine; #endif @@ -2111,6 +1049,27 @@ PCODE CallStubGenerator::GetFPReg128RangeRoutine(int x1, int x2) #if LOG_COMPUTE_CALL_STUB printf("GetFPReg128RangeRoutine %d %d\n", x1, x2); #endif + static const PCODE FPRegs128LoadRoutines[] = { + (PCODE)Load_Q0, (PCODE)Load_Q0_Q1, (PCODE)Load_Q0_Q1_Q2, (PCODE)Load_Q0_Q1_Q2_Q3, (PCODE)Load_Q0_Q1_Q2_Q3_Q4, (PCODE)Load_Q0_Q1_Q2_Q3_Q4_Q5, (PCODE)Load_Q0_Q1_Q2_Q3_Q4_Q5_Q6, (PCODE)Load_Q0_Q1_Q2_Q3_Q4_Q5_Q6_Q7, + (PCODE)0, (PCODE)Load_Q1, (PCODE)Load_Q1_Q2, (PCODE)Load_Q1_Q2_Q3, (PCODE)Load_Q1_Q2_Q3_Q4, (PCODE)Load_Q1_Q2_Q3_Q4_Q5, (PCODE)Load_Q1_Q2_Q3_Q4_Q5_Q6, (PCODE)Load_Q1_Q2_Q3_Q4_Q5_Q6_Q7, + (PCODE)0, (PCODE)0, (PCODE)Load_Q2, (PCODE)Load_Q2_Q3, (PCODE)Load_Q2_Q3_Q4, (PCODE)Load_Q2_Q3_Q4_Q5, (PCODE)Load_Q2_Q3_Q4_Q5_Q6, (PCODE)Load_Q2_Q3_Q4_Q5_Q6_Q7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_Q3, (PCODE)Load_Q3_Q4, (PCODE)Load_Q3_Q4_Q5, (PCODE)Load_Q3_Q4_Q5_Q6, (PCODE)Load_Q3_Q4_Q5_Q6_Q7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_Q4, (PCODE)Load_Q4_Q5, (PCODE)Load_Q4_Q5_Q6, (PCODE)Load_Q4_Q5_Q6_Q7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_Q5, (PCODE)Load_Q5_Q6, (PCODE)Load_Q5_Q6_Q7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_Q6, (PCODE)Load_Q6_Q7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_Q7 + }; + static const PCODE FPRegs128StoreRoutines[] = { + (PCODE)Store_Q0, (PCODE)Store_Q0_Q1, (PCODE)Store_Q0_Q1_Q2, (PCODE)Store_Q0_Q1_Q2_Q3, (PCODE)Store_Q0_Q1_Q2_Q3_Q4, (PCODE)Store_Q0_Q1_Q2_Q3_Q4_Q5, (PCODE)Store_Q0_Q1_Q2_Q3_Q4_Q5_Q6, (PCODE)Store_Q0_Q1_Q2_Q3_Q4_Q5_Q6_Q7, + (PCODE)0, (PCODE)Store_Q1, (PCODE)Store_Q1_Q2, (PCODE)Store_Q1_Q2_Q3, (PCODE)Store_Q1_Q2_Q3_Q4, (PCODE)Store_Q1_Q2_Q3_Q4_Q5, (PCODE)Store_Q1_Q2_Q3_Q4_Q5_Q6, (PCODE)Store_Q1_Q2_Q3_Q4_Q5_Q6_Q7, + (PCODE)0, (PCODE)0, (PCODE)Store_Q2, (PCODE)Store_Q2_Q3, (PCODE)Store_Q2_Q3_Q4, (PCODE)Store_Q2_Q3_Q4_Q5, (PCODE)Store_Q2_Q3_Q4_Q5_Q6, (PCODE)Store_Q2_Q3_Q4_Q5_Q6_Q7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_Q3, (PCODE)Store_Q3_Q4, (PCODE)Store_Q3_Q4_Q5, (PCODE)Store_Q3_Q4_Q5_Q6, (PCODE)Store_Q3_Q4_Q5_Q6_Q7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_Q4, (PCODE)Store_Q4_Q5, (PCODE)Store_Q4_Q5_Q6, (PCODE)Store_Q4_Q5_Q6_Q7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_Q5, (PCODE)Store_Q5_Q6, (PCODE)Store_Q5_Q6_Q7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_Q6, (PCODE)Store_Q6_Q7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_Q7 + }; + int index = x1 * NUM_FLOAT_ARGUMENT_REGISTERS + x2; PCODE routine = m_interpreterToNative ? FPRegs128LoadRoutines[index] : FPRegs128StoreRoutines[index]; _ASSERTE(routine != 0); @@ -2122,6 +1081,27 @@ PCODE CallStubGenerator::GetFPReg32RangeRoutine(int x1, int x2) #if LOG_COMPUTE_CALL_STUB printf("GetFPReg32RangeRoutine %d %d\n", x1, x2); #endif + static const PCODE FPRegs32LoadRoutines[] = { + (PCODE)Load_S0, (PCODE)Load_S0_S1, (PCODE)Load_S0_S1_S2, (PCODE)Load_S0_S1_S2_S3, (PCODE)Load_S0_S1_S2_S3_S4, (PCODE)Load_S0_S1_S2_S3_S4_S5, (PCODE)Load_S0_S1_S2_S3_S4_S5_S6, (PCODE)Load_S0_S1_S2_S3_S4_S5_S6_S7, + (PCODE)0, (PCODE)Load_S1, (PCODE)Load_S1_S2, (PCODE)Load_S1_S2_S3, (PCODE)Load_S1_S2_S3_S4, (PCODE)Load_S1_S2_S3_S4_S5, (PCODE)Load_S1_S2_S3_S4_S5_S6, (PCODE)Load_S1_S2_S3_S4_S5_S6_S7, + (PCODE)0, (PCODE)0, (PCODE)Load_S2, (PCODE)Load_S2_S3, (PCODE)Load_S2_S3_S4, (PCODE)Load_S2_S3_S4_S5, (PCODE)Load_S2_S3_S4_S5_S6, (PCODE)Load_S2_S3_S4_S5_S6_S7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_S3, (PCODE)Load_S3_S4, (PCODE)Load_S3_S4_S5, (PCODE)Load_S3_S4_S5_S6, (PCODE)Load_S3_S4_S5_S6_S7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_S4, (PCODE)Load_S4_S5, (PCODE)Load_S4_S5_S6, (PCODE)Load_S4_S5_S6_S7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_S5, (PCODE)Load_S5_S6, (PCODE)Load_S5_S6_S7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_S6, (PCODE)Load_S6_S7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_S7 + }; + static const PCODE FPRegs32StoreRoutines[] = { + (PCODE)Store_S0, (PCODE)Store_S0_S1, (PCODE)Store_S0_S1_S2, (PCODE)Store_S0_S1_S2_S3, (PCODE)Store_S0_S1_S2_S3_S4, (PCODE)Store_S0_S1_S2_S3_S4_S5, (PCODE)Store_S0_S1_S2_S3_S4_S5_S6, (PCODE)Store_S0_S1_S2_S3_S4_S5_S6_S7, + (PCODE)0, (PCODE)Store_S1, (PCODE)Store_S1_S2, (PCODE)Store_S1_S2_S3, (PCODE)Store_S1_S2_S3_S4, (PCODE)Store_S1_S2_S3_S4_S5, (PCODE)Store_S1_S2_S3_S4_S5_S6, (PCODE)Store_S1_S2_S3_S4_S5_S6_S7, + (PCODE)0, (PCODE)0, (PCODE)Store_S2, (PCODE)Store_S2_S3, (PCODE)Store_S2_S3_S4, (PCODE)Store_S2_S3_S4_S5, (PCODE)Store_S2_S3_S4_S5_S6, (PCODE)Store_S2_S3_S4_S5_S6_S7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_S3, (PCODE)Store_S3_S4, (PCODE)Store_S3_S4_S5, (PCODE)Store_S3_S4_S5_S6, (PCODE)Store_S3_S4_S5_S6_S7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_S4, (PCODE)Store_S4_S5, (PCODE)Store_S4_S5_S6, (PCODE)Store_S4_S5_S6_S7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_S5, (PCODE)Store_S5_S6, (PCODE)Store_S5_S6_S7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_S6, (PCODE)Store_S6_S7, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_S7 + }; + int index = x1 * NUM_FLOAT_ARGUMENT_REGISTERS + x2; return m_interpreterToNative ? FPRegs32LoadRoutines[index] : FPRegs32StoreRoutines[index]; } @@ -2133,6 +1113,19 @@ PCODE CallStubGenerator::GetRegRoutine_4B(int r1, int r2) #if LOG_COMPUTE_CALL_STUB printf("GetRegRoutine_4B\n"); #endif + static const PCODE GPRegLoadRoutines_4B[] = { + (PCODE)0, (PCODE)Load_R0_R1_4B, (PCODE)0, (PCODE)Load_R0_R1_R2_R3_4B, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Load_R2_R3_4B, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0 + }; + static const PCODE GPRegStoreRoutines_4B[] = { + (PCODE)0, (PCODE)Store_R0_R1_4B, (PCODE)0, (PCODE)Store_R0_R1_R2_R3_4B, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)Store_R2_R3_4B, + (PCODE)0, (PCODE)0, (PCODE)0, (PCODE)0 + }; + int index = r1 * NUM_ARGUMENT_REGISTERS + r2; return m_interpreterToNative ? GPRegLoadRoutines_4B[index] : GPRegStoreRoutines_4B[index]; } From 27e570025bb5d5290c7ed35b787118dca398dea7 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Fri, 30 Jan 2026 19:32:37 +0100 Subject: [PATCH 31/33] Wrap code in TARGET_APPLE --- src/coreclr/vm/arm64/asmhelpers.S | 64 ++++++++++++++++++++++++++++++ src/coreclr/vm/callstubgenerator.h | 6 ++- 2 files changed, 69 insertions(+), 1 deletion(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index d5dffd31dec298..f397af17cabf30 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -2486,16 +2486,20 @@ LEAF_END Load_Q1_Q2_Q3_Q4_Q5_Q6_Q7 NESTED_ENTRY CallJittedMethodRetVoid, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 str x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetVoid_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRetVoid_NoSwiftError): +#endif ldr x4, [fp, #16] str x2, [x4] EPILOG_STACK_RESTORE @@ -2511,17 +2515,21 @@ NESTED_END CallJittedMethodRetVoid, _TEXT NESTED_ENTRY CallJittedMethodRetBuff, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 str x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 mov x8, x2 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetBuff_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRetBuff_NoSwiftError): +#endif ldr x4, [fp, #16] str x2, [x4] EPILOG_STACK_RESTORE @@ -2537,16 +2545,20 @@ NESTED_END CallJittedMethodRetBuff, _TEXT NESTED_ENTRY CallJittedMethodRetI1, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetI1_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRetI1_NoSwiftError): +#endif sxtb x0, w0 ldr x9, [fp, #24] str x2, [x9] @@ -2565,16 +2577,20 @@ NESTED_END CallJittedMethodRetI1, _TEXT NESTED_ENTRY CallJittedMethodRetI2, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetI2_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRetI2_NoSwiftError): +#endif sxth x0, w0 ldr x9, [fp, #24] str x2, [x9] @@ -2593,16 +2609,20 @@ NESTED_END CallJittedMethodRetI2, _TEXT NESTED_ENTRY CallJittedMethodRetI8, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetI8_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRetI8_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -2620,16 +2640,20 @@ NESTED_END CallJittedMethodRetI8, _TEXT NESTED_ENTRY CallJittedMethodRetU1, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetU1_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRetU1_NoSwiftError): +#endif uxtb x0, w0 ldr x9, [fp, #24] str x2, [x9] @@ -2648,16 +2672,20 @@ NESTED_END CallJittedMethodRetU1, _TEXT NESTED_ENTRY CallJittedMethodRetU2, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetU2_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRetU2_NoSwiftError): +#endif uxth x0, w0 ldr x9, [fp, #24] str x2, [x9] @@ -2676,16 +2704,20 @@ NESTED_END CallJittedMethodRetU2, _TEXT NESTED_ENTRY CallJittedMethodRet2I8, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet2I8_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRet2I8_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -2703,16 +2735,20 @@ NESTED_END CallJittedMethodRet2I8, _TEXT NESTED_ENTRY CallJittedMethodRetDouble, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetDouble_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRetDouble_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -2730,16 +2766,20 @@ NESTED_END CallJittedMethodRetDouble, _TEXT NESTED_ENTRY CallJittedMethodRet2Double, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet2Double_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRet2Double_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -2757,16 +2797,20 @@ NESTED_END CallJittedMethodRet2Double, _TEXT NESTED_ENTRY CallJittedMethodRet3Double, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet3Double_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRet3Double_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -2785,16 +2829,20 @@ NESTED_END CallJittedMethodRet3Double, _TEXT NESTED_ENTRY CallJittedMethodRet4Double, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet4Double_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRet4Double_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -2813,16 +2861,20 @@ NESTED_END CallJittedMethodRet4Double, _TEXT NESTED_ENTRY CallJittedMethodRetFloat, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetFloat_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRetFloat_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -2840,16 +2892,20 @@ NESTED_END CallJittedMethodRetFloat, _TEXT NESTED_ENTRY CallJittedMethodRet2Float, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet2Float_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRet2Float_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -2867,16 +2923,20 @@ NESTED_END CallJittedMethodRet2Float, _TEXT NESTED_ENTRY CallJittedMethodRet3Float, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet3Float_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRet3Float_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -2895,16 +2955,20 @@ NESTED_END CallJittedMethodRet3Float, _TEXT NESTED_ENTRY CallJittedMethodRet4Float, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet4Float_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRet4Float_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] diff --git a/src/coreclr/vm/callstubgenerator.h b/src/coreclr/vm/callstubgenerator.h index a499090bc24109..9621b634a8f13b 100644 --- a/src/coreclr/vm/callstubgenerator.h +++ b/src/coreclr/vm/callstubgenerator.h @@ -243,8 +243,12 @@ class CallStubGenerator // The size of the temporary storage is the size of the CallStubHeader plus the size of the routines array. // The size of the routines array is three times the number of arguments plus one slot for the target method pointer. + size_t baseSize = sizeof(CallStubHeader) + ((numArgs + 1) * 3 + 1) * sizeof(PCODE); +#if defined(TARGET_APPLE) && defined(TARGET_ARM64) // Add extra space for Swift return lowering (up to 4 elements * 2 slots + terminator = 9 slots). - return sizeof(CallStubHeader) + ((numArgs + 1) * 3 + 1 + 9) * sizeof(PCODE); + baseSize += 9 * sizeof(PCODE); +#endif + return baseSize; } void ComputeCallStub(MetaSig &sig, PCODE *pRoutines, MethodDesc *pMD); template From debfe01f78d918d050a57bb4b4288f666c438d9d Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Wed, 4 Feb 2026 10:41:20 +0100 Subject: [PATCH 32/33] Update logging for Swift routines and guard stubs with TARGET_APPLE --- src/coreclr/vm/arm64/asmhelpers.S | 39 ++++++++++++++++++++++++++++ src/coreclr/vm/callstubgenerator.cpp | 38 +++++++++++++-------------- 2 files changed, 58 insertions(+), 19 deletions(-) diff --git a/src/coreclr/vm/arm64/asmhelpers.S b/src/coreclr/vm/arm64/asmhelpers.S index c2f0c72f7b6660..905614cbf60251 100644 --- a/src/coreclr/vm/arm64/asmhelpers.S +++ b/src/coreclr/vm/arm64/asmhelpers.S @@ -1171,6 +1171,8 @@ Store_Ref X5 Store_Ref X6 Store_Ref X7 +#ifdef TARGET_APPLE + LEAF_ENTRY Load_SwiftSelf ldr x20, [x9], #8 ldr x11, [x10], #8 @@ -1344,6 +1346,8 @@ LEAF_ENTRY Store_D3_AtOffset SwiftStoreFloat_AtOffset d3 LEAF_END Store_D3_AtOffset +#endif // TARGET_APPLE + LEAF_ENTRY Store_X0 str x0, [x9], #8 ldr x11, [x10], #8 @@ -2985,16 +2989,20 @@ NESTED_END CallJittedMethodRet4Float, _TEXT NESTED_ENTRY CallJittedMethodRetVector64, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetVector64_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRetVector64_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -3012,16 +3020,20 @@ NESTED_END CallJittedMethodRetVector64, _TEXT NESTED_ENTRY CallJittedMethodRet2Vector64, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet2Vector64_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRet2Vector64_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -3040,16 +3052,20 @@ NESTED_END CallJittedMethodRet2Vector64, _TEXT NESTED_ENTRY CallJittedMethodRet3Vector64, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet3Vector64_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRet3Vector64_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -3069,16 +3085,20 @@ NESTED_END CallJittedMethodRet3Vector64, _TEXT NESTED_ENTRY CallJittedMethodRet4Vector64, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet4Vector64_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRet4Vector64_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -3099,16 +3119,20 @@ NESTED_END CallJittedMethodRet4Vector64, _TEXT NESTED_ENTRY CallJittedMethodRetVector128, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRetVector128_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRetVector128_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -3126,16 +3150,20 @@ NESTED_END CallJittedMethodRetVector128, _TEXT NESTED_ENTRY CallJittedMethodRet2Vector128, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet2Vector128_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRet2Vector128_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -3154,16 +3182,20 @@ NESTED_END CallJittedMethodRet2Vector128, _TEXT NESTED_ENTRY CallJittedMethodRet3Vector128, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet3Vector128_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRet3Vector128_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -3183,16 +3215,20 @@ NESTED_END CallJittedMethodRet3Vector128, _TEXT NESTED_ENTRY CallJittedMethodRet4Vector128, _TEXT, NoHandler PROLOG_SAVE_REG_PAIR_INDEXED fp, lr, -48 stp x2, x4, [fp, #16] +#ifdef TARGET_APPLE str xzr, [fp, #40] +#endif sub sp, sp, x3 mov x10, x0 mov x9, x1 ldr x11, [x10], #8 blr x11 +#ifdef TARGET_APPLE ldr x11, [fp, #40] cbz x11, LOCAL_LABEL(CallJittedMethodRet4Vector128_NoSwiftError) str x21, [x11] LOCAL_LABEL(CallJittedMethodRet4Vector128_NoSwiftError): +#endif ldr x9, [fp, #24] str x2, [x9] ldr x9, [fp, #16] @@ -3205,6 +3241,8 @@ LOCAL_LABEL(CallJittedMethodRet4Vector128_NoSwiftError): EPILOG_RETURN NESTED_END CallJittedMethodRet4Vector128, _TEXT +#ifdef TARGET_APPLE + // X0 - routines array // X1 - interpreter stack args location // X2 - interpreter stack return value location @@ -3248,6 +3286,7 @@ LEAF_ENTRY SwiftLoweredReturnTerminator br x11 LEAF_END SwiftLoweredReturnTerminator +#endif // TARGET_APPLE #endif // FEATURE_INTERPRETER diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index fa9579941cf0f5..c2bfd910ebfa21 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -1096,7 +1096,7 @@ PCODE CallStubGenerator::GetFPReg32RangeRoutine(int x1, int x2) PCODE CallStubGenerator::GetRegRoutine_4B(int r1, int r2) { #if LOG_COMPUTE_CALL_STUB - printf("GetRegRoutine_4B\n"); + LOG2((LF2_INTERPRETER, LL_INFO10000, "GetRegRoutine_4B\n")); #endif static const PCODE GPRegLoadRoutines_4B[] = { (PCODE)0, (PCODE)Load_R0_R1_4B, (PCODE)0, (PCODE)Load_R0_R1_R2_R3_4B, @@ -1118,7 +1118,7 @@ PCODE CallStubGenerator::GetRegRoutine_4B(int r1, int r2) PCODE CallStubGenerator::GetStackRoutine_4B() { #if LOG_COMPUTE_CALL_STUB - printf("GetStackRoutine_4B\n"); + LOG2((LF2_INTERPRETER, LL_INFO10000, "GetStackRoutine_4B\n")); #endif return m_interpreterToNative ? (PCODE)Load_Stack_4B : (PCODE)Store_Stack_4B; } @@ -1127,7 +1127,7 @@ PCODE CallStubGenerator::GetStackRoutine_4B() PCODE CallStubGenerator::GetSwiftSelfRoutine() { #if LOG_COMPUTE_CALL_STUB - printf("GetSwiftSelfRoutine\n"); + LOG2((LF2_INTERPRETER, LL_INFO10000, "GetSwiftSelfRoutine\n")); #endif return (PCODE)Load_SwiftSelf; } @@ -1135,7 +1135,7 @@ PCODE CallStubGenerator::GetSwiftSelfRoutine() PCODE CallStubGenerator::GetSwiftSelfByRefRoutine() { #if LOG_COMPUTE_CALL_STUB - printf("GetSwiftSelfByRefRoutine\n"); + LOG2((LF2_INTERPRETER, LL_INFO10000, "GetSwiftSelfByRefRoutine\n")); #endif return (PCODE)Load_SwiftSelf_ByRef; } @@ -1143,7 +1143,7 @@ PCODE CallStubGenerator::GetSwiftSelfByRefRoutine() PCODE CallStubGenerator::GetSwiftErrorRoutine() { #if LOG_COMPUTE_CALL_STUB - printf("GetSwiftErrorRoutine\n"); + LOG2((LF2_INTERPRETER, LL_INFO10000, "GetSwiftErrorRoutine\n")); #endif return (PCODE)Load_SwiftError; } @@ -1151,7 +1151,7 @@ PCODE CallStubGenerator::GetSwiftErrorRoutine() PCODE CallStubGenerator::GetSwiftIndirectResultRoutine() { #if LOG_COMPUTE_CALL_STUB - printf("GetSwiftIndirectResultRoutine\n"); + LOG2((LF2_INTERPRETER, LL_INFO10000, "GetSwiftIndirectResultRoutine\n")); #endif return (PCODE)Load_SwiftIndirectResult; } @@ -2061,7 +2061,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo if (swiftIndirectResultCount > 0) { #if LOG_COMPUTE_CALL_STUB - printf("Emitting Load_SwiftIndirectResult routine\n"); + LOG2((LF2_INTERPRETER, LL_INFO10000, "Emitting Load_SwiftIndirectResult routine\n")); #endif TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftIndirectResult, pRoutines); pRoutines[m_routineIndex++] = GetSwiftIndirectResultRoutine(); @@ -2791,7 +2791,7 @@ void CallStubGenerator::RewriteSignatureForSwiftLowering(MetaSig &sig, SigBuilde m_hasSwiftReturnLowering = true; m_swiftReturnLowering = lowering; #if LOG_COMPUTE_CALL_STUB - printf("Swift return lowering detected: %d elements\n", lowering.numLoweredElements); + LOG2((LF2_INTERPRETER, LL_INFO10000, "Swift return lowering detected: %d elements\n", lowering.numLoweredElements)); #endif } } @@ -3039,7 +3039,7 @@ bool CallStubGenerator::ProcessSwiftSpecialArgument(MethodTable* pArgMT, int int #endif // DEBUG #if LOG_COMPUTE_CALL_STUB - printf("SwiftSelf argument detected\n"); + LOG2((LF2_INTERPRETER, LL_INFO10000, "SwiftSelf argument detected\n")); #endif TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftSelfByRef, pRoutines); m_currentRoutineType = RoutineType::SwiftSelfByRef; @@ -3053,7 +3053,7 @@ bool CallStubGenerator::ProcessSwiftSpecialArgument(MethodTable* pArgMT, int int if (pArgMT == CoreLibBinder::GetClass(CLASS__SWIFT_SELF)) { #if LOG_COMPUTE_CALL_STUB - printf("Swift Self argument detected\n"); + LOG2((LF2_INTERPRETER, LL_INFO10000, "Swift Self argument detected\n")); #endif TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftSelf, pRoutines); @@ -3065,7 +3065,7 @@ bool CallStubGenerator::ProcessSwiftSpecialArgument(MethodTable* pArgMT, int int if (pArgMT == CoreLibBinder::GetClass(CLASS__SWIFT_ERROR)) { #if LOG_COMPUTE_CALL_STUB - printf("Swift Error argument detected\n"); + LOG2((LF2_INTERPRETER, LL_INFO10000, "Swift Error argument detected\n")); #endif TerminateCurrentRoutineIfNotOfNewType(RoutineType::SwiftError, pRoutines); @@ -3089,8 +3089,8 @@ void CallStubGenerator::EmitSwiftLoweredElementRoutine(SwiftLoweringElement &ele PCODE packedData = (PCODE)elem.offset | ((PCODE)elem.structSize << 16); pRoutines[m_routineIndex++] = packedData; #if LOG_COMPUTE_CALL_STUB - printf("Swift lowered element to FP reg: offset=%d, structSize=%d, reg=d%d\n", - elem.offset, elem.structSize, regIndex); + LOG2((LF2_INTERPRETER, LL_INFO10000, "Swift lowered element to FP reg: offset=%d, structSize=%d, reg=d%d\n", + elem.offset, elem.structSize, regIndex)); #endif } else if (!elem.isFloat && argLocDesc.m_cGenReg > 0) @@ -3101,8 +3101,8 @@ void CallStubGenerator::EmitSwiftLoweredElementRoutine(SwiftLoweringElement &ele PCODE packedData = (PCODE)elem.offset | ((PCODE)elem.structSize << 16); pRoutines[m_routineIndex++] = packedData; #if LOG_COMPUTE_CALL_STUB - printf("Swift lowered element to GP reg: offset=%d, structSize=%d, reg=x%d\n", - elem.offset, elem.structSize, regIndex); + LOG2((LF2_INTERPRETER, LL_INFO10000, "Swift lowered element to GP reg: offset=%d, structSize=%d, reg=x%d\n", + elem.offset, elem.structSize, regIndex)); #endif } else @@ -3115,8 +3115,8 @@ void CallStubGenerator::EmitSwiftLoweredElementRoutine(SwiftLoweringElement &ele ((PCODE)argLocDesc.m_byteStackIndex << 32); pRoutines[m_routineIndex++] = packedData; #if LOG_COMPUTE_CALL_STUB - printf("Swift lowered element to stack: offset=%d, structSize=%d, stackOffset=%d\n", - elem.offset, elem.structSize, argLocDesc.m_byteStackIndex); + LOG2((LF2_INTERPRETER, LL_INFO10000, "Swift lowered element to stack: offset=%d, structSize=%d, stackOffset=%d\n", + elem.offset, elem.structSize, argLocDesc.m_byteStackIndex)); #endif } } @@ -3140,7 +3140,7 @@ void CallStubGenerator::EmitSwiftReturnLoweringRoutines(PCODE *pRoutines) pRoutines[m_routineIndex++] = (PCODE)offset; fpRegIndex++; #if LOG_COMPUTE_CALL_STUB - printf("Swift return store FP d%d at offset %d\n", fpRegIndex - 1, offset); + LOG2((LF2_INTERPRETER, LL_INFO10000, "Swift return store FP d%d at offset %d\n", fpRegIndex - 1, offset)); #endif } else @@ -3150,7 +3150,7 @@ void CallStubGenerator::EmitSwiftReturnLoweringRoutines(PCODE *pRoutines) pRoutines[m_routineIndex++] = (PCODE)offset; gpRegIndex++; #if LOG_COMPUTE_CALL_STUB - printf("Swift return store GP x%d at offset %d\n", gpRegIndex - 1, offset); + LOG2((LF2_INTERPRETER, LL_INFO10000, "Swift return store GP x%d at offset %d\n", gpRegIndex - 1, offset)); #endif } } From d0710f518082a98210ddcb05d18700ae94117210 Mon Sep 17 00:00:00 2001 From: Milos Kotlar Date: Wed, 4 Feb 2026 10:59:20 +0100 Subject: [PATCH 33/33] Fix arg index and interpreter stack offset for lowered structs --- src/coreclr/vm/callstubgenerator.cpp | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/coreclr/vm/callstubgenerator.cpp b/src/coreclr/vm/callstubgenerator.cpp index c2bfd910ebfa21..9390fc04555f2d 100644 --- a/src/coreclr/vm/callstubgenerator.cpp +++ b/src/coreclr/vm/callstubgenerator.cpp @@ -2104,6 +2104,7 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo if (ProcessSwiftSpecialArgument(pArgMT, interpStackSlotSize, interpreterStackOffset, pRoutines)) { + swiftArgIndex++; continue; } } @@ -2134,7 +2135,6 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo interpStackSlotSize = ALIGN_UP(thArgTypeHandle.GetSize(), align); } - interpreterStackOffset += interpStackSlotSize; #if defined(TARGET_APPLE) && defined(TARGET_ARM64) if (isSwiftCallConv && m_interpreterToNative && swiftArgIndex < (int)swiftLoweringInfo.Size()) @@ -2144,12 +2144,18 @@ void CallStubGenerator::ComputeCallStubWorker(bool hasUnmanagedCallConv, CorInfo if (elem.isLowered) { + if (elem.structSize != 0) + { + interpreterStackOffset += elem.structSize; + } EmitSwiftLoweredElementRoutine(elem, argLocDesc, pRoutines); continue; } } #endif // TARGET_APPLE && TARGET_ARM64 + interpreterStackOffset += interpStackSlotSize; + #ifdef UNIX_AMD64_ABI ArgLocDesc* argLocDescForStructInRegs = argIt.GetArgLocDescForStructInRegs(); if (argLocDescForStructInRegs != NULL) @@ -2955,9 +2961,11 @@ void CallStubGenerator::RewriteSignatureForSwiftLowering(MetaSig &sig, SigBuilde switch (lowering.loweredElements[i]) { case CORINFO_TYPE_BYTE: - case CORINFO_TYPE_UBYTE: swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I1); break; + case CORINFO_TYPE_UBYTE: + swiftSigBuilder.AppendElementType(ELEMENT_TYPE_U1); + break; case CORINFO_TYPE_SHORT: swiftSigBuilder.AppendElementType(ELEMENT_TYPE_I2); break;