diff --git a/src/coreclr/jit/codegencommon.cpp b/src/coreclr/jit/codegencommon.cpp index 892933a516b183..01787693973dbb 100644 --- a/src/coreclr/jit/codegencommon.cpp +++ b/src/coreclr/jit/codegencommon.cpp @@ -1555,7 +1555,6 @@ bool CodeGen::genCreateAddrMode(GenTree* addr, return true; } -#ifndef TARGET_WASM //------------------------------------------------------------------------ // genEmitCallWithCurrentGC: // Emit a call with GC information captured from current GC information. @@ -1570,7 +1569,6 @@ void CodeGen::genEmitCallWithCurrentGC(EmitCallParams& params) params.byrefRegs = gcInfo.gcRegByrefSetCur; GetEmitter()->emitIns_Call(params); } -#endif // !TARGET_WASM /***************************************************************************** * @@ -5737,6 +5735,8 @@ CORINFO_FIELD_HANDLE CodeGen::genEmitAsyncResumeInfo(unsigned stateNum) return compiler->eeFindJitDataOffs(baseOffs + stateNum * sizeof(CORINFO_AsyncResumeInfo)); } +#endif // !TARGET_WASM + //------------------------------------------------------------------------ // getCallTarget - Get the node that evaluates to the call target // @@ -5814,6 +5814,8 @@ regNumber CodeGen::getCallIndirectionCellReg(GenTreeCall* call) return result; } +#if !defined(TARGET_WASM) + //------------------------------------------------------------------------ // genDefinePendingLabel - If necessary, define the pending call label after a // call instruction was emitted. diff --git a/src/coreclr/jit/codegenwasm.cpp b/src/coreclr/jit/codegenwasm.cpp index 3f9520e66cf165..c9f884ff4f1afa 100644 --- a/src/coreclr/jit/codegenwasm.cpp +++ b/src/coreclr/jit/codegenwasm.cpp @@ -511,6 +511,10 @@ void CodeGen::genCodeForTreeNode(GenTree* treeNode) genCodeForStoreInd(treeNode->AsStoreInd()); break; + case GT_CALL: + genCall(treeNode->AsCall()); + break; + default: #ifdef DEBUG NYIRAW(GenTree::OpName(treeNode->OperGet())); @@ -1016,45 +1020,61 @@ void CodeGen::genCodeForDivMod(GenTreeOp* treeNode) // void CodeGen::genCodeForConstant(GenTree* treeNode) { - instruction ins; - cnsval_ssize_t bits; + instruction ins = INS_none; + cnsval_ssize_t bits = 0; var_types type = treeNode->TypeIs(TYP_REF, TYP_BYREF) ? TYP_I_IMPL : treeNode->TypeGet(); static_assert(sizeof(cnsval_ssize_t) >= sizeof(double)); - switch (type) + GenTreeIntConCommon* icon = nullptr; + if ((type == TYP_INT) || (type == TYP_LONG)) { - case TYP_INT: - { - ins = INS_i32_const; - GenTreeIntConCommon* con = treeNode->AsIntConCommon(); - bits = con->IntegralValue(); - break; - } - case TYP_LONG: + icon = treeNode->AsIntConCommon(); + if (icon->ImmedValNeedsReloc(compiler)) { - ins = INS_i64_const; - GenTreeIntConCommon* con = treeNode->AsIntConCommon(); - bits = con->IntegralValue(); - break; + // WASM-TODO: Generate reloc for this handle + ins = INS_I_const; + bits = 0; } - case TYP_FLOAT: + else { - ins = INS_f32_const; - GenTreeDblCon* con = treeNode->AsDblCon(); - double value = con->DconValue(); - memcpy(&bits, &value, sizeof(double)); - break; + bits = icon->IntegralValue(); } - case TYP_DOUBLE: + } + + if (ins == INS_none) + { + switch (type) { - ins = INS_f64_const; - GenTreeDblCon* con = treeNode->AsDblCon(); - double value = con->DconValue(); - memcpy(&bits, &value, sizeof(double)); - break; + case TYP_INT: + { + ins = INS_i32_const; + assert(((INT64)(INT32)bits) == bits); + break; + } + case TYP_LONG: + { + ins = INS_i64_const; + break; + } + case TYP_FLOAT: + { + ins = INS_f32_const; + GenTreeDblCon* con = treeNode->AsDblCon(); + double value = con->DconValue(); + memcpy(&bits, &value, sizeof(double)); + break; + } + case TYP_DOUBLE: + { + ins = INS_f64_const; + GenTreeDblCon* con = treeNode->AsDblCon(); + double value = con->DconValue(); + memcpy(&bits, &value, sizeof(double)); + break; + } + default: + unreached(); } - default: - unreached(); } // The IF_ for the selected instruction, i.e. IF_F64, determines how these bits are emitted @@ -1373,6 +1393,155 @@ void CodeGen::genCodeForStoreInd(GenTreeStoreInd* tree) genUpdateLife(tree); } +//------------------------------------------------------------------------ +// genCall: Produce code for a GT_CALL node +// +void CodeGen::genCall(GenTreeCall* call) +{ + if (call->NeedsNullCheck()) + { + NYI_WASM("Insert nullchecks for calls that need it in lowering"); + } + + assert(!call->IsTailCall()); + + genCallInstruction(call); + genProduceReg(call); +} + +//------------------------------------------------------------------------ +// genCallInstruction - Generate instructions necessary to transfer control to the call. +// +// Arguments: +// call - the GT_CALL node +// +void CodeGen::genCallInstruction(GenTreeCall* call) +{ + EmitCallParams params; + params.isJump = call->IsFastTailCall(); + params.hasAsyncRet = call->IsAsync(); + + // We need to propagate the debug information to the call instruction, so we can emit + // an IL to native mapping record for the call, to support managed return value debugging. + // We don't want tail call helper calls that were converted from normal calls to get a record, + // so we skip this hash table lookup logic in that case. + if (compiler->opts.compDbgInfo && compiler->genCallSite2DebugInfoMap != nullptr && !call->IsTailCall()) + { + DebugInfo di; + (void)compiler->genCallSite2DebugInfoMap->Lookup(call, &di); + params.debugInfo = di; + } + +#ifdef DEBUG + // Pass the call signature information down into the emitter so the emitter can associate + // native call sites with the signatures they were generated from. + if (!call->IsHelperCall()) + { + params.sigInfo = call->callSig; + } +#endif // DEBUG + GenTree* target = getCallTarget(call, ¶ms.methHnd); + + if (target != nullptr) + { + // Codegen should have already evaluated our target node (last) and pushed it onto the stack, + // ready for call_indirect. Consume it. + genConsumeReg(target); + + params.callType = EC_INDIR_R; + genEmitCallWithCurrentGC(params); + } + else + { + // If we have no target and this is a call with indirection cell then + // we do an optimization where we load the call address directly from + // the indirection cell instead of duplicating the tree. In BuildCall + // we ensure that get an extra register for the purpose. Note that for + // CFG the call might have changed to + // CORINFO_HELP_DISPATCH_INDIRECT_CALL in which case we still have the + // indirection cell but we should not try to optimize. + WellKnownArg indirectionCellArgKind = WellKnownArg::None; + if (!call->IsHelperCall(compiler, CORINFO_HELP_DISPATCH_INDIRECT_CALL)) + { + indirectionCellArgKind = call->GetIndirectionCellArgKind(); + } + + if (indirectionCellArgKind != WellKnownArg::None) + { + assert(call->IsR2ROrVirtualStubRelativeIndir()); + + params.callType = EC_INDIR_R; + // params.ireg = targetAddrReg; + genEmitCallWithCurrentGC(params); + } + else + { + // Generate a direct call to a non-virtual user defined or helper method + assert(call->IsHelperCall() || (call->gtCallType == CT_USER_FUNC)); + + if (call->gtEntryPoint.addr != NULL) + { + NYI_WASM("Call with statically known address"); + } + else + { + if (call->IsHelperCall()) + { + NYI_WASM("Call helper statically without indirection cell"); + CorInfoHelpFunc helperNum = compiler->eeGetHelperNum(params.methHnd); + noway_assert(helperNum != CORINFO_HELP_UNDEF); + + CORINFO_CONST_LOOKUP helperLookup = compiler->compGetHelperFtn(helperNum); + params.addr = helperLookup.addr; + assert(helperLookup.accessType == IAT_VALUE); + } + else + { + // Direct call to a non-virtual user function. + params.addr = call->gtDirectCallAddress; + } + } + + params.callType = EC_FUNC_TOKEN; + genEmitCallWithCurrentGC(params); + } + } +} + +/***************************************************************************** + * Emit a call to a helper function. + */ +void CodeGen::genEmitHelperCall(unsigned helper, int argSize, emitAttr retSize, regNumber callTargetReg /*= REG_NA */) +{ + EmitCallParams params; + + CORINFO_CONST_LOOKUP helperFunction = compiler->compGetHelperFtn((CorInfoHelpFunc)helper); + params.ireg = callTargetReg; + + if (helperFunction.accessType == IAT_VALUE) + { + params.callType = EC_FUNC_TOKEN; + params.addr = helperFunction.addr; + } + else + { + params.addr = nullptr; + assert(helperFunction.accessType == IAT_PVALUE); + void* pAddr = helperFunction.addr; + + // Push indirection cell address onto stack for genEmitCall to dereference + GetEmitter()->emitIns_I(INS_i32_const, emitActualTypeSize(TYP_I_IMPL), (cnsval_ssize_t)pAddr); + + params.callType = EC_INDIR_R; + } + + params.methHnd = compiler->eeFindHelper(helper); + params.argSize = argSize; + params.retSize = retSize; + + genEmitCallWithCurrentGC(params); +} + //------------------------------------------------------------------------ // genCodeForCompare: Produce code for a GT_EQ/GT_NE/GT_LT/GT_LE/GT_GE/GT_GT node. // diff --git a/src/coreclr/jit/emit.cpp b/src/coreclr/jit/emit.cpp index 90c9613b517b47..44f0d26545cfc8 100644 --- a/src/coreclr/jit/emit.cpp +++ b/src/coreclr/jit/emit.cpp @@ -3566,6 +3566,8 @@ void emitter::emitSetSecondRetRegGCType(instrDescCGCA* id, emitAttr secondRetSiz } #endif // MULTIREG_HAS_SECOND_GC_RET +#ifndef TARGET_WASM + /***************************************************************************** * * Allocate an instruction descriptor for an indirect call. @@ -3731,6 +3733,8 @@ emitter::instrDesc* emitter::emitNewInstrCallDir(int argCnt, } } +#endif // TARGET_WASM + /***************************************************************************** * * Be very careful, some instruction descriptors are allocated as "tiny" and diff --git a/src/coreclr/jit/emitfmtswasm.h b/src/coreclr/jit/emitfmtswasm.h index 55b852649192f5..226895e975c14a 100644 --- a/src/coreclr/jit/emitfmtswasm.h +++ b/src/coreclr/jit/emitfmtswasm.h @@ -26,16 +26,17 @@ enum ID_OPS // (unused) ////////////////////////////////////////////////////////////////////////////// -IF_DEF(NONE, IS_NONE, NONE) -IF_DEF(OPCODE, IS_NONE, NONE) // -IF_DEF(BLOCK, IS_NONE, NONE) // <0x40> -IF_DEF(RAW_ULEB128, IS_NONE, NONE) // -IF_DEF(ULEB128, IS_NONE, NONE) // -IF_DEF(SLEB128, IS_NONE, NONE) // -IF_DEF(F32, IS_NONE, NONE) // -IF_DEF(F64, IS_NONE, NONE) // -IF_DEF(MEMARG, IS_NONE, NONE) // ( ) -IF_DEF(LOCAL_DECL, IS_NONE, NONE) // +IF_DEF(NONE, IS_NONE, NONE) +IF_DEF(OPCODE, IS_NONE, NONE) // +IF_DEF(BLOCK, IS_NONE, NONE) // <0x40> +IF_DEF(RAW_ULEB128, IS_NONE, NONE) // +IF_DEF(ULEB128, IS_NONE, NONE) // +IF_DEF(SLEB128, IS_NONE, NONE) // +IF_DEF(F32, IS_NONE, NONE) // +IF_DEF(F64, IS_NONE, NONE) // +IF_DEF(MEMARG, IS_NONE, NONE) // ( ) +IF_DEF(LOCAL_DECL, IS_NONE, NONE) // +IF_DEF(CALL_INDIRECT, IS_NONE, NONE) // #undef IF_DEF #endif // !DEFINE_ID_OPS diff --git a/src/coreclr/jit/emitwasm.cpp b/src/coreclr/jit/emitwasm.cpp index 8d08ab6a9bd826..1dd9a71334d2d3 100644 --- a/src/coreclr/jit/emitwasm.cpp +++ b/src/coreclr/jit/emitwasm.cpp @@ -117,6 +117,71 @@ bool emitter::emitInsIsStore(instruction ins) return false; } +/***************************************************************************** + * + * Add a call instruction (direct or indirect). + * + * Unless callType is EC_FUNC_TOKEN, addr needs to be null. + * + */ + +void emitter::emitIns_Call(const EmitCallParams& params) +{ + /* Sanity check the arguments depending on callType */ + + assert(params.callType < EC_COUNT); + assert((params.callType == EC_FUNC_TOKEN) || (params.addr == nullptr)); + + /* Managed RetVal: emit sequence point for the call */ + if (emitComp->opts.compDbgInfo && params.debugInfo.GetLocation().IsValid()) + { + codeGen->genIPmappingAdd(IPmappingDscKind::Normal, params.debugInfo, false); + } + + /* + We need to allocate the appropriate instruction descriptor based + on whether this is a direct/indirect call, and whether we need to + record an updated set of live GC variables. + */ + instrDesc* id = nullptr; + + instruction ins; + + // FIXME-WASM: Currently while we're loading SP onto the stack we're not loading PEP, so generate one here. + emitIns_I(INS_i32_const, EA_4BYTE, 0); + + switch (params.callType) + { + case EC_FUNC_TOKEN: + ins = params.isJump ? INS_return_call : INS_call; + id = emitNewInstrSC(EA_8BYTE, 0 /* FIXME-WASM: function index reloc */); + id->idIns(ins); + id->idInsFmt(IF_ULEB128); + break; + case EC_INDIR_R: + // Indirect load of actual ftn ptr from indirection cell (on the stack) + // TODO-WASM: temporary, move this into higher layers (lowering). + emitIns_I(INS_i32_load, EA_PTRSIZE, 0); + ins = params.isJump ? INS_return_call_indirect : INS_call_indirect; + id = emitNewInstrSC(EA_8BYTE, 0 /* FIXME-WASM: type index reloc */); + id->idIns(ins); + id->idInsFmt(IF_CALL_INDIRECT); + break; + default: + unreached(); + } + + if (m_debugInfoSize > 0) + { + INDEBUG(id->idDebugOnlyInfo()->idCallSig = params.sigInfo); + id->idDebugOnlyInfo()->idMemCookie = (size_t)params.methHnd; // method token + } + + dispIns(id); + appendToCurIG(id); + // emitLastMemBarrier = nullptr; // Cannot optimize away future memory barriers +} + //----------------------------------------------------------------------------- // emitNewInstrLclVarDecl: Construct an instrDesc corresponding to a wasm local // declaration. @@ -305,6 +370,12 @@ unsigned emitter::instrDesc::idCodeSize() const case IF_SLEB128: size += idIsCnsReloc() ? PADDED_RELOC_SIZE : SizeOfSLEB128(emitGetInsSC(this)); break; + case IF_CALL_INDIRECT: + { + size += SizeOfULEB128(emitGetInsSC(this)); + size += SizeOfULEB128(0); + break; + } case IF_F32: size += 4; break; @@ -433,6 +504,13 @@ size_t emitter::emitOutputInstr(insGroup* ig, instrDesc* id, BYTE** dp) dst += emitOutputSLEB128(dst, (int64_t)constant); break; } + case IF_CALL_INDIRECT: + { + dst += emitOutputByte(dst, opcode); + dst += emitOutputULEB128(dst, (uint64_t)emitGetInsSC(id)); + dst += emitOutputULEB128(dst, 0); + break; + } case IF_F32: { dst += emitOutputOpcode(dst, ins); @@ -606,6 +684,13 @@ void emitter::emitDispIns( } break; + case IF_CALL_INDIRECT: + { + cnsval_ssize_t imm = emitGetInsSC(id); + printf(" %llu 0", (uint64_t)imm); + } + break; + case IF_LOCAL_DECL: { unsigned int count = emitGetLclVarDeclCount(id); diff --git a/src/coreclr/jit/emitwasm.h b/src/coreclr/jit/emitwasm.h index f6cb7f831496cc..3028635894fd89 100644 --- a/src/coreclr/jit/emitwasm.h +++ b/src/coreclr/jit/emitwasm.h @@ -42,18 +42,6 @@ static unsigned int emitGetLclVarDeclCount(const instrDesc* id); /* Private members that deal with target-dependent instr. descriptors */ /************************************************************************/ -private: -instrDesc* emitNewInstrCallDir( - int argCnt, VARSET_VALARG_TP GCvars, regMaskTP gcrefRegs, regMaskTP byrefRegs, emitAttr retSize, bool hasAsyncRet); - -instrDesc* emitNewInstrCallInd(int argCnt, - ssize_t disp, - VARSET_VALARG_TP GCvars, - regMaskTP gcrefRegs, - regMaskTP byrefRegs, - emitAttr retSize, - bool hasAsyncRet); - /************************************************************************/ /* Private helpers for instruction output */ /************************************************************************/ diff --git a/src/coreclr/jit/instrswasm.h b/src/coreclr/jit/instrswasm.h index 7230e20a64bf7e..6cf96023a85e9f 100644 --- a/src/coreclr/jit/instrswasm.h +++ b/src/coreclr/jit/instrswasm.h @@ -24,21 +24,26 @@ // control flow // -INST(invalid, "INVALID", 0, IF_NONE, BAD_CODE) -INST(unreachable, "unreachable", 0, IF_OPCODE, 0x00) -INST(label, "label", 0, IF_RAW_ULEB128, 0x00) -INST(local_cnt, "local.cnt", 0, IF_RAW_ULEB128, 0x00) -INST(local_decl, "local", 0, IF_LOCAL_DECL, 0x00) -INST(nop, "nop", 0, IF_OPCODE, 0x01) -INST(block, "block", 0, IF_BLOCK, 0x02) -INST(loop, "loop", 0, IF_BLOCK, 0x03) -INST(if, "if", 0, IF_BLOCK, 0x04) -INST(else, "else", 0, IF_OPCODE, 0x05) -INST(end, "end", 0, IF_OPCODE, 0x0B) -INST(br, "br", 0, IF_ULEB128, 0x0C) -INST(br_if, "br_if", 0, IF_ULEB128, 0x0D) -INST(br_table, "br_table", 0, IF_ULEB128, 0x0E) -INST(return, "return", 0, IF_OPCODE, 0x0F) +INST(invalid, "INVALID", 0, IF_NONE, BAD_CODE) +INST(unreachable, "unreachable", 0, IF_OPCODE, 0x00) +INST(label, "label", 0, IF_RAW_ULEB128, 0x00) +INST(local_cnt, "local.cnt", 0, IF_RAW_ULEB128, 0x00) +INST(local_decl, "local", 0, IF_LOCAL_DECL, 0x00) +INST(nop, "nop", 0, IF_OPCODE, 0x01) +INST(block, "block", 0, IF_BLOCK, 0x02) +INST(loop, "loop", 0, IF_BLOCK, 0x03) +INST(if, "if", 0, IF_BLOCK, 0x04) +INST(else, "else", 0, IF_OPCODE, 0x05) +INST(end, "end", 0, IF_OPCODE, 0x0B) +INST(br, "br", 0, IF_ULEB128, 0x0C) +INST(br_if, "br_if", 0, IF_ULEB128, 0x0D) +INST(br_table, "br_table", 0, IF_ULEB128, 0x0E) +INST(return, "return", 0, IF_OPCODE, 0x0F) +INST(call, "call", 0, IF_ULEB128, 0x10) +INST(call_indirect, "call_indirect", 0, IF_CALL_INDIRECT, 0x11) +INST(return_call, "return_call", 0, IF_ULEB128, 0x12) +INST(return_call_indirect, "return_call_indirect", 0, IF_CALL_INDIRECT, 0x13) + INST(drop, "drop", 0, IF_OPCODE, 0x1A) INST(local_get, "local.get", 0, IF_ULEB128, 0x20) diff --git a/src/coreclr/jit/lower.cpp b/src/coreclr/jit/lower.cpp index de0064ca3a1b4d..2e43febbe7e9ba 100644 --- a/src/coreclr/jit/lower.cpp +++ b/src/coreclr/jit/lower.cpp @@ -1975,7 +1975,7 @@ void Lowering::InsertPutArgReg(GenTree** argNode, const ABIPassingSegment& regis InsertBitCastIfNecessary(argNode, registerSegment); -#ifdef HAS_FIXED_REGISTER_SET +#if HAS_FIXED_REGISTER_SET GenTree* putArg = comp->gtNewPutArgReg(genActualType(*argNode), *argNode, registerSegment.GetRegister()); BlockRange().InsertAfter(*argNode, putArg); *argNode = putArg; @@ -3155,6 +3155,11 @@ size_t Lowering::MarkCallPutArgAndFieldListNodes(GenTreeCall* call) // size_t Lowering::MarkPutArgAndFieldListNodes(GenTree* node) { +#if !HAS_FIXED_REGISTER_SET + if (!node->OperIsPutArg() && !node->OperIsFieldList()) + return 0; +#endif + assert(node->OperIsPutArg() || node->OperIsFieldList()); assert((node->gtLIRFlags & LIR::Flags::Mark) == 0); @@ -9243,13 +9248,13 @@ void Lowering::CheckCallArg(GenTree* arg) for (GenTreeFieldList::Use& use : list->Uses()) { - assert(use.GetNode()->OperIsPutArg()); + assert(!HAS_FIXED_REGISTER_SET || use.GetNode()->OperIsPutArg()); } } break; default: - assert(arg->OperIsPutArg()); + assert(!HAS_FIXED_REGISTER_SET || arg->OperIsPutArg()); break; } }