diff --git a/lib/Backend/BackwardPass.cpp b/lib/Backend/BackwardPass.cpp index 6b4c9f9a0b3..2b334b079aa 100644 --- a/lib/Backend/BackwardPass.cpp +++ b/lib/Backend/BackwardPass.cpp @@ -2215,15 +2215,20 @@ BackwardPass::IsLazyBailOutCurrentlyNeeeded(IR::Instr * instr) const "liveFixedField is null, MergeSuccBlocksInfo might have not initialized it?" ); - if (instr->IsStFldVariant()) + // StFld LazyBailOut tag removal optimization. Given that this instr is a StFld variant and + // that there already is an BailOutOnImplicitCall tag on this instr, we can remove the + // LazyBailOut tag on this instr if the StFld is writing to a live fixed field. We cannot + // perform this optimization if a BailOutOnImplicitCall tag is abscent because writing to + // a property can result in an implicit call that then can result in a lazy bailout. + if (instr->IsStFldVariant() && BailOutInfo::IsBailOutOnImplicitCalls(instr->GetBailOutKind())) { Assert(instr->GetDst()); - Js::PropertyId id = instr->GetDst()->GetSym()->AsPropertySym()->m_propertyId; - - // We only need to protect against SetFld if it is setting to one of the live fixed fields - return this->currentBlock->liveFixedFields->Test(id); + // We only need to protect against StFld if it is setting to one of the live fixed fields. + return currentBlock->liveFixedFields->Test(instr->GetDst()->GetSym()->AsPropertySym()->m_propertyId); } + // If no more fixed fields exist at this point in the block it is safe to assume that any field marked as + // a fixed field has been verified to have not been modified and thus a LazyBailOut tag is not necessary. return !this->currentBlock->liveFixedFields->IsEmpty(); } @@ -2333,7 +2338,7 @@ BackwardPass::DeadStoreTypeCheckBailOut(IR::Instr * instr) return; } - // If bailOutKind is equivTypeCheck then leave alone the bailout + // If bailOutKind is equivTypeCheck then leave the bailout alone. if (bailOutKind == IR::BailOutFailedEquivalentTypeCheck || bailOutKind == IR::BailOutFailedEquivalentFixedFieldTypeCheck) { @@ -2357,9 +2362,9 @@ BackwardPass::DeadStoreTypeCheckBailOut(IR::Instr * instr) } void -BackwardPass::DeadStoreLazyBailOut(IR::Instr * instr, bool needsLazyBailOut) +BackwardPass::DeadStoreLazyBailOut(IR::Instr * instr) { - if (!this->IsPrePass() && !needsLazyBailOut && instr->HasLazyBailOut()) + if (!this->IsPrePass() && instr->HasLazyBailOut()) { instr->ClearLazyBailOut(); if (!instr->HasBailOutInfo()) @@ -2441,12 +2446,13 @@ BackwardPass::DeadStoreImplicitCallBailOut(IR::Instr * instr, bool hasLiveFields // We have an implicit call bailout in the code, and we want to make sure that it's required. // Do this now, because only in the dead store pass do we have complete forward and backward liveness info. bool needsBailOutOnImplicitCall = this->IsImplicitCallBailOutCurrentlyNeeded(instr, mayNeedBailOnImplicitCall, needsLazyBailOut, hasLiveFields); + if(!UpdateImplicitCallBailOutKind(instr, needsBailOutOnImplicitCall, needsLazyBailOut)) { instr->ClearBailOutInfo(); - if (preOpBailOutInstrToProcess == instr) + if (this->preOpBailOutInstrToProcess == instr) { - preOpBailOutInstrToProcess = nullptr; + this->preOpBailOutInstrToProcess = nullptr; } #if DBG if (this->DoMarkTempObjectVerify()) @@ -2476,9 +2482,9 @@ BackwardPass::UpdateImplicitCallBailOutKind(IR::Instr *const instr, bool needsBa const bool hasMarkTempObject = bailOutKindWithBits & IR::BailOutMarkTempObject; - // Firstly, we remove the mark temp object bit, as it is not needed after the dead store pass. - // We will later skip removing BailOutOnImplicitCalls when there is a mark temp object bit regardless - // of `needsBailOutOnImplicitCall`. + // First we remove the mark temp object bit as it is not needed after the dead + // store pass. We will later skip removing BailOutOnImplicitCalls when there + // is a mark temp object bit regardless of needsBailOutOnImplicitCall. if (hasMarkTempObject) { instr->SetBailOutKind(bailOutKindWithBits & ~IR::BailOutMarkTempObject); @@ -2486,9 +2492,11 @@ BackwardPass::UpdateImplicitCallBailOutKind(IR::Instr *const instr, bool needsBa if (needsBailOutOnImplicitCall) { - // We decided that BailOutOnImplicitCall is needed. So lazy bailout is unnecessary - // because we are already protected from potential side effects unless the operation - // itself can change fields' values (StFld/StElem). + // We decided that BailOutOnImplicitCall is needed; LazyBailOut is unnecessary because + // the modification of a property would trigger an implicit call bailout before a LazyBailOut + // would trigger. An edge case is when the act of checking the type of the object with the + // property, which occurs before the implicit call check, results in a property guard invalidation. + // In this case a LazyBailOut is necessary. if (needsLazyBailOut && !instr->CanChangeFieldValueWithoutImplicitCall()) { instr->ClearLazyBailOut(); @@ -2496,16 +2504,19 @@ BackwardPass::UpdateImplicitCallBailOutKind(IR::Instr *const instr, bool needsBa return true; } - else + + // needsBailOutOnImplicitCall also captures our intention to keep BailOutOnImplicitCalls + // because we want to do fixed field lazy bailout optimization. So if we don't need them, + // just remove our lazy bailout unless this instr can cause a PropertyGuard invalidation + // during the type check. + if (!instr->CanChangeFieldValueWithoutImplicitCall()) { - // `needsBailOutOnImplicitCall` also captures our intention to keep BailOutOnImplicitCalls - // because we want to do fixed field lazy bailout optimization. So if we don't need them, - // just remove our lazy bailout. instr->ClearLazyBailOut(); - if (!instr->HasBailOutInfo()) - { - return true; - } + } + + if (!instr->HasBailOutInfo()) + { + return true; } const IR::BailOutKind bailOutKindWithoutBits = instr->GetBailOutKindNoBits(); @@ -2517,8 +2528,8 @@ BackwardPass::UpdateImplicitCallBailOutKind(IR::Instr *const instr, bool needsBa return true; } - // At this point, we don't need the bail on implicit calls. - // Simply use the bailout kind bits as our new bailout kind. + // At this point we don't need the bail on implicit calls, + // use the bailout kind bits as our new bailout kind. IR::BailOutKind newBailOutKind = bailOutKindWithBits - bailOutKindWithoutBits; if (newBailOutKind == IR::BailOutInvalid) @@ -3721,7 +3732,10 @@ BackwardPass::ProcessBlock(BasicBlock * block) ); DeadStoreTypeCheckBailOut(instr); - DeadStoreLazyBailOut(instr, needsLazyBailOut); + if (!needsLazyBailOut) + { + DeadStoreLazyBailOut(instr); + } DeadStoreImplicitCallBailOut(instr, hasLiveFields, needsLazyBailOut); AssertMsg( @@ -5714,8 +5728,13 @@ BackwardPass::TrackAddPropertyTypes(IR::PropertySymOpnd *opnd, BasicBlock *block typeWithProperty == typeWithoutProperty || (opnd->IsTypeChecked() && !opnd->IsInitialTypeChecked())) { - if (!this->IsPrePass() && block->stackSymToFinalType != nullptr && !this->currentInstr->HasBailOutInfo()) + if ( + !this->IsPrePass() && + block->stackSymToFinalType != nullptr && + (!this->currentInstr->HasBailOutInfo() || currentInstr->OnlyHasLazyBailOut()) + ) { + PropertySym *propertySym = opnd->m_sym->AsPropertySym(); AddPropertyCacheBucket *pBucket = block->stackSymToFinalType->Get(propertySym->m_stackSym->m_id); @@ -6009,12 +6028,15 @@ BackwardPass::InsertTypeTransitionsAtPotentialKills() // Final types can't be pushed up past certain instructions. IR::Instr *instr = this->currentInstr; - if (instr->HasBailOutInfo() || instr->m_opcode == Js::OpCode::UpdateNewScObjectCache) + // Final types can't be pushed up past a BailOut point. Insert any transitions called + // for by the current state of add-property buckets. Also do this for ctor cache updates + // to avoid putting a type in the ctor cache that extends past the end of the ctor that + // the cache covers. + // TODO: explain why LBO gets exempted from this rule. + if (instr->m_opcode == Js::OpCode::UpdateNewScObjectCache || + (instr->HasBailOutInfo() && !instr->OnlyHasLazyBailOut()) + ) { - // Final types can't be pushed up past a bailout point. - // Insert any transitions called for by the current state of add-property buckets. - // Also do this for ctor cache updates, to avoid putting a type in the ctor cache that extends past - // the end of the ctor that the cache covers. this->ForEachAddPropertyCacheBucket([&](int symId, AddPropertyCacheBucket *data)->bool { this->InsertTypeTransitionAfterInstr(instr, symId, data, this->currentBlock->upwardExposedUses); return false; diff --git a/lib/Backend/BackwardPass.h b/lib/Backend/BackwardPass.h index 4f7658b00a1..83bade432de 100644 --- a/lib/Backend/BackwardPass.h +++ b/lib/Backend/BackwardPass.h @@ -74,7 +74,7 @@ class BackwardPass void DumpMarkTemp(); #endif - static bool UpdateImplicitCallBailOutKind(IR::Instr *const instr, bool needsBailOutOnImplicitCall, bool needsLazyBailOut); + bool UpdateImplicitCallBailOutKind(IR::Instr *const instr, bool needsBailOutOnImplicitCall, bool needsLazyBailOut); bool ProcessNoImplicitCallUses(IR::Instr *const instr); void ProcessNoImplicitCallDef(IR::Instr *const instr); @@ -106,7 +106,7 @@ class BackwardPass bool IsLazyBailOutCurrentlyNeeeded(IR::Instr * instr) const; void DeadStoreImplicitCallBailOut(IR::Instr * instr, bool hasLiveFields, bool needsLazyBailOut); void DeadStoreTypeCheckBailOut(IR::Instr * instr); - void DeadStoreLazyBailOut(IR::Instr * instr, bool needsLazyBailOut); + void DeadStoreLazyBailOut(IR::Instr * instr); bool IsImplicitCallBailOutCurrentlyNeeded(IR::Instr * instr, bool mayNeedImplicitCallBailOut, bool needLazyBailOut, bool hasLiveFields); bool NeedBailOutOnImplicitCallsForTypedArrayStore(IR::Instr* instr); bool TrackNoImplicitCallInlinees(IR::Instr *instr); diff --git a/lib/Backend/BailOut.cpp b/lib/Backend/BailOut.cpp index 11361de735f..df41d8cc294 100644 --- a/lib/Backend/BailOut.cpp +++ b/lib/Backend/BailOut.cpp @@ -2974,14 +2974,6 @@ SharedBailOutRecord::SharedBailOutRecord(uint32 bailOutOffset, uint bailOutCache this->type = BailoutRecordType::Shared; } -#if DBG -void LazyBailOutRecord::Dump(Js::FunctionBody* functionBody) const -{ - OUTPUT_PRINT(functionBody); - Output::Print(_u("Bytecode Offset: #%04x opcode: %s"), this->bailOutRecord->GetBailOutOffset(), Js::OpCodeUtil::GetOpCodeName(this->bailOutRecord->GetBailOutOpCode())); -} -#endif - void GlobalBailOutRecordDataTable::Finalize(NativeCodeData::Allocator *allocator, JitArenaAllocator *tempAlloc) { GlobalBailOutRecordDataRow *newRows = NativeCodeDataNewArrayZNoFixup(allocator, GlobalBailOutRecordDataRow, length); diff --git a/lib/Backend/BailOut.h b/lib/Backend/BailOut.h index 92473256bfb..51f2b40f7f3 100644 --- a/lib/Backend/BailOut.h +++ b/lib/Backend/BailOut.h @@ -50,13 +50,10 @@ class BailOutInfo void PartialDeepCopyTo(BailOutInfo *const bailOutInfo) const; void Clear(JitArenaAllocator * allocator); - // Lazy bailout - // - // Workaround for dealing with use of destination register of `call` instructions with postop lazy bailout. - // As an example, in globopt, we have s1 = Call and s1 is in byteCodeUpwardExposedUse, - // but after lowering, the instructions are: s3 = Call, s1 = s3. - // If we add a postop lazy bailout to s3 = call, we will create a use of s1 right at that instructions. - // However, s1 at that point is not initialized yet. + // Related to Lazy bailout. Workaround for dealing with use of destination register of `call` instructions + // with postop lazy bailout. As an example, in globopt, we have s1 = Call and s1 is in byteCodeUpwardExposedUse, + // but after lowering, the instructions are: s3 = Call, s1 = s3. If we add a postop lazy bailout to s3 = call, + // we will create a use of s1 right at that instructions. However, s1 at that point is not initialized yet. // As a workaround, we will clear the use of s1 and restore it if we determine that lazy bailout is not needed. void ClearUseOfDst(SymID id); void RestoreUseOfDst(); diff --git a/lib/Backend/DbCheckPostLower.cpp b/lib/Backend/DbCheckPostLower.cpp index 1a77f7c70d4..d94b3bce4a0 100644 --- a/lib/Backend/DbCheckPostLower.cpp +++ b/lib/Backend/DbCheckPostLower.cpp @@ -319,6 +319,7 @@ DbCheckPostLower::IsAssign(IR::Instr *instr) return LowererMD::IsAssign(instr) #ifdef _M_X64 || instr->m_opcode == Js::OpCode::MOVQ + || instr->m_opcode == Js::OpCode::MOV_TRUNC #endif ; } @@ -364,7 +365,9 @@ DbCheckPostLower::EnsureOnlyMovesToRegisterOpnd(IR::Instr *instr) if (this->IsCallToHelper(instr, IR::HelperOp_Equal) || this->IsCallToHelper(instr, IR::HelperOp_StrictEqual) || this->IsCallToHelper(instr, IR::HelperOP_CmEq_A) || - this->IsCallToHelper(instr, IR::HelperOP_CmNeq_A) + this->IsCallToHelper(instr, IR::HelperOP_CmNeq_A) || + this->IsCallToHelper(instr, IR::HelperOP_CmSrNeq_A) || + this->IsCallToHelper(instr, IR::HelperOP_CmSrEq_A) ) { // Pattern matched diff --git a/lib/Backend/Encoder.cpp b/lib/Backend/Encoder.cpp index c15d60cb58d..a80260ff7a2 100644 --- a/lib/Backend/Encoder.cpp +++ b/lib/Backend/Encoder.cpp @@ -77,7 +77,7 @@ Encoder::Encode() m_pc = m_encodeBuffer; m_inlineeFrameMap = Anew(m_tempAlloc, ArenaInlineeFrameMap, m_tempAlloc); - m_sortedLazyBailoutRecordList = Anew(m_tempAlloc, ArenaLazyBailoutRecordList, m_tempAlloc); + m_sortedLazyBailOutRecordList = Anew(m_tempAlloc, ArenaLazyBailoutRecordList, m_tempAlloc); IR::PragmaInstr* pragmaInstr = nullptr; uint32 pragmaOffsetInBuffer = 0; @@ -607,29 +607,29 @@ Encoder::Encode() if (this->m_inlineeFrameMap->Count() > 0 && !(this->m_inlineeFrameMap->Count() == 1 && this->m_inlineeFrameMap->Item(0).record == nullptr)) { - if (!m_func->IsOOPJIT()) // in-proc JIT - { - m_func->GetInProcJITEntryPointInfo()->GetInProcNativeEntryPointData()->RecordInlineeFrameMap(m_inlineeFrameMap); - } - else // OOP JIT + if (m_func->IsOOPJIT()) { - NativeOffsetInlineeFrameRecordOffset* pairs = NativeCodeDataNewArrayZNoFixup(m_func->GetNativeCodeDataAllocator(), NativeOffsetInlineeFrameRecordOffset, this->m_inlineeFrameMap->Count()); + NativeOffsetToRecordOffset* pairs = NativeCodeDataNewArrayZNoFixup(m_func->GetNativeCodeDataAllocator(), NativeOffsetToRecordOffset, this->m_inlineeFrameMap->Count()); - this->m_inlineeFrameMap->Map([&pairs](int i, NativeOffsetInlineeFramePair& p) + this->m_inlineeFrameMap->Map([&pairs](int i, NativeOffsetRecordPair& p) { - pairs[i].offset = p.offset; + pairs[i].nativeAddressOffset = p.nativeAddressOffset; if (p.record) { pairs[i].recordOffset = NativeCodeData::GetDataChunk(p.record)->offset; } else { - pairs[i].recordOffset = NativeOffsetInlineeFrameRecordOffset::InvalidRecordOffset; - } + pairs[i].recordOffset = NativeOffsetToRecordOffset::InvalidRecordOffset; + } }); m_func->GetJITOutput()->RecordInlineeFrameOffsetsInfo(NativeCodeData::GetDataChunk(pairs)->offset, this->m_inlineeFrameMap->Count()); } + else + { + m_func->GetInProcJITEntryPointInfo()->GetInProcNativeEntryPointData()->RecordInlineeFrameMap(m_inlineeFrameMap); + } } this->SaveLazyBailOutJitTransferData(); @@ -1019,15 +1019,15 @@ void Encoder::RecordInlineeFrame(Func* inlinee, uint32 currentOffset) if (m_inlineeFrameMap->Count() > 0) { // update existing record if the entry is the same. - NativeOffsetInlineeFramePair& lastPair = m_inlineeFrameMap->Item(m_inlineeFrameMap->Count() - 1); + NativeOffsetRecordPair& lastPair = m_inlineeFrameMap->Item(m_inlineeFrameMap->Count() - 1); if (lastPair.record == record) { - lastPair.offset = currentOffset; + lastPair.nativeAddressOffset = currentOffset; return; } } - NativeOffsetInlineeFramePair pair = { currentOffset, record }; + NativeOffsetRecordPair pair = { currentOffset, record }; m_inlineeFrameMap->Add(pair); } } @@ -1207,6 +1207,7 @@ Encoder::ShortenBranchesAndLabelAlign(BYTE **codeStart, ptrdiff_t *codeSize, uin FixUpMapIndex mapIndices; int32 totalBytesSaved = 0; + int32 bytesSavedAfterLBOThunk = 0; // loop over all BRs, find the ones we can convert to short form for (int32 j = 0; j < relocList->Count(); j++) @@ -1294,6 +1295,16 @@ Encoder::ShortenBranchesAndLabelAlign(BYTE **codeStart, ptrdiff_t *codeSize, uin codeChange = true; totalBytesSaved += bytesSaved; + // If the offset of the current br instr is further from the beginning of the function than + // the lazyBailOutThunk entry point (which is subtracted by totalBytesSaved as at this point + // the LBOThunk entry point has been moved due to shortening branches), then keep track of + // the bytes being saved as these bytes do not count towards the adjustment of + // m_lazyBailOutThunkOffset made at the end of this function. + if ((unsigned char*)reloc.m_ptr - *codeStart > (int32)m_lazyBailOutThunkOffset - totalBytesSaved) + { + bytesSavedAfterLBOThunk += bytesSaved; + } + // mark br reloc entry as shortened #ifdef _M_IX86 reloc.setAsShortBr(targetLabel); @@ -1309,7 +1320,7 @@ Encoder::ShortenBranchesAndLabelAlign(BYTE **codeStart, ptrdiff_t *codeSize, uin m_encoderMD.FixMaps((uint32)-1, totalBytesSaved, &mapIndices); codeChange = true; newCodeSize -= totalBytesSaved; - this->FixLazyBailOutThunkOffset(totalBytesSaved); + this->FixLazyBailOutThunkOffset(totalBytesSaved - bytesSavedAfterLBOThunk); } // no BR shortening or Label alignment happened, no need to copy code @@ -1579,11 +1590,11 @@ void Encoder::CopyMaps(OffsetList **m_origInlineeFrameRecords { if (!restore) { - origMapList->Add(mapList->Item(i).offset); + origMapList->Add(mapList->Item(i).nativeAddressOffset); } else { - mapList->Item(i).offset = origMapList->Item(i); + mapList->Item(i).nativeAddressOffset = origMapList->Item(i); } } @@ -1637,8 +1648,8 @@ void Encoder::DumpInlineeFrameMap(size_t baseAddress) { Output::Print(_u("Inlinee frame info mapping\n")); Output::Print(_u("---------------------------------------\n")); - m_inlineeFrameMap->Map([=](uint index, NativeOffsetInlineeFramePair& pair) { - Output::Print(_u("%Ix"), baseAddress + pair.offset); + m_inlineeFrameMap->Map([=](uint index, NativeOffsetRecordPair& pair) { + Output::Print(_u("%Ix"), baseAddress + pair.nativeAddressOffset); Output::SkipToColumn(20); if (pair.record) { @@ -1671,7 +1682,7 @@ Encoder::SaveToLazyBailOutRecordList(IR::Instr* instr, uint32 currentOffset) #endif LazyBailOutRecord record(currentOffset, bailOutInfo->bailOutRecord); - this->m_sortedLazyBailoutRecordList->Add(record); + this->m_sortedLazyBailOutRecordList->Add(record); } void @@ -1687,29 +1698,121 @@ Encoder::SaveLazyBailOutThunkOffset(uint32 currentOffset) void Encoder::SaveLazyBailOutJitTransferData() { - if (this->m_func->HasLazyBailOut()) + // Things to save: LBORecords, LBOProperties, LBOThunkOffset, RecordSlotOffset, HasLazyBailOut. + + const bool isOOPJIT = m_func->IsOOPJIT(); + + if (m_func->HasLazyBailOut()) + { + const int sortedLazyBailOutRecordListCount = m_sortedLazyBailOutRecordList->Count(); + + Assert(sortedLazyBailOutRecordListCount > 0); + Assert(m_lazyBailOutThunkOffset != 0); + Assert(m_func->GetLazyBailOutRecordSlot() != nullptr); + + // This function has the potential to be bailed out of due to a LazyBailOut, + // store the LBORecords, LBOThunkOffset, RecordSlotOffset. + if (isOOPJIT) + { + // For OOPJIT, m_sortedLazyBailOutRecordList will not be stored nor will any LazyBailoutRecord. All + // BailOutRecords from m_sortedLazyBailOutRecordList's BailOutRecords have already been stored into + // the JitOutput. To keep track of all the BailOutRecords, create and store LazyBailOutRecordOffsets. + NativeOffsetToRecordOffset* lazyBailOutRecordOffsets = NativeCodeDataNewArrayZNoFixup( + m_func->GetNativeCodeDataAllocator(), NativeOffsetToRecordOffset, sortedLazyBailOutRecordListCount); + + // Transfer data from m_sortedLazyBailOutRecordList to lazyBailOutRecordOffsets. + m_sortedLazyBailOutRecordList->Map([&lazyBailOutRecordOffsets](int i, LazyBailOutRecord& lazyBailOutRecord) + { + lazyBailOutRecordOffsets[i].nativeAddressOffset = lazyBailOutRecord.nativeAddressOffset; + if (lazyBailOutRecord.bailOutRecord) + { + // Allocate lazyBailOutRecord.bailOutRecord's JitOutput offset as the recordOffset. + lazyBailOutRecordOffsets[i].recordOffset = NativeCodeData::GetDataChunk(lazyBailOutRecord.bailOutRecord)->offset; + } + else + { + lazyBailOutRecordOffsets[i].recordOffset = NativeOffsetToRecordOffset::InvalidRecordOffset; + } + }); + + // Store the LBORecords. lazyBailOutRecordOffsets has been stored into JitOutput because + // lazyBailOutRecordOffsets was allocated using NativeCodeDataNewArrayZNoFixup. Store the + // offset into JitOutput of NativeCodeDataNewArrayZNoFixup along with the amount of entrys + // in lazyBailOutRecordOffsets. + m_func->GetJITOutput()->RecordLazyBailOutRecordOffsetsInfo( + NativeCodeData::GetDataChunk(lazyBailOutRecordOffsets)->offset, sortedLazyBailOutRecordListCount); + + // Store the LBOThunkOffset, RecordSlotOffset, HasLazyBailOut. + m_func->GetJITOutput()->RecordLazyBailOutRecordSlotOffset(m_func->GetLazyBailOutRecordSlot()->m_offset); + m_func->GetJITOutput()->RecordLazyBailOutThunkOffset(m_lazyBailOutThunkOffset); + m_func->GetJITOutput()->RecordHasLazyBailOut(true); + } + else + { + // Store the LBORecords. + InProcNativeEntryPointData* inProcNativeEntryPointData = m_func->GetInProcJITEntryPointInfo()->GetInProcNativeEntryPointData(); + inProcNativeEntryPointData->SetSortedLazyBailOutRecordList(m_sortedLazyBailOutRecordList); + + // Store the LBOThunkOffset, RecordSlotOffset, + inProcNativeEntryPointData->SetLazyBailOutRecordSlotOffset(m_func->GetLazyBailOutRecordSlot()->m_offset); + inProcNativeEntryPointData->SetLazyBailOutThunkOffset(m_lazyBailOutThunkOffset); + inProcNativeEntryPointData->SetHasLazyBailOut(true); + } + } + + // The function does not have a LazyBailOut. + else { - Assert(this->m_sortedLazyBailoutRecordList->Count() > 0); - Assert(this->m_lazyBailOutThunkOffset != 0); - Assert(this->m_func->GetLazyBailOutRecordSlot() != nullptr); - - auto nativeEntryPointData = this->m_func->GetInProcJITEntryPointInfo()->GetInProcNativeEntryPointData(); - nativeEntryPointData->SetSortedLazyBailOutRecordList(this->m_sortedLazyBailoutRecordList); - nativeEntryPointData->SetLazyBailOutRecordSlotOffset(this->m_func->GetLazyBailOutRecordSlot()->m_offset); - nativeEntryPointData->SetLazyBailOutThunkOffset(this->m_lazyBailOutThunkOffset); + if (isOOPJIT) + { + m_func->GetJITOutput()->RecordHasLazyBailOut(false); + } + else + { + m_func->GetInProcJITEntryPointInfo()->GetInProcNativeEntryPointData()->SetHasLazyBailOut(false); + } } - if (this->m_func->lazyBailoutProperties.Count() > 0) + const int lazyBailOutPropertiesCount = m_func->lazyBailOutProperties.Count(); + + // Even if the function does not have a LazyBailOut, a property whose scope lives outside the function + // can still change, thus the property's property guard will be invalidated. In this case the function's + // native code is invalid and must be invalidated by invalidating the function's entry point. + if (lazyBailOutPropertiesCount > 0) { - const int count = this->m_func->lazyBailoutProperties.Count(); - Js::PropertyId* lazyBailoutProperties = HeapNewArrayZ(Js::PropertyId, count); - Js::PropertyId* dstProperties = lazyBailoutProperties; - this->m_func->lazyBailoutProperties.Map([&](Js::PropertyId propertyId) + // Store the LBOProperties + Js::PropertyId* lazyBailoutPropertiesArray; + if (isOOPJIT) { - *dstProperties++ = propertyId; + lazyBailoutPropertiesArray = NativeCodeDataNewArrayZNoFixup( + m_func->GetNativeCodeDataAllocator(), Js::PropertyId, lazyBailOutPropertiesCount); + } + else + { + lazyBailoutPropertiesArray = HeapNewArrayZ(Js::PropertyId, lazyBailOutPropertiesCount); + } + + // Populate the lazyBailoutPropertiesArray using data from lazyBailOutProperties. + Js::PropertyId* currLazyBailoutPropertiesArrayIndex = lazyBailoutPropertiesArray; + m_func->lazyBailOutProperties.Map([&](Js::PropertyId propertyId) + { + *currLazyBailoutPropertiesArrayIndex++ = propertyId; }); - this->m_func->GetInProcJITEntryPointInfo()->GetJitTransferData()->SetLazyBailoutProperties(lazyBailoutProperties, count); + + if (isOOPJIT) + { + // Instead of saving LBO properties to this process, we + // must transfer the properties to the root process. + m_func->GetJITOutput()->RecordLazyBailOutPropertiesInfo( + NativeCodeData::GetDataChunk(lazyBailoutPropertiesArray)->offset, lazyBailOutPropertiesCount); + } + else + { + m_func->GetInProcJITEntryPointInfo()->GetJitTransferData()->SetLazyBailoutProperties( + lazyBailoutPropertiesArray, lazyBailOutPropertiesCount); + } } + } void diff --git a/lib/Backend/Encoder.h b/lib/Backend/Encoder.h index 8c76cb60d09..3f3cc287a9a 100644 --- a/lib/Backend/Encoder.h +++ b/lib/Backend/Encoder.h @@ -11,7 +11,7 @@ /// ///--------------------------------------------------------------------------- -typedef JsUtil::List ArenaInlineeFrameMap; +typedef JsUtil::List, ArenaAllocator> ArenaInlineeFrameMap; typedef JsUtil::List PragmaInstrList; typedef JsUtil::List OffsetList; typedef JsUtil::List JmpTableList; @@ -32,7 +32,7 @@ class Encoder public: Encoder(Func * func) : m_func(func), m_encoderMD(func), m_inlineeFrameMap(nullptr), - m_lazyBailOutThunkOffset(0), m_sortedLazyBailoutRecordList(nullptr) + m_lazyBailOutThunkOffset(0), m_sortedLazyBailOutRecordList(nullptr) {} void Encode(); @@ -52,7 +52,7 @@ class Encoder uint32 m_inlineeFrameMapRecordCount; uint32 m_lazyBailOutThunkOffset; - ArenaLazyBailoutRecordList* m_sortedLazyBailoutRecordList; + ArenaLazyBailoutRecordList* m_sortedLazyBailOutRecordList; #if DBG_DUMP void DumpInlineeFrameMap(size_t baseAddress); uint32 * m_offsetBuffer; diff --git a/lib/Backend/Func.cpp b/lib/Backend/Func.cpp index 61a3516b3f6..be16383beec 100644 --- a/lib/Backend/Func.cpp +++ b/lib/Backend/Func.cpp @@ -42,7 +42,7 @@ Func::Func(JitArenaAllocator *alloc, JITTimeWorkItem * workItem, callSiteToArgumentsOffsetFixupMap(nullptr), indexedPropertyGuardCount(0), propertiesWrittenTo(nullptr), - lazyBailoutProperties(alloc), + lazyBailOutProperties(alloc), anyPropertyMayBeWrittenTo(false), #ifdef PROFILE_EXEC m_codeGenProfiler(codeGenProfiler), @@ -2088,7 +2088,7 @@ bool Func::ShouldDoLazyBailOut() const { #if defined(_M_X64) - if (!PHASE_ON1(Js::LazyBailoutPhase) || + if (PHASE_OFF1(Js::LazyBailoutPhase) || this->GetJITFunctionBody()->IsAsmJsMode() || // don't have bailouts in asm.js this->HasTry() || // lazy bailout in function with try/catch not supported for now // `EHBailoutPatchUp` set a `hasBailedOut` bit to rethrow the exception in the interpreter diff --git a/lib/Backend/Func.h b/lib/Backend/Func.h index 0f2eefec8d5..02f525c41d8 100644 --- a/lib/Backend/Func.h +++ b/lib/Backend/Func.h @@ -662,7 +662,7 @@ static const unsigned __int64 c_debugFillPattern8 = 0xcececececececece; typedef JsUtil::BaseHashSet PropertyIdSet; PropertyIdSet* propertiesWrittenTo; - PropertyIdSet lazyBailoutProperties; + PropertyIdSet lazyBailOutProperties; bool anyPropertyMayBeWrittenTo; FrameDisplayCheckTable *frameDisplayCheckTable; diff --git a/lib/Backend/GlobOptBailOut.cpp b/lib/Backend/GlobOptBailOut.cpp index fe586ea1a39..59ba5205e67 100644 --- a/lib/Backend/GlobOptBailOut.cpp +++ b/lib/Backend/GlobOptBailOut.cpp @@ -1233,51 +1233,43 @@ GlobOpt::IsLazyBailOutCurrentlyNeeded(IR::Instr * instr, Value const * src1Val, Assert(!instr->HasAnyImplicitCalls() || this->currentBlock->GetNext()->loop->endDisableImplicitCall != nullptr); return false; } - - // These opcodes can change the value of a field regardless whether the - // instruction has any implicit call + // These opcodes can change the value of a field regardless + // of whether the instruction has any implicit calls. if (OpCodeAttr::CallInstr(instr->m_opcode) || instr->IsStElemVariant() || instr->IsStFldVariant()) { return true; } - - // Now onto those that might change values of fixed fields through implicit calls. + // Now onto instrs that might change values of fixed fields through implicit calls. // There are certain bailouts that are already attached to this instruction that // prevent implicit calls from happening, so we won't need lazy bailout for those. - - // If a type check fails, we will bail out and therefore no need for lazy bailout + // If a type check fails we will bailout and therefore there is no need for a LazyBailout. if (instr->HasTypeCheckBailOut()) { return false; } - // We decided to do StackArgs optimization, which means that this instruction - // could only either be LdElemI_A or TypeofElem, and that it does not have - // an implicit call. So no need for lazy bailout. + // We decided to do StackArgs optimization, which means that this instruction could only either be + // LdElemI_A or TypeofElem, and that it does not have an implicit call. So no need for lazy bailout. if (instr->HasBailOutInfo() && instr->GetBailOutKind() == IR::BailOnStackArgsOutOfActualsRange) { Assert(instr->m_opcode == Js::OpCode::LdElemI_A || instr->m_opcode == Js::OpCode::TypeofElem); return false; } - // If all operands are type specialized, we won't generate helper path; - // therefore no need for lazy bailout + // If all operands are type specialized, we won't generate + // helper path; therefore no need for lazy bailout. if (instr->AreAllOpndsTypeSpecialized()) { return false; } - // The instruction might have other bailouts that prevent - // implicit calls from happening. That is captured in - // GlobOpt::MayNeedBailOnImplicitCall. So we only - // need lazy bailout of we think there might be implicit calls - // or if there aren't any bailouts that prevent them from happening. + // The instruction might have other bailouts that prevent implicit calls from happening. That + // is captured in GlobOpt::MayNeedBailOnImplicitCall. So we only need lazy bailout if we think + // there might be implicit calls or if there aren't any bailouts that prevent them from happening. return this->MayNeedBailOnImplicitCall(instr, src1Val, src2Val); #else // _M_X64 - return false; - #endif } diff --git a/lib/Backend/IR.cpp b/lib/Backend/IR.cpp index c18597ad467..b6022b0ad67 100644 --- a/lib/Backend/IR.cpp +++ b/lib/Backend/IR.cpp @@ -1032,18 +1032,18 @@ bool Instr::CanAggregateByteCodeUsesAcrossInstr(Instr * instr) bool IR::Instr::IsStFldVariant() const { return this->m_opcode == Js::OpCode::StFld || - this->m_opcode == Js::OpCode::StFldStrict || - this->m_opcode == Js::OpCode::StLocalFld || - this->m_opcode == Js::OpCode::StRootFld || - this->m_opcode == Js::OpCode::StRootFldStrict || - this->m_opcode == Js::OpCode::StSuperFld; + this->m_opcode == Js::OpCode::StFldStrict || + this->m_opcode == Js::OpCode::StLocalFld || + this->m_opcode == Js::OpCode::StRootFld || + this->m_opcode == Js::OpCode::StRootFldStrict || + this->m_opcode == Js::OpCode::StSuperFld; } bool IR::Instr::IsStElemVariant() const { return this->m_opcode == Js::OpCode::StElemI_A || - this->m_opcode == Js::OpCode::StElemI_A_Strict || - this->m_opcode == Js::OpCode::StElemC; + this->m_opcode == Js::OpCode::StElemI_A_Strict || + this->m_opcode == Js::OpCode::StElemC; } bool IR::Instr::CanChangeFieldValueWithoutImplicitCall() const @@ -1053,10 +1053,10 @@ bool IR::Instr::CanChangeFieldValueWithoutImplicitCall() const } // If LazyBailOut is the only BailOutKind on the instruction, the BailOutInfo is cleared. -// Otherwise, we remove the LazyBailOut kind from the instruction and still keep the BailOutInfo. +// Otherwise, we remove the LazyBailOut kind from the instruction but still keep the BailOutInfo. void IR::Instr::ClearLazyBailOut() { - if (!this->HasBailOutInfo()) + if (!this->HasBailOutInfo() || !this->HasLazyBailOut()) { return; } diff --git a/lib/Backend/InlineeFrameInfo.cpp b/lib/Backend/InlineeFrameInfo.cpp index 706f5cae942..218e37fc001 100644 --- a/lib/Backend/InlineeFrameInfo.cpp +++ b/lib/Backend/InlineeFrameInfo.cpp @@ -24,7 +24,7 @@ #endif -unsigned int NativeOffsetInlineeFrameRecordOffset::InvalidRecordOffset = (unsigned int)(-1); +unsigned int NativeOffsetToRecordOffset::InvalidRecordOffset = (unsigned int)(-1); void BailoutConstantValue::InitVarConstValue(Js::Var value) { diff --git a/lib/Backend/InlineeFrameInfo.h b/lib/Backend/InlineeFrameInfo.h index fdfb23c485f..b269e145fcd 100644 --- a/lib/Backend/InlineeFrameInfo.h +++ b/lib/Backend/InlineeFrameInfo.h @@ -130,17 +130,19 @@ struct InlineeFrameRecord InlineeFrameRecord* Reverse(); }; -struct NativeOffsetInlineeFramePair +template +struct NativeOffsetRecordPair { - uint32 offset; - InlineeFrameRecord* record; + uint32 nativeAddressOffset; // An address offset into native code. + T* record; // The record that corresponds to the nativeAddressOffset. }; -struct NativeOffsetInlineeFrameRecordOffset +struct NativeOffsetToRecordOffset { - uint32 offset; - uint32 recordOffset; - static uint32 InvalidRecordOffset; + uint32 nativeAddressOffset; // An address offset into native code. + uint32 recordOffset; // The offset that the record that corresponds to + // nativeAddressOffset can be found at in the JitOutput. + static uint32 InvalidRecordOffset; // No record exists for this nativeAddressOffset. }; struct InlineeFrameInfo diff --git a/lib/Backend/JITOutput.cpp b/lib/Backend/JITOutput.cpp index dee344d47f9..67b87244127 100644 --- a/lib/Backend/JITOutput.cpp +++ b/lib/Backend/JITOutput.cpp @@ -220,6 +220,38 @@ JITOutput::RecordInlineeFrameOffsetsInfo(unsigned int offsetsArrayOffset, unsign m_outputData->inlineeFrameOffsetArrayCount = offsetsArrayCount; } +void +JITOutput::RecordLazyBailOutRecordOffsetsInfo(unsigned int offsetsArrayOffset, unsigned int offsetsArrayCount) +{ + m_outputData->lazyBailOutRecordOffsetArrayOffset = offsetsArrayOffset; + m_outputData->lazyBailOutRecordOffsetArrayCount = offsetsArrayCount; +} + +void +JITOutput::RecordLazyBailOutPropertiesInfo(unsigned int offsetsArrayOffset, unsigned int offsetsArrayCount) +{ + m_outputData->lazyBailOutPropertiesArrayOffset = offsetsArrayOffset; + m_outputData->lazyBailOutPropertiesArrayCount = offsetsArrayCount; +} + +void +JITOutput::RecordLazyBailOutRecordSlotOffset(int lazyBailOutRecordSlotOffset) +{ + m_outputData->lazyBailOutRecordSlotOffset = lazyBailOutRecordSlotOffset; +} + +void +JITOutput::RecordLazyBailOutThunkOffset(unsigned int lazyBailOutThunkOffset) +{ + m_outputData->lazyBailOutThunkOffset = lazyBailOutThunkOffset; +} + +void +JITOutput::RecordHasLazyBailOut(bool hasLazyBailOut) +{ + m_outputData->hasLazyBailOut = hasLazyBailOut; +} + #if TARGET_64 void JITOutput::RecordUnwindInfo(BYTE *unwindInfo, size_t size, BYTE * xdataAddr, BYTE* localXdataAddr) diff --git a/lib/Backend/JITOutput.h b/lib/Backend/JITOutput.h index 85e9b3c1fa2..945fa465c20 100644 --- a/lib/Backend/JITOutput.h +++ b/lib/Backend/JITOutput.h @@ -41,6 +41,11 @@ class JITOutput #endif void RecordNativeCode(const BYTE* sourceBuffer, BYTE* localCodeAddress); void RecordInlineeFrameOffsetsInfo(unsigned int offsetsArrayOffset, unsigned int offsetsArrayCount); + void RecordLazyBailOutRecordOffsetsInfo(unsigned int offsetsArrayOffset, unsigned int offsetsArrayCount); + void RecordLazyBailOutPropertiesInfo(unsigned int offsetsArrayOffset, unsigned int offsetsArrayCount); + void RecordLazyBailOutRecordSlotOffset(int lazyBailOutRecordSlot); + void RecordLazyBailOutThunkOffset(unsigned int lazyBailOutThunkOffset); + void RecordHasLazyBailOut(bool hasLazyBailOut); #if TARGET_64 void RecordUnwindInfo(BYTE *unwindInfo, size_t size, BYTE * xdataAddr, BYTE* localXdataAddr); diff --git a/lib/Backend/JitTransferData.cpp b/lib/Backend/JitTransferData.cpp index bb50778ae59..b6a44cf188b 100644 --- a/lib/Backend/JitTransferData.cpp +++ b/lib/Backend/JitTransferData.cpp @@ -43,7 +43,10 @@ void JitTransferData::Cleanup() if (this->lazyBailoutProperties != nullptr) { - HeapDeleteArray(this->lazyBailoutPropertyCount, this->lazyBailoutProperties); + if (!JITManager::GetJITManager()->IsOOPJITEnabled()) + { + HeapDeleteArray(this->lazyBailOutPropertyCount, this->lazyBailoutProperties); + } this->lazyBailoutProperties = nullptr; } diff --git a/lib/Backend/JitTransferData.h b/lib/Backend/JitTransferData.h index 2cc8179b73f..e6479776132 100644 --- a/lib/Backend/JitTransferData.h +++ b/lib/Backend/JitTransferData.h @@ -46,7 +46,7 @@ class JitTransferData Field(size_t) ctorCacheGuardsByPropertyIdPlusSize; Field(int) equivalentTypeGuardCount; - Field(int) lazyBailoutPropertyCount; + Field(int) lazyBailOutPropertyCount; // This is a dynamically sized array of JitEquivalentTypeGuards. It's heap allocated by the JIT thread and lives // until entry point is installed, at which point it is explicitly freed. We need it during installation so as to // swap the cache associated with each guard from the heap to the recycler (so the types in the cache are kept alive). @@ -66,7 +66,7 @@ class JitTransferData propertyGuardCount(0), propertyGuardsByPropertyId(nullptr), propertyGuardsByPropertyIdPlusSize(0), ctorCacheGuardsByPropertyId(nullptr), ctorCacheGuardsByPropertyIdPlusSize(0), equivalentTypeGuardCount(0), equivalentTypeGuards(nullptr), jitTransferRawData(nullptr), - falseReferencePreventionBit(true), isReady(false), lazyBailoutProperties(nullptr), lazyBailoutPropertyCount(0) {} + falseReferencePreventionBit(true), isReady(false), lazyBailoutProperties(nullptr), lazyBailOutPropertyCount(0) {} void SetRawData(NativeCodeData* rawData) { jitTransferRawData = rawData; } @@ -85,7 +85,7 @@ class JitTransferData void SetLazyBailoutProperties(Js::PropertyId* properties, int count) { this->lazyBailoutProperties = properties; - this->lazyBailoutPropertyCount = count; + this->lazyBailOutPropertyCount = count; } void SetEquivalentTypeGuardOffsets(EquivalentTypeGuardOffsets* offsets) { diff --git a/lib/Backend/Lower.cpp b/lib/Backend/Lower.cpp index 5d309c5446c..4ca21b71fb3 100644 --- a/lib/Backend/Lower.cpp +++ b/lib/Backend/Lower.cpp @@ -7816,7 +7816,7 @@ Lowerer::CreateTypePropertyGuardForGuardedProperties(JITTypeHolder type, IR::Pro { if (ShouldDoLazyFixedTypeBailout(this->m_func)) { - this->m_func->lazyBailoutProperties.Item(propertyId); + this->m_func->lazyBailOutProperties.Item(propertyId); } else { @@ -8090,7 +8090,7 @@ Lowerer::GeneratePropertyGuardCheck(IR::Instr *insertPointInstr, IR::PropertySym if (ShouldDoLazyFixedDataBailout(this->m_func)) { - this->m_func->lazyBailoutProperties.Item(propertySymOpnd->GetPropertyId()); + this->m_func->lazyBailOutProperties.Item(propertySymOpnd->GetPropertyId()); return false; } else diff --git a/lib/Backend/Lower.h b/lib/Backend/Lower.h index be9218c69c4..4a898e6849e 100644 --- a/lib/Backend/Lower.h +++ b/lib/Backend/Lower.h @@ -822,7 +822,7 @@ class Lowerer static bool IsSpreadCall(IR::Instr *instr); static IR::Instr* GetLdSpreadIndicesInstr(IR::Instr *instr); - static bool ShouldDoLazyFixedTypeBailout(Func* func) { return func->ShouldDoLazyBailOut() && PHASE_ON1(Js::LazyFixedTypeBailoutPhase); } + static bool ShouldDoLazyFixedTypeBailout(Func* func) { return func->ShouldDoLazyBailOut() && !PHASE_OFF1(Js::LazyFixedTypeBailoutPhase); } static bool ShouldDoLazyFixedDataBailout(Func* func) { return func->ShouldDoLazyBailOut() && !PHASE_OFF1(Js::LazyFixedDataBailoutPhase); } LowererMD * GetLowererMD() { return &m_lowererMD; } private: diff --git a/lib/Backend/NativeCodeGenerator.cpp b/lib/Backend/NativeCodeGenerator.cpp index 43b1e2ebc45..eaeace8aaa1 100644 --- a/lib/Backend/NativeCodeGenerator.cpp +++ b/lib/Backend/NativeCodeGenerator.cpp @@ -1112,7 +1112,20 @@ NativeCodeGenerator::CodeGen(PageAllocator * pageAllocator, CodeGenWorkItem* wor } } + typedef JsUtil::BaseHashSet PropertyIdSet; + epInfo->GetOOPNativeEntryPointData()->RecordInlineeFrameOffsetsInfo(jitWriteData.inlineeFrameOffsetArrayOffset, jitWriteData.inlineeFrameOffsetArrayCount); + epInfo->GetOOPNativeEntryPointData()->RecordLazyBailOutRecordOffsetsInfo(jitWriteData.lazyBailOutRecordOffsetArrayOffset, jitWriteData.lazyBailOutRecordOffsetArrayCount); + epInfo->GetOOPNativeEntryPointData()->RecordLazyBailOutPropertiesInfo(jitWriteData.lazyBailOutPropertiesArrayOffset, jitWriteData.lazyBailOutPropertiesArrayCount); + epInfo->GetOOPNativeEntryPointData()->RecordLazyBailOutRecordSlotOffset(jitWriteData.lazyBailOutRecordSlotOffset); + epInfo->GetOOPNativeEntryPointData()->RecordLazyBailOutThunkOffset(jitWriteData.lazyBailOutThunkOffset); + epInfo->GetOOPNativeEntryPointData()->SetHasLazyBailOut(jitWriteData.hasLazyBailOut); + + epInfo->GetOOPNativeEntryPointData()->GetJitTransferData()->SetLazyBailoutProperties( + // Offset into jitWriteData to get address of lazyBailOutPropertiesArray. + (Js::PropertyId*)(jitWriteData.buffer->data + jitWriteData.lazyBailOutPropertiesArrayOffset), + jitWriteData.lazyBailOutPropertiesArrayCount + ); } #endif diff --git a/lib/Backend/NativeEntryPointData.cpp b/lib/Backend/NativeEntryPointData.cpp index fa46e2def4e..2eb98293363 100644 --- a/lib/Backend/NativeEntryPointData.cpp +++ b/lib/Backend/NativeEntryPointData.cpp @@ -418,7 +418,7 @@ InProcNativeEntryPointData::GetInlineeFrameMap() } void -InProcNativeEntryPointData::RecordInlineeFrameMap(JsUtil::List* tempInlineeFrameMap) +InProcNativeEntryPointData::RecordInlineeFrameMap(JsUtil::List, ArenaAllocator>* tempInlineeFrameMap) { Assert(!JITManager::GetJITManager()->IsOOPJITEnabled()); Assert(this->inlineeFrameMap == nullptr); @@ -453,7 +453,7 @@ InProcNativeEntryPointData::SetSortedLazyBailOutRecordList(JsUtil::ListItem(index - 1); AssertMsg( - currentRecord.offset > previousRecord.offset, + currentRecord.nativeAddressOffset > previousRecord.nativeAddressOffset, "Lazy bailout record list isn't sorted by offset?" ); }); @@ -466,7 +466,7 @@ InProcNativeEntryPointData::SetSortedLazyBailOutRecordList(JsUtil::ListlazyBailOutRecordSlotOffset != 0); return this->lazyBailOutRecordSlotOffset; @@ -480,9 +480,10 @@ InProcNativeEntryPointData::SetLazyBailOutRecordSlotOffset(int32 argSlotOffset) } uint32 -InProcNativeEntryPointData::GetLazyBailOutThunkOffset() const +InProcNativeEntryPointData::GetLazyBailOutThunkOffset() { Assert(this->lazyBailOutThunkOffset != 0); + Assert(!JITManager::GetJITManager()->IsOOPJITEnabled()); return this->lazyBailOutThunkOffset; } @@ -493,6 +494,18 @@ InProcNativeEntryPointData::SetLazyBailOutThunkOffset(uint32 thunkOffset) this->lazyBailOutThunkOffset = thunkOffset; } +void +InProcNativeEntryPointData::SetHasLazyBailOut(bool hasLazyBailOut) +{ + this->hasLazyBailOut = hasLazyBailOut; +} + +bool +InProcNativeEntryPointData::GetHasLazyBailOut() +{ + return this->hasLazyBailOut; +} + void InProcNativeEntryPointData::OnCleanup() { @@ -572,6 +585,34 @@ OOPNativeEntryPointData::GetInlineeFrameOffsetArrayCount() return this->inlineeFrameOffsetArrayCount; } +uint +OOPNativeEntryPointData::GetLazyBailOutRecordOffsetArrayOffset() +{ + Assert(JITManager::GetJITManager()->IsOOPJITEnabled()); + return this->lazyBailOutRecordOffsetArrayOffset; +} + +uint +OOPNativeEntryPointData::GetLazyBailOutRecordOffsetArrayCount() +{ + Assert(JITManager::GetJITManager()->IsOOPJITEnabled()); + return this->lazyBailOutRecordOffsetArrayCount; +} + +int32 +OOPNativeEntryPointData::GetLazyBailOutRecordSlotOffset() +{ + Assert(JITManager::GetJITManager()->IsOOPJITEnabled()); + return this->lazyBailOutRecordSlotOffset; +} + +uint +OOPNativeEntryPointData::GetLazyBailOutThunkOffset() +{ + Assert(JITManager::GetJITManager()->IsOOPJITEnabled()); + return this->lazyBailOutThunkOffset; +} + void OOPNativeEntryPointData::RecordInlineeFrameOffsetsInfo(unsigned int offsetsArrayOffset, unsigned int offsetsArrayCount) { @@ -580,6 +621,48 @@ OOPNativeEntryPointData::RecordInlineeFrameOffsetsInfo(unsigned int offsetsArray this->inlineeFrameOffsetArrayCount = offsetsArrayCount; } +void +OOPNativeEntryPointData::RecordLazyBailOutRecordOffsetsInfo(unsigned int offsetsArrayOffset, unsigned int offsetsArrayCount) +{ + Assert(JITManager::GetJITManager()->IsOOPJITEnabled()); + this->lazyBailOutRecordOffsetArrayOffset = offsetsArrayOffset; + this->lazyBailOutRecordOffsetArrayCount = offsetsArrayCount; +} + +void +OOPNativeEntryPointData::RecordLazyBailOutPropertiesInfo(unsigned int arrayOffset, unsigned int arrayCount) +{ + Assert(JITManager::GetJITManager()->IsOOPJITEnabled()); + this->lazyBailOutPropertiesArrayOffset = arrayOffset; + this->lazyBailOutPropertiesArrayCount = arrayCount; +} + +void +OOPNativeEntryPointData::RecordLazyBailOutRecordSlotOffset(int lazyBailOutRecordSlotOffset) +{ + Assert(JITManager::GetJITManager()->IsOOPJITEnabled()); + this->lazyBailOutRecordSlotOffset = lazyBailOutRecordSlotOffset; +} + +void +OOPNativeEntryPointData::RecordLazyBailOutThunkOffset(uint lazyBailOutThunkOffset) +{ + Assert(JITManager::GetJITManager()->IsOOPJITEnabled()); + this->lazyBailOutThunkOffset = lazyBailOutThunkOffset; +} + +void +OOPNativeEntryPointData::SetHasLazyBailOut(bool hasLazyBailOut) +{ + this->hasLazyBailOut = hasLazyBailOut; +} + +bool +OOPNativeEntryPointData::GetHasLazyBailOut() +{ + return this->hasLazyBailOut; +} + #if !FLOATVAR void OOPNativeEntryPointData::ProcessNumberPageSegments(ScriptContext * scriptContext) diff --git a/lib/Backend/NativeEntryPointData.h b/lib/Backend/NativeEntryPointData.h index ef48be5e64a..22a61659b63 100644 --- a/lib/Backend/NativeEntryPointData.h +++ b/lib/Backend/NativeEntryPointData.h @@ -12,7 +12,7 @@ namespace Js class FunctionBody; }; -typedef JsUtil::List InlineeFrameMap; +typedef JsUtil::List, HeapAllocator> InlineeFrameMap; typedef JsUtil::List NativeLazyBailOutRecordList; class JitTransferData; @@ -69,6 +69,11 @@ class NativeEntryPointData void Cleanup(Js::ScriptContext * scriptContext, bool isShutdown, bool reset); void ClearTypeRefsAndGuards(Js::ScriptContext * scriptContext); + virtual uint32 GetLazyBailOutThunkOffset() = 0; + virtual int32 GetLazyBailOutRecordSlotOffset() = 0; + virtual void SetHasLazyBailOut(bool hasLazyBailOut) = 0; + virtual bool GetHasLazyBailOut() = 0; + #if PDATA_ENABLED XDataAllocation* GetXDataInfo() { return this->xdataInfo; } void CleanupXDataInfo(); @@ -154,16 +159,19 @@ class InProcNativeEntryPointData : public NativeEntryPointData void SetNativeCodeData(NativeCodeData * nativeCodeData); InlineeFrameMap * GetInlineeFrameMap(); - void RecordInlineeFrameMap(JsUtil::List* tempInlineeFrameMap); + void RecordInlineeFrameMap(JsUtil::List, ArenaAllocator>* tempInlineeFrameMap); NativeLazyBailOutRecordList * GetSortedLazyBailOutRecordList() const; void SetSortedLazyBailOutRecordList(JsUtil::List* sortedLazyBailOutRecordList); void SetLazyBailOutRecordSlotOffset(int32 argSlotOffset); - int32 GetLazyBailOutRecordSlotOffset() const; + int32 GetLazyBailOutRecordSlotOffset(); void SetLazyBailOutThunkOffset(uint32 thunkOffset); - uint32 GetLazyBailOutThunkOffset() const; + uint32 GetLazyBailOutThunkOffset(); + + void SetHasLazyBailOut(bool hasLazyBailOut); + bool GetHasLazyBailOut(); #if !FLOATVAR void SetNumberChunks(CodeGenNumberChunk* chunks) @@ -179,6 +187,7 @@ class InProcNativeEntryPointData : public NativeEntryPointData FieldNoBarrier(NativeLazyBailOutRecordList *) sortedLazyBailoutRecordList; FieldNoBarrier(int32) lazyBailOutRecordSlotOffset; FieldNoBarrier(uint32) lazyBailOutThunkOffset; + FieldNoBarrier(bool) hasLazyBailOut; #if !FLOATVAR Field(CodeGenNumberChunk*) numberChunks; #endif @@ -198,8 +207,22 @@ class OOPNativeEntryPointData : public NativeEntryPointData uint GetInlineeFrameOffsetArrayOffset(); uint GetInlineeFrameOffsetArrayCount(); + uint GetLazyBailOutRecordOffsetArrayOffset(); + uint GetLazyBailOutRecordOffsetArrayCount(); + uint32 GetLazyBailOutThunkOffset(); + int32 GetLazyBailOutRecordSlotOffset(); void RecordInlineeFrameOffsetsInfo(unsigned int offsetsArrayOffset, unsigned int offsetsArrayCount); + // TODO: parent class can virtualize these methods and share them with + // InProcNativeEntryPointData's similarily named functions. + void RecordLazyBailOutRecordOffsetsInfo(unsigned int offsetsArrayOffset, unsigned int offsetsArrayCount); + void RecordLazyBailOutPropertiesInfo(unsigned int arrayOffset, unsigned int arrayCount); + void RecordLazyBailOutRecordSlotOffset(int lazyBailOutRecordSlotOffset); + void RecordLazyBailOutThunkOffset(uint lazyBailOutThunkOffset); + + void SetHasLazyBailOut(bool hasLazyBailOut); + bool GetHasLazyBailOut(); + #if !FLOATVAR void ProcessNumberPageSegments(Js::ScriptContext * scriptContext); void SetNumberPageSegment(XProcNumberPageSegment * segments) @@ -213,6 +236,13 @@ class OOPNativeEntryPointData : public NativeEntryPointData private: Field(uint) inlineeFrameOffsetArrayOffset; Field(uint) inlineeFrameOffsetArrayCount; + Field(uint) lazyBailOutRecordOffsetArrayOffset; + Field(uint) lazyBailOutRecordOffsetArrayCount; + Field(uint) lazyBailOutPropertiesArrayOffset; + Field(uint) lazyBailOutPropertiesArrayCount; + Field(uint) lazyBailOutThunkOffset; + Field(int) lazyBailOutRecordSlotOffset; + Field(bool) hasLazyBailOut; FieldNoBarrier(char *) nativeDataBuffer; #if !FLOATVAR diff --git a/lib/Backend/amd64/EncoderMD.cpp b/lib/Backend/amd64/EncoderMD.cpp index ac614a265cc..0a8ebc21ec3 100644 --- a/lib/Backend/amd64/EncoderMD.cpp +++ b/lib/Backend/amd64/EncoderMD.cpp @@ -1560,9 +1560,9 @@ EncoderMD::FixMaps(uint32 brOffset, uint32 bytesSaved, FixUpMapIndex *mapIndices { ArenaInlineeFrameMap *mapList = m_encoder->m_inlineeFrameMap; - for (i = mapIndices->inlineeFrameMapIndex; i < mapList->Count() && mapList->Item(i).offset <= brOffset; i++) + for (i = mapIndices->inlineeFrameMapIndex; i < mapList->Count() && mapList->Item(i).nativeAddressOffset <= brOffset; i++) { - mapList->Item(i).offset -= bytesSaved; + mapList->Item(i).nativeAddressOffset -= bytesSaved; } mapIndices->inlineeFrameMapIndex = i; @@ -1579,10 +1579,10 @@ EncoderMD::FixMaps(uint32 brOffset, uint32 bytesSaved, FixUpMapIndex *mapIndices } { - ArenaLazyBailoutRecordList *lazyBailOutRecordList = m_encoder->m_sortedLazyBailoutRecordList; - for (i = mapIndices->lazyBailOutRecordListIndex; i < lazyBailOutRecordList->Count() && lazyBailOutRecordList->Item(i).offset <= brOffset; i++) + ArenaLazyBailoutRecordList *lazyBailOutRecordList = m_encoder->m_sortedLazyBailOutRecordList; + for (i = mapIndices->lazyBailOutRecordListIndex; i < lazyBailOutRecordList->Count() && lazyBailOutRecordList->Item(i).nativeAddressOffset <= brOffset; i++) { - lazyBailOutRecordList->Item(i).offset -= bytesSaved; + lazyBailOutRecordList->Item(i).nativeAddressOffset -= bytesSaved; } mapIndices->lazyBailOutRecordListIndex = i; diff --git a/lib/Backend/i386/EncoderMD.cpp b/lib/Backend/i386/EncoderMD.cpp index 8b5d3a4474d..738fcd01984 100644 --- a/lib/Backend/i386/EncoderMD.cpp +++ b/lib/Backend/i386/EncoderMD.cpp @@ -1391,9 +1391,9 @@ EncoderMD::FixMaps(uint32 brOffset, int32 bytesSaved, FixUpMapIndex *mapIndices) { ArenaInlineeFrameMap *mapList = m_encoder->m_inlineeFrameMap; - for (i = mapIndices->inlineeFrameMapIndex; i < mapList->Count() && mapList->Item(i).offset <= brOffset; i++) + for (i = mapIndices->inlineeFrameMapIndex; i < mapList->Count() && mapList->Item(i).nativeAddressOffset <= brOffset; i++) { - mapList->Item(i).offset -= bytesSaved; + mapList->Item(i).nativeAddressOffset -= bytesSaved; } mapIndices->inlineeFrameMapIndex = i; @@ -1411,10 +1411,10 @@ EncoderMD::FixMaps(uint32 brOffset, int32 bytesSaved, FixUpMapIndex *mapIndices) { - ArenaLazyBailoutRecordList *lazyBailOutRecordList = m_encoder->m_sortedLazyBailoutRecordList; - for (i = mapIndices->lazyBailOutRecordListIndex; i < lazyBailOutRecordList->Count() && lazyBailOutRecordList->Item(i).offset <= brOffset; i++) + ArenaLazyBailoutRecordList *lazyBailOutRecordList = m_encoder->m_sortedLazyBailOutRecordList; + for (i = mapIndices->lazyBailOutRecordListIndex; i < lazyBailOutRecordList->Count() && lazyBailOutRecordList->Item(i).nativeAddressOffset <= brOffset; i++) { - lazyBailOutRecordList->Item(i).offset -= bytesSaved; + lazyBailOutRecordList->Item(i).nativeAddressOffset -= bytesSaved; } mapIndices->lazyBailOutRecordListIndex = i; diff --git a/lib/Common/BackendApi.h b/lib/Common/BackendApi.h index 11b3b4abcd0..f2d1982d3a1 100644 --- a/lib/Common/BackendApi.h +++ b/lib/Common/BackendApi.h @@ -141,18 +141,16 @@ class BailOutRecord; struct LazyBailOutRecord { - uint32 offset; + // The offset from nativeAddress where the bailOutRecord exists. + uint32 nativeAddressOffset; BailOutRecord* bailOutRecord; - LazyBailOutRecord() : offset(0), bailOutRecord(nullptr) {} + LazyBailOutRecord() : nativeAddressOffset(0), bailOutRecord(nullptr) {} - LazyBailOutRecord(uint32 offset, BailOutRecord* record) : - offset(offset), bailOutRecord(record) + LazyBailOutRecord(uint32 nativeAddressOffset, BailOutRecord* bailOutRecord) : + nativeAddressOffset(nativeAddressOffset), + bailOutRecord(bailOutRecord) {} - -#if DBG - void Dump(Js::FunctionBody* functionBody) const; -#endif }; struct StackFrameConstants diff --git a/lib/JITIDL/JITTypes.h b/lib/JITIDL/JITTypes.h index ae2f63b12bf..227b803b893 100644 --- a/lib/JITIDL/JITTypes.h +++ b/lib/JITIDL/JITTypes.h @@ -875,6 +875,13 @@ typedef struct JITOutputIDL unsigned int throwMapCount; unsigned int inlineeFrameOffsetArrayOffset; unsigned int inlineeFrameOffsetArrayCount; + unsigned int lazyBailOutRecordOffsetArrayOffset; + unsigned int lazyBailOutRecordOffsetArrayCount; + unsigned int lazyBailOutPropertiesArrayOffset; + unsigned int lazyBailOutPropertiesArrayCount; + unsigned int lazyBailOutThunkOffset; + int lazyBailOutRecordSlotOffset; + boolean hasLazyBailOut; unsigned int propertyGuardCount; unsigned int ctorCachesCount; diff --git a/lib/Runtime/Base/FunctionBody.cpp b/lib/Runtime/Base/FunctionBody.cpp index 0d2f99efdf1..b78671be5be 100644 --- a/lib/Runtime/Base/FunctionBody.cpp +++ b/lib/Runtime/Base/FunctionBody.cpp @@ -8233,6 +8233,17 @@ namespace Js } #endif + NativeEntryPointData * EntryPointInfo::GetProcSpecificNativeEntryPoint() + { +#if ENABLE_OOP_NATIVE_CODEGEN + if (JITManager::GetJITManager()->IsOOPJITEnabled()) + { + return GetOOPNativeEntryPointData(); + } +#endif + return GetInProcNativeEntryPointData(); + } + void EntryPointInfo::EnsureIsReadyToCall() { ProcessJitTransferData(); @@ -8359,7 +8370,7 @@ namespace Js JitTransferData * jitTransferData = this->GetNativeEntryPointData()->GetJitTransferData(); Assert(jitTransferData != nullptr && jitTransferData->GetIsReady()); - for (int i = 0; i < jitTransferData->lazyBailoutPropertyCount; i++) + for (int i = 0; i < jitTransferData->lazyBailOutPropertyCount; i++) { Assert(jitTransferData->lazyBailoutProperties != nullptr); @@ -8380,7 +8391,6 @@ namespace Js } } - // in-proc JIT if (jitTransferData->equivalentTypeGuardCount > 0) { @@ -8647,7 +8657,7 @@ namespace Js { OOPNativeEntryPointData * oopNativeEntryPointData = this->GetOOPNativeEntryPointData(); char * nativeDataBuffer = oopNativeEntryPointData->GetNativeDataBuffer(); - NativeOffsetInlineeFrameRecordOffset* offsets = (NativeOffsetInlineeFrameRecordOffset*)(nativeDataBuffer + oopNativeEntryPointData->GetInlineeFrameOffsetArrayOffset()); + NativeOffsetToRecordOffset* offsets = (NativeOffsetToRecordOffset*)(nativeDataBuffer + oopNativeEntryPointData->GetInlineeFrameOffsetArrayOffset()); size_t offset = (size_t)((BYTE*)returnAddress - (BYTE*)this->GetNativeAddress()); uint inlineeFrameOffsetArrayCount = oopNativeEntryPointData->GetInlineeFrameOffsetArrayCount(); @@ -8663,11 +8673,11 @@ namespace Js uint midIndex = fromIndex + (toIndex - fromIndex) / 2; auto item = offsets[midIndex]; - if (item.offset >= offset) + if (item.nativeAddressOffset >= offset) { - if (midIndex == 0 || (midIndex > 0 && offsets[midIndex - 1].offset < offset)) + if (midIndex == 0 || (midIndex > 0 && offsets[midIndex - 1].nativeAddressOffset < offset)) { - if (offsets[midIndex].recordOffset == NativeOffsetInlineeFrameRecordOffset::InvalidRecordOffset) + if (offsets[midIndex].recordOffset == NativeOffsetToRecordOffset::InvalidRecordOffset) { return nullptr; } @@ -8699,10 +8709,10 @@ namespace Js } size_t offset = (size_t)((BYTE*)returnAddress - (BYTE*)this->GetNativeAddress()); - int index = inlineeFrameMap->BinarySearch([=](const NativeOffsetInlineeFramePair& pair, int index) { - if (pair.offset >= offset) + int index = inlineeFrameMap->BinarySearch([=](const NativeOffsetRecordPair& pair, int index) { + if (pair.nativeAddressOffset >= offset) { - if (index == 0 || (index > 0 && inlineeFrameMap->Item(index - 1).offset < offset)) + if (index == 0 || (index > 0 && inlineeFrameMap->Item(index - 1).nativeAddressOffset < offset)) { return 0; } @@ -8723,69 +8733,126 @@ namespace Js } - void EntryPointInfo::DoLazyBailout( - BYTE **addressOfInstructionPointer, - BYTE *framePointer -#if DBG - , Js::FunctionBody *functionBody - , const PropertyRecord *propertyRecord -#endif - ) + // Does not return a LazyBailOutRecord but instead returns a BailOutRecord + // with bailOutRecord->bailOutKind containing IR::LazyBailOut. + BailOutRecord* EntryPointInfo::FindLazyBailOutRecord(size_t instructionPointerOffset) { - BYTE* instructionPointer = *addressOfInstructionPointer; - NativeEntryPointData * nativeEntryPointData = this->GetNativeEntryPointData(); - Js::JavascriptMethod nativeAddress = nativeEntryPointData->GetNativeAddress(); - ptrdiff_t codeSize = nativeEntryPointData->GetCodeSize(); - Assert(instructionPointer > (BYTE*)nativeAddress && instructionPointer < ((BYTE*)nativeAddress + codeSize)); - size_t offset = instructionPointer - (BYTE*)nativeAddress; - NativeLazyBailOutRecordList * bailOutRecordList = this->GetInProcNativeEntryPointData()->GetSortedLazyBailOutRecordList(); +#if ENABLE_OOP_NATIVE_CODEGEN + if (JITManager::GetJITManager()->IsOOPJITEnabled()) + { + // OOPJIT obtains a bailOutRecord that has been stored in the nativeDataBuffer. + + OOPNativeEntryPointData* oopNativeEntryPointData = GetOOPNativeEntryPointData(); + char * nativeDataBuffer = oopNativeEntryPointData->GetNativeDataBuffer(); - AssertMsg(bailOutRecordList != nullptr, "Lazy Bailout: bailOutRecordList is missing"); + // LazyBailOutRecordOffsetArray is stored at the offset LazyBailOutRecordOffsetArrayOffset in nativeDataBuffer. + NativeOffsetToRecordOffset* lazyBailOutRecordOffsetArray = + (NativeOffsetToRecordOffset*)(nativeDataBuffer + oopNativeEntryPointData->GetLazyBailOutRecordOffsetArrayOffset()); - int found = bailOutRecordList->BinarySearch([=](const LazyBailOutRecord& record, int index) - { - if (record.offset == offset) + uint lazyBailOutRecordOffsetArrayCount = oopNativeEntryPointData->GetLazyBailOutRecordOffsetArrayCount(); + if (lazyBailOutRecordOffsetArrayCount > 0) { - return 0; + // Binary search for the bailOutRecordOffset that corresponds to the instructionPointerOffset. This process + // is similar to finding the bailOutRecord seen in the inProcJIT version of FindLazyBailOutRecord. + uint fromIndex = 0; + uint toIndex = lazyBailOutRecordOffsetArrayCount - 1; + while (fromIndex <= toIndex) + { + uint midIndex = fromIndex + (toIndex - fromIndex) / 2; + NativeOffsetToRecordOffset lazyBailOutRecordOffset = lazyBailOutRecordOffsetArray[midIndex]; + + // lazyBailOutRecordOffset.offset is the offset from the nativeAddress. + if (lazyBailOutRecordOffset.nativeAddressOffset == instructionPointerOffset) + { + uint32 bailOutRecordOffset = lazyBailOutRecordOffsetArray[midIndex].recordOffset; + if (bailOutRecordOffset == NativeOffsetToRecordOffset::InvalidRecordOffset) + { + break; + } + else + { + return (BailOutRecord*)(nativeDataBuffer + bailOutRecordOffset); + } + } + + lazyBailOutRecordOffset.nativeAddressOffset > instructionPointerOffset ? toIndex = midIndex - 1 : fromIndex = midIndex + 1; + } } - else if (record.offset > offset) + } + else +#endif + // inProcJIT + { + // lazyBailOutRecordList holds LazyBailOutRecords. LazyBailOutRecords are + // not BailOutRecords, instead LazyBailOutRecords hold BailOutRecord. + NativeLazyBailOutRecordList* sortedLazyBailOutRecordList = GetInProcNativeEntryPointData()->GetSortedLazyBailOutRecordList(); + Assert(sortedLazyBailOutRecordList); + + // Find the LazyBailOutRecord whose address matches the instr pointer. + int foundLazyBailOutRecordIndex = sortedLazyBailOutRecordList->BinarySearch([=](const LazyBailOutRecord& lazyBailOutRecord, int index) { - return 1; - } - else + if (lazyBailOutRecord.nativeAddressOffset == instructionPointerOffset) + { + // LazyBailOutRecord found, populate foundLazyBailOutRecordIndex with index. + return 0; + } + + // Search left or right depending on whether the instr pointer + // is less or greather than the current lazyBailOutRecord. + return lazyBailOutRecord.nativeAddressOffset > instructionPointerOffset ? 1 : -1; + }); + + if (foundLazyBailOutRecordIndex != -1) { - return -1; + return sortedLazyBailOutRecordList->Item(foundLazyBailOutRecordIndex).bailOutRecord; } - }); + } - if (found != -1) - { - auto inProcNativeEntryPointData = this->GetInProcNativeEntryPointData(); - const LazyBailOutRecord& record = bailOutRecordList->Item(found); - const uint32 lazyBailOutThunkOffset = inProcNativeEntryPointData->GetLazyBailOutThunkOffset(); - BYTE * const lazyBailOutThunkAddress = (BYTE *) nativeAddress + lazyBailOutThunkOffset; + // TODO: save info here for later check + return nullptr; + } - // Change the instruction pointer of the frame to our thunk so that - // when execution returns back to this frame, we will execute the thunk instead - *addressOfInstructionPointer = lazyBailOutThunkAddress; + // Convert this function's return address to point to this function's LazyBailOutThunk. Also Thunk. Also put + // a BailOutRecord on the stack for the thunk to use. Returns true if successful return address conversion. + bool EntryPointInfo::ConvertFuncRetAddrToLazyBailOutThunk(BYTE **addressOfInstructionPointer, BYTE *stackFramePointer) + { + NativeEntryPointData* nativeEntryPointData = GetProcSpecificNativeEntryPoint(); - // Put the BailOutRecord corresponding to our LazyBailOut point on the pre-allocated slot on the stack - BYTE *addressOfLazyBailOutRecordSlot = framePointer + inProcNativeEntryPointData->GetLazyBailOutRecordSlotOffset(); - *(reinterpret_cast(addressOfLazyBailOutRecordSlot)) = reinterpret_cast(record.bailOutRecord); - - if (PHASE_TRACE1(Js::LazyBailoutPhase)) - { -#if DBG - Output::Print(_u("On stack lazy bailout. Property: %s Old IP: 0x%x New IP: 0x%x "), propertyRecord->GetBuffer(), instructionPointer, lazyBailOutThunkAddress); - record.Dump(functionBody); - Output::Print(_u("\n")); -#endif - } + // This frame does not have a LazyBailOut; no need to jmp to the thunk, this frame can return normally. + if (!nativeEntryPointData->GetHasLazyBailOut()) + { + return false; } - else + + BYTE* instructionPointer = *addressOfInstructionPointer; + Js::JavascriptMethod nativeAddress = nativeEntryPointData->GetNativeAddress(); + + // instructionPointer should be pointing to an address in this function's native code. + Assert((BYTE*)nativeAddress < instructionPointer && instructionPointer < ((BYTE*)nativeAddress + nativeEntryPointData->GetCodeSize())); + + size_t instructionPointerOffset = instructionPointer - (BYTE*)nativeAddress; + + BailOutRecord* bailOutRecord = FindLazyBailOutRecord(instructionPointerOffset); + + if (bailOutRecord == nullptr) { - AssertMsg(false, "Lazy Bailout: Address mapping missing"); + return false; } + + // Change the instruction pointer of the frame to our thunk so that when + // execution returns back to this frame, we will execute the thunk instead. + *addressOfInstructionPointer = (BYTE*)nativeAddress + nativeEntryPointData->GetLazyBailOutThunkOffset(); + +#if DBG + instructionPointer = *addressOfInstructionPointer; + Assert((BYTE*)nativeAddress < instructionPointer && instructionPointer < ((BYTE*)nativeAddress + nativeEntryPointData->GetCodeSize())); +#endif + + // Put the BailOutRecord corresponding to our LazyBailOut point on the pre-allocated slot on the stack. + BYTE *addressOfLazyBailOutRecordSlot = stackFramePointer + nativeEntryPointData->GetLazyBailOutRecordSlotOffset(); + *(reinterpret_cast(addressOfLazyBailOutRecordSlot)) = reinterpret_cast(bailOutRecord); + + return true; } void EntryPointInfo::FreeJitTransferData() @@ -9063,7 +9130,8 @@ namespace Js callsCount(0), jitMode(ExecutionMode::Interpreter), functionProxy(functionProxy), - nextEntryPoint(nullptr) + nextEntryPoint(nullptr), + retAddrNotModified(false) { } diff --git a/lib/Runtime/Base/FunctionBody.h b/lib/Runtime/Base/FunctionBody.h index 3955086fefb..a6f142c2c99 100644 --- a/lib/Runtime/Base/FunctionBody.h +++ b/lib/Runtime/Base/FunctionBody.h @@ -305,6 +305,8 @@ namespace Js #if ENABLE_OOP_NATIVE_CODEGEN OOPNativeEntryPointData * GetOOPNativeEntryPointData(); #endif + NativeEntryPointData * EntryPointInfo::GetProcSpecificNativeEntryPoint(); + #endif protected: @@ -577,13 +579,11 @@ namespace Js virtual void Invalidate(bool prolongEntryPoint) { Assert(false); } InlineeFrameRecord* FindInlineeFrame(void* returnAddress); + BailOutRecord* FindLazyBailOutRecord(size_t codeOffset); bool HasInlinees(); -#if DBG - void DoLazyBailout(BYTE **addressOfInstructionPointer, BYTE *framePointer, Js::FunctionBody *functionBody, const PropertyRecord *propertyRecord); -#else - void DoLazyBailout(BYTE **addressOfInstructionPointer, BYTE *framePointer); -#endif + // Unabbreviated: Convert this function's return address to the function's LazyBailOut thunk. + bool ConvertFuncRetAddrToLazyBailOutThunk(BYTE **addressOfInstructionPointer, BYTE *framePointer); void CleanupNativeCode(ScriptContext * scriptContext); #if DBG_DUMP @@ -622,6 +622,7 @@ namespace Js Field(uint32) callsCount; Field(uint32) lastCallsCount; + Field(bool) retAddrNotModified; private: Field(ExecutionMode) jitMode; diff --git a/lib/Runtime/Base/ScriptContext.cpp b/lib/Runtime/Base/ScriptContext.cpp index 95e2c8cefa3..4cbdc3f4aa3 100644 --- a/lib/Runtime/Base/ScriptContext.cpp +++ b/lib/Runtime/Base/ScriptContext.cpp @@ -221,6 +221,7 @@ namespace Js , bailOutOffsetBytes(0) #endif , emptyStringPropertyId(Js::PropertyIds::_none) + , isDeferredTypeInit(false) { #ifdef ENABLE_SCRIPT_DEBUGGING // This may allocate memory and cause exception, but it is ok, as we all we have done so far @@ -629,6 +630,9 @@ namespace Js isScriptContextActuallyClosed = true; this->GetThreadContext()->closedScriptContextCount++; + // Should only be enabled during certain sections of Runtime's execution. + Assert(!this->IsDeferredTypeInit()); + PERF_COUNTER_DEC(Basic, ScriptContextActive); #if DBG_DUMP @@ -5004,9 +5008,14 @@ namespace Js void ScriptContext::InvalidateProtoCaches(const PropertyId propertyId) { + if (this->IsDeferredTypeInit()) + { + return; + } + threadContext->InvalidateProtoInlineCaches(propertyId); - // Because setter inline caches get registered in the store field chain, we must invalidate that - // chain whenever we invalidate the proto chain. + // Because setter inline caches get registered in the store field chain, + // we must invalidate that chain whenever we invalidate the proto chain. threadContext->InvalidateStoreFieldInlineCaches(propertyId); #if ENABLE_NATIVE_CODEGEN threadContext->InvalidatePropertyGuards(propertyId); diff --git a/lib/Runtime/Base/ScriptContext.h b/lib/Runtime/Base/ScriptContext.h index 7c7534e1612..89694d0282f 100644 --- a/lib/Runtime/Base/ScriptContext.h +++ b/lib/Runtime/Base/ScriptContext.h @@ -584,6 +584,10 @@ namespace Js NoSpecialPropertyScriptRegistry* GetNoSpecialPropertyRegistry() { return &this->noSpecialPropertyRegistry; } OnlyWritablePropertyScriptRegistry* GetOnlyWritablePropertyRegistry() { return &this->onlyWritablePropertyRegistry; } + + bool IsDeferredTypeInit() const { return this->isDeferredTypeInit; } + void SetIsDeferredTypeInit(bool isDeferredTypeInit) { this->isDeferredTypeInit = isDeferredTypeInit; } + private: JavascriptFunction* GenerateRootFunction(ParseNodeProg * parseTree, uint sourceIndex, Parser* parser, uint32 grfscr, CompileScriptException * pse, const char16 *rootDisplayName); @@ -592,6 +596,9 @@ namespace Js NoSpecialPropertyScriptRegistry noSpecialPropertyRegistry; OnlyWritablePropertyScriptRegistry onlyWritablePropertyRegistry; + // Unabbreviated: is this ScriptContext currently performing a deferred type initialization? + bool isDeferredTypeInit; + ArenaAllocator generalAllocator; #ifdef ENABLE_BASIC_TELEMETRY ArenaAllocator telemetryAllocator; diff --git a/lib/Runtime/Base/ThreadContext.cpp b/lib/Runtime/Base/ThreadContext.cpp index e4e9a663bb4..d74649193a2 100644 --- a/lib/Runtime/Base/ThreadContext.cpp +++ b/lib/Runtime/Base/ThreadContext.cpp @@ -3570,15 +3570,20 @@ ThreadContext::RegisterSharedPropertyGuard(Js::PropertyId propertyId) void ThreadContext::RegisterLazyBailout(Js::PropertyId propertyId, Js::EntryPointInfo* entryPoint) { + Assert(entryPoint); + const Js::PropertyRecord * propertyRecord = GetPropertyName(propertyId); bool foundExistingGuard; - PropertyGuardEntry* entry = EnsurePropertyGuardEntry(propertyRecord, foundExistingGuard); - if (!entry->entryPoints) + PropertyGuardEntry* propertyGuardEntry = EnsurePropertyGuardEntry(propertyRecord, foundExistingGuard); + if (!propertyGuardEntry->lazyBailOutEntryPoints) { - entry->entryPoints = RecyclerNew(recycler, PropertyGuardEntry::EntryPointDictionary, recycler, /*capacity*/ 3); + propertyGuardEntry->lazyBailOutEntryPoints = RecyclerNew(recycler, PropertyGuardEntry::EntryPointDictionary, recycler, /*capacity*/ 3); } - entry->entryPoints->UncheckedAdd(entryPoint, NULL); + + // We use lazyBailOutEntryPoints' EntryPointDictionary functionality not as a value lookup but + // rather as a means to verify that an entryPoint for the given propertyGuardEntry exists. + propertyGuardEntry->lazyBailOutEntryPoints->UncheckedAdd(entryPoint, NULL); } void @@ -3633,13 +3638,17 @@ ThreadContext::RegisterConstructorCache(Js::PropertyId propertyId, Js::Construct } void -ThreadContext::InvalidatePropertyGuardEntry(const Js::PropertyRecord* propertyRecord, PropertyGuardEntry* entry, bool isAllPropertyGuardsInvalidation) +ThreadContext::InvalidatePropertyGuardEntry( + const Js::PropertyRecord* propertyRecord, + PropertyGuardEntry* propertyGuardEntry, + bool isAllPropertyGuardsInvalidation +) { - Assert(entry != nullptr); + Assert(propertyGuardEntry != nullptr); - if (entry->sharedGuard != nullptr) + if (propertyGuardEntry->sharedGuard != nullptr) { - Js::PropertyGuard* guard = entry->sharedGuard; + Js::PropertyGuard* guard = propertyGuardEntry->sharedGuard; if (PHASE_TRACE1(Js::TracePropertyGuardsPhase) || PHASE_VERBOSE_TRACE1(Js::FixedMethodsPhase)) { @@ -3658,7 +3667,7 @@ ThreadContext::InvalidatePropertyGuardEntry(const Js::PropertyRecord* propertyRe } uint count = 0; - entry->uniqueGuards.Map([&count, propertyRecord](RecyclerWeakReference* guardWeakRef) + propertyGuardEntry->uniqueGuards.Map([&count, propertyRecord](RecyclerWeakReference* guardWeakRef) { Js::PropertyGuard* guard = guardWeakRef->Get(); if (guard != nullptr) @@ -3682,56 +3691,92 @@ ThreadContext::InvalidatePropertyGuardEntry(const Js::PropertyRecord* propertyRe } }); - entry->uniqueGuards.Clear(); + propertyGuardEntry->uniqueGuards.Clear(); - - // Count no. of invalidations done so far. Exclude if this is all property guards invalidation in which case - // the unique Guards will be cleared anyway. + // Count no. of invalidations done so far. Exclude if this is all property + // guards invalidation in which case the unique Guards will be cleared anyway. if (!isAllPropertyGuardsInvalidation) { this->recyclableData->constructorCacheInvalidationCount += count; if (this->recyclableData->constructorCacheInvalidationCount > (uint)CONFIG_FLAG(ConstructorCacheInvalidationThreshold)) { - // TODO: In future, we should compact the uniqueGuards dictionary so this function can be called from PreCollectionCallback - // instead + // TODO: In future, we should compact the uniqueGuards dictionary so this + // function can be called from PreCollectionCallback instead this->ClearInvalidatedUniqueGuards(); this->recyclableData->constructorCacheInvalidationCount = 0; } } - if (entry->entryPoints && entry->entryPoints->Count() > 0) + if (propertyGuardEntry->lazyBailOutEntryPoints && propertyGuardEntry->lazyBailOutEntryPoints->Count() > 0) { + Assert(propertyGuardEntry->lazyBailOutEntryPoints); + Assert(propertyGuardEntry->lazyBailOutEntryPoints->Count() > 0); + Js::JavascriptStackWalker stackWalker(this->GetScriptContextList()); Js::JavascriptFunction* caller = nullptr; - while (stackWalker.GetCaller(&caller, /*includeInlineFrames*/ false)) + while (stackWalker.GetCaller(&caller, false)) { // If the current frame is already from a bailout - we do not need to do on stack invalidation if (caller != nullptr && Js::ScriptFunction::Test(caller) && !stackWalker.GetCurrentFrameFromBailout()) { - BYTE dummy; + // Given the current stackframe (currentFunc), test if this stackframe was one of the possibly many stackframes + // that had generated the LazyBailOut property corresponding to propertyEntryGuard during codegen. Js::FunctionEntryPointInfo* functionEntryPoint = caller->GetFunctionBody()->GetDefaultFunctionEntryPointInfo(); + Assert(functionEntryPoint); + if (functionEntryPoint->IsInNativeAddressRange((DWORD_PTR)stackWalker.GetInstructionPointer())) { - if (entry->entryPoints->TryGetValue(functionEntryPoint, &dummy)) + // Each entryPoint stored into entryPoints has to be stored as a EntryPointDictionary. But we only + // need to verify that an entryPoint exists in entryPoints; each pair in entryPoints is stored as + // key: functionEntryPoint, value: doesnt_matter. To verify an entryPoint exists in entryPoints, we + // use the return code of TryGetValue and ignore the captured value corresponding to the key. + BYTE unusedCapturedValue; + if (propertyGuardEntry->lazyBailOutEntryPoints->TryGetValue(functionEntryPoint, &unusedCapturedValue)) { - functionEntryPoint->DoLazyBailout( + // TODO: ConvertFuncRetAddrToLazyBailOutThunk returns false when a LazyBailOutRecord cannot be found at + // the current instr pointer's native address. This is expected in certain scenarios such as when + // a LazyBailOut tag is removed (and thus a corresponding LazyBailOutRecord is never created) due + // to an optimization that assumes another BailOutTag on the same instr will execute before the + // LazyBailOut ever would. There could be more scenarios where a LazyBailOutRecord is allow to not + // exist on this instr, but there could be scenarios where a LazyBailOutRecord should have existed + // on this instr but for some reason was never put on this instr. To catch these bad scenarios, we + // should set a flag on functionEntryPoint->scriptContext when ConvertFuncRetAddrToLazyBailOutThunk + // returns false. Then, when an expected scenario completes, we should unset that flag. Once + // ScriptContext closes, we should assert that the flag should be off. + bool didConvertRetAddrToThunk = functionEntryPoint->ConvertFuncRetAddrToLazyBailOutThunk( stackWalker.GetCurrentAddressOfInstructionPointer(), static_cast(stackWalker.GetFramePointer()) -#if DBG - , caller->GetFunctionBody() - , propertyRecord -#endif ); + + if (!didConvertRetAddrToThunk) + { + functionEntryPoint->retAddrNotModified = true; + } } } } } - entry->entryPoints->Map([=](Js::EntryPointInfo* info, BYTE& dummy, const RecyclerWeakReference* infoWeakRef) + } + + if (propertyGuardEntry->lazyBailOutEntryPoints && propertyGuardEntry->lazyBailOutEntryPoints->Count() > 0) + { + propertyGuardEntry->lazyBailOutEntryPoints->Map([=](Js::EntryPointInfo* lazyBailOutEntryPoint, BYTE& dummy, const RecyclerWeakReference* infoWeakRef) { - OUTPUT_TRACE2(Js::LazyBailoutPhase, info->GetFunctionBody(), _u("Lazy bailout - Invalidation due to property: %s \n"), propertyRecord->GetBuffer()); - info->Invalidate(true); + if (lazyBailOutEntryPoint->IsCleanedUp()) + { + return; + } + OUTPUT_TRACE2(Js::LazyBailoutPhase, lazyBailOutEntryPoint->GetFunctionBody(), _u("Lazy bailout - Invalidation due to property: %s \n"), propertyRecord->GetBuffer()); + + if (!((Js::FunctionEntryPointInfo*)lazyBailOutEntryPoint)->retAddrNotModified) + { + lazyBailOutEntryPoint->Invalidate(true); + ((Js::FunctionEntryPointInfo*)lazyBailOutEntryPoint)->retAddrNotModified = false; + } + }); - entry->entryPoints->Clear(); + + propertyGuardEntry->lazyBailOutEntryPoints->Clear(); } } diff --git a/lib/Runtime/Base/ThreadContext.h b/lib/Runtime/Base/ThreadContext.h index b11e9e0fe10..0d076eea7f7 100644 --- a/lib/Runtime/Base/ThreadContext.h +++ b/lib/Runtime/Base/ThreadContext.h @@ -478,9 +478,9 @@ class ThreadContext sealed : // invalidated. The entry of a unique guard is removed from the table once the corresponding cache is invalidated. Field(Js::PropertyGuard*) sharedGuard; Field(PropertyGuardHashSet) uniqueGuards; - Field(EntryPointDictionary*) entryPoints; + Field(EntryPointDictionary*) lazyBailOutEntryPoints; - PropertyGuardEntry(Recycler* recycler) : sharedGuard(nullptr), uniqueGuards(recycler), entryPoints(nullptr) {} + PropertyGuardEntry(Recycler* recycler) : sharedGuard(nullptr), uniqueGuards(recycler), lazyBailOutEntryPoints(nullptr) {} }; public: diff --git a/lib/Runtime/Types/DeferredTypeHandler.h b/lib/Runtime/Types/DeferredTypeHandler.h index 445841de86f..e9a0a155ea4 100644 --- a/lib/Runtime/Types/DeferredTypeHandler.h +++ b/lib/Runtime/Types/DeferredTypeHandler.h @@ -259,6 +259,35 @@ namespace Js bool DeferredTypeHandler::EnsureObjectReady(DynamicObject* instance, DeferredInitializeMode mode) { Assert(initializer == m_initializer); + ScriptContext* scriptContext = instance->GetScriptContext(); + + // Set the ScriptContext's IsDeferredTypeInitialization to true as DeferredTypeInitialization is about + // to occur. This must be done using an instantiated struct on the stack so that once this stack frame + // is killed, IsDeferredTypeInit is restored to its previous value (using AutoRestoreIsDeferredTypeInit's + // destructor). This is necessary as this function may not be returned to due to a potential exception. + struct AutoRestoreIsDeferredTypeInit + { + bool oldIsDeferredTypeInit; + ScriptContext* scriptContext; + AutoRestoreIsDeferredTypeInit(bool oldIsDeferredTypeInit, ScriptContext* scriptContext) : + oldIsDeferredTypeInit(oldIsDeferredTypeInit), + scriptContext(scriptContext) + { + scriptContext->SetIsDeferredTypeInit(true); + } + ~AutoRestoreIsDeferredTypeInit() + { + // We keep track of the ScriptContext's previous IsDeferredTypeInit (as opposed to setting + // IsDeferredTypeInit to false) in order to protect against nested deferred type initializations. + scriptContext->SetIsDeferredTypeInit(oldIsDeferredTypeInit); + } + }; + + AutoRestoreIsDeferredTypeInit autoRestoreIsDeferredTypeInit( + scriptContext->IsDeferredTypeInit(), + scriptContext + ); + return m_initializer(instance, this, mode); } diff --git a/lib/Runtime/Types/SimpleDictionaryTypeHandler.cpp b/lib/Runtime/Types/SimpleDictionaryTypeHandler.cpp index 4130b7da5dc..e8f0f9ca04b 100644 --- a/lib/Runtime/Types/SimpleDictionaryTypeHandler.cpp +++ b/lib/Runtime/Types/SimpleDictionaryTypeHandler.cpp @@ -2780,7 +2780,7 @@ namespace Js // We don't evolve dictionary types when adding a field, so we need to invalidate prototype caches. // We only have to do this though if the current type is used as a prototype, or the current property // is found on the prototype chain. - scriptContext->InvalidateProtoCaches(propertyId); + scriptContext->InvalidateProtoCaches(propertyId/*, possibleSideEffects*/); } SetPropertyUpdateSideEffect(instance, propertyId, value, possibleSideEffects); } diff --git a/lib/Runtime/Types/TypeHandler.h b/lib/Runtime/Types/TypeHandler.h index 0feb677f075..9bc32eb349e 100644 --- a/lib/Runtime/Types/TypeHandler.h +++ b/lib/Runtime/Types/TypeHandler.h @@ -314,7 +314,7 @@ namespace Js /* Returns a value indicating whether the current type handler is locked. - Given below is the list of all the actions where different type handlers explicitly check for the type handler being locked. In most cases, when a type is evolving, + Given below is the list of all the actions where different type handlers explicitly check for the type handler being locked. In most cases, when a type is evolving, if the type handler is locked a new type handler is created and all existing properties are copied over before evolving the type. 1. SimpleTypeHandler (i) SetPropertyWithAttributes diff --git a/test/FixedFields/rlexe.xml b/test/FixedFields/rlexe.xml index cb3ea379129..8b682d84ce6 100644 --- a/test/FixedFields/rlexe.xml +++ b/test/FixedFields/rlexe.xml @@ -1,5 +1,12 @@ + + + fixedDataWithSubsequentUses.js + fixedDataWithSubsequentUses.baseline + -maxinterpretcount:1 -MaxSimpleJitRunCount:0 -bgjit- + + NonFixedFieldHoist.js @@ -78,13 +85,6 @@ -maxinterpretcount:1 -MaxSimpleJitRunCount:0 -bgjit- - - - fixedDataWithSubsequentUses.js - fixedDataWithSubsequentUses.baseline - -maxinterpretcount:1 -MaxSimpleJitRunCount:0 -bgjit- - - fixedDataWithCacheSharing.js diff --git a/test/bailout/LazyBailout_0.js b/test/bailout/LazyBailout_0.js new file mode 100644 index 00000000000..c965d914ff7 --- /dev/null +++ b/test/bailout/LazyBailout_0.js @@ -0,0 +1,29 @@ +//------------------------------------------------------------------------------------------------------- +// Copyright (C) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information. +//------------------------------------------------------------------------------------------------------- + +var a = []; +a.reduce(function () {}, 0); +a.reduce(function () {}, 0); + +// Same error but does not use built ins: +var h = function(a){ + return a; +} + +var g = function (a) { + if (h(a)) { + return { prop0: a, prop1: a.length }; + } +} + +var f = function (a) { + let {prop0, prop1} = g(a); +} + +var b = []; +f(b); +f(b); + +print("Pass") \ No newline at end of file diff --git a/test/bailout/LazyBailout_1.baseline b/test/bailout/LazyBailout_1.baseline new file mode 100644 index 00000000000..49e902479ad --- /dev/null +++ b/test/bailout/LazyBailout_1.baseline @@ -0,0 +1,4 @@ +1 +1 +2 +2 diff --git a/test/bailout/LazyBailout_1.js b/test/bailout/LazyBailout_1.js new file mode 100644 index 00000000000..e6e9596e6d1 --- /dev/null +++ b/test/bailout/LazyBailout_1.js @@ -0,0 +1,28 @@ +//------------------------------------------------------------------------------------------------------- +// Copyright (C) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information. +//------------------------------------------------------------------------------------------------------- + +var o = { x: 1 }; +var run = false; + +function invalidate() { + // guard invalidation starts, stack walk happens + o.x = 2; +} + +function test(o) +{ + if (run) + { + invalidate(); + } + + var a = o.x; + print(a); + return o.x; +} + +print(test(o)); +run = true; +print(test(o)); diff --git a/test/bailout/rlexe.xml b/test/bailout/rlexe.xml index ef2e21423f7..0bbe2b82538 100644 --- a/test/bailout/rlexe.xml +++ b/test/bailout/rlexe.xml @@ -275,4 +275,36 @@ bug17449647.js + + + LazyBailout_0.js + -maxinterpretcount:1 -off:simplejit -off:bailonnoprofile -force:fixdataprops -oopjit- + + + + + LazyBailout_0.js + -maxinterpretcount:1 -off:simplejit -off:bailonnoprofile -force:fixdataprops + + + + + LazyBailout_0.js + -maxinterpretcount:1 -off:simplejit -off:bailonnoprofile -force:fixdataprops -off:bailonnoprofile -on:LazyFixedTypeBailout + + + + + LazyBailout_1.js + LazyBailout_1.baseline + -maxinterpretcount:1 -off:simplejit -off:bailonnoprofile -force:fixdataprops -oopjit- -off:bailonnoprofile -on:LazyFixedTypeBailout + + + + + LazyBailout_1.js + LazyBailout_1.baseline + -mic:1 -bgjit- -off:simplejit -off:bailonnoprofile -force:fixdataprops -oopjit- -on:lazybailout -on:LazyFixedTypeBailout + +