diff --git a/Build/NuGet/.pack-version b/Build/NuGet/.pack-version index 4dae2985b58..5ad2491cf88 100644 --- a/Build/NuGet/.pack-version +++ b/Build/NuGet/.pack-version @@ -1 +1 @@ -1.10.1 +1.10.2 diff --git a/lib/Backend/BackwardPass.cpp b/lib/Backend/BackwardPass.cpp index bd0767069e5..8ae021bd517 100644 --- a/lib/Backend/BackwardPass.cpp +++ b/lib/Backend/BackwardPass.cpp @@ -2245,6 +2245,11 @@ BackwardPass::DeadStoreTypeCheckBailOut(IR::Instr * instr) IR::PropertySymOpnd *propertySymOpnd = (instr->GetDst() && instr->GetDst()->IsSymOpnd()) ? instr->GetDst()->AsPropertySymOpnd() : instr->GetSrc1()->AsPropertySymOpnd(); + if (propertySymOpnd->TypeCheckRequired()) + { + return; + } + bool isTypeCheckProtected = false; IR::BailOutKind bailOutKind; if (GlobOpt::NeedsTypeCheckBailOut(instr, propertySymOpnd, propertySymOpnd == instr->GetDst(), &isTypeCheckProtected, &bailOutKind)) @@ -4902,8 +4907,8 @@ BackwardPass::UpdateArrayBailOutKind(IR::Instr *const instr) } IR::BailOutKind includeBailOutKinds = IR::BailOutInvalid; - if(!baseValueType.IsNotNativeArray() && - (!baseValueType.IsLikelyNativeArray() || !instr->GetSrc1()->IsInt32()) && + if (!baseValueType.IsNotNativeArray() && + (!baseValueType.IsLikelyNativeArray() || instr->GetSrc1()->IsVar()) && !currentBlock->noImplicitCallNativeArrayUses->IsEmpty() && !(instr->GetBailOutKind() & IR::BailOutOnArrayAccessHelperCall)) { diff --git a/lib/Backend/BailOut.cpp b/lib/Backend/BailOut.cpp index 3c8278a3579..28475844ec5 100644 --- a/lib/Backend/BailOut.cpp +++ b/lib/Backend/BailOut.cpp @@ -506,6 +506,13 @@ uint32 BailOutRecord::GetArgumentsObjectOffset() return argumentsObjectOffset; } +Js::FunctionEntryPointInfo *BailOutRecord::GetFunctionEntryPointInfo() const +{ + Js::EntryPointInfo* result = this->globalBailOutRecordTable->entryPointInfo; + AssertOrFailFast(result->IsFunctionEntryPointInfo()); + return (Js::FunctionEntryPointInfo*)result; +} + Js::Var BailOutRecord::EnsureArguments(Js::InterpreterStackFrame * newInstance, Js::JavascriptCallStackLayout * layout, Js::ScriptContext* scriptContext, Js::Var* pArgumentsObject) const { Assert(globalBailOutRecordTable->hasStackArgOpt); @@ -1706,7 +1713,27 @@ void BailOutRecord::ScheduleFunctionCodeGen(Js::ScriptFunction * function, Js::S BailOutRecord * bailOutRecordNotConst = (BailOutRecord *)(void *)bailOutRecord; bailOutRecordNotConst->bailOutCount++; - Js::FunctionEntryPointInfo *entryPointInfo = function->GetFunctionEntryPointInfo(); + Js::FunctionEntryPointInfo *entryPointInfo = bailOutRecord->GetFunctionEntryPointInfo(); + +#if DBG + // BailOutRecord is not recycler-allocated, so make sure something the recycler can see was keeping the entry point info alive. + // We expect the entry point to be kept alive as follows: + // 1. The function's current type might still have the same entry point info as when we entered the function (easy case) + // 2. The function might have moved to a successor path type, which still keeps the previous type and its entry point info alive + // 3. The entry point info might be held by the ThreadContext (QueueFreeOldEntryPointInfoIfInScript): + // a. If the entry point info was replaced on the type that used to hold it (ScriptFunction::ChangeEntryPoint) + // b. If the function's last-added property was deleted and it moved to a previous type (ScriptFunction::ReplaceTypeWithPredecessorType) + // c. If the function's path type got replaced with a dictionary, then all previous entry point infos in that path are queued on the ThreadContext (ScriptFunction::PrepareForConversionToNonPathType) + bool foundEntryPoint = false; + executeFunction->MapEntryPointsUntil([&](int index, Js::FunctionEntryPointInfo* info) + { + foundEntryPoint = info == entryPointInfo; + return foundEntryPoint; + }); + foundEntryPoint = foundEntryPoint || function->GetScriptContext()->GetThreadContext()->IsOldEntryPointInfo(entryPointInfo); + Assert(foundEntryPoint); +#endif + uint8 callsCount = entryPointInfo->callsCount > 255 ? 255 : static_cast(entryPointInfo->callsCount); RejitReason rejitReason = RejitReason::None; bool reThunk = false; @@ -2235,7 +2262,7 @@ void BailOutRecord::ScheduleFunctionCodeGen(Js::ScriptFunction * function, Js::S if (bailOutRecord->IsForLoopTop() && IR::IsTypeCheckBailOutKind(bailOutRecord->bailOutKind)) { // Disable FieldPRE if we're triggering a type check rejit due to a bailout at the loop top. - // Most likely this was caused by a CheckFixedFld that was hoisted from a branch block where + // Most likely this was caused by a CheckFixedFld that was hoisted from a branch block where // only certain types flowed, to the loop top, where more types (different or non-equivalent) // were flowing in. profileInfo->DisableFieldPRE(); diff --git a/lib/Backend/BailOut.h b/lib/Backend/BailOut.h index d19665b1a1c..55af32893b1 100644 --- a/lib/Backend/BailOut.h +++ b/lib/Backend/BailOut.h @@ -209,6 +209,8 @@ class BailOutRecord void SetType(BailoutRecordType type) { this->type = type; } bool IsShared() const { return type == Shared || type == SharedForLoopTop; } bool IsForLoopTop() const { return type == SharedForLoopTop; } + + Js::FunctionEntryPointInfo *GetFunctionEntryPointInfo() const; protected: struct BailOutReturnValue { @@ -237,7 +239,7 @@ class BailOutRecord static void UpdatePolymorphicFieldAccess(Js::JavascriptFunction * function, BailOutRecord const * bailOutRecord); - static void ScheduleFunctionCodeGen(Js::ScriptFunction * function, Js::ScriptFunction * innerMostInlinee, BailOutRecord const * bailOutRecord, IR::BailOutKind bailOutKind, + static void ScheduleFunctionCodeGen(Js::ScriptFunction * function, Js::ScriptFunction * innerMostInlinee, BailOutRecord const * bailOutRecord, IR::BailOutKind bailOutKind, uint32 actualBailOutOffset, Js::ImplicitCallFlags savedImplicitCallFlags, void * returnAddress); static void ScheduleLoopBodyCodeGen(Js::ScriptFunction * function, Js::ScriptFunction * innerMostInlinee, BailOutRecord const * bailOutRecord, IR::BailOutKind bailOutKind); static void CheckPreemptiveRejit(Js::FunctionBody* executeFunction, IR::BailOutKind bailOutKind, BailOutRecord* bailoutRecord, uint8& callsOrIterationsCount, int loopNumber); @@ -416,6 +418,7 @@ struct GlobalBailOutRecordDataTable // The offset to 'registerSaveSpace' is hard-coded in LinearScanMD::SaveAllRegisters, so let this be the first member variable Js::Var *registerSaveSpace; GlobalBailOutRecordDataRow *globalBailOutRecordDataRows; + Js::EntryPointInfo *entryPointInfo; uint32 length; uint32 size; int32 firstActualStackOffset; diff --git a/lib/Backend/FunctionJITTimeInfo.cpp b/lib/Backend/FunctionJITTimeInfo.cpp index bd32326c640..a6b3b409b11 100644 --- a/lib/Backend/FunctionJITTimeInfo.cpp +++ b/lib/Backend/FunctionJITTimeInfo.cpp @@ -27,6 +27,7 @@ FunctionJITTimeInfo::BuildJITTimeData( jitData->isInlined = codeGenData->GetIsInlined(); jitData->weakFuncRef = (intptr_t)codeGenData->GetWeakFuncRef(); jitData->inlineesBv = (BVFixedIDL*)(const BVFixed*)codeGenData->inlineesBv; + jitData->entryPointInfoAddr = (intptr_t)codeGenData->GetEntryPointInfo(); if (!codeGenData->GetFunctionBody() || !codeGenData->GetFunctionBody()->GetByteCode()) { @@ -62,7 +63,7 @@ FunctionJITTimeInfo::BuildJITTimeData( Assert(defaultEntryPointInfo->IsFunctionEntryPointInfo()); Js::FunctionEntryPointInfo *functionEntryPointInfo = static_cast(defaultEntryPointInfo); jitData->callsCountAddress = (intptr_t)&functionEntryPointInfo->callsCount; - + jitData->sharedPropertyGuards = codeGenData->sharedPropertyGuards; jitData->sharedPropGuardCount = codeGenData->sharedPropertyGuardCount; } @@ -203,6 +204,12 @@ FunctionJITTimeInfo::GetFunctionInfoAddr() const return m_data.functionInfoAddr; } +intptr_t +FunctionJITTimeInfo::GetEntryPointInfoAddr() const +{ + return m_data.entryPointInfoAddr; +} + intptr_t FunctionJITTimeInfo::GetWeakFuncRef() const { diff --git a/lib/Backend/FunctionJITTimeInfo.h b/lib/Backend/FunctionJITTimeInfo.h index 4ca48ebb802..f52a3dea421 100644 --- a/lib/Backend/FunctionJITTimeInfo.h +++ b/lib/Backend/FunctionJITTimeInfo.h @@ -27,6 +27,7 @@ class FunctionJITTimeInfo JITTimeFunctionBody * GetBody() const; bool IsPolymorphicCallSite(Js::ProfileId profiledCallSiteId) const; intptr_t GetFunctionInfoAddr() const; + intptr_t GetEntryPointInfoAddr() const; intptr_t GetWeakFuncRef() const; uint GetLocalFunctionId() const; uint GetSourceContextId() const; diff --git a/lib/Backend/GlobOpt.h b/lib/Backend/GlobOpt.h index 256523b33ef..1aa4333aba4 100644 --- a/lib/Backend/GlobOpt.h +++ b/lib/Backend/GlobOpt.h @@ -938,6 +938,9 @@ class GlobOpt template bool ProcessPropOpInTypeCheckSeq(IR::Instr* instr, IR::PropertySymOpnd *opnd, BasicBlock* block, bool updateExistingValue, bool* emitsTypeCheckOut = nullptr, bool* changesTypeValueOut = nullptr, bool *isObjTypeChecked = nullptr); void KillObjectHeaderInlinedTypeSyms(BasicBlock *block, bool isObjTypeSpecialized, SymID symId = SymID_Invalid); + bool HasLiveObjectHeaderInlinedTypeSym(BasicBlock *block, bool isObjTypeSpecialized, SymID symId = SymID_Invalid); + template + bool MapObjectHeaderInlinedTypeSymsUntil(BasicBlock *block, bool isObjTypeSpecialized, SymID opndId, Fn fn); void ValueNumberObjectType(IR::Opnd *dstOpnd, IR::Instr *instr); void SetSingleTypeOnObjectTypeValue(Value* value, const JITTypeHolder type); void SetTypeSetOnObjectTypeValue(Value* value, Js::EquivalentTypeSet* typeSet); diff --git a/lib/Backend/GlobOptArrays.cpp b/lib/Backend/GlobOptArrays.cpp index d2357493cfc..78b60d9c113 100644 --- a/lib/Backend/GlobOptArrays.cpp +++ b/lib/Backend/GlobOptArrays.cpp @@ -897,19 +897,8 @@ void GlobOpt::ArraySrcOpt::DoLowerBoundCheck() Assert(!indexIntSym || indexIntSym->GetType() == TyInt32 || indexIntSym->GetType() == TyUint32); } - // The info in the landing pad may be better than the info in the current block due to changes made to - // the index sym inside the loop. Check if the bound check we intend to hoist is unnecessary in the - // landing pad. - if (!ValueInfo::IsLessThanOrEqualTo( - nullptr, - 0, - 0, - hoistInfo.IndexValue(), - hoistInfo.IndexConstantBounds().LowerBound(), - hoistInfo.IndexConstantBounds().UpperBound(), - hoistInfo.Offset())) + if (hoistInfo.IndexSym()) { - Assert(hoistInfo.IndexSym()); Assert(hoistInfo.Loop()->bailOutInfo); globOpt->EnsureBailTarget(hoistInfo.Loop()); @@ -1156,106 +1145,94 @@ void GlobOpt::ArraySrcOpt::DoUpperBoundCheck() Assert(!indexIntSym || indexIntSym->GetType() == TyInt32 || indexIntSym->GetType() == TyUint32); } - // The info in the landing pad may be better than the info in the current block due to changes made to the - // index sym inside the loop. Check if the bound check we intend to hoist is unnecessary in the landing pad. - if (!ValueInfo::IsLessThanOrEqualTo( - hoistInfo.IndexValue(), - hoistInfo.IndexConstantBounds().LowerBound(), - hoistInfo.IndexConstantBounds().UpperBound(), - hoistInfo.HeadSegmentLengthValue(), - hoistInfo.HeadSegmentLengthConstantBounds().LowerBound(), - hoistInfo.HeadSegmentLengthConstantBounds().UpperBound(), - hoistInfo.Offset())) - { - Assert(hoistInfo.Loop()->bailOutInfo); - globOpt->EnsureBailTarget(hoistInfo.Loop()); + Assert(hoistInfo.Loop()->bailOutInfo); + globOpt->EnsureBailTarget(hoistInfo.Loop()); - if (hoistInfo.LoopCount()) + if (hoistInfo.LoopCount()) + { + // Generate the loop count and loop count based bound that will be used for the bound check + if (!hoistInfo.LoopCount()->HasBeenGenerated()) { - // Generate the loop count and loop count based bound that will be used for the bound check - if (!hoistInfo.LoopCount()->HasBeenGenerated()) - { - globOpt->GenerateLoopCount(hoistInfo.Loop(), hoistInfo.LoopCount()); - } - globOpt->GenerateSecondaryInductionVariableBound( - hoistInfo.Loop(), - indexVarSym->GetInt32EquivSym(nullptr), - hoistInfo.LoopCount(), - hoistInfo.MaxMagnitudeChange(), - hoistInfo.IndexSym()); + globOpt->GenerateLoopCount(hoistInfo.Loop(), hoistInfo.LoopCount()); } + globOpt->GenerateSecondaryInductionVariableBound( + hoistInfo.Loop(), + indexVarSym->GetInt32EquivSym(nullptr), + hoistInfo.LoopCount(), + hoistInfo.MaxMagnitudeChange(), + hoistInfo.IndexSym()); + } - IR::Opnd* lowerBound = indexIntSym - ? static_cast(IR::RegOpnd::New(indexIntSym, TyInt32, instr->m_func)) - : IR::IntConstOpnd::New( - hoistInfo.IndexConstantBounds().LowerBound(), - TyInt32, - instr->m_func); - - lowerBound->SetIsJITOptimizedReg(true); - IR::Opnd* upperBound = IR::RegOpnd::New(headSegmentLengthSym, headSegmentLengthSym->GetType(), instr->m_func); - upperBound->SetIsJITOptimizedReg(true); + IR::Opnd* lowerBound = indexIntSym + ? static_cast(IR::RegOpnd::New(indexIntSym, TyInt32, instr->m_func)) + : IR::IntConstOpnd::New( + hoistInfo.IndexConstantBounds().LowerBound(), + TyInt32, + instr->m_func); - // indexSym <= headSegmentLength + offset (src1 <= src2 + dst) - IR::Instr *const boundCheck = globOpt->CreateBoundsCheckInstr( - lowerBound, - upperBound, - hoistInfo.Offset(), - hoistInfo.IsLoopCountBasedBound() - ? IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck - : IR::BailOutOnFailedHoistedBoundCheck, - hoistInfo.Loop()->bailOutInfo, - hoistInfo.Loop()->bailOutInfo->bailOutFunc); + lowerBound->SetIsJITOptimizedReg(true); + IR::Opnd* upperBound = IR::RegOpnd::New(headSegmentLengthSym, headSegmentLengthSym->GetType(), instr->m_func); + upperBound->SetIsJITOptimizedReg(true); - InsertInstrInLandingPad(boundCheck, hoistInfo.Loop()); + // indexSym <= headSegmentLength + offset (src1 <= src2 + dst) + IR::Instr *const boundCheck = globOpt->CreateBoundsCheckInstr( + lowerBound, + upperBound, + hoistInfo.Offset(), + hoistInfo.IsLoopCountBasedBound() + ? IR::BailOutOnFailedHoistedLoopCountBasedBoundCheck + : IR::BailOutOnFailedHoistedBoundCheck, + hoistInfo.Loop()->bailOutInfo, + hoistInfo.Loop()->bailOutInfo->bailOutFunc); - if (indexIntSym) - { - TRACE_PHASE_INSTR( - Js::Phase::BoundCheckHoistPhase, - instr, - _u("Hoisting array upper bound check out of loop %u to landing pad block %u, as (s%u <= s%u + %d)\n"), - hoistInfo.Loop()->GetLoopNumber(), - landingPad->GetBlockNum(), - hoistInfo.IndexSym()->m_id, - headSegmentLengthSym->m_id, - hoistInfo.Offset()); - } - else - { - TRACE_PHASE_INSTR( - Js::Phase::BoundCheckHoistPhase, - instr, - _u("Hoisting array upper bound check out of loop %u to landing pad block %u, as (%d <= s%u + %d)\n"), - hoistInfo.Loop()->GetLoopNumber(), - landingPad->GetBlockNum(), - hoistInfo.IndexConstantBounds().LowerBound(), - headSegmentLengthSym->m_id, - hoistInfo.Offset()); - } + InsertInstrInLandingPad(boundCheck, hoistInfo.Loop()); - TESTTRACE_PHASE_INSTR( + if (indexIntSym) + { + TRACE_PHASE_INSTR( + Js::Phase::BoundCheckHoistPhase, + instr, + _u("Hoisting array upper bound check out of loop %u to landing pad block %u, as (s%u <= s%u + %d)\n"), + hoistInfo.Loop()->GetLoopNumber(), + landingPad->GetBlockNum(), + hoistInfo.IndexSym()->m_id, + headSegmentLengthSym->m_id, + hoistInfo.Offset()); + } + else + { + TRACE_PHASE_INSTR( Js::Phase::BoundCheckHoistPhase, instr, - _u("Hoisting array upper bound check out of loop\n")); + _u("Hoisting array upper bound check out of loop %u to landing pad block %u, as (%d <= s%u + %d)\n"), + hoistInfo.Loop()->GetLoopNumber(), + landingPad->GetBlockNum(), + hoistInfo.IndexConstantBounds().LowerBound(), + headSegmentLengthSym->m_id, + hoistInfo.Offset()); + } - // Record the bound check instruction as available - const IntBoundCheck boundCheckInfo( - hoistInfo.IndexValue() ? hoistInfo.IndexValueNumber() : ZeroValueNumber, - hoistInfo.HeadSegmentLengthValue()->GetValueNumber(), - boundCheck, - landingPad); - { - const bool added = globOpt->CurrentBlockData()->availableIntBoundChecks->AddNew(boundCheckInfo) >= 0; - Assert(added || failedToUpdateCompatibleUpperBoundCheck); - } - for (InvariantBlockBackwardIterator it(globOpt, globOpt->currentBlock, landingPad, nullptr); - it.IsValid(); - it.MoveNext()) - { - const bool added = it.Block()->globOptData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0; - Assert(added || failedToUpdateCompatibleUpperBoundCheck); - } + TESTTRACE_PHASE_INSTR( + Js::Phase::BoundCheckHoistPhase, + instr, + _u("Hoisting array upper bound check out of loop\n")); + + // Record the bound check instruction as available + const IntBoundCheck boundCheckInfo( + hoistInfo.IndexValue() ? hoistInfo.IndexValueNumber() : ZeroValueNumber, + hoistInfo.HeadSegmentLengthValue()->GetValueNumber(), + boundCheck, + landingPad); + { + const bool added = globOpt->CurrentBlockData()->availableIntBoundChecks->AddNew(boundCheckInfo) >= 0; + Assert(added || failedToUpdateCompatibleUpperBoundCheck); + } + for (InvariantBlockBackwardIterator it(globOpt, globOpt->currentBlock, landingPad, nullptr); + it.IsValid(); + it.MoveNext()) + { + const bool added = it.Block()->globOptData.availableIntBoundChecks->AddNew(boundCheckInfo) >= 0; + Assert(added || failedToUpdateCompatibleUpperBoundCheck); } } diff --git a/lib/Backend/GlobOptFields.cpp b/lib/Backend/GlobOptFields.cpp index fd29d9c0c3f..e5bf7a545fe 100644 --- a/lib/Backend/GlobOptFields.cpp +++ b/lib/Backend/GlobOptFields.cpp @@ -788,6 +788,10 @@ GlobOpt::FinishOptPropOp(IR::Instr *instr, IR::PropertySymOpnd *opnd, BasicBlock { this->KillObjectHeaderInlinedTypeSyms(block, isObjTypeSpecialized, opndId); } + else if (!isObjTypeChecked && this->HasLiveObjectHeaderInlinedTypeSym(block, true, opndId)) + { + opnd->SetTypeCheckRequired(true); + } } return isObjTypeSpecialized; @@ -795,10 +799,23 @@ GlobOpt::FinishOptPropOp(IR::Instr *instr, IR::PropertySymOpnd *opnd, BasicBlock void GlobOpt::KillObjectHeaderInlinedTypeSyms(BasicBlock *block, bool isObjTypeSpecialized, SymID opndId) +{ + this->MapObjectHeaderInlinedTypeSymsUntil(block, isObjTypeSpecialized, opndId, [&](SymID symId)->bool { this->currentBlock->globOptData.liveFields->Clear(symId); return false; }); +} + +bool +GlobOpt::HasLiveObjectHeaderInlinedTypeSym(BasicBlock *block, bool isObjTypeSpecialized, SymID opndId) +{ + return this->MapObjectHeaderInlinedTypeSymsUntil(block, true, opndId, [&](SymID symId)->bool { return this->currentBlock->globOptData.liveFields->Test(symId); }); +} + +template +bool +GlobOpt::MapObjectHeaderInlinedTypeSymsUntil(BasicBlock *block, bool isObjTypeSpecialized, SymID opndId, Fn fn) { if (this->objectTypeSyms == nullptr) { - return; + return false; } FOREACH_BITSET_IN_SPARSEBV(symId, this->objectTypeSyms) @@ -821,7 +838,10 @@ GlobOpt::KillObjectHeaderInlinedTypeSyms(BasicBlock *block, bool isObjTypeSpecia { if (type->GetTypeHandler()->IsObjectHeaderInlinedTypeHandler()) { - this->currentBlock->globOptData.liveFields->Clear(symId); + if (fn(symId)) + { + return true; + } } } } @@ -835,7 +855,10 @@ GlobOpt::KillObjectHeaderInlinedTypeSyms(BasicBlock *block, bool isObjTypeSpecia { if (type->GetTypeHandler()->IsObjectHeaderInlinedTypeHandler()) { - this->currentBlock->globOptData.liveFields->Clear(symId); + if (fn(symId)) + { + return true; + } break; } } @@ -844,6 +867,8 @@ GlobOpt::KillObjectHeaderInlinedTypeSyms(BasicBlock *block, bool isObjTypeSpecia } } NEXT_BITSET_IN_SPARSEBV; + + return false; } bool diff --git a/lib/Backend/Inline.cpp b/lib/Backend/Inline.cpp index 7693abaf7da..7d09c52109a 100644 --- a/lib/Backend/Inline.cpp +++ b/lib/Backend/Inline.cpp @@ -3418,10 +3418,6 @@ Inline::SetupInlineInstrForCallDirect(Js::BuiltinFunction builtInId, IR::Instr* callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperString_Link, callInstr->m_func)); break; - case Js::BuiltinFunction::JavascriptString_LocaleCompare: - callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperString_LocaleCompare, callInstr->m_func)); - break; - case Js::BuiltinFunction::JavascriptString_Match: callInstr->SetSrc1(IR::HelperCallOpnd::New(IR::JnHelperMethod::HelperString_Match, callInstr->m_func)); break; diff --git a/lib/Backend/InliningDecider.cpp b/lib/Backend/InliningDecider.cpp index abafe245962..2f21acd5cfa 100644 --- a/lib/Backend/InliningDecider.cpp +++ b/lib/Backend/InliningDecider.cpp @@ -491,7 +491,6 @@ bool InliningDecider::GetBuiltInInfoCommon( case Js::JavascriptBuiltInFunction::JavascriptArray_Splice: case Js::JavascriptBuiltInFunction::JavascriptString_Link: - case Js::JavascriptBuiltInFunction::JavascriptString_LocaleCompare: goto CallDirectCommon; case Js::JavascriptBuiltInFunction::JavascriptArray_Join: diff --git a/lib/Backend/LinearScan.cpp b/lib/Backend/LinearScan.cpp index e155e153625..0221ea1d410 100644 --- a/lib/Backend/LinearScan.cpp +++ b/lib/Backend/LinearScan.cpp @@ -1288,6 +1288,7 @@ LinearScan::EnsureGlobalBailOutRecordTable(Func *func) if (globalBailOutRecordDataTable == nullptr) { globalBailOutRecordDataTable = globalBailOutRecordTables[inlineeID] = NativeCodeDataNew(allocator, GlobalBailOutRecordDataTable); + globalBailOutRecordDataTable->entryPointInfo = (Js::EntryPointInfo*)func->GetWorkItem()->GetJITTimeInfo()->GetEntryPointInfoAddr(); globalBailOutRecordDataTable->length = globalBailOutRecordDataTable->size = 0; globalBailOutRecordDataTable->isInlinedFunction = !isTopFunc; globalBailOutRecordDataTable->hasNonSimpleParams = func->GetHasNonSimpleParams(); @@ -2603,14 +2604,14 @@ LinearScan::FindReg(Lifetime *newLifetime, IR::RegOpnd *regOpnd, bool force) // Avoid the temp reg that we have loaded in this basic block regsBvNoTemps.Minus(this->tempRegs); } - + BitVector regsBvNoTempsNoCallee = regsBvNoTemps; // Try to find a non-callee saved reg so that we don't have to save it in prolog regsBvNoTempsNoCallee.Minus(this->calleeSavedRegs); // Allocate a non-callee saved reg from the other end of the bit vector so that it can keep live for longer regIndex = regsBvNoTempsNoCallee.GetPrevBit(); - + if (regIndex == BVInvalidIndex) { // If we don't have any non-callee saved reg then get the first available callee saved reg so that prolog can store adjacent registers @@ -3730,7 +3731,7 @@ LinearScan::ProcessSecondChanceBoundaryHelper(IR::BranchInstr *branchInstr, IR:: } else { - // Dead code after the unconditional branch causes the currentBlock data to be freed later on... + // Dead code after the unconditional branch causes the currentBlock data to be freed later on... // Deep copy in this case. branchLabel->m_loweredBasicBlock = this->currentBlock->Clone(this->tempAlloc); } @@ -4730,7 +4731,7 @@ IR::Instr * LinearScan::GetIncInsertionPoint(IR::Instr *instr) } void LinearScan::DynamicStatsInstrument() -{ +{ { IR::Instr *firstInstr = this->func->m_headInstr; IR::MemRefOpnd *memRefOpnd = IR::MemRefOpnd::New(this->func->GetJITFunctionBody()->GetCallCountStatsAddr(), TyUint32, this->func); diff --git a/lib/Backend/Lower.cpp b/lib/Backend/Lower.cpp index 5b3e9911727..45f8d21714c 100644 --- a/lib/Backend/Lower.cpp +++ b/lib/Backend/Lower.cpp @@ -4207,7 +4207,7 @@ Lowerer::GenerateProfiledNewScObjArrayFastPath(IR::Instr *instr, Js::ArrayCallSi { // Ensure we don't write missingItems past allocation size Assert(offsetStart + missingItemIndex * sizeOfElement <= maxAllocationSize); - GenerateMemInit(headOpnd, offsetStart + missingItemIndex * sizeOfElement, GetMissingItemOpnd(missingItemType, func), instr, true /*isZeroed*/); + GenerateMemInit(headOpnd, offsetStart + missingItemIndex * sizeOfElement, GetMissingItemOpndForAssignment(missingItemType, func), instr, true /*isZeroed*/); missingItemIndex++; } @@ -7380,7 +7380,7 @@ Lowerer::GenerateStFldWithCachedType(IR::Instr *instrStFld, bool* continueAsHelp if (hasTypeCheckBailout) { - AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !propertySymOpnd->IsTypeDead(), + AssertMsg(PHASE_ON1(Js::ObjTypeSpecIsolatedFldOpsWithBailOutPhase) || !propertySymOpnd->IsTypeDead() || propertySymOpnd->TypeCheckRequired(), "Why does a field store have a type check bailout, if its type is dead?"); if (instrStFld->GetBailOutInfo()->bailOutInstr != instrStFld) @@ -7442,10 +7442,11 @@ Lowerer::GenerateCachedTypeCheck(IR::Instr *instrChk, IR::PropertySymOpnd *prope // cache and no type check bailout. In the latter case, we can wind up doing expensive failed equivalence checks // repeatedly and never rejit. bool doEquivTypeCheck = - propertySymOpnd->HasEquivalentTypeSet() && - !(propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType()) && - !propertySymOpnd->MustDoMonoCheck() && - (propertySymOpnd->IsPoly() || instrChk->HasTypeCheckBailOut()); + (instrChk->HasEquivalentTypeCheckBailOut() && propertySymOpnd->TypeCheckRequired()) || + (propertySymOpnd->HasEquivalentTypeSet() && + !(propertySymOpnd->HasFinalType() && propertySymOpnd->HasInitialType()) && + !propertySymOpnd->MustDoMonoCheck() && + (propertySymOpnd->IsPoly() || instrChk->HasTypeCheckBailOut())); Assert(doEquivTypeCheck || !instrChk->HasEquivalentTypeCheckBailOut()); // Create and initialize the property guard if required. Note that for non-shared monomorphic checks we can refer @@ -10801,7 +10802,7 @@ Lowerer::LowerStElemC(IR::Instr * stElem) IR::Opnd* missingElementOpnd = GetMissingItemOpnd(stElem->GetSrc1()->GetType(), m_func); if (!stElem->GetSrc1()->IsEqual(missingElementOpnd)) { - InsertCompareBranch(stElem->GetSrc1(), missingElementOpnd , Js::OpCode::BrEq_A, labelBailOut, stElem, true); + InsertMissingItemCompareBranch(stElem->GetSrc1(), Js::OpCode::BrEq_A, labelBailOut, stElem); } else { @@ -11857,7 +11858,7 @@ Lowerer::GenerateHelperToArrayPopFastPath(IR::Instr * instr, IR::LabelInstr * do if(retInstr->GetDst()) { //Do this check only for native arrays with Dst. For Var arrays, this is taken care in the Runtime helper itself. - InsertCompareBranch(GetMissingItemOpnd(retInstr->GetDst()->GetType(), m_func), retInstr->GetDst(), Js::OpCode::BrNeq_A, doneLabel, bailOutLabelHelper); + InsertMissingItemCompareBranch(retInstr->GetDst(), Js::OpCode::BrNeq_A, doneLabel, bailOutLabelHelper); } else { @@ -16458,13 +16459,11 @@ Lowerer::GenerateFastElemIIntIndexCommon( Assert(instr->m_opcode != Js::OpCode::InlineArrayPush || bailOutLabelInstr); // Check for a write of the MissingItem value. - InsertCompareBranch( + InsertMissingItemCompareBranch( element, - GetMissingItemOpnd(elementType, m_func), Js::OpCode::BrEq_A, instr->m_opcode == Js::OpCode::InlineArrayPush ? bailOutLabelInstr : labelCantUseArray, - instr, - true); + instr); } if(!headSegmentOpnd) @@ -16984,17 +16983,15 @@ Lowerer::GenerateFastElemIIntIndexCommon( //If the array has missing values, check for one if (!baseValueType.HasNoMissingValues()) { - InsertCompareBranch( + InsertMissingItemCompareBranch( dst, - GetMissingItemOpnd(indirType, m_func), Js::OpCode::BrEq_A, bailOutLabelInstr, - instr, - true); + instr); } } // MOV [head + offset], missing - InsertMove(indirOpnd, GetMissingItemOpnd(indirType, m_func), instr); + InsertMove(indirOpnd, GetMissingItemOpndForAssignment(indirType, m_func), instr); IR::Opnd *newLengthOpnd; IR::AutoReuseOpnd autoReuseNewLengthOpnd; @@ -17275,6 +17272,22 @@ Lowerer::GenerateFastElemIIntIndexCommon( return indirOpnd; } +IR::BranchInstr* +Lowerer::InsertMissingItemCompareBranch(IR::Opnd* compareSrc, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr) +{ + IR::Opnd* missingItemOpnd = GetMissingItemOpndForCompare(compareSrc->GetType(), m_func); + if (compareSrc->IsFloat64()) + { + Assert(compareSrc->IsRegOpnd() || compareSrc->IsIndirOpnd()); + return m_lowererMD.InsertMissingItemCompareBranch(compareSrc, missingItemOpnd, opcode, target, insertBeforeInstr); + } + else + { + Assert(compareSrc->IsInt32() || compareSrc->IsVar()); + return InsertCompareBranch(missingItemOpnd, compareSrc, opcode, target, insertBeforeInstr, true); + } +} + IR::RegOpnd * Lowerer::GenerateUntagVar(IR::RegOpnd * opnd, IR::LabelInstr * labelFail, IR::Instr * insertBeforeInstr, bool generateTagCheck) { @@ -17835,13 +17848,11 @@ Lowerer::GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef) { // TEST dst, dst // JEQ $helper | JNE $fallthrough - InsertCompareBranch( + InsertMissingItemCompareBranch( dst, - GetMissingItemOpnd(dst->GetType(), m_func), needObjectTest ? Js::OpCode::BrEq_A : Js::OpCode::BrNeq_A, needObjectTest ? labelHelper : labelFallThru, - ldElem, - true); + ldElem); if (isNativeArrayLoad) { @@ -17927,7 +17938,7 @@ Lowerer::GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef) labelMissingNative = IR::LabelInstr::New(Js::OpCode::Label, m_func, true); } - InsertCompareBranch(GetMissingItemOpnd(ldElem->GetDst()->GetType(), m_func), ldElem->GetDst(), Js::OpCode::BrEq_A, labelMissingNative, insertBeforeInstr, true); + InsertMissingItemCompareBranch(ldElem->GetDst(), Js::OpCode::BrEq_A, labelMissingNative, insertBeforeInstr); } InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr); if(labelMissingNative) @@ -17959,7 +17970,7 @@ Lowerer::GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef) { if(!emitBailout) { - InsertCompareBranch(GetMissingItemOpnd(ldElem->GetDst()->GetType(), m_func), ldElem->GetDst(), Js::OpCode::BrEq_A, labelBailOut, insertBeforeInstr, true); + InsertMissingItemCompareBranch(ldElem->GetDst(), Js::OpCode::BrEq_A, labelBailOut, insertBeforeInstr); } InsertBranch(Js::OpCode::Br, labelFallThru, insertBeforeInstr); @@ -17989,8 +18000,48 @@ Lowerer::GetMissingItemOpnd(IRType type, Func *func) { return IR::IntConstOpnd::New(Js::JavascriptNativeIntArray::MissingItem, TyInt32, func, true); } - Assert(type == TyFloat64); - return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetNativeFloatArrayMissingItemAddr(), TyFloat64, func); + AssertMsg(false, "Only expecting TyVar and TyInt32 in Lowerer::GetMissingItemOpnd"); + __assume(false); +} + +IR::Opnd* +Lowerer::GetMissingItemOpndForAssignment(IRType type, Func *func) +{ + switch (type) + { + case TyVar: + case TyInt32: + return GetMissingItemOpnd(type, func); + + case TyFloat64: + return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetNativeFloatArrayMissingItemAddr(), TyFloat64, func); + + default: + AnalysisAssertMsg(false, "Unexpected type in Lowerer::GetMissingItemOpndForAssignment"); + __assume(false); + } +} + +IR::Opnd * +Lowerer::GetMissingItemOpndForCompare(IRType type, Func *func) +{ + switch (type) + { + case TyVar: + case TyInt32: + return GetMissingItemOpnd(type, func); + + case TyFloat64: +#if TARGET_64 + return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetNativeFloatArrayMissingItemAddr(), TyUint64, func); +#else + return IR::MemRefOpnd::New(func->GetThreadContextInfo()->GetNativeFloatArrayMissingItemAddr(), TyUint32, func); +#endif + + default: + AnalysisAssertMsg(false, "Unexpected type in Lowerer::GetMissingItemOpndForCompare"); + __assume(false); + } } bool @@ -18630,13 +18681,11 @@ Lowerer::GenerateFastStElemI(IR::Instr *& stElem, bool *instrIsInHelperBlockRef) // // cmp [segment + index], Js::SparseArraySegment::MissingValue // je $helper - InsertCompareBranch( + InsertMissingItemCompareBranch( indirOpnd, - GetMissingItemOpnd(src->GetType(), m_func), Js::OpCode::BrEq_A, labelHelper, - stElem, - true); + stElem); } else { @@ -28022,6 +28071,13 @@ Lowerer::AddBailoutToHelperCallInstr(IR::Instr * helperCallInstr, BailOutInfo * return helperCallInstr; } +void +Lowerer::InsertAndLegalize(IR::Instr * instr, IR::Instr* insertBeforeInstr) +{ + insertBeforeInstr->InsertBefore(instr); + LowererMD::Legalize(instr); +} + #if DBG void Lowerer::LegalizeVerifyRange(IR::Instr * instrStart, IR::Instr * instrLast) diff --git a/lib/Backend/Lower.h b/lib/Backend/Lower.h index 3fca23f5174..fbe26266252 100644 --- a/lib/Backend/Lower.h +++ b/lib/Backend/Lower.h @@ -417,6 +417,8 @@ class Lowerer public: static IR::HelperCallOpnd* CreateHelperCallOpnd(IR::JnHelperMethod helperMethod, int helperArgCount, Func* func); static IR::Opnd * GetMissingItemOpnd(IRType type, Func *func); + static IR::Opnd * GetMissingItemOpndForAssignment(IRType type, Func *func); + static IR::Opnd * GetMissingItemOpndForCompare(IRType type, Func *func); static IR::Opnd * GetImplicitCallFlagsOpnd(Func * func); inline static IR::IntConstOpnd* MakeCallInfoConst(ushort flags, int32 argCount, Func* func) { argCount = Js::CallInfo::GetArgCountWithoutExtraArgs((Js::CallFlags)flags, (uint16)argCount); @@ -430,6 +432,7 @@ class Lowerer return IR::IntConstOpnd::New(argCount | (flags << 24), TyMachReg, func, true); #endif } + static void InsertAndLegalize(IR::Instr * instr, IR::Instr* insertBeforeInstr); private: IR::IndirOpnd* GenerateFastElemICommon( _In_ IR::Instr* elemInstr, @@ -516,6 +519,7 @@ class Lowerer _Inout_ IR::RegOpnd** taggedTypeOpnd); void GenerateFastIsInSymbolOrStringIndex(IR::Instr * instrInsert, IR::RegOpnd *indexOpnd, IR::RegOpnd *baseOpnd, IR::Opnd *dest, uint32 inlineCacheOffset, const uint32 hitRateOffset, IR::LabelInstr * labelHelper, IR::LabelInstr * labelDone); + IR::BranchInstr* InsertMissingItemCompareBranch(IR::Opnd* compareSrc, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr); bool GenerateFastLdElemI(IR::Instr *& ldElem, bool *instrIsInHelperBlockRef); bool GenerateFastStElemI(IR::Instr *& StElem, bool *instrIsInHelperBlockRef); bool GenerateFastLdLen(IR::Instr *ldLen, bool *instrIsInHelperBlockRef); diff --git a/lib/Backend/LowerMDShared.cpp b/lib/Backend/LowerMDShared.cpp index 91f8ce06007..37dca7d3a36 100644 --- a/lib/Backend/LowerMDShared.cpp +++ b/lib/Backend/LowerMDShared.cpp @@ -8624,4 +8624,10 @@ LowererMD::InsertCmovCC(const Js::OpCode opCode, IR::Opnd * dst, IR::Opnd* src1, LowererMD::Legalize(instr); return instr; +} + +IR::BranchInstr* +LowererMD::InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr) +{ + return this->lowererMDArch.InsertMissingItemCompareBranch(compareSrc, missingItemOpnd, opcode, target, insertBeforeInstr); } \ No newline at end of file diff --git a/lib/Backend/LowerMDShared.h b/lib/Backend/LowerMDShared.h index e84e5b44948..e37cabd5d69 100644 --- a/lib/Backend/LowerMDShared.h +++ b/lib/Backend/LowerMDShared.h @@ -223,6 +223,7 @@ class LowererMD void EmitLoadFloatFromNumber(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr); void EmitLoadFloat(IR::Opnd *dst, IR::Opnd *src, IR::Instr *insertInstr, IR::Instr * instrBailOut = nullptr, IR::LabelInstr * labelBailOut = nullptr); static void EmitNon32BitOvfCheck(IR::Instr *instr, IR::Instr *insertInstr, IR::LabelInstr* bailOutLabel); + IR::BranchInstr* InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr); static void LowerInt4NegWithBailOut(IR::Instr *const instr, const IR::BailOutKind bailOutKind, IR::LabelInstr *const bailOutLabel, IR::LabelInstr *const skipBailOutLabel); static void LowerInt4AddWithBailOut(IR::Instr *const instr, const IR::BailOutKind bailOutKind, IR::LabelInstr *const bailOutLabel, IR::LabelInstr *const skipBailOutLabel); diff --git a/lib/Backend/NativeCodeGenerator.cpp b/lib/Backend/NativeCodeGenerator.cpp index 071cf77d297..53b3e746501 100644 --- a/lib/Backend/NativeCodeGenerator.cpp +++ b/lib/Backend/NativeCodeGenerator.cpp @@ -2848,7 +2848,7 @@ NativeCodeGenerator::GatherCodeGenData( inlineCache->TryGetFixedMethodFromCache(functionBody, ldFldInlineCacheIndex, &fixedFunctionObject); } - if (fixedFunctionObject && !fixedFunctionObject->GetFunctionInfo()->IsDeferred() && fixedFunctionObject->GetFunctionBody() != inlineeFunctionBody) + if (fixedFunctionObject && fixedFunctionObject->GetFunctionInfo() != inlineeFunctionBody->GetFunctionInfo()) { fixedFunctionObject = nullptr; } diff --git a/lib/Backend/Opnd.h b/lib/Backend/Opnd.h index cf3570a0daa..2f63cd1d28c 100644 --- a/lib/Backend/Opnd.h +++ b/lib/Backend/Opnd.h @@ -226,6 +226,7 @@ class Opnd bool IsUnsigned() const { return IRType_IsUnsignedInt(this->m_type); } int GetSize() const { return TySize[this->m_type]; } bool IsInt64() const { return IRType_IsInt64(this->m_type); } + bool IsUint64() const { return this->m_type == TyUint64; } bool IsInt32() const { return this->m_type == TyInt32; } bool IsUInt32() const { return this->m_type == TyUint32; } bool IsIntegral32() const { return IsInt32() || IsUInt32(); } @@ -647,6 +648,7 @@ class PropertySymOpnd sealed : public SymOpnd bool initialTypeChecked: 1; bool typeMismatch: 1; bool writeGuardChecked: 1; + bool typeCheckRequired: 1; }; uint8 typeCheckSeqFlags; }; @@ -1014,6 +1016,17 @@ class PropertySymOpnd sealed : public SymOpnd this->writeGuardChecked = value; } + bool TypeCheckRequired() const + { + return this->typeCheckRequired; + } + + void SetTypeCheckRequired(bool value) + { + Assert(IsTypeCheckSeqCandidate()); + this->typeCheckRequired = value; + } + uint16 GetObjTypeSpecFlags() const { return this->objTypeSpecFlags; diff --git a/lib/Backend/TempTracker.cpp b/lib/Backend/TempTracker.cpp index 89e12b14914..00b0b53c570 100644 --- a/lib/Backend/TempTracker.cpp +++ b/lib/Backend/TempTracker.cpp @@ -79,9 +79,19 @@ TempTrackerBase::~TempTrackerBase() void TempTrackerBase::MergeData(TempTrackerBase * fromData, bool deleteData) { - nonTempSyms.Or(&fromData->nonTempSyms); - tempTransferredSyms.Or(&fromData->tempTransferredSyms); - MergeDependencies(tempTransferDependencies, fromData->tempTransferDependencies, deleteData); + this->nonTempSyms.Or(&fromData->nonTempSyms); + this->tempTransferredSyms.Or(&fromData->tempTransferredSyms); + this->MergeDependencies(this->tempTransferDependencies, fromData->tempTransferDependencies, deleteData); + if (this->tempTransferDependencies) + { + FOREACH_HASHTABLE_ENTRY(BVSparse *, bucket, this->tempTransferDependencies) + { + if (bucket.element->Test(&this->nonTempSyms)) + { + this->nonTempSyms.Set(bucket.value); + } + } NEXT_HASHTABLE_ENTRY; + } } void diff --git a/lib/Backend/amd64/LowererMDArch.cpp b/lib/Backend/amd64/LowererMDArch.cpp index 060b74ddd26..b0579debb35 100644 --- a/lib/Backend/amd64/LowererMDArch.cpp +++ b/lib/Backend/amd64/LowererMDArch.cpp @@ -3448,3 +3448,22 @@ LowererMDArch::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * tar // return the last instruction inserted return retInstr; } + +IR::BranchInstr* +LowererMDArch::InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr) +{ + Assert(compareSrc->IsFloat64() && missingItemOpnd->IsUint64()); + + IR::Opnd * compareSrcUint64Opnd = IR::RegOpnd::New(TyUint64, m_func); + + if (compareSrc->IsRegOpnd()) + { + this->lowererMD->EmitReinterpretPrimitive(compareSrcUint64Opnd, compareSrc, insertBeforeInstr); + } + else if (compareSrc->IsIndirOpnd()) + { + compareSrcUint64Opnd = compareSrc->UseWithNewType(TyUint64, m_func); + } + + return this->lowererMD->m_lowerer->InsertCompareBranch(missingItemOpnd, compareSrcUint64Opnd, opcode, target, insertBeforeInstr); +} \ No newline at end of file diff --git a/lib/Backend/amd64/LowererMDArch.h b/lib/Backend/amd64/LowererMDArch.h index 506121171c9..24cdda24018 100644 --- a/lib/Backend/amd64/LowererMDArch.h +++ b/lib/Backend/amd64/LowererMDArch.h @@ -151,7 +151,7 @@ class LowererMDArch void LowerInlineSpreadArgOutLoop(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd); IR::Instr * LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd); - + IR::BranchInstr* InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr); private: void MovArgFromReg2Stack(IR::Instr * instr, RegNum reg, Js::ArgSlot slotNumber, IRType type = TyMachReg); void GenerateStackAllocation(IR::Instr *instr, uint32 size); diff --git a/lib/Backend/arm/ARMEncode.h b/lib/Backend/arm/ARMEncode.h index 0c1e0c57b95..b04249a2d64 100644 --- a/lib/Backend/arm/ARMEncode.h +++ b/lib/Backend/arm/ARMEncode.h @@ -1203,6 +1203,20 @@ static const FormTable Forms_VMOVARMVFP[] = FT (NOMORE, 0x0, 0), }; +static const FormTable Forms_VMOVF64R32L[] = +{ + FT(2dr______, 0x0b10ee00, Steps_FLT_FMSR_d0r), + FT(2rd______, 0x0b10ee10, Steps_FLT_FMRS_rd0), + FT(NOMORE, 0x0, 0), +}; + +static const FormTable Forms_VMOVF64R32U[] = +{ + FT(2dr______, 0x0b10ee20, Steps_FLT_FMSR_d1r), + FT(2rd______, 0x0b10ee30, Steps_FLT_FMRS_rd1), + FT(NOMORE, 0x0, 0), +}; + static const FormTable Forms_VCVTF64F32 [] = { FT (2dd______, 0x0ac0eeb7, Steps_FCVTDS_ds), diff --git a/lib/Backend/arm/AssemblyStep.h b/lib/Backend/arm/AssemblyStep.h index ec798f5c637..7027eb29a3a 100644 --- a/lib/Backend/arm/AssemblyStep.h +++ b/lib/Backend/arm/AssemblyStep.h @@ -789,6 +789,20 @@ static const AssemblyStep Steps_FLT_FMSR_sr [] = STEP_OPCODE, STEP_DONE }; +static const AssemblyStep Steps_FLT_FMSR_d0r[] = +{ + STEP_DREG, 0, 23, STEP_NEXTOPN, + STEP_REG, 28, + STEP_OPCODE, STEP_DONE +}; + +static const AssemblyStep Steps_FLT_FMSR_d1r[] = +{ + STEP_DREG, 0, 23, STEP_NEXTOPN, + STEP_REG, 28, + STEP_OPCODE, STEP_DONE +}; + static const AssemblyStep Steps_FLT_FMRS_rs [] = { STEP_REG, 28, STEP_NEXTOPN, @@ -796,6 +810,20 @@ static const AssemblyStep Steps_FLT_FMRS_rs [] = STEP_OPCODE, STEP_DONE }; +static const AssemblyStep Steps_FLT_FMRS_rd0[] = +{ + STEP_REG, 28, STEP_NEXTOPN, + STEP_DREG, 0, 23, + STEP_OPCODE, STEP_DONE +}; + +static const AssemblyStep Steps_FLT_FMRS_rd1[] = +{ + STEP_REG, 28, STEP_NEXTOPN, + STEP_DREG, 0, 23, + STEP_OPCODE, STEP_DONE +}; + static const AssemblyStep Steps_T2_PLD_offset [] = { STEP_BASED, STEP_BASEREG, 0, diff --git a/lib/Backend/arm/EncoderMD.cpp b/lib/Backend/arm/EncoderMD.cpp index 42cb2167213..fc77dbfe964 100644 --- a/lib/Backend/arm/EncoderMD.cpp +++ b/lib/Backend/arm/EncoderMD.cpp @@ -194,6 +194,8 @@ InstructionType EncoderMD::CanonicalizeInstr(IR::Instr* instr) case Js::OpCode::VSQRT: case Js::OpCode::VMOV: case Js::OpCode::VMOVARMVFP: + case Js::OpCode::VMOVF64R32L: + case Js::OpCode::VMOVF64R32U: case Js::OpCode::VCVTF64F32: case Js::OpCode::VCVTF32F64: case Js::OpCode::VCVTF64S32: diff --git a/lib/Backend/arm/LowerMD.cpp b/lib/Backend/arm/LowerMD.cpp index 193f63131a8..8826ff3de35 100644 --- a/lib/Backend/arm/LowerMD.cpp +++ b/lib/Backend/arm/LowerMD.cpp @@ -7695,6 +7695,30 @@ LowererMD::LowerTypeof(IR::Instr* typeOfInstr) m_lowerer->LowerUnaryHelperMem(typeOfInstr, IR::HelperOp_Typeof); } +IR::BranchInstr* +LowererMD::InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr) +{ + Assert(compareSrc->IsFloat64() && missingItemOpnd->IsUInt32()); + + IR::Opnd * compareSrcUint32Opnd = IR::RegOpnd::New(TyUint32, m_func); + IR::RegOpnd* tmpDoubleRegOpnd = IR::RegOpnd::New(TyFloat64, m_func); + + if (compareSrc->IsIndirOpnd()) + { + Lowerer::InsertMove(tmpDoubleRegOpnd, compareSrc, insertBeforeInstr); + } + else + { + tmpDoubleRegOpnd = compareSrc->AsRegOpnd(); + } + + IR::Instr * movInstr = IR::Instr::New(Js::OpCode::VMOVF64R32U, compareSrcUint32Opnd, tmpDoubleRegOpnd, m_func); + insertBeforeInstr->InsertBefore(movInstr); + Legalize(movInstr); + + return m_lowerer->InsertCompareBranch(missingItemOpnd, compareSrcUint32Opnd, opcode, target, insertBeforeInstr); +} + #if DBG // // Helps in debugging of fast paths. diff --git a/lib/Backend/arm/LowerMD.h b/lib/Backend/arm/LowerMD.h index 47cd0ea2a31..28ee28d0a2d 100644 --- a/lib/Backend/arm/LowerMD.h +++ b/lib/Backend/arm/LowerMD.h @@ -247,6 +247,7 @@ class LowererMD void LowerInlineSpreadArgOutLoop(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd); void LowerTypeof(IR::Instr * typeOfInstr); void GenerateMemInit(IR::RegOpnd * opnd, int32 offset, size_t value, IR::Instr * insertBeforeInstr, bool isZeroed = false); + IR::BranchInstr* InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr); private: IR::Opnd* IsOpndNegZero(IR::Opnd* opnd, IR::Instr* instr); diff --git a/lib/Backend/arm/MdOpCodes.h b/lib/Backend/arm/MdOpCodes.h index 56df1601e38..a4449ff52d5 100644 --- a/lib/Backend/arm/MdOpCodes.h +++ b/lib/Backend/arm/MdOpCodes.h @@ -159,6 +159,8 @@ MACRO(VLDR, Reg2, 0, 0, LEGAL_VLOAD, INSTR_TYPE(Forms MACRO(VLDR32, Reg2, 0, 0, LEGAL_VLOAD, INSTR_TYPE(Forms_VLDR32), DL__C) //single precision float load MACRO(VMOV, Reg2, 0, 0, LEGAL_REG2, INSTR_TYPE(Forms_VMOV), DM__C) MACRO(VMOVARMVFP, Reg2, 0, 0, LEGAL_REG2, INSTR_TYPE(Forms_VMOVARMVFP), DM__C) +MACRO(VMOVF64R32L, Reg2, 0, 0, LEGAL_REG2, INSTR_TYPE(Forms_VMOVF64R32L), DM__C) // transfer bits between integral register and lower 4 bytes of double register +MACRO(VMOVF64R32U, Reg2, 0, 0, LEGAL_REG2, INSTR_TYPE(Forms_VMOVF64R32U), DM__C) // transfer bits between integral register and upper 4 bytes of double register MACRO(VMRS, Empty, OpSideEffect, 0, LEGAL_NONE, INSTR_TYPE(Forms_VMRS), D___C) MACRO(VMRSR, Reg1, OpSideEffect, 0, LEGAL_NONE, INSTR_TYPE(Forms_VMRSR), D___C) MACRO(VMSR, Reg1, OpSideEffect, 0, LEGAL_NONE, INSTR_TYPE(Forms_VMSR), D___C) diff --git a/lib/Backend/arm64/LowerMD.cpp b/lib/Backend/arm64/LowerMD.cpp index 0c614762efa..f1ab5a07772 100644 --- a/lib/Backend/arm64/LowerMD.cpp +++ b/lib/Backend/arm64/LowerMD.cpp @@ -7051,6 +7051,25 @@ LowererMD::InsertObjectPoison(IR::Opnd* poisonedOpnd, IR::BranchInstr* branchIns } } +IR::BranchInstr* +LowererMD::InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr) +{ + Assert(compareSrc->IsFloat64() && missingItemOpnd->IsUint64()); + + IR::Opnd * compareSrcUint64Opnd = IR::RegOpnd::New(TyUint64, m_func); + if (compareSrc->IsRegOpnd()) + { + IR::Instr * movDoubleToUint64Instr = IR::Instr::New(Js::OpCode::FMOV_GEN, compareSrcUint64Opnd, compareSrc, insertBeforeInstr->m_func); + insertBeforeInstr->InsertBefore(movDoubleToUint64Instr); + } + else if (compareSrc->IsIndirOpnd()) + { + compareSrcUint64Opnd = compareSrc->UseWithNewType(TyUint64, m_func); + } + + return m_lowerer->InsertCompareBranch(compareSrcUint64Opnd, missingItemOpnd, opcode, target, insertBeforeInstr); +} + #if DBG // // Helps in debugging of fast paths. diff --git a/lib/Backend/arm64/LowerMD.h b/lib/Backend/arm64/LowerMD.h index efc04a576c4..c795d310c02 100644 --- a/lib/Backend/arm64/LowerMD.h +++ b/lib/Backend/arm64/LowerMD.h @@ -251,6 +251,7 @@ class LowererMD void GenerateMemInit(IR::RegOpnd * opnd, int32 offset, size_t value, IR::Instr * insertBeforeInstr, bool isZeroed = false); static void InsertObjectPoison(IR::Opnd* poisonedOpnd, IR::BranchInstr* branchInstr, IR::Instr* insertInstr, bool isForStore); + IR::BranchInstr* InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr); private: static IR::Instr * ChangeToAssign(IR::Instr * instr, IRType destType); diff --git a/lib/Backend/i386/LowererMDArch.cpp b/lib/Backend/i386/LowererMDArch.cpp index 59c34b34249..2dd78457efa 100644 --- a/lib/Backend/i386/LowererMDArch.cpp +++ b/lib/Backend/i386/LowererMDArch.cpp @@ -4178,3 +4178,50 @@ LowererMDArch::LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * tar // return the last instruction inserted return retInstr; } + +IR::BranchInstr* +LowererMDArch::InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr) +{ + Assert(compareSrc->IsFloat64() && missingItemOpnd->IsUInt32()); + + IR::Opnd * compareSrcUint32Opnd = IR::RegOpnd::New(TyUint32, m_func); + + // Missing item NaN have a different bit pattern from k_Nan, but is a NaN nonetheless. Given that, it is sufficient + // to compare just the top 32 bits + // + // IF sse4.1 available + // mov xmm0, compareSrc + // pextrd ecx, xmm0, 1 <-- ecx will containg xmm0[63:32] after this + // cmp missingItemOpnd, ecx + // jcc target + // + // ELSE + // mov xmm0, compareSrc + // shufps xmm0, xmm0, (3 << 6 | 2 << 4 | 1 << 2 | 1) <-- xmm0[31:0] will contain compareSrc[63:32] after this + // movd ecx, xmm0 + // cmp missingItemOpnd, ecx + // jcc $target + + IR::RegOpnd* tmpDoubleRegOpnd = IR::RegOpnd::New(TyFloat64, m_func); + + if (AutoSystemInfo::Data.SSE4_1Available()) + { + if (compareSrc->IsIndirOpnd()) + { + Lowerer::InsertMove(tmpDoubleRegOpnd, compareSrc, insertBeforeInstr); + } + else + { + tmpDoubleRegOpnd = compareSrc->AsRegOpnd(); + } + Lowerer::InsertAndLegalize(IR::Instr::New(Js::OpCode::PEXTRD, compareSrcUint32Opnd, tmpDoubleRegOpnd, IR::IntConstOpnd::New(1, TyInt8, m_func, true), m_func), insertBeforeInstr); + } + else + { + Lowerer::InsertMove(tmpDoubleRegOpnd, compareSrc, insertBeforeInstr); + Lowerer::InsertAndLegalize(IR::Instr::New(Js::OpCode::SHUFPS, tmpDoubleRegOpnd, tmpDoubleRegOpnd, IR::IntConstOpnd::New(3 << 6 | 2 << 4 | 1 << 2 | 1, TyInt8, m_func, true), m_func), insertBeforeInstr); + Lowerer::InsertAndLegalize(IR::Instr::New(Js::OpCode::MOVD, compareSrcUint32Opnd, tmpDoubleRegOpnd, m_func), insertBeforeInstr); + } + + return this->lowererMD->m_lowerer->InsertCompareBranch(missingItemOpnd, compareSrcUint32Opnd, opcode, target, insertBeforeInstr); +} \ No newline at end of file diff --git a/lib/Backend/i386/LowererMDArch.h b/lib/Backend/i386/LowererMDArch.h index 4306f366d9f..1da1fa80615 100644 --- a/lib/Backend/i386/LowererMDArch.h +++ b/lib/Backend/i386/LowererMDArch.h @@ -121,7 +121,7 @@ class LowererMDArch void LowerInlineSpreadArgOutLoop(IR::Instr *callInstr, IR::RegOpnd *indexOpnd, IR::RegOpnd *arrayElementsStartOpnd); IR::Instr * LowerEHRegionReturn(IR::Instr * insertBeforeInstr, IR::Opnd * targetOpnd); - + IR::BranchInstr* InsertMissingItemCompareBranch(IR::Opnd* compareSrc, IR::Opnd* missingItemOpnd, Js::OpCode opcode, IR::LabelInstr* target, IR::Instr* insertBeforeInstr); private: void GeneratePreCall(IR::Instr * callInstr, IR::Opnd *functionObjOpnd); }; diff --git a/lib/Common/ChakraCoreVersion.h b/lib/Common/ChakraCoreVersion.h index 3a6e4b78661..3daf687d23b 100644 --- a/lib/Common/ChakraCoreVersion.h +++ b/lib/Common/ChakraCoreVersion.h @@ -17,7 +17,7 @@ // ChakraCore version number definitions (used in ChakraCore binary metadata) #define CHAKRA_CORE_MAJOR_VERSION 1 #define CHAKRA_CORE_MINOR_VERSION 10 -#define CHAKRA_CORE_PATCH_VERSION 1 +#define CHAKRA_CORE_PATCH_VERSION 2 #define CHAKRA_CORE_VERSION_RELEASE_QFE 0 // Redundant with PATCH_VERSION. Keep this value set to 0. // ------------- diff --git a/lib/JITIDL/JITTypes.h b/lib/JITIDL/JITTypes.h index 52bfbb4a445..600a38bae7e 100644 --- a/lib/JITIDL/JITTypes.h +++ b/lib/JITIDL/JITTypes.h @@ -680,6 +680,7 @@ typedef struct FunctionJITTimeDataIDL CHAKRA_PTR functionInfoAddr; CHAKRA_PTR callsCountAddress; CHAKRA_PTR weakFuncRef; + CHAKRA_PTR entryPointInfoAddr; } FunctionJITTimeDataIDL; #if !FLOATVAR diff --git a/lib/Runtime/Base/CrossSite.cpp b/lib/Runtime/Base/CrossSite.cpp index 697971281f7..2792a7877c2 100644 --- a/lib/Runtime/Base/CrossSite.cpp +++ b/lib/Runtime/Base/CrossSite.cpp @@ -99,6 +99,11 @@ namespace Js { MarshalDynamicObject(scriptContext, prototypeObject); } + if (JavascriptProxy::Is(prototypeObject)) + { + // Fetching prototype of proxy can invoke trap - which we don't want during the marshalling time. + break; + } prototype = prototypeObject->GetPrototype(); } } diff --git a/lib/Runtime/Base/FunctionBody.cpp b/lib/Runtime/Base/FunctionBody.cpp index 9364742eacd..db98c682f10 100644 --- a/lib/Runtime/Base/FunctionBody.cpp +++ b/lib/Runtime/Base/FunctionBody.cpp @@ -2088,33 +2088,6 @@ namespace Js this->SetAuxPtr(list); } - template - void FunctionProxy::MapFunctionObjectTypes(Fn func) - { - FunctionTypeWeakRefList* functionObjectTypeList = this->GetFunctionObjectTypeList(); - if (functionObjectTypeList != nullptr) - { - functionObjectTypeList->Map([&](int, FunctionTypeWeakRef* typeWeakRef) - { - if (typeWeakRef) - { - ScriptFunctionType* type = typeWeakRef->Get(); - if (type) - { - func(type); - } - } - }); - } - - if (this->deferredPrototypeType) - { - func(this->deferredPrototypeType); - } - // NOTE: We deliberately do not map the undeferredFunctionType here, since it's in the list - // of registered function object types we processed above. - } - FunctionProxy::FunctionTypeWeakRefList* FunctionProxy::EnsureFunctionObjectTypeList() { FunctionTypeWeakRefList* functionObjectTypeList = this->GetFunctionObjectTypeList(); diff --git a/lib/Runtime/Base/FunctionBody.h b/lib/Runtime/Base/FunctionBody.h index 6407a8bc208..e3d8b61d23c 100644 --- a/lib/Runtime/Base/FunctionBody.h +++ b/lib/Runtime/Base/FunctionBody.h @@ -222,7 +222,7 @@ namespace Js // main and JIT threads. class EntryPointInfo : public ProxyEntryPointInfo { - + private: enum State : BYTE { @@ -296,7 +296,6 @@ namespace Js public: virtual void Finalize(bool isShutdown) override; - virtual bool IsFunctionEntryPointInfo() const override { return true; } #if ENABLE_NATIVE_CODEGEN NativeEntryPointData * EnsureNativeEntryPointData(); @@ -572,9 +571,9 @@ namespace Js void ResetOnLazyBailoutFailure(); void OnNativeCodeInstallFailure(); virtual void ResetOnNativeCodeInstallFailure() = 0; - - void FreeJitTransferData(); - bool ClearEquivalentTypeCaches(); + + void FreeJitTransferData(); + bool ClearEquivalentTypeCaches(); virtual void Invalidate(bool prolongEntryPoint) { Assert(false); } InlineeFrameRecord* FindInlineeFrame(void* returnAddress); @@ -624,6 +623,8 @@ namespace Js public: FunctionEntryPointInfo(FunctionProxy * functionInfo, Js::JavascriptMethod method, ThreadContext* context); + virtual bool IsFunctionEntryPointInfo() const override { return true; } + bool ExecutedSinceCallCountCollection() const; void CollectCallCounts(); @@ -1063,8 +1064,33 @@ namespace Js FunctionTypeWeakRefList* GetFunctionObjectTypeList() const; void SetFunctionObjectTypeList(FunctionTypeWeakRefList* list); void RegisterFunctionObjectType(ScriptFunctionType* functionType); + template - void MapFunctionObjectTypes(Fn func); + void MapFunctionObjectTypes(Fn func) + { + FunctionTypeWeakRefList* functionObjectTypeList = this->GetFunctionObjectTypeList(); + if (functionObjectTypeList != nullptr) + { + functionObjectTypeList->Map([&](int, FunctionTypeWeakRef* typeWeakRef) + { + if (typeWeakRef) + { + ScriptFunctionType* type = typeWeakRef->Get(); + if (type) + { + func(type); + } + } + }); + } + + if (this->deferredPrototypeType) + { + func(this->deferredPrototypeType); + } + // NOTE: We deliberately do not map the undeferredFunctionType here, since it's in the list + // of registered function object types we processed above. + } static uint GetOffsetOfDeferredPrototypeType() { return static_cast(offsetof(Js::FunctionProxy, deferredPrototypeType)); } static Js::ScriptFunctionType * EnsureFunctionProxyDeferredPrototypeType(FunctionProxy * proxy) @@ -2196,7 +2222,7 @@ namespace Js #if DYNAMIC_INTERPRETER_THUNK void GenerateDynamicInterpreterThunk(); #endif - + Js::JavascriptMethod GetEntryPoint(ProxyEntryPointInfo* entryPoint) const { return entryPoint->jsMethod; } void CaptureDynamicProfileState(FunctionEntryPointInfo* entryPointInfo); #if ENABLE_DEBUG_CONFIG_OPTIONS diff --git a/lib/Runtime/Base/ThreadContext.cpp b/lib/Runtime/Base/ThreadContext.cpp index d34fa3ea8a6..0496e0625ae 100644 --- a/lib/Runtime/Base/ThreadContext.cpp +++ b/lib/Runtime/Base/ThreadContext.cpp @@ -124,7 +124,7 @@ ThreadContext::ThreadContext(AllocationPolicyManager * allocationPolicyManager, entryExitRecord(nullptr), leafInterpreterFrame(nullptr), threadServiceWrapper(nullptr), - tryCatchFrameAddr(nullptr), + tryHandlerAddrOfReturnAddr(nullptr), temporaryArenaAllocatorCount(0), temporaryGuestArenaAllocatorCount(0), crefSContextForDiag(0), @@ -721,7 +721,7 @@ bool ThreadContext::ThreadContextRecyclerTelemetryHostInterface::TransmitTelemet bool ThreadContext::ThreadContextRecyclerTelemetryHostInterface::IsThreadBound() const { - return this->tc->IsThreadBound(); + return this->tc->IsThreadBound(); } @@ -2641,11 +2641,16 @@ ThreadContext::DoExpirableCollectModeStackWalk() if (javascriptFunction != nullptr && Js::ScriptFunction::Test(javascriptFunction)) { Js::ScriptFunction* scriptFunction = (Js::ScriptFunction*) javascriptFunction; - Js::FunctionEntryPointInfo* entryPointInfo = scriptFunction->GetFunctionEntryPointInfo(); - entryPointInfo->SetIsObjectUsed(); + scriptFunction->GetFunctionBody()->MapEntryPoints([](int index, Js::FunctionEntryPointInfo* entryPoint){ entryPoint->SetIsObjectUsed(); }); + + // Make sure we marked the current one when iterating all entry points + Js::ProxyEntryPointInfo* entryPointInfo = scriptFunction->GetEntryPointInfo(); + Assert(entryPointInfo == nullptr + || !entryPointInfo->IsFunctionEntryPointInfo() + || ((Js::FunctionEntryPointInfo*)entryPointInfo)->IsObjectUsed()); } } } diff --git a/lib/Runtime/Base/ThreadContext.h b/lib/Runtime/Base/ThreadContext.h index 133613a5b6e..4bec829d4d7 100644 --- a/lib/Runtime/Base/ThreadContext.h +++ b/lib/Runtime/Base/ThreadContext.h @@ -667,7 +667,7 @@ class ThreadContext sealed : ThreadServiceWrapper* threadServiceWrapper; uint functionCount; uint sourceInfoCount; - void * tryCatchFrameAddr; + void * tryHandlerAddrOfReturnAddr; enum RedeferralState { InitialRedeferralState, @@ -1268,8 +1268,8 @@ class ThreadContext sealed : uint EnterScriptStart(Js::ScriptEntryExitRecord *, bool doCleanup); void EnterScriptEnd(Js::ScriptEntryExitRecord *, bool doCleanup); - void * GetTryCatchFrameAddr() { return this->tryCatchFrameAddr; } - void SetTryCatchFrameAddr(void * frameAddr) { this->tryCatchFrameAddr = frameAddr; } + void * GetTryHandlerAddrOfReturnAddr() { return this->tryHandlerAddrOfReturnAddr; } + void SetTryHandlerAddrOfReturnAddr(void * addrOfReturnAddr) { this->tryHandlerAddrOfReturnAddr = addrOfReturnAddr; } template void LeaveScriptStart(void *); @@ -1466,6 +1466,18 @@ class ThreadContext sealed : } } + bool IsOldEntryPointInfo(Js::ProxyEntryPointInfo* entryPointInfo) + { + Js::FunctionEntryPointInfo* current = this->recyclableData->oldEntryPointInfo; + while (current != nullptr) + { + if (current == entryPointInfo) + return true; + current = current->nextEntryPoint; + } + return false; + } + static bool IsOnStack(void const *ptr); _NOINLINE bool IsStackAvailable(size_t size, bool* isInterrupt = nullptr); _NOINLINE bool IsStackAvailableNoThrow(size_t size = Js::Constants::MinStackDefault); diff --git a/lib/Runtime/Language/InterpreterStackFrame.cpp b/lib/Runtime/Language/InterpreterStackFrame.cpp index 489c004c192..3a0233c4e1e 100644 --- a/lib/Runtime/Language/InterpreterStackFrame.cpp +++ b/lib/Runtime/Language/InterpreterStackFrame.cpp @@ -6492,6 +6492,8 @@ namespace Js this->OrFlags(InterpreterStackFrameFlags_WithinTryBlock); Js::JavascriptExceptionOperators::AutoCatchHandlerExists autoCatchHandlerExists(scriptContext); + void * addrOfReturnAddr = _AddressOfReturnAddress(); + Js::JavascriptExceptionOperators::TryHandlerAddrOfReturnAddrStack tryHandlerAddrOfReturnAddrStack(scriptContext, addrOfReturnAddr); #ifdef ENABLE_SCRIPT_DEBUGGING if (this->IsInDebugMode()) diff --git a/lib/Runtime/Language/JavascriptExceptionOperators.cpp b/lib/Runtime/Language/JavascriptExceptionOperators.cpp index e5e3a9c6195..e8d09238320 100644 --- a/lib/Runtime/Language/JavascriptExceptionOperators.cpp +++ b/lib/Runtime/Language/JavascriptExceptionOperators.cpp @@ -69,16 +69,16 @@ namespace Js m_threadContext->SetIsUserCode(m_previousCatchHandlerToUserCodeStatus); } - JavascriptExceptionOperators::TryCatchFrameAddrStack::TryCatchFrameAddrStack(ScriptContext* scriptContext, void *frameAddr) + JavascriptExceptionOperators::TryHandlerAddrOfReturnAddrStack::TryHandlerAddrOfReturnAddrStack(ScriptContext* scriptContext, void *addrOfReturnAddr) { m_threadContext = scriptContext->GetThreadContext(); - m_prevTryCatchFrameAddr = m_threadContext->GetTryCatchFrameAddr(); - scriptContext->GetThreadContext()->SetTryCatchFrameAddr(frameAddr); + m_prevTryHandlerAddrOfReturnAddr = m_threadContext->GetTryHandlerAddrOfReturnAddr(); + scriptContext->GetThreadContext()->SetTryHandlerAddrOfReturnAddr(addrOfReturnAddr); } - JavascriptExceptionOperators::TryCatchFrameAddrStack::~TryCatchFrameAddrStack() + JavascriptExceptionOperators::TryHandlerAddrOfReturnAddrStack::~TryHandlerAddrOfReturnAddrStack() { - m_threadContext->SetTryCatchFrameAddr(m_prevTryCatchFrameAddr); + m_threadContext->SetTryHandlerAddrOfReturnAddr(m_prevTryHandlerAddrOfReturnAddr); } JavascriptExceptionOperators::HasBailedOutPtrStack::HasBailedOutPtrStack(ScriptContext* scriptContext, bool *hasBailedOutPtr) @@ -125,13 +125,14 @@ namespace Js { void *continuation = nullptr; JavascriptExceptionObject *exception = nullptr; - void *tryCatchFrameAddr = nullptr; + void *tryHandlerAddrOfReturnAddr = nullptr; Js::JavascriptExceptionOperators::HasBailedOutPtrStack hasBailedOutPtrStack(scriptContext, (bool*)((char*)frame + hasBailedOutOffset)); PROBE_STACK(scriptContext, Constants::MinStackJitEHBailout + spillSize + argsSize); { - Js::JavascriptExceptionOperators::TryCatchFrameAddrStack tryCatchFrameAddrStack(scriptContext, frame); + void * addrOfReturnAddr = (void*)((char*)frame + sizeof(char*)); + Js::JavascriptExceptionOperators::TryHandlerAddrOfReturnAddrStack tryHandlerAddrOfReturnAddrStack(scriptContext, addrOfReturnAddr); try { Js::JavascriptExceptionOperators::AutoCatchHandlerExists autoCatchHandlerExists(scriptContext); @@ -140,8 +141,7 @@ namespace Js catch (const Js::JavascriptException& err) { exception = err.GetAndClear(); - tryCatchFrameAddr = scriptContext->GetThreadContext()->GetTryCatchFrameAddr(); - Assert(frame == tryCatchFrameAddr); + tryHandlerAddrOfReturnAddr = scriptContext->GetThreadContext()->GetTryHandlerAddrOfReturnAddr(); } } if (exception) @@ -157,7 +157,7 @@ namespace Js #if ENABLE_NATIVE_CODEGEN if (exception->GetExceptionContext() && exception->GetExceptionContext()->ThrowingFunction()) { - WalkStackForCleaningUpInlineeInfo(scriptContext, nullptr /* start stackwalk from the current frame */, tryCatchFrameAddr); + WalkStackForCleaningUpInlineeInfo(scriptContext, nullptr /* start stackwalk from the current frame */, tryHandlerAddrOfReturnAddr); } #endif @@ -212,11 +212,11 @@ namespace Js if (exception) { #if ENABLE_NATIVE_CODEGEN - if (scriptContext->GetThreadContext()->GetTryCatchFrameAddr() != nullptr) + if (scriptContext->GetThreadContext()->GetTryHandlerAddrOfReturnAddr() != nullptr) { if (exception->GetExceptionContext() && exception->GetExceptionContext()->ThrowingFunction()) { - WalkStackForCleaningUpInlineeInfo(scriptContext, nullptr /* start stackwalk from the current frame */, scriptContext->GetThreadContext()->GetTryCatchFrameAddr()); + WalkStackForCleaningUpInlineeInfo(scriptContext, nullptr /* start stackwalk from the current frame */, scriptContext->GetThreadContext()->GetTryHandlerAddrOfReturnAddr()); } } else @@ -295,13 +295,13 @@ namespace Js { void *continuation = nullptr; JavascriptExceptionObject *exception = nullptr; - void * tryCatchFrameAddr = nullptr; + void * tryHandlerAddrOfReturnAddr = nullptr; Js::JavascriptExceptionOperators::HasBailedOutPtrStack hasBailedOutPtrStack(scriptContext, (bool*)((char*)localsPtr + hasBailedOutOffset)); PROBE_STACK(scriptContext, Constants::MinStackJitEHBailout + argsSize); { - Js::JavascriptExceptionOperators::TryCatchFrameAddrStack tryCatchFrameAddrStack(scriptContext, framePtr); - + void * addrOfReturnAddr = (void*)((char*)framePtr + sizeof(char*)); + Js::JavascriptExceptionOperators::TryHandlerAddrOfReturnAddrStack tryHandlerAddrOfReturnAddrStack(scriptContext, addrOfReturnAddr); try { Js::JavascriptExceptionOperators::AutoCatchHandlerExists autoCatchHandlerExists(scriptContext); @@ -314,8 +314,7 @@ namespace Js catch (const Js::JavascriptException& err) { exception = err.GetAndClear(); - tryCatchFrameAddr = scriptContext->GetThreadContext()->GetTryCatchFrameAddr(); - Assert(framePtr == tryCatchFrameAddr); + tryHandlerAddrOfReturnAddr = scriptContext->GetThreadContext()->GetTryHandlerAddrOfReturnAddr(); } } @@ -332,7 +331,7 @@ namespace Js #if ENABLE_NATIVE_CODEGEN if (exception->GetExceptionContext() && exception->GetExceptionContext()->ThrowingFunction()) { - WalkStackForCleaningUpInlineeInfo(scriptContext, nullptr /* start stackwalk from the current frame */, tryCatchFrameAddr); + WalkStackForCleaningUpInlineeInfo(scriptContext, nullptr /* start stackwalk from the current frame */, tryHandlerAddrOfReturnAddr); } #endif exception = exception->CloneIfStaticExceptionObject(scriptContext); @@ -387,11 +386,11 @@ namespace Js if (exception) { #if ENABLE_NATIVE_CODEGEN - if (scriptContext->GetThreadContext()->GetTryCatchFrameAddr() != nullptr) + if (scriptContext->GetThreadContext()->GetTryHandlerAddrOfReturnAddr() != nullptr) { if (exception->GetExceptionContext() && exception->GetExceptionContext()->ThrowingFunction()) { - WalkStackForCleaningUpInlineeInfo(scriptContext, nullptr /* start stackwalk from the current frame */, scriptContext->GetThreadContext()->GetTryCatchFrameAddr()); + WalkStackForCleaningUpInlineeInfo(scriptContext, nullptr /* start stackwalk from the current frame */, scriptContext->GetThreadContext()->GetTryHandlerAddrOfReturnAddr()); } } else @@ -486,14 +485,14 @@ namespace Js { void* continuationAddr = NULL; Js::JavascriptExceptionObject* pExceptionObject = NULL; - void *tryCatchFrameAddr = nullptr; + void *tryHandlerAddrOfReturnAddr = nullptr; Js::JavascriptExceptionOperators::HasBailedOutPtrStack hasBailedOutPtrStack(scriptContext, (bool*)((char*)framePtr + hasBailedOutOffset)); PROBE_STACK(scriptContext, Constants::MinStackJitEHBailout); { - Js::JavascriptExceptionOperators::TryCatchFrameAddrStack tryCatchFrameAddrStack(scriptContext, framePtr); - + void * addrOfReturnAddr = (void*)((char*)framePtr + sizeof(char*)); + Js::JavascriptExceptionOperators::TryHandlerAddrOfReturnAddrStack tryHandlerAddrOfReturnAddrStack(scriptContext, addrOfReturnAddr); try { Js::JavascriptExceptionOperators::AutoCatchHandlerExists autoCatchHandlerExists(scriptContext); @@ -557,8 +556,7 @@ namespace Js catch (const Js::JavascriptException& err) { pExceptionObject = err.GetAndClear(); - tryCatchFrameAddr = scriptContext->GetThreadContext()->GetTryCatchFrameAddr(); - Assert(framePtr == tryCatchFrameAddr); + tryHandlerAddrOfReturnAddr = scriptContext->GetThreadContext()->GetTryHandlerAddrOfReturnAddr(); } } @@ -576,7 +574,7 @@ namespace Js #if ENABLE_NATIVE_CODEGEN if (pExceptionObject->GetExceptionContext() && pExceptionObject->GetExceptionContext()->ThrowingFunction()) { - WalkStackForCleaningUpInlineeInfo(scriptContext, nullptr /* start stackwalk from the current frame */, tryCatchFrameAddr); + WalkStackForCleaningUpInlineeInfo(scriptContext, nullptr /* start stackwalk from the current frame */, tryHandlerAddrOfReturnAddr); } #endif pExceptionObject = pExceptionObject->CloneIfStaticExceptionObject(scriptContext); @@ -722,11 +720,11 @@ namespace Js if (pExceptionObject) { #if ENABLE_NATIVE_CODEGEN - if (scriptContext->GetThreadContext()->GetTryCatchFrameAddr() != nullptr) + if (scriptContext->GetThreadContext()->GetTryHandlerAddrOfReturnAddr() != nullptr) { if (pExceptionObject->GetExceptionContext() && pExceptionObject->GetExceptionContext()->ThrowingFunction()) { - WalkStackForCleaningUpInlineeInfo(scriptContext, nullptr /* start stackwalk from the current frame */, scriptContext->GetThreadContext()->GetTryCatchFrameAddr()); + WalkStackForCleaningUpInlineeInfo(scriptContext, nullptr /* start stackwalk from the current frame */, scriptContext->GetThreadContext()->GetTryHandlerAddrOfReturnAddr()); } } else @@ -1053,14 +1051,14 @@ namespace Js } #if ENABLE_NATIVE_CODEGEN // TODO: Add code address of throwing function on exception context, and use that for returnAddress instead of passing nullptr which starts stackwalk from the top - void JavascriptExceptionOperators::WalkStackForCleaningUpInlineeInfo(ScriptContext *scriptContext, PVOID returnAddress, PVOID tryCatchFrameAddr) + void JavascriptExceptionOperators::WalkStackForCleaningUpInlineeInfo(ScriptContext *scriptContext, PVOID returnAddress, PVOID tryHandlerAddrOfReturnAddr) { - Assert(tryCatchFrameAddr != nullptr); + Assert(tryHandlerAddrOfReturnAddr != nullptr); JavascriptStackWalker walker(scriptContext, /*useEERContext*/ true, returnAddress); // We have to walk the inlinee frames and clear callinfo count on them on an exception // At this point inlinedFrameWalker is closed, so we should build it again by calling InlinedFrameWalker::FromPhysicalFrame - walker.WalkAndClearInlineeFrameCallInfoOnException(tryCatchFrameAddr); + walker.WalkAndClearInlineeFrameCallInfoOnException(tryHandlerAddrOfReturnAddr); } #endif void diff --git a/lib/Runtime/Language/JavascriptExceptionOperators.h b/lib/Runtime/Language/JavascriptExceptionOperators.h index baf3a135ed2..0ca74dfa76d 100644 --- a/lib/Runtime/Language/JavascriptExceptionOperators.h +++ b/lib/Runtime/Language/JavascriptExceptionOperators.h @@ -43,15 +43,15 @@ namespace Js ~AutoCatchHandlerExists(); }; - class TryCatchFrameAddrStack + class TryHandlerAddrOfReturnAddrStack { private: - void * m_prevTryCatchFrameAddr; + void * m_prevTryHandlerAddrOfReturnAddr; ThreadContext* m_threadContext; public: - TryCatchFrameAddrStack(ScriptContext* scriptContext, void *frameAddr); - ~TryCatchFrameAddrStack(); + TryHandlerAddrOfReturnAddrStack(ScriptContext* scriptContext, void *addrOfReturnAddr); + ~TryHandlerAddrOfReturnAddrStack(); }; class HasBailedOutPtrStack diff --git a/lib/Runtime/Language/JavascriptStackWalker.cpp b/lib/Runtime/Language/JavascriptStackWalker.cpp index 6c29f965de8..75b0ff06062 100644 --- a/lib/Runtime/Language/JavascriptStackWalker.cpp +++ b/lib/Runtime/Language/JavascriptStackWalker.cpp @@ -632,7 +632,7 @@ namespace Js return nullptr; } #if ENABLE_NATIVE_CODEGEN - void JavascriptStackWalker::WalkAndClearInlineeFrameCallInfoOnException(void *tryCatchFrameAddr) + void JavascriptStackWalker::WalkAndClearInlineeFrameCallInfoOnException(void *tryHandlerAddrOfReturnAddr) { // Walk the stack and when we find the first native frame, we clear the inlinee's callinfo for this frame // It is sufficient we stop at the first native frame which had the enclosing try-catch @@ -649,10 +649,10 @@ namespace Js inlinedFrame->callInfo.Clear(); } } - if (this->currentFrame.GetFrame() == tryCatchFrameAddr) - { - break; - } + } + if (this->currentFrame.GetAddressOfReturnAddress() == tryHandlerAddrOfReturnAddr) + { + break; } } } diff --git a/lib/Runtime/Language/JavascriptStackWalker.h b/lib/Runtime/Language/JavascriptStackWalker.h index 008868f6bac..359442427e6 100644 --- a/lib/Runtime/Language/JavascriptStackWalker.h +++ b/lib/Runtime/Language/JavascriptStackWalker.h @@ -237,7 +237,7 @@ namespace Js void ClearCachedInternalFrameInfo(); void SetCachedInternalFrameInfo(InternalFrameType frameType, JavascriptFunction* function, bool hasInlinedFramesOnStack, bool prevIntFrameIsFromBailout); InternalFrameInfo GetCachedInternalFrameInfo() const { return this->lastInternalFrameInfo; } - void WalkAndClearInlineeFrameCallInfoOnException(void *tryCatchFrameAddr); + void WalkAndClearInlineeFrameCallInfoOnException(void *tryHandlerAddrOfReturnAddr); #endif bool IsCurrentPhysicalFrameForLoopBody() const; diff --git a/lib/Runtime/Library/JavascriptArray.cpp b/lib/Runtime/Library/JavascriptArray.cpp index d874e87db4e..9df666c14a9 100644 --- a/lib/Runtime/Library/JavascriptArray.cpp +++ b/lib/Runtime/Library/JavascriptArray.cpp @@ -27,8 +27,10 @@ using namespace Js; { 5, 0, 0 }, // allocate space for 5 elements for array of length 4,5 { 8, 0, 0 }, // allocate space for 8 elements for array of length 6,7,8 }; + + const Var JavascriptArray::MissingItem = (Var)FloatMissingItemPattern; #if defined(TARGET_64) - const Var JavascriptArray::MissingItem = (Var)0x8000000280000002; + const Var JavascriptArray::IntMissingItemVar = (Var)(((uint64)IntMissingItemPattern << 32) | (uint32)IntMissingItemPattern); uint JavascriptNativeIntArray::allocationBuckets[][AllocationBucketsInfoSize] = { // See comments above on how to read this @@ -44,7 +46,7 @@ using namespace Js; {8, 0, 0}, }; #else - const Var JavascriptArray::MissingItem = (Var)0x80000002; + const Var JavascriptArray::IntMissingItemVar = (Var)IntMissingItemPattern; uint JavascriptNativeIntArray::allocationBuckets[][AllocationBucketsInfoSize] = { // See comments above on how to read this @@ -60,8 +62,7 @@ using namespace Js; }; #endif - const int32 JavascriptNativeIntArray::MissingItem = 0x80000002; - static const uint64 FloatMissingItemPattern = 0x8000000280000002ull; + const int32 JavascriptNativeIntArray::MissingItem = IntMissingItemPattern; const double JavascriptNativeFloatArray::MissingItem = *(double*)&FloatMissingItemPattern; // Allocate enough space for 4 inline property slots and 16 inline element slots @@ -12769,7 +12770,7 @@ using namespace Js; return TypeIds_NativeIntArray; } } - if (JavascriptNumber::Is_NoTaggedIntCheck(value)) + else if (JavascriptNumber::Is_NoTaggedIntCheck(value)) { bool isInt32; int32 i; diff --git a/lib/Runtime/Library/JavascriptArray.h b/lib/Runtime/Library/JavascriptArray.h index 2971ce21d57..5476f804014 100644 --- a/lib/Runtime/Library/JavascriptArray.h +++ b/lib/Runtime/Library/JavascriptArray.h @@ -140,6 +140,7 @@ namespace Js #endif static uint allocationBuckets[AllocationBucketsCount][AllocationBucketsInfoSize]; static const Var MissingItem; + static const Var IntMissingItemVar; template static T GetMissingItem(); SparseArraySegmentBase * GetHead() const { return head; } diff --git a/lib/Runtime/Library/JavascriptLibrary.cpp b/lib/Runtime/Library/JavascriptLibrary.cpp index 03727a0b5fa..dfdeaef62ec 100644 --- a/lib/Runtime/Library/JavascriptLibrary.cpp +++ b/lib/Runtime/Library/JavascriptLibrary.cpp @@ -3277,9 +3277,6 @@ namespace Js case PropertyIds::link: return BuiltinFunction::JavascriptString_Link; - case PropertyIds::localeCompare: - return BuiltinFunction::JavascriptString_LocaleCompare; - case PropertyIds::match: return BuiltinFunction::JavascriptString_Match; @@ -3842,7 +3839,8 @@ namespace Js builtinFuncs[BuiltinFunction::JavascriptString_CharAt] = library->AddFunctionToLibraryObject(stringPrototype, PropertyIds::charAt, &JavascriptString::EntryInfo::CharAt, 1); builtinFuncs[BuiltinFunction::JavascriptString_CharCodeAt] = library->AddFunctionToLibraryObject(stringPrototype, PropertyIds::charCodeAt, &JavascriptString::EntryInfo::CharCodeAt, 1); builtinFuncs[BuiltinFunction::JavascriptString_Concat] = library->AddFunctionToLibraryObject(stringPrototype, PropertyIds::concat, &JavascriptString::EntryInfo::Concat, 1); - builtinFuncs[BuiltinFunction::JavascriptString_LocaleCompare] = library->AddFunctionToLibraryObject(stringPrototype, PropertyIds::localeCompare, &JavascriptString::EntryInfo::LocaleCompare, 1); + // OS#17824730: Don't inline String.prototype.localeCompare because it immediately calls back into Intl.js, which can break implicitCallFlags + /* No inlining String_LocaleCompare */ library->AddFunctionToLibraryObject(stringPrototype, PropertyIds::localeCompare, &JavascriptString::EntryInfo::LocaleCompare, 1); builtinFuncs[BuiltinFunction::JavascriptString_Match] = library->AddFunctionToLibraryObject(stringPrototype, PropertyIds::match, &JavascriptString::EntryInfo::Match, 1); builtinFuncs[BuiltinFunction::JavascriptString_Split] = library->AddFunctionToLibraryObject(stringPrototype, PropertyIds::split, &JavascriptString::EntryInfo::Split, 2); builtinFuncs[BuiltinFunction::JavascriptString_Substring] = library->AddFunctionToLibraryObject(stringPrototype, PropertyIds::substring, &JavascriptString::EntryInfo::Substring, 2); diff --git a/lib/Runtime/Library/ScriptFunction.cpp b/lib/Runtime/Library/ScriptFunction.cpp index 19d272e2d06..6d24e7aeba3 100644 --- a/lib/Runtime/Library/ScriptFunction.cpp +++ b/lib/Runtime/Library/ScriptFunction.cpp @@ -202,6 +202,29 @@ using namespace Js; return type; } + void ScriptFunction::PrepareForConversionToNonPathType() + { + // We have a path type handler that is currently responsible for holding some number of entry point infos alive. + // The last one will be copied on to the new dictionary type handler, but if any previous instances in the path + // are holding different entry point infos, those need to be copied to somewhere safe. + // The number of entry points is likely low compared to length of path, so iterate those instead. + + ProxyEntryPointInfo* entryPointInfo = this->GetScriptFunctionType()->GetEntryPointInfo(); + + this->GetFunctionProxy()->MapFunctionObjectTypes([&](ScriptFunctionType* functionType) + { + CopyEntryPointInfoToThreadContextIfNecessary(functionType->GetEntryPointInfo(), entryPointInfo); + }); + } + + void ScriptFunction::ReplaceTypeWithPredecessorType(DynamicType * previousType) + { + ProxyEntryPointInfo* oldEntryPointInfo = this->GetScriptFunctionType()->GetEntryPointInfo(); + __super::ReplaceTypeWithPredecessorType(previousType); + ProxyEntryPointInfo* newEntryPointInfo = this->GetScriptFunctionType()->GetEntryPointInfo(); + CopyEntryPointInfoToThreadContextIfNecessary(oldEntryPointInfo, newEntryPointInfo); + } + bool ScriptFunction::HasFunctionBody() { // for asmjs we want to first check if the FunctionObject has a function body. Check that the function is not deferred @@ -221,6 +244,20 @@ using namespace Js; this->GetScriptFunctionType()->ChangeEntryPoint(entryPointInfo, entryPoint, isAsmJS); } + void ScriptFunction::CopyEntryPointInfoToThreadContextIfNecessary(ProxyEntryPointInfo* oldEntryPointInfo, ProxyEntryPointInfo* newEntryPointInfo) + { + if (oldEntryPointInfo + && oldEntryPointInfo != newEntryPointInfo + && oldEntryPointInfo->SupportsExpiration()) + { + // The old entry point could be executing so we need root it to make sure + // it isn't prematurely collected. The rooting is done by queuing it up on the threadContext + ThreadContext* threadContext = ThreadContext::GetContextForCurrentThread(); + + threadContext->QueueFreeOldEntryPointInfoIfInScript((FunctionEntryPointInfo*)oldEntryPointInfo); + } + } + FunctionProxy * ScriptFunction::GetFunctionProxy() const { Assert(this->functionInfo->HasBody()); @@ -586,7 +623,7 @@ using namespace Js; TTD::NSSnapObjects::StdExtractSetKindSpecificInfo(objData, ssfi); } - + // TODO: Fixup function definition - something funky w/ header file includes - cycles? void ScriptFunction::ExtractSnapObjectDataIntoSnapScriptFunctionInfo(/*TTD::NSSnapObjects::SnapScriptFunctionInfo* */ void* snapScriptFunctionInfo, TTD::SlabAllocator& alloc) { diff --git a/lib/Runtime/Library/ScriptFunction.h b/lib/Runtime/Library/ScriptFunction.h index 7c265f92da2..005c020fb39 100644 --- a/lib/Runtime/Library/ScriptFunction.h +++ b/lib/Runtime/Library/ScriptFunction.h @@ -86,11 +86,15 @@ namespace Js static ScriptFunction * OP_NewScFunc(FrameDisplay *environment, FunctionInfoPtrPtr infoRef); static ScriptFunction * OP_NewScFuncHomeObj(FrameDisplay *environment, FunctionInfoPtrPtr infoRef, Var homeObj); + static void CopyEntryPointInfoToThreadContextIfNecessary(ProxyEntryPointInfo* oldEntryPointInfo, ProxyEntryPointInfo* newEntryPointInfo); + ProxyEntryPointInfo* GetEntryPointInfo() const; FunctionEntryPointInfo* GetFunctionEntryPointInfo() const { Assert(this->GetFunctionProxy()->IsDeferred() == FALSE); - return (FunctionEntryPointInfo*) this->GetEntryPointInfo(); + ProxyEntryPointInfo* result = this->GetEntryPointInfo(); + Assert(result->IsFunctionEntryPointInfo()); + return (FunctionEntryPointInfo*)result; } FunctionProxy * GetFunctionProxy() const; @@ -112,6 +116,8 @@ namespace Js JavascriptMethod UpdateUndeferredBody(FunctionBody* newFunctionInfo); virtual ScriptFunctionType * DuplicateType() override; + virtual void PrepareForConversionToNonPathType() override; + virtual void ReplaceTypeWithPredecessorType(DynamicType * previousType) override; virtual Var GetSourceString() const; virtual JavascriptString * EnsureSourceString(); @@ -199,7 +205,7 @@ namespace Js WebAssemblyMemory* GetWebAssemblyMemory() const; virtual bool IsWasmFunction() const override { return true; } - protected: + protected: DEFINE_VTABLE_CTOR(WasmScriptFunction, AsmJsScriptFunction); DEFINE_MARSHAL_OBJECT_TO_SCRIPT_CONTEXT(WasmScriptFunction); private: diff --git a/lib/Runtime/Library/SparseArraySegment.h b/lib/Runtime/Library/SparseArraySegment.h index e9a60351013..a318347ddad 100644 --- a/lib/Runtime/Library/SparseArraySegment.h +++ b/lib/Runtime/Library/SparseArraySegment.h @@ -89,6 +89,7 @@ namespace Js static SparseArraySegment* CopySegment(Recycler *recycler, SparseArraySegment* dst, uint32 dstIndex, SparseArraySegment* src, uint32 srcIndex, uint32 inputLen); static T GetMissingItem(); + static Var GetMissingItemVar(); static bool IsMissingItem(const T* value); template @@ -131,20 +132,27 @@ namespace Js template<> inline int32 SparseArraySegment::GetMissingItem() { - return 0x80000002; + return IntMissingItemPattern; } template<> inline double SparseArraySegment::GetMissingItem() { - uint64 u = 0x8000000280000002; - return *(double*)&u; + return *(double*)&FloatMissingItemPattern; } + template + Var SparseArraySegment::GetMissingItemVar() + { + return JavascriptArray::MissingItem; + } + template<> + Var SparseArraySegment::GetMissingItemVar(); + template<> inline bool SparseArraySegment::IsMissingItem(const double* value) { - return *(uint64*)value == 0x8000000280000002ull; + return *(uint64*)value == FloatMissingItemPattern; } template diff --git a/lib/Runtime/Library/SparseArraySegment.inl b/lib/Runtime/Library/SparseArraySegment.inl index 8a9ba339ed9..ecbf363b268 100644 --- a/lib/Runtime/Library/SparseArraySegment.inl +++ b/lib/Runtime/Library/SparseArraySegment.inl @@ -223,12 +223,18 @@ namespace Js return SparseArraySegment::Allocate(recycler, left, length, size); } + template<> + inline Var SparseArraySegment::GetMissingItemVar() + { + return JavascriptArray::IntMissingItemVar; + } + template void SparseArraySegment::FillSegmentBuffer(uint32 start, uint32 size) { // Fill the segment buffer using gp-register-sized stores. Avoid using the FPU for the sake // of perf (especially x86). - Var fill = JavascriptArray::MissingItem; + Var fill = (Var)SparseArraySegment::GetMissingItemVar(); if (sizeof(Var) > sizeof(T)) { // Pointer size is greater than the element (int32 buffer on x64). diff --git a/lib/Runtime/LibraryFunction.h b/lib/Runtime/LibraryFunction.h index b1e670345ea..7f256067365 100644 --- a/lib/Runtime/LibraryFunction.h +++ b/lib/Runtime/LibraryFunction.h @@ -28,7 +28,6 @@ LIBRARY_FUNCTION(JavascriptString, FromCodePoint, 1, BIF_None LIBRARY_FUNCTION(JavascriptString, IndexOf, 3, BIF_UseSrc0 | BIF_VariableArgsNumber , JavascriptString::EntryInfo::IndexOf) LIBRARY_FUNCTION(JavascriptString, LastIndexOf, 3, BIF_UseSrc0 | BIF_VariableArgsNumber , JavascriptString::EntryInfo::LastIndexOf) LIBRARY_FUNCTION(JavascriptString, Link, 2, BIF_UseSrc0 , JavascriptString::EntryInfo::Link) -LIBRARY_FUNCTION(JavascriptString, LocaleCompare, 2, BIF_UseSrc0 , JavascriptString::EntryInfo::LocaleCompare) LIBRARY_FUNCTION(JavascriptString, Match, 2, BIF_UseSrc0 | BIF_IgnoreDst , JavascriptString::EntryInfo::Match) LIBRARY_FUNCTION(JavascriptString, Replace, 3, BIF_UseSrc0 | BIF_IgnoreDst , JavascriptString::EntryInfo::Replace) LIBRARY_FUNCTION(JavascriptString, Search, 2, BIF_UseSrc0 , JavascriptString::EntryInfo::Search) diff --git a/lib/Runtime/RuntimeCommon.h b/lib/Runtime/RuntimeCommon.h index cee8ccb1833..484953244a8 100644 --- a/lib/Runtime/RuntimeCommon.h +++ b/lib/Runtime/RuntimeCommon.h @@ -180,6 +180,8 @@ namespace Js #if FLOATVAR const uint64 FloatTag_Value = 0xFFFCull << 48; #endif + const uint64 FloatMissingItemPattern = 0xFFF80002FFF80002; + const int32 IntMissingItemPattern = 0xFFF80002; template class NullTypeHandler; template class SimpleDictionaryTypeHandlerBase; diff --git a/lib/Runtime/Types/DynamicObject.cpp b/lib/Runtime/Types/DynamicObject.cpp index 2012ca11460..e7a7e12ccc2 100644 --- a/lib/Runtime/Types/DynamicObject.cpp +++ b/lib/Runtime/Types/DynamicObject.cpp @@ -128,7 +128,7 @@ namespace Js } else { - // Otherwise, assert that there is either + // Otherwise, assert that there is either // - no object array to deep copy // - an object array, but no deep copy needed // - data in the objectArray member, but it is inline slot data @@ -526,6 +526,11 @@ namespace Js return RecyclerNew(GetRecycler(), DynamicType, this->GetDynamicType()); } + void DynamicObject::PrepareForConversionToNonPathType() + { + // Nothing to do in base class + } + /* * DynamicObject::IsTypeHandlerCompatibleForObjectHeaderInlining * - Checks if the TypeHandlers are compatible for transition from oldTypeHandler to newTypeHandler diff --git a/lib/Runtime/Types/DynamicObject.h b/lib/Runtime/Types/DynamicObject.h index ee1d0cf97b1..bd1e0e202ef 100644 --- a/lib/Runtime/Types/DynamicObject.h +++ b/lib/Runtime/Types/DynamicObject.h @@ -69,7 +69,7 @@ namespace Js friend class JavascriptNativeArray; // for xplat offsetof field access friend class JavascriptOperators; friend class JavascriptLibrary; - friend class ModuleNamespace; // for slot setting. + friend class ModuleNamespace; // for slot setting. #if ENABLE_OBJECT_SOURCE_TRACKING public: @@ -152,7 +152,7 @@ namespace Js void EnsureSlots(int oldCount, int newCount, ScriptContext * scriptContext, DynamicTypeHandler * newTypeHandler = nullptr); void EnsureSlots(int newCount, ScriptContext *scriptContext); void ReplaceType(DynamicType * type); - void ReplaceTypeWithPredecessorType(DynamicType * previousType); + virtual void ReplaceTypeWithPredecessorType(DynamicType * previousType); DynamicTypeHandler * GetTypeHandler() const; @@ -304,6 +304,7 @@ namespace Js virtual BOOL IsCrossSiteObject() const { return FALSE; } virtual DynamicType* DuplicateType(); + virtual void PrepareForConversionToNonPathType(); static bool IsTypeHandlerCompatibleForObjectHeaderInlining(DynamicTypeHandler * oldTypeHandler, DynamicTypeHandler * newTypeHandler); void ChangeType(); @@ -338,7 +339,7 @@ namespace Js void SetArrayCallSiteIndex(ProfileId profileId); static DynamicObject * BoxStackInstance(DynamicObject * instance, bool deepCopy); - + private: ArrayObject* EnsureObjectArray(); ArrayObject* GetObjectArrayOrFlagsAsArray() const { return objectArray; } @@ -375,8 +376,8 @@ namespace Js public: virtual VTableValue DummyVirtualFunctionToHinderLinkerICF() { - // This virtual function hinders linker to do ICF vtable of this class with other classes. - // ICF vtable causes unexpected behavior in type check code. Objects uses vtable as identify should + // This virtual function hinders linker to do ICF vtable of this class with other classes. + // ICF vtable causes unexpected behavior in type check code. Objects uses vtable as identify should // override this function and return a unique value. return VTableValue::VtableDynamicObject; } diff --git a/lib/Runtime/Types/PathTypeHandler.cpp b/lib/Runtime/Types/PathTypeHandler.cpp index ddd4a696238..74e81f88f15 100644 --- a/lib/Runtime/Types/PathTypeHandler.cpp +++ b/lib/Runtime/Types/PathTypeHandler.cpp @@ -145,7 +145,7 @@ namespace Js } } - PathTypeMultiSuccessorInfo::PathTypeMultiSuccessorInfo(Recycler * recycler, const PathTypeSuccessorKey key, RecyclerWeakReference * typeWeakRef) + PathTypeMultiSuccessorInfo::PathTypeMultiSuccessorInfo(Recycler * recycler, const PathTypeSuccessorKey key, RecyclerWeakReference * typeWeakRef) { this->propertySuccessors = RecyclerNew(recycler, PropertySuccessorsMap, recycler, 3); this->propertySuccessors->Item(key, typeWeakRef); @@ -250,7 +250,7 @@ namespace Js { #if DBG DynamicType * successorType = typeWeakRef->Get(); - AssertMsg(!successorType || !successorType->GetTypeHandler()->IsPathTypeHandler() || + AssertMsg(!successorType || !successorType->GetTypeHandler()->IsPathTypeHandler() || PathTypeHandlerBase::FromTypeHandler(successorType->GetTypeHandler())->GetPredecessorType() == type, "We're using a successor that has a different predecessor?"); #endif if (this->successorInfo == nullptr) @@ -388,7 +388,17 @@ namespace Js #endif // This can happen if object header inlining is deoptimized, and we haven't built a full path from the root. // For now, just punt this case. - return TryConvertToSimpleDictionaryType(instance, GetPathLength())->SetAttributes(instance, propertyId, ObjectSlotAttributesToPropertyAttributes(propertyAttributes)); + + if (setAllAttributes) + { + // We could be trying to convert an accessor to a data property, or something similar, so do the type handler conversion here and let the caller handle setting the attributes. + TryConvertToSimpleDictionaryType(instance, GetPathLength()); + return false; + } + else + { + return TryConvertToSimpleDictionaryType(instance, GetPathLength())->SetAttributes(instance, propertyId, ObjectSlotAttributesToPropertyAttributes(propertyAttributes)); + } } predTypeHandler = PathTypeHandlerBase::FromTypeHandler(currentType->GetTypeHandler()); } @@ -456,7 +466,7 @@ namespace Js if(typePath->GetIsUsedFixedFieldAt(slotIndex, objectSlotCount)) { - // We are adding a new value where some other instance already has an existing value. If this is a fixed + // We are adding a new value where some other instance already has an existing value. If this is a fixed // field we must clear the bit. If the value was hard coded in the JIT-ed code, we must invalidate the guards. // Invalidate any JIT-ed code that hard coded this method. No need to invalidate store field @@ -472,16 +482,16 @@ namespace Js Assert(HasSingletonInstanceOnlyIfNeeded(/*typePath*/)); if(objectSlotCount == typePath->GetMaxInitializedLength()) { - // We have now reached the most advanced instance along this path. If this instance is not the singleton instance, - // then the former singleton instance (if any) is no longer a singleton. This instance could be the singleton + // We have now reached the most advanced instance along this path. If this instance is not the singleton instance, + // then the former singleton instance (if any) is no longer a singleton. This instance could be the singleton // instance, if we just happen to set (overwrite) its last property. // This is perhaps the most fragile point of fixed fields on path types. If we cleared the singleton instance // while some fields remained fixed, the instance would be collectible, and yet some code would expect to see - // values and call methods on it. Clearly, a recipe for disaster. We rely on the fact that we always add - // properties to (pre-initialized) type handlers in the order they appear on the type path. By the time - // we reach the singleton instance, all fixed fields will have been invalidated. Otherwise, some fields - // could remain fixed (or even uninitialized) and we would have to spin off a loop here to invalidate any + // values and call methods on it. Clearly, a recipe for disaster. We rely on the fact that we always add + // properties to (pre-initialized) type handlers in the order they appear on the type path. By the time + // we reach the singleton instance, all fixed fields will have been invalidated. Otherwise, some fields + // could remain fixed (or even uninitialized) and we would have to spin off a loop here to invalidate any // remaining fixed fields - a rather unfortunate overhead. typePath->ClearSingletonInstance(); } @@ -752,16 +762,25 @@ namespace Js // In CacheOperators::CachePropertyWrite we ensure that we never cache property adds for types that aren't shared. Assert(!instance->GetDynamicType()->GetIsShared() || GetIsShared()); + bool setAttrDone; if (setAttributes) { - this->SetAttributesHelper(instance, propertyId, index, GetAttributeArray(), attr, true); + setAttrDone = this->SetAttributesHelper(instance, propertyId, index, GetAttributeArray(), attr, true); + if (!setAttrDone) + { + return instance->GetTypeHandler()->SetPropertyWithAttributes(instance, propertyId, value, attr, info, flags, possibleSideEffects); + } } else if (isInit) { ObjectSlotAttributes * attributes = this->GetAttributeArray(); if (attributes && (attributes[index] & ObjectSlotAttr_Accessor)) { - this->SetAttributesHelper(instance, propertyId, index, attributes, (ObjectSlotAttributes)(attributes[index] & ~ObjectSlotAttr_Accessor), true); + setAttrDone = this->SetAttributesHelper(instance, propertyId, index, attributes, (ObjectSlotAttributes)(attributes[index] & ~ObjectSlotAttr_Accessor), true); + if (!setAttrDone) + { + return instance->GetTypeHandler()->InitProperty(instance, propertyId, value, flags, info); + } // We're changing an accessor into a data property at object init time. Don't cache this transition from setter to non-setter, // as it behaves differently from a normal set property. PropertyValueInfo::SetNoCache(info, instance); @@ -1073,7 +1092,7 @@ namespace Js BOOL PathTypeHandlerBase::SetAccessorsHelper(DynamicObject* instance, PropertyId propertyId, ObjectSlotAttributes * attributes, PathTypeSetterSlotIndex * setters, Var getter, Var setter, PropertyOperationFlags flags) { if (instance->GetType()->IsExternal() || instance->GetScriptContext()->IsScriptContextInDebugMode() || PHASE_OFF1(ShareAccessorTypesPhase)) - { + { #ifdef PROFILE_TYPES instance->GetScriptContext()->convertPathToDictionaryAccessorsCount++; #endif @@ -1120,7 +1139,7 @@ namespace Js { getter = CanonicalizeAccessor(getter, library); setter = CanonicalizeAccessor(setter, library); - + if (!setters || setters[propertyIndex] == NoSetterSlot) { // We'll add 1 property to the type, so check the limit. @@ -1307,7 +1326,7 @@ namespace Js Assert(CanConvertToSimpleDictionaryType()); // Convert to new shared type with shared simple dictionary type handler and call operation on it. - SimpleDictionaryTypeHandlerWithNonExtensibleSupport * newTypeHandler = + SimpleDictionaryTypeHandlerWithNonExtensibleSupport * newTypeHandler = ConvertToSimpleDictionaryType(instance, this->GetPathLength(), true); Assert(newTypeHandler->GetMayBecomeShared() && !newTypeHandler->GetIsShared()); @@ -1368,6 +1387,8 @@ namespace Js ScriptContext* scriptContext = instance->GetScriptContext(); Recycler* recycler = scriptContext->GetRecycler(); + instance->PrepareForConversionToNonPathType(); + PathTypeHandlerBase * oldTypeHandler; // Ideally 'this' and oldTypeHandler->GetTypeHandler() should be same @@ -1548,6 +1569,8 @@ namespace Js ScriptContext* scriptContext = instance->GetScriptContext(); Recycler* recycler = scriptContext->GetRecycler(); + instance->PrepareForConversionToNonPathType(); + // Ideally 'this' and oldTypeHandler->GetTypeHandler() should be same // But we can have calls from external DOM objects, which requests us to replace the type of the // object with a new type. And in such cases, this API gets called with oldTypeHandler and the @@ -2036,13 +2059,13 @@ namespace Js } } } - else + else { if (key.GetAttributes() != ObjectSlotAttr_Default && oldAttributes == nullptr) { newAttributes = this->UpdateAttributes(recycler, nullptr, oldPathSize, newTypePath->GetPathSize()); } - + if ((key.GetAttributes() == ObjectSlotAttr_Setter) && oldSetters == nullptr) { newSetters = this->UpdateSetterSlots(recycler, nullptr, oldPathSize, newTypePath->GetPathSize()); @@ -2590,7 +2613,7 @@ namespace Js } clonedTypeHandler->SetMayBecomeShared(); clonedTypeHandler->CopyPropertyTypes(PropertyTypesWritableDataOnly | PropertyTypesWritableDataOnlyDetection | PropertyTypesHasSpecialProperties, this->GetPropertyTypes()); - + return clonedTypeHandler; } @@ -2690,7 +2713,7 @@ namespace Js Js::PropertyId propertyId = GetPropertyId(scriptContext, propertyIndex); ObjectSlotAttributes attr = attributes ? attributes[propertyIndex] : ObjectSlotAttr_Default; cachedDynamicType = newTypeHandler->PromoteType(cachedDynamicType, PathTypeSuccessorKey(propertyId, attr), true, scriptContext, instance, &propertyIndex); - newTypeHandler = PathTypeHandlerBase::FromTypeHandler(cachedDynamicType->GetTypeHandler()); + newTypeHandler = PathTypeHandlerBase::FromTypeHandler(cachedDynamicType->GetTypeHandler()); if (attr == ObjectSlotAttr_Setter) { newTypeHandler->SetSetterSlot(newTypeHandler->GetTypePath()->LookupInline(propertyId, newTypeHandler->GetPathLength()), (PathTypeSetterSlotIndex)(newTypeHandler->GetPathLength() - 1)); @@ -3354,7 +3377,7 @@ namespace Js { uint32 plength = this->GetPathLength(); ObjectSlotAttributes * attributes = this->GetAttributeArray(); - + for(uint32 index = 0; index < plength; ++index) { ObjectSlotAttributes attr = attributes ? attributes[index] : ObjectSlotAttr_Default; @@ -3493,7 +3516,7 @@ namespace Js return true; } - ObjectSlotAttributes attr = + ObjectSlotAttributes attr = (ObjectSlotAttributes)(value ? (attributes[propertyIndex] | ObjectSlotAttr_Configurable) : (attributes[propertyIndex] & ~ObjectSlotAttr_Configurable)); return SetAttributesHelper(instance, propertyId, propertyIndex, attributes, attr); } @@ -3520,7 +3543,7 @@ namespace Js return true; } - ObjectSlotAttributes attr = + ObjectSlotAttributes attr = (ObjectSlotAttributes)(value ? (attributes[propertyIndex] | ObjectSlotAttr_Enumerable) : (attributes[propertyIndex] & ~ObjectSlotAttr_Enumerable)); return SetAttributesHelper(instance, propertyId, propertyIndex, attributes, attr); } @@ -3547,7 +3570,7 @@ namespace Js return true; } - ObjectSlotAttributes attr = + ObjectSlotAttributes attr = (ObjectSlotAttributes)(value ? (attributes[propertyIndex] | ObjectSlotAttr_Writable) : (attributes[propertyIndex] & ~ObjectSlotAttr_Writable)); return SetAttributesHelper(instance, propertyId, propertyIndex, attributes, attr); } @@ -3652,7 +3675,7 @@ namespace Js { // PropertyAttributes is only one byte so it can't carry out data about whether this is an accessor. // Accessors must be cached differently than normal properties, so if we want to cache this we must - // do so here rather than in the caller. However, caching here would require passing originalInstance and + // do so here rather than in the caller. However, caching here would require passing originalInstance and // requestContext through a wide variety of call paths to this point (like we do for GetProperty), for // very little improvement. For now, just block caching this case. PropertyValueInfo::SetNoCache(info, instance); diff --git a/lib/Runtime/Types/ScriptFunctionType.cpp b/lib/Runtime/Types/ScriptFunctionType.cpp index 32f98278700..4e0344233d0 100644 --- a/lib/Runtime/Types/ScriptFunctionType.cpp +++ b/lib/Runtime/Types/ScriptFunctionType.cpp @@ -60,16 +60,7 @@ namespace Js } ProxyEntryPointInfo* oldEntryPointInfo = this->GetEntryPointInfo(); - if (oldEntryPointInfo - && oldEntryPointInfo != entryPointInfo - && oldEntryPointInfo->SupportsExpiration()) - { - // The old entry point could be executing so we need root it to make sure - // it isn't prematurely collected. The rooting is done by queuing it up on the threadContext - ThreadContext* threadContext = ThreadContext::GetContextForCurrentThread(); - - threadContext->QueueFreeOldEntryPointInfoIfInScript((FunctionEntryPointInfo*)oldEntryPointInfo); - } + ScriptFunction::CopyEntryPointInfoToThreadContextIfNecessary(oldEntryPointInfo, entryPointInfo); this->SetEntryPointInfo(entryPointInfo); } diff --git a/test/Optimizer/bcebug.js b/test/Optimizer/bcebug.js new file mode 100644 index 00000000000..4f52207b0ea --- /dev/null +++ b/test/Optimizer/bcebug.js @@ -0,0 +1,15 @@ +//------------------------------------------------------------------------------------------------------- +// Copyright (C) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE.txt file in the project root for full license information. +//------------------------------------------------------------------------------------------------------- + +let arr = new Uint32Array(10); +for (let i = 0; i < 11; i++) { + for (let j = 0; j < 1; j++) { + i--; + i++; + } + arr[i] = 0x1234; +} + +print("Pass") \ No newline at end of file diff --git a/test/Optimizer/rlexe.xml b/test/Optimizer/rlexe.xml index 76e3a6e7797..6b76860b44e 100644 --- a/test/Optimizer/rlexe.xml +++ b/test/Optimizer/rlexe.xml @@ -1556,6 +1556,12 @@ exclude_dynapogo,exclude_nonative + + + bcebug.js + -mic:1 -off:simplejit -bgjit- -lic:1 + + rembug.js