From d604e0b1db69bf780c9b920bfd9a59c06bcde23c Mon Sep 17 00:00:00 2001 From: "Aman Khalid (from Dev Box)" Date: Thu, 18 Jan 2024 13:12:29 -0500 Subject: [PATCH 01/12] Remove fallthrough fixup from loop phases --- src/coreclr/jit/block.cpp | 6 +- src/coreclr/jit/fgbasic.cpp | 3 +- src/coreclr/jit/fgopt.cpp | 49 ++----------- src/coreclr/jit/flowgraph.cpp | 5 +- src/coreclr/jit/loopcloning.cpp | 52 +------------- src/coreclr/jit/optimizebools.cpp | 4 +- src/coreclr/jit/optimizer.cpp | 91 ++++++++++--------------- src/coreclr/jit/redundantbranchopts.cpp | 3 +- 8 files changed, 48 insertions(+), 165 deletions(-) diff --git a/src/coreclr/jit/block.cpp b/src/coreclr/jit/block.cpp index 24d37e691c9ff..455195674b642 100644 --- a/src/coreclr/jit/block.cpp +++ b/src/coreclr/jit/block.cpp @@ -844,8 +844,7 @@ void BasicBlock::CopyTarget(Compiler* compiler, const BasicBlock* from) SetEhf(new (compiler, CMK_BasicBlock) BBehfDesc(compiler, from->GetEhfTargets())); break; case BBJ_COND: - // TODO-NoFallThrough: Copy false target, too? - SetCond(from->GetTrueTarget(), Next()); + SetCond(from->GetTrueTarget(), from->GetFalseTarget()); break; case BBJ_ALWAYS: SetKindAndTarget(from->GetKind(), from->GetTarget()); @@ -886,8 +885,7 @@ void BasicBlock::TransferTarget(BasicBlock* from) from->bbEhfTargets = nullptr; // Make sure nobody uses the descriptor after this. break; case BBJ_COND: - // TODO-NoFallThrough: Copy false target, too? - SetCond(from->GetTrueTarget(), Next()); + SetCond(from->GetTrueTarget(), from->GetFalseTarget()); break; case BBJ_ALWAYS: SetKindAndTarget(from->GetKind(), from->GetTarget()); diff --git a/src/coreclr/jit/fgbasic.cpp b/src/coreclr/jit/fgbasic.cpp index d88ed59a8f196..8fcf8eb0583ec 100644 --- a/src/coreclr/jit/fgbasic.cpp +++ b/src/coreclr/jit/fgbasic.cpp @@ -4820,8 +4820,7 @@ BasicBlock* Compiler::fgSplitBlockAtEnd(BasicBlock* curr) curr->RemoveFlags(BBF_HAS_JMP | BBF_RETLESS_CALL); // Transfer the kind and target. Do this after the code above, to avoid null-ing out the old targets used by the - // above code (and so newBlock->bbNext is valid, so SetCond() can initialize bbFalseTarget if newBlock is a - // BBJ_COND). + // above code. newBlock->TransferTarget(curr); // Default to fallthrough, and add the arc for that. diff --git a/src/coreclr/jit/fgopt.cpp b/src/coreclr/jit/fgopt.cpp index 996ed5dd16f0e..527428e6cbe41 100644 --- a/src/coreclr/jit/fgopt.cpp +++ b/src/coreclr/jit/fgopt.cpp @@ -2556,25 +2556,14 @@ bool Compiler::fgOptimizeUncondBranchToSimpleCond(BasicBlock* block, BasicBlock* fgInsertStmtAtEnd(block, cloneStmt); } - // add an unconditional block after this block to jump to the target block's fallthrough block - // - assert(!target->IsLast()); - BasicBlock* next = fgNewBBafter(BBJ_ALWAYS, block, true, target->GetFalseTarget()); - // Fix up block's flow // - block->SetCond(target->GetTrueTarget(), next); + block->SetCond(target->GetTrueTarget(), target->GetFalseTarget()); fgAddRefPred(block->GetTrueTarget(), block); + fgAddRefPred(block->GetFalseTarget(), block); fgRemoveRefPred(target, block); - // The new block 'next' will inherit its weight from 'block' - // - next->inheritWeight(block); - fgAddRefPred(next, block); - fgAddRefPred(next->GetTarget(), next); - - JITDUMP("fgOptimizeUncondBranchToSimpleCond(from " FMT_BB " to cond " FMT_BB "), created new uncond " FMT_BB "\n", - block->bbNum, target->bbNum, next->bbNum); + JITDUMP("fgOptimizeUncondBranchToSimpleCond(from " FMT_BB " to cond " FMT_BB ")\n", block->bbNum, target->bbNum); JITDUMP(" expecting opts to key off V%02u in " FMT_BB "\n", lclNum, block->bbNum); return true; @@ -3651,7 +3640,6 @@ bool Compiler::fgReorderBlocks(bool useProfile) } else if (bPrev->KindIs(BBJ_COND)) { - assert(bPrev->FalseTargetIs(block) || useProfile); bDest = bPrev->GetTrueTarget(); bFalseDest = bPrev->GetFalseTarget(); // TODO: Cheaper to call fgRenumberBlocks first and compare bbNums? @@ -4910,9 +4898,8 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) change = true; modified = true; bDest = block->GetTrueTarget(); - bNext = block->GetFalseTarget(); - // TODO-NoFallThrough: Adjust the above logic once bbFalseTarget can diverge from bbNext + // Optimization should not have inserted a new block assert(block->NextIs(bNext)); } } @@ -5055,12 +5042,6 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) } } - // TODO-NoFallThrough: Remove this requirement - if (bDest->KindIs(BBJ_COND) && !bDest->NextIs(bDest->GetFalseTarget())) - { - optimizeJump = false; - } - if (optimizeJump && isJumpToJoinFree) { // In the join free case, we also need to move bDest right after bNext @@ -5079,15 +5060,6 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) JITDUMP("\nMoving " FMT_BB " after " FMT_BB " to enable reversal\n", bDest->bbNum, bNext->bbNum); - // If bDest can fall through we'll need to create a jump - // block after it too. Remember where to jump to. - // - BasicBlock* const bDestNext = bDest->Next(); - - // Once bbFalseTarget and bbNext can diverge, this assert will hit - // TODO-NoFallThrough: Remove fall-through for BBJ_COND below - assert(!bDest->KindIs(BBJ_COND) || bDest->FalseTargetIs(bDestNext)); - // Move bDest // if (ehIsBlockEHLast(bDest)) @@ -5102,19 +5074,6 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) { ehUpdateLastBlocks(bNext, bDest); } - - // Add fall through fixup block, if needed. - // - if (bDest->KindIs(BBJ_COND)) - { - BasicBlock* const bFixup = fgNewBBafter(BBJ_ALWAYS, bDest, true, bDestNext); - bDest->SetFalseTarget(bFixup); - bFixup->inheritWeight(bDestNext); - - fgRemoveRefPred(bDestNext, bDest); - fgAddRefPred(bFixup, bDest); - fgAddRefPred(bDestNext, bFixup); - } } } diff --git a/src/coreclr/jit/flowgraph.cpp b/src/coreclr/jit/flowgraph.cpp index 7a12698b8481a..ba7bc8844326c 100644 --- a/src/coreclr/jit/flowgraph.cpp +++ b/src/coreclr/jit/flowgraph.cpp @@ -4404,10 +4404,9 @@ FlowGraphNaturalLoops* FlowGraphNaturalLoops::Find(const FlowGraphDfsTree* dfsTr for (FlowEdge* const predEdge : loop->m_header->PredEdges()) { BasicBlock* predBlock = predEdge->getSourceBlock(); - if (dfsTree->Contains(predBlock) && !dfsTree->IsAncestor(header, predEdge->getSourceBlock())) + if (dfsTree->Contains(predBlock) && !dfsTree->IsAncestor(header, predBlock)) { - JITDUMP(FMT_BB " -> " FMT_BB " is an entry edge\n", predEdge->getSourceBlock()->bbNum, - loop->m_header->bbNum); + JITDUMP(FMT_BB " -> " FMT_BB " is an entry edge\n", predBlock->bbNum, loop->m_header->bbNum); loop->m_entryEdges.push_back(predEdge); } } diff --git a/src/coreclr/jit/loopcloning.cpp b/src/coreclr/jit/loopcloning.cpp index 108bdf4e5181a..16e351d25027a 100644 --- a/src/coreclr/jit/loopcloning.cpp +++ b/src/coreclr/jit/loopcloning.cpp @@ -2040,25 +2040,6 @@ void Compiler::optCloneLoop(FlowGraphNaturalLoop* loop, LoopCloneContext* contex // bottomNext BasicBlock* bottom = loop->GetLexicallyBottomMostBlock(); BasicBlock* newPred = bottom; - if (bottom->KindIs(BBJ_COND)) - { - // TODO-NoFallThrough: Shouldn't need new BBJ_ALWAYS block once bbFalseTarget can diverge from bbNext - BasicBlock* bottomNext = bottom->Next(); - assert(bottom->FalseTargetIs(bottomNext)); - JITDUMP("Create branch around cloned loop\n"); - BasicBlock* bottomRedirBlk = fgNewBBafter(BBJ_ALWAYS, bottom, /*extendRegion*/ true, bottomNext); - JITDUMP("Adding " FMT_BB " after " FMT_BB "\n", bottomRedirBlk->bbNum, bottom->bbNum); - bottomRedirBlk->bbWeight = bottomRedirBlk->isRunRarely() ? BB_ZERO_WEIGHT : ambientWeight; - - bottom->SetFalseTarget(bottomRedirBlk); - fgAddRefPred(bottomRedirBlk, bottom); - JITDUMP("Adding " FMT_BB " -> " FMT_BB "\n", bottom->bbNum, bottomRedirBlk->bbNum); - fgReplacePred(bottomNext, bottom, bottomRedirBlk); - JITDUMP("Replace " FMT_BB " -> " FMT_BB " with " FMT_BB " -> " FMT_BB "\n", bottom->bbNum, bottomNext->bbNum, - bottomRedirBlk->bbNum, bottomNext->bbNum); - - newPred = bottomRedirBlk; - } // Create a new preheader for the slow loop immediately before the slow // loop itself. All failed conditions will branch to the slow preheader. @@ -2107,37 +2088,6 @@ void Compiler::optCloneLoop(FlowGraphNaturalLoop* loop, LoopCloneContext* contex newPred = newBlk; blockMap->Set(blk, newBlk); - // If the block falls through to a block outside the loop then we may - // need to insert a new block to redirect. - // Skip this for the bottom block; we duplicate the slow loop such that - // the bottom block will fall through to the bottom's original next. - if ((blk != bottom) && blk->bbFallsThrough() && !loop->ContainsBlock(blk->Next())) - { - if (blk->KindIs(BBJ_COND)) - { - BasicBlock* targetBlk = blk->GetFalseTarget(); - assert(blk->NextIs(targetBlk)); - - // Need to insert a block. - BasicBlock* newRedirBlk = fgNewBBafter(BBJ_ALWAYS, newPred, /* extendRegion */ true, targetBlk); - newRedirBlk->copyEHRegion(newPred); - newRedirBlk->bbWeight = blk->Next()->bbWeight; - newRedirBlk->CopyFlags(blk->Next(), (BBF_RUN_RARELY | BBF_PROF_WEIGHT)); - newRedirBlk->scaleBBWeight(slowPathWeightScaleFactor); - - JITDUMP(FMT_BB " falls through to " FMT_BB "; inserted redirection block " FMT_BB "\n", blk->bbNum, - blk->Next()->bbNum, newRedirBlk->bbNum); - // This block isn't part of the loop, so below loop won't add - // refs for it. - fgAddRefPred(targetBlk, newRedirBlk); - newPred = newRedirBlk; - } - else - { - assert(!"Cannot handle fallthrough"); - } - } - return BasicBlockVisit::Continue; }); @@ -2160,7 +2110,7 @@ void Compiler::optCloneLoop(FlowGraphNaturalLoop* loop, LoopCloneContext* contex // Now redirect the new block according to "blockMap". optRedirectBlock(newblk, blockMap); - // Add predecessor edges for the new successors, as well as the fall-through paths. + // Add predecessor edges for the new successors. switch (newblk->GetKind()) { case BBJ_ALWAYS: diff --git a/src/coreclr/jit/optimizebools.cpp b/src/coreclr/jit/optimizebools.cpp index b96c4ae024e40..27f918ea8456e 100644 --- a/src/coreclr/jit/optimizebools.cpp +++ b/src/coreclr/jit/optimizebools.cpp @@ -1897,10 +1897,10 @@ PhaseStatus Compiler::optOptimizeBools() continue; } - // If there is no next block, we're done + // If the false target is not the next block, we're done BasicBlock* b2 = b1->GetFalseTarget(); - if (b2 == nullptr) + if (!b1->NextIs(b2)) { break; } diff --git a/src/coreclr/jit/optimizer.cpp b/src/coreclr/jit/optimizer.cpp index e26af00a4a504..5ac0f0cce4a96 100644 --- a/src/coreclr/jit/optimizer.cpp +++ b/src/coreclr/jit/optimizer.cpp @@ -2089,6 +2089,12 @@ class LoopSearch BasicBlock* moveAfter = bottom; while (moveAfter->bbFallsThrough()) { + if (moveAfter->KindIs(BBJ_COND) && !moveAfter->NextIs(moveAfter->GetFalseTarget())) + { + // Found a BBJ_COND that won't fall into its false target. + return moveAfter; + } + // Keep looking for a better insertion point if we can. BasicBlock* newMoveAfter = TryAdvanceInsertionPoint(moveAfter); @@ -2295,14 +2301,14 @@ class LoopSearch comp->gtReverseCond(test); } - // Redirect the Conditional JUMP to go to `oldNext` - block->SetTrueTarget(oldNext); + // Redirect the Conditional JUMP + block->SetTrueTarget(block->GetFalseTarget()); block->SetFalseTarget(newNext); } else { // Insert an unconditional jump to `oldNext` just after `block`. - newBlock = comp->fgConnectFallThrough(block, oldNext); + newBlock = comp->fgConnectFallThrough(block, oldNext, true /* noFallThroughQuirk */); noway_assert((newBlock == nullptr) || loopBlocks.CanRepresent(newBlock->bbNum)); } } @@ -2637,7 +2643,6 @@ void Compiler::optFindNaturalLoops() // predOption - specifies how to update the pred lists // // Notes: -// Fall-through successors are assumed correct and are not modified. // Pred lists for successors of `blk` may be changed, depending on `predOption`. // void Compiler::optRedirectBlock(BasicBlock* blk, BlockToBlockMap* redirectMap, RedirectBlockOption predOption) @@ -2645,11 +2650,6 @@ void Compiler::optRedirectBlock(BasicBlock* blk, BlockToBlockMap* redirectMap, R const bool updatePreds = (predOption == RedirectBlockOption::UpdatePredLists); const bool addPreds = (predOption == RedirectBlockOption::AddToPredLists); - if (addPreds && blk->bbFallsThrough()) - { - fgAddRefPred(blk->Next(), blk); - } - BasicBlock* newJumpDest = nullptr; switch (blk->GetKind()) @@ -2662,9 +2662,15 @@ void Compiler::optRedirectBlock(BasicBlock* blk, BlockToBlockMap* redirectMap, R // These have no jump destination to update. break; + case BBJ_CALLFINALLY: + if (addPreds && blk->bbFallsThrough()) + { + fgAddRefPred(blk->Next(), blk); + } + + FALLTHROUGH; case BBJ_ALWAYS: case BBJ_LEAVE: - case BBJ_CALLFINALLY: case BBJ_CALLFINALLYRET: // All of these have a single jump destination to update. if (redirectMap->Lookup(blk->GetTarget(), &newJumpDest)) @@ -2703,6 +2709,24 @@ void Compiler::optRedirectBlock(BasicBlock* blk, BlockToBlockMap* redirectMap, R { fgAddRefPred(blk->GetTrueTarget(), blk); } + + // Update jump taken when condition is false + if (redirectMap->Lookup(blk->GetFalseTarget(), &newJumpDest)) + { + if (updatePreds) + { + fgRemoveRefPred(blk->GetFalseTarget(), blk); + } + blk->SetFalseTarget(newJumpDest); + if (updatePreds || addPreds) + { + fgAddRefPred(newJumpDest, blk); + } + } + else if (addPreds) + { + fgAddRefPred(blk->GetFalseTarget(), blk); + } break; case BBJ_EHFINALLYRET: @@ -3775,28 +3799,6 @@ bool Compiler::optTryUnrollLoop(FlowGraphNaturalLoop* loop, bool* changedIR) BasicBlock* exit = loop->ContainsBlock(exiting->GetTrueTarget()) ? exiting->GetFalseTarget() : exiting->GetTrueTarget(); - // If the bottom block falls out of the loop, then insert an - // explicit block to branch around the unrolled iterations we are - // going to create. - if (bottom->KindIs(BBJ_COND)) - { - // TODO-NoFallThrough: Shouldn't need new BBJ_ALWAYS block once bbFalseTarget can diverge from bbNext - BasicBlock* bottomNext = bottom->Next(); - assert(bottom->FalseTargetIs(bottomNext)); - JITDUMP("Create branch around unrolled loop\n"); - BasicBlock* bottomRedirBlk = fgNewBBafter(BBJ_ALWAYS, bottom, /*extendRegion*/ true, bottomNext); - JITDUMP("Adding " FMT_BB " after " FMT_BB "\n", bottomRedirBlk->bbNum, bottom->bbNum); - - bottom->SetFalseTarget(bottomRedirBlk); - fgAddRefPred(bottomRedirBlk, bottom); - JITDUMP("Adding " FMT_BB " -> " FMT_BB "\n", bottom->bbNum, bottomRedirBlk->bbNum); - fgReplacePred(bottomNext, bottom, bottomRedirBlk); - JITDUMP("Replace " FMT_BB " -> " FMT_BB " with " FMT_BB " -> " FMT_BB "\n", bottom->bbNum, bottomNext->bbNum, - bottomRedirBlk->bbNum, bottomNext->bbNum); - - insertAfter = bottomRedirBlk; - } - for (int lval = lbeg; iterToUnroll > 0; iterToUnroll--) { BasicBlock* testBlock = nullptr; @@ -4996,7 +4998,7 @@ bool Compiler::optCreatePreheader(FlowGraphNaturalLoop* loop) BasicBlock* preheaderCandidate = loop->EntryEdges()[0]->getSourceBlock(); unsigned candidateEHRegion = preheaderCandidate->hasTryIndex() ? preheaderCandidate->getTryIndex() : EHblkDsc::NO_ENCLOSING_INDEX; - if (preheaderCandidate->KindIs(BBJ_ALWAYS) && (preheaderCandidate->GetUniqueSucc() == loop->GetHeader()) && + if (preheaderCandidate->KindIs(BBJ_ALWAYS) && preheaderCandidate->TargetIs(loop->GetHeader()) && (candidateEHRegion == preheaderEHRegion)) { JITDUMP("Natural loop " FMT_LP " already has preheader " FMT_BB "\n", loop->GetIndex(), @@ -5036,29 +5038,6 @@ bool Compiler::optCreatePreheader(FlowGraphNaturalLoop* loop) fgReplaceJumpTarget(enterBlock, preheader, header); } - // For BBJ_COND blocks preceding header, fgReplaceJumpTarget set their false targets to preheader. - // Direct fallthrough to preheader by inserting a jump after the block. - // TODO-NoFallThrough: Remove this, and just rely on fgReplaceJumpTarget to do the fixup - BasicBlock* fallthroughSource = header->Prev(); - if (fallthroughSource->KindIs(BBJ_COND) && fallthroughSource->FalseTargetIs(preheader)) - { - FlowEdge* edge = fgRemoveRefPred(preheader, fallthroughSource); - BasicBlock* newAlways = fgNewBBafter(BBJ_ALWAYS, fallthroughSource, true, preheader); - fgAddRefPred(preheader, newAlways, edge); - fgAddRefPred(newAlways, fallthroughSource, edge); - - JITDUMP("Created new BBJ_ALWAYS " FMT_BB " to redirect " FMT_BB " to preheader\n", newAlways->bbNum, - fallthroughSource->bbNum); - fallthroughSource->SetFalseTarget(newAlways); - } - - // Make sure false target is set correctly for fallthrough into preheader. - if (!preheader->IsFirst()) - { - fallthroughSource = preheader->Prev(); - assert(!fallthroughSource->KindIs(BBJ_COND) || fallthroughSource->FalseTargetIs(preheader)); - } - optSetPreheaderWeight(loop, preheader); return true; diff --git a/src/coreclr/jit/redundantbranchopts.cpp b/src/coreclr/jit/redundantbranchopts.cpp index 235a5fc1d5f40..5881124a8f6da 100644 --- a/src/coreclr/jit/redundantbranchopts.cpp +++ b/src/coreclr/jit/redundantbranchopts.cpp @@ -1763,12 +1763,11 @@ bool Compiler::optJumpThreadCore(JumpThreadInfo& jti) { Statement* const lastStmt = jti.m_block->lastStmt(); fgRemoveStmt(jti.m_block, lastStmt); - JITDUMP(" repurposing " FMT_BB " to always fall through to " FMT_BB "\n", jti.m_block->bbNum, + JITDUMP(" repurposing " FMT_BB " to always jump to " FMT_BB "\n", jti.m_block->bbNum, jti.m_falseTarget->bbNum); fgRemoveRefPred(jti.m_trueTarget, jti.m_block); jti.m_block->SetKindAndTarget(BBJ_ALWAYS, jti.m_falseTarget); jti.m_block->SetFlags(BBF_NONE_QUIRK); - assert(jti.m_block->JumpsToNext()); } // Now reroute the flow from the predecessors. From 6878ef9d7d1066ca55e0f199360157bd50085c93 Mon Sep 17 00:00:00 2001 From: "Aman Khalid (from Dev Box)" Date: Thu, 18 Jan 2024 16:24:27 -0500 Subject: [PATCH 02/12] Remove fgConnectFallThrough --- src/coreclr/jit/compiler.h | 2 - src/coreclr/jit/fgbasic.cpp | 124 +++--------------------------- src/coreclr/jit/fgopt.cpp | 31 +++----- src/coreclr/jit/optimizebools.cpp | 11 ++- src/coreclr/jit/optimizer.cpp | 52 +------------ 5 files changed, 32 insertions(+), 188 deletions(-) diff --git a/src/coreclr/jit/compiler.h b/src/coreclr/jit/compiler.h index b0c2f05b6c03f..83a4bf5007e22 100644 --- a/src/coreclr/jit/compiler.h +++ b/src/coreclr/jit/compiler.h @@ -5956,8 +5956,6 @@ class Compiler void fgUpdateLoopsAfterCompacting(BasicBlock* block, BasicBlock* bNext); - BasicBlock* fgConnectFallThrough(BasicBlock* bSrc, BasicBlock* bDst, bool noFallThroughQuirk = false); - bool fgRenumberBlocks(); bool fgExpandRarelyRunBlocks(); diff --git a/src/coreclr/jit/fgbasic.cpp b/src/coreclr/jit/fgbasic.cpp index 8fcf8eb0583ec..991bc6ffc8b74 100644 --- a/src/coreclr/jit/fgbasic.cpp +++ b/src/coreclr/jit/fgbasic.cpp @@ -5232,7 +5232,7 @@ void Compiler::fgUnlinkRange(BasicBlock* bBeg, BasicBlock* bEnd) // // Arguments: // block - the block to remove -// unreachable - the block to remove +// unreachable - indicates whether removal is because block is unreachable or empty // // Return Value: // The block after the block, or blocks, removed. @@ -5282,7 +5282,7 @@ BasicBlock* Compiler::fgRemoveBlock(BasicBlock* block, bool unreachable) // If we delete such a BBJ_CALLFINALLY we also delete the BBJ_CALLFINALLYRET. if (block->isBBCallFinallyPair()) { - BasicBlock* const leaveBlock = block->Next(); + BasicBlock* const leaveBlock = bNext; bNext = leaveBlock->Next(); fgPrepareCallFinallyRetForRemoval(leaveBlock); fgRemoveBlock(leaveBlock, /* unreachable */ true); @@ -5302,12 +5302,6 @@ BasicBlock* Compiler::fgRemoveBlock(BasicBlock* block, bool unreachable) /* At this point the bbPreds and bbRefs had better be zero */ noway_assert((block->bbRefs == 0) && (block->bbPreds == nullptr)); - - if (bPrev->KindIs(BBJ_COND) && bPrev->FalseTargetIs(block)) - { - assert(bNext != nullptr); - bPrev->SetFalseTarget(bNext); - } } else // block is empty { @@ -5519,105 +5513,6 @@ void Compiler::fgPrepareCallFinallyRetForRemoval(BasicBlock* block) block->SetKind(BBJ_ALWAYS); } -//------------------------------------------------------------------------ -// fgConnectFallThrough: fix flow from a block that previously had a fall through -// -// Arguments: -// bSrc - source of fall through -// bDst - target of fall through -// -// Returns: -// Newly inserted block after bSrc that jumps to bDst, -// or nullptr if bSrc already falls through to bDst -// -BasicBlock* Compiler::fgConnectFallThrough(BasicBlock* bSrc, BasicBlock* bDst, bool noFallThroughQuirk /* = false */) -{ - assert(bSrc != nullptr); - assert(fgPredsComputed); - BasicBlock* jmpBlk = nullptr; - - /* If bSrc falls through to a block that is not bDst, we will insert a jump to bDst */ - - if (bSrc->KindIs(BBJ_COND) && bSrc->FalseTargetIs(bDst) && !bSrc->NextIs(bDst)) - { - assert(fgGetPredForBlock(bDst, bSrc) != nullptr); - - // Allow the caller to decide whether to use the old logic of maintaining implicit fallthrough behavior, - // or to allow BBJ_COND blocks' false targets to diverge from bbNext. - // TODO-NoFallThrough: Remove this quirk once callers' dependencies on implicit fallthrough are gone. - if (noFallThroughQuirk) - { - return jmpBlk; - } - - // Add a new block after bSrc which jumps to 'bDst' - jmpBlk = fgNewBBafter(BBJ_ALWAYS, bSrc, true, bDst); - bSrc->SetFalseTarget(jmpBlk); - fgAddRefPred(jmpBlk, bSrc, fgGetPredForBlock(bDst, bSrc)); - - // Record the loop number in the new block - jmpBlk->bbNatLoopNum = bSrc->bbNatLoopNum; - - // When adding a new jmpBlk we will set the bbWeight and bbFlags - // - if (fgHaveValidEdgeWeights && fgHaveProfileWeights()) - { - FlowEdge* const newEdge = fgGetPredForBlock(jmpBlk, bSrc); - - jmpBlk->bbWeight = (newEdge->edgeWeightMin() + newEdge->edgeWeightMax()) / 2; - if (bSrc->bbWeight == BB_ZERO_WEIGHT) - { - jmpBlk->bbWeight = BB_ZERO_WEIGHT; - } - - if (jmpBlk->bbWeight == BB_ZERO_WEIGHT) - { - jmpBlk->SetFlags(BBF_RUN_RARELY); - } - - weight_t weightDiff = (newEdge->edgeWeightMax() - newEdge->edgeWeightMin()); - weight_t slop = BasicBlock::GetSlopFraction(bSrc, bDst); - // - // If the [min/max] values for our edge weight is within the slop factor - // then we will set the BBF_PROF_WEIGHT flag for the block - // - if (weightDiff <= slop) - { - jmpBlk->SetFlags(BBF_PROF_WEIGHT); - } - } - else - { - // We set the bbWeight to the smaller of bSrc->bbWeight or bDst->bbWeight - if (bSrc->bbWeight < bDst->bbWeight) - { - jmpBlk->bbWeight = bSrc->bbWeight; - jmpBlk->CopyFlags(bSrc, BBF_RUN_RARELY); - } - else - { - jmpBlk->bbWeight = bDst->bbWeight; - jmpBlk->CopyFlags(bDst, BBF_RUN_RARELY); - } - } - - fgReplacePred(bDst, bSrc, jmpBlk); - - JITDUMP("Added an unconditional jump to " FMT_BB " after block " FMT_BB "\n", jmpBlk->GetTarget()->bbNum, - bSrc->bbNum); - } - else if (bSrc->KindIs(BBJ_ALWAYS) && bSrc->HasInitializedTarget() && bSrc->JumpsToNext()) - { - bSrc->SetFlags(BBF_NONE_QUIRK); - } - else if (bSrc->KindIs(BBJ_COND) && bSrc->NextIs(bDst)) - { - bSrc->SetFalseTarget(bDst); - } - - return jmpBlk; -} - //------------------------------------------------------------------------ // fgRenumberBlocks: update block bbNums to reflect bbNext order // @@ -6133,11 +6028,15 @@ BasicBlock* Compiler::fgRelocateEHRange(unsigned regionIndex, FG_RELOCATE_TYPE r // We have decided to insert the block(s) after fgLastBlock fgMoveBlocksAfter(bStart, bLast, insertAfterBlk); - // If bPrev falls through, we will insert a jump to block - fgConnectFallThrough(bPrev, bStart); + if (bPrev->KindIs(BBJ_ALWAYS) && bPrev->JumpsToNext()) + { + bPrev->SetFlags(BBF_NONE_QUIRK); + } - // If bLast falls through, we will insert a jump to bNext - fgConnectFallThrough(bLast, bNext); + if (bLast->KindIs(BBJ_ALWAYS) && bLast->JumpsToNext()) + { + bLast->SetFlags(BBF_NONE_QUIRK); + } #endif // !FEATURE_EH_FUNCLETS @@ -7162,9 +7061,6 @@ BasicBlock* Compiler::fgNewBBinRegionWorker(BBKinds jumpKind, } } - /* If afterBlk falls through, we insert a jump around newBlk */ - fgConnectFallThrough(afterBlk, newBlk->Next()); - // If the loop table is valid, add this block to the appropriate loop. // Note we don't verify (via flow) that this block actually belongs // to the loop, just that it is lexically within the span of blocks diff --git a/src/coreclr/jit/fgopt.cpp b/src/coreclr/jit/fgopt.cpp index 527428e6cbe41..788a2dac1e840 100644 --- a/src/coreclr/jit/fgopt.cpp +++ b/src/coreclr/jit/fgopt.cpp @@ -4707,35 +4707,26 @@ bool Compiler::fgReorderBlocks(bool useProfile) /* We have decided to insert the block(s) after 'insertAfterBlk' */ fgMoveBlocksAfter(bStart, bEnd, insertAfterBlk); - // useProfile should be true only when finalizing the block layout in Compiler::optOptimizeLayout. - // In this final pass, allow BBJ_COND blocks' false targets to diverge from bbNext. - // TODO-NoFallThrough: Always allow the false targets to diverge. - if (bDest) + if (bPrev->KindIs(BBJ_ALWAYS) && bPrev->JumpsToNext()) { - /* We may need to insert an unconditional branch after bPrev to bDest */ - fgConnectFallThrough(bPrev, bDest, /* noFallThroughQuirk */ useProfile); + bPrev->SetFlags(BBF_NONE_QUIRK); } - else + + if (bEnd->KindIs(BBJ_ALWAYS) && bEnd->JumpsToNext()) { - /* If bPrev falls through, we must insert a jump to block */ - fgConnectFallThrough(bPrev, block, /* noFallThroughQuirk */ useProfile); + bEnd->SetFlags(BBF_NONE_QUIRK); } - BasicBlock* bSkip = bEnd->Next(); - - /* If bEnd falls through, we must insert a jump to bNext */ - fgConnectFallThrough(bEnd, bNext, /* noFallThroughQuirk */ useProfile); - if (bStart2 == nullptr) { - /* If insertAfterBlk falls through, we are forced to */ - /* add a jump around the block(s) we just inserted */ - fgConnectFallThrough(insertAfterBlk, bSkip, /* noFallThroughQuirk */ useProfile); + if (insertAfterBlk->KindIs(BBJ_ALWAYS) && insertAfterBlk->JumpsToNext()) + { + insertAfterBlk->SetFlags(BBF_NONE_QUIRK); + } } - else + else if (bPrev2->KindIs(BBJ_ALWAYS) && bPrev2->JumpsToNext()) { - /* We may need to insert an unconditional branch after bPrev2 to bStart */ - fgConnectFallThrough(bPrev2, bStart, /* noFallThroughQuirk */ useProfile); + bPrev2->SetFlags(BBF_NONE_QUIRK); } #if DEBUG diff --git a/src/coreclr/jit/optimizebools.cpp b/src/coreclr/jit/optimizebools.cpp index 27f918ea8456e..d71f8ad3a3cbd 100644 --- a/src/coreclr/jit/optimizebools.cpp +++ b/src/coreclr/jit/optimizebools.cpp @@ -751,7 +751,7 @@ bool OptBoolsDsc::optOptimizeRangeTests() // BasicBlock* notInRangeBb = m_b1->GetTrueTarget(); BasicBlock* inRangeBb; - if (notInRangeBb == m_b2->GetTrueTarget()) + if (m_b2->TrueTargetIs(notInRangeBb)) { // Shape 1: both conditions jump to NotInRange // @@ -765,7 +765,7 @@ bool OptBoolsDsc::optOptimizeRangeTests() // ... inRangeBb = m_b2->GetFalseTarget(); } - else if (notInRangeBb == m_b2->GetFalseTarget()) + else if (m_b2->FalseTargetIs(notInRangeBb)) { // Shape 2: 2nd block jumps to InRange // @@ -807,11 +807,16 @@ bool OptBoolsDsc::optOptimizeRangeTests() return false; } + // Re-direct firstBlock to jump to inRangeBb m_comp->fgAddRefPred(inRangeBb, m_b1); if (!cmp2IsReversed) { - // Re-direct firstBlock to jump to inRangeBb m_b1->SetTrueTarget(inRangeBb); + m_b1->SetFalseTarget(notInRangeBb); + } + else + { + m_b1->SetFalseTarget(inRangeBb); } // Remove the 2nd condition block as we no longer need it diff --git a/src/coreclr/jit/optimizer.cpp b/src/coreclr/jit/optimizer.cpp index 5ac0f0cce4a96..42e746d4a29b6 100644 --- a/src/coreclr/jit/optimizer.cpp +++ b/src/coreclr/jit/optimizer.cpp @@ -2041,14 +2041,7 @@ class LoopSearch FixupFallThrough(moveAfter, moveBefore, block); FixupFallThrough(lastNonLoopBlock, nextLoopBlock, moveBefore); // Also apply any adjustments needed where the blocks were snipped out of the loop. - BasicBlock* newBlock = FixupFallThrough(previous, block, nextLoopBlock); - if (newBlock != nullptr) - { - // This new block is in the loop and is a loop exit. - loopBlocks.Insert(newBlock->bbNum); - lastExit = newBlock; - ++exitCount; - } + FixupFallThrough(previous, block, nextLoopBlock); // Update moveAfter for the next insertion. moveAfter = lastNonLoopBlock; @@ -2271,48 +2264,11 @@ class LoopSearch // oldNext - Previous value of `block->bbNext`. // newNext - New value of `block->bbNext`. // - // Return Value: - // If a new block is created to reconnect flow, the new block is - // returned; otherwise, nullptr. - // - BasicBlock* FixupFallThrough(BasicBlock* block, BasicBlock* oldNext, BasicBlock* newNext) + void FixupFallThrough(BasicBlock* block, BasicBlock* oldNext, BasicBlock* newNext) { // If we create a new block, that will be our return value. - BasicBlock* newBlock = nullptr; - - if (block->bbFallsThrough()) - { - // Need to reconnect the flow from `block` to `oldNext`. - - if (block->KindIs(BBJ_COND) && block->TrueTargetIs(newNext)) - { - // Reverse the jump condition - GenTree* test = block->lastNode(); - noway_assert(test->OperIsConditionalJump()); - - if (test->OperGet() == GT_JTRUE) - { - GenTree* cond = comp->gtReverseCond(test->AsOp()->gtOp1); - assert(cond == test->AsOp()->gtOp1); // Ensure `gtReverseCond` did not create a new node. - test->AsOp()->gtOp1 = cond; - } - else - { - comp->gtReverseCond(test); - } - // Redirect the Conditional JUMP - block->SetTrueTarget(block->GetFalseTarget()); - block->SetFalseTarget(newNext); - } - else - { - // Insert an unconditional jump to `oldNext` just after `block`. - newBlock = comp->fgConnectFallThrough(block, oldNext, true /* noFallThroughQuirk */); - noway_assert((newBlock == nullptr) || loopBlocks.CanRepresent(newBlock->bbNum)); - } - } - else if (block->KindIs(BBJ_ALWAYS) && block->TargetIs(newNext)) + if (block->KindIs(BBJ_ALWAYS) && block->TargetIs(newNext)) { // If block is newNext's only predecessor, move the IR from block to newNext, // but keep the now-empty block around. @@ -2359,8 +2315,6 @@ class LoopSearch } } } - - return newBlock; } //------------------------------------------------------------------------ From 3a8e94322daeb5f81a89339472cfb94d03163bbb Mon Sep 17 00:00:00 2001 From: "Aman Khalid (from Dev Box)" Date: Thu, 18 Jan 2024 19:00:42 -0500 Subject: [PATCH 03/12] Unblock bool opt --- src/coreclr/jit/optimizebools.cpp | 6 ------ 1 file changed, 6 deletions(-) diff --git a/src/coreclr/jit/optimizebools.cpp b/src/coreclr/jit/optimizebools.cpp index d71f8ad3a3cbd..57989ad72de50 100644 --- a/src/coreclr/jit/optimizebools.cpp +++ b/src/coreclr/jit/optimizebools.cpp @@ -1902,13 +1902,7 @@ PhaseStatus Compiler::optOptimizeBools() continue; } - // If the false target is not the next block, we're done - BasicBlock* b2 = b1->GetFalseTarget(); - if (!b1->NextIs(b2)) - { - break; - } // The next block must not be marked as BBF_DONT_REMOVE if (b2->HasFlag(BBF_DONT_REMOVE)) From 804357689b7bfc5346a67389fa7e8ab96b3f7f63 Mon Sep 17 00:00:00 2001 From: "Aman Khalid (from Dev Box)" Date: Thu, 18 Jan 2024 21:54:27 -0500 Subject: [PATCH 04/12] Improve asmdiffs --- src/coreclr/jit/fgopt.cpp | 115 +++++++++++++++++++--------------- src/coreclr/jit/optimizer.cpp | 32 +++++++--- 2 files changed, 91 insertions(+), 56 deletions(-) diff --git a/src/coreclr/jit/fgopt.cpp b/src/coreclr/jit/fgopt.cpp index 788a2dac1e840..0e387cefb731d 100644 --- a/src/coreclr/jit/fgopt.cpp +++ b/src/coreclr/jit/fgopt.cpp @@ -1653,7 +1653,16 @@ bool Compiler::fgOptimizeBranchToEmptyUnconditional(BasicBlock* block, BasicBloc break; case BBJ_COND: - block->SetTrueTarget(bDest->GetTarget()); + if (block->TrueTargetIs(bDest)) + { + assert(!block->FalseTargetIs(bDest)); + block->SetTrueTarget(bDest->GetTarget()); + } + else + { + assert(block->FalseTargetIs(bDest)); + block->SetFalseTarget(bDest->GetTarget()); + } break; default: @@ -4916,12 +4925,22 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) BasicBlock* bFalseDest = block->GetFalseTarget(); assert(bFalseDest != nullptr); - if (bFalseDest->KindIs(BBJ_ALWAYS) && bFalseDest->TargetIs(bDest) && bFalseDest->isEmpty()) + if (bFalseDest->KindIs(BBJ_ALWAYS) && bFalseDest->isEmpty()) { - // Optimize bFalseDest -> BBJ_ALWAYS -> bDest - block->SetFalseTarget(bDest); - fgRemoveRefPred(bFalseDest, block); - fgAddRefPred(bDest, block); + if (bFalseDest->TargetIs(bDest)) + { + // Optimize bFalseDest -> BBJ_ALWAYS -> bDest + block->SetFalseTarget(bDest); + fgRemoveRefPred(bFalseDest, block); + fgAddRefPred(bDest, block); + } + else if (bFalseDest->TargetIs(bNext)) + { + // Enable fallthrough into false target + block->SetFalseTarget(bNext); + fgRemoveRefPred(bFalseDest, block); + fgAddRefPred(bNext, block); + } } else if (bDest->KindIs(BBJ_ALWAYS) && bDest->TargetIs(bFalseDest) && bDest->isEmpty()) { @@ -4958,35 +4977,33 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) } } + BasicBlock* bFalseDest = block->KindIs(BBJ_COND) ? block->GetFalseTarget() : nullptr; + // Check for cases where reversing the branch condition may enable // other flow opts. // - // Current block falls through to an empty bNext BBJ_ALWAYS, and - // (a) block jump target is bNext's bbNext. + // Current block jumps to an empty bFalseDest BBJ_ALWAYS, and + // (a) block jump target is bFalseDest's bbNext. // (b) block jump target is elsewhere but join free, and - // bNext's jump target has a join. + // bFalseDest's jump target has a join. // - if (block->KindIs(BBJ_COND) && // block is a BBJ_COND block - (bNext != nullptr) && // block isn't the last block - block->FalseTargetIs(bNext) && // block falls into its false target - (bNext->bbRefs == 1) && // No other block jumps to bNext - bNext->KindIs(BBJ_ALWAYS) && // The next block is a BBJ_ALWAYS block - !bNext->JumpsToNext() && // and it doesn't jump to the next block (we might compact them) - bNext->isEmpty() && // and it is an empty block - !bNext->TargetIs(bNext) && // special case for self jumps + if ((bFalseDest != nullptr) && // block is a BBJ_COND block + (bFalseDest->bbRefs == 1) && // No other block jumps to bFalseDest + bFalseDest->KindIs(BBJ_ALWAYS) && // The next block is a BBJ_ALWAYS block + !bFalseDest->JumpsToNext() && // and it doesn't jump to the next block (we might compact them) + bFalseDest->isEmpty() && // and it is an empty block + !bFalseDest->TargetIs(bFalseDest) && // special case for self jumps !bDest->IsFirstColdBlock(this) && !fgInDifferentRegions(block, bDest)) // do not cross hot/cold sections { - assert(!block->IsLast()); - // case (a) // - const bool isJumpAroundEmpty = bNext->NextIs(bDest); + const bool isJumpAroundEmpty = bFalseDest->NextIs(bDest); // case (b) // // Note the asymmetric checks for refs == 1 and refs > 1 ensures that we - // differentiate the roles played by bDest and bNextJumpDest. We need some + // differentiate the roles played by bDest and bFalseDestTarget. We need some // sense of which arrangement is preferable to avoid getting stuck in a loop // reversing and re-reversing. // @@ -4997,9 +5014,9 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) // * don't consider lexical predecessors, or we may confuse loop recognition // * don't consider blocks of different rarities // - BasicBlock* const bNextJumpDest = bNext->GetTarget(); + BasicBlock* const bFalseDestTarget = bFalseDest->GetTarget(); const bool isJumpToJoinFree = !isJumpAroundEmpty && (bDest->bbRefs == 1) && - (bNextJumpDest->bbRefs > 1) && (bDest->bbNum > block->bbNum) && + (bFalseDestTarget->bbRefs > 1) && (bDest->bbNum > block->bbNum) && (block->isRunRarely() == bDest->isRunRarely()); bool optimizeJump = isJumpAroundEmpty || isJumpToJoinFree; @@ -5012,9 +5029,9 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) optimizeJump = false; } - // Also consider bNext's try region + // Also consider bFalseDest's try region // - if (bNext->hasTryIndex() && !BasicBlock::sameTryRegion(block, bNext)) + if (bFalseDest->hasTryIndex() && !BasicBlock::sameTryRegion(block, bFalseDest)) { optimizeJump = false; } @@ -5035,21 +5052,21 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) if (optimizeJump && isJumpToJoinFree) { - // In the join free case, we also need to move bDest right after bNext + // In the join free case, we also need to move bDest right after bFalseDest // to create same flow as in the isJumpAroundEmpty case. // - if (!fgEhAllowsMoveBlock(bNext, bDest) || bDest->isBBCallFinallyPair()) + if (!fgEhAllowsMoveBlock(bFalseDest, bDest) || bDest->isBBCallFinallyPair()) { optimizeJump = false; } else { - // We don't expect bDest to already be right after bNext. + // We don't expect bDest to already be right after bFalseDest. // - assert(!bNext->NextIs(bDest)); + assert(!bFalseDest->NextIs(bDest)); JITDUMP("\nMoving " FMT_BB " after " FMT_BB " to enable reversal\n", bDest->bbNum, - bNext->bbNum); + bFalseDest->bbNum); // Move bDest // @@ -5059,11 +5076,11 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) } fgUnlinkBlock(bDest); - fgInsertBBafter(bNext, bDest); + fgInsertBBafter(bFalseDest, bDest); - if (ehIsBlockEHLast(bNext)) + if (ehIsBlockEHLast(bFalseDest)) { - ehUpdateLastBlocks(bNext, bDest); + ehUpdateLastBlocks(bFalseDest, bDest); } } } @@ -5072,7 +5089,7 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) { JITDUMP("\nReversing a conditional jump around an unconditional jump (" FMT_BB " -> " FMT_BB ", " FMT_BB " -> " FMT_BB ")\n", - block->bbNum, bDest->bbNum, bNext->bbNum, bNextJumpDest->bbNum); + block->bbNum, bDest->bbNum, bFalseDest->bbNum, bFalseDestTarget->bbNum); // Reverse the jump condition // @@ -5091,38 +5108,38 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) } // Optimize the Conditional JUMP to go to the new target - assert(block->FalseTargetIs(bNext)); - block->SetTrueTarget(bNext->GetTarget()); - block->SetFalseTarget(bNext->Next()); + assert(block->FalseTargetIs(bFalseDest)); + block->SetTrueTarget(bFalseDest->GetTarget()); + block->SetFalseTarget(bFalseDest->Next()); - fgAddRefPred(bNext->GetTarget(), block, fgRemoveRefPred(bNext->GetTarget(), bNext)); + fgAddRefPred(bFalseDest->GetTarget(), block, fgRemoveRefPred(bFalseDest->GetTarget(), bFalseDest)); /* - Unlink bNext from the BasicBlock list; note that we can + Unlink bFalseDest from the BasicBlock list; note that we can do this even though other blocks could jump to it - the reason is that elsewhere in this function we always redirect jumps to jumps to jump to the final label, - so even if another block jumps to bNext it won't matter + so even if another block jumps to bFalseDest it won't matter once we're done since any such jump will be redirected to the final target by the time we're done here. */ - fgRemoveRefPred(bNext, block); - fgUnlinkBlockForRemoval(bNext); + fgRemoveRefPred(bFalseDest, block); + fgUnlinkBlockForRemoval(bFalseDest); /* Mark the block as removed */ - bNext->SetFlags(BBF_REMOVED); + bFalseDest->SetFlags(BBF_REMOVED); if (optLoopTableValid) { // Update the loop table if we removed the bottom of a loop, for example. - fgUpdateLoopsAfterCompacting(block, bNext); + fgUpdateLoopsAfterCompacting(block, bFalseDest); } // If this is the first Cold basic block update fgFirstColdBlock - if (bNext->IsFirstColdBlock(this)) + if (bFalseDest->IsFirstColdBlock(this)) { - fgFirstColdBlock = bNext->Next(); + fgFirstColdBlock = bFalseDest->Next(); } // @@ -5132,7 +5149,7 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) for (EHblkDsc* const HBtab : EHClauses(this)) { - if ((HBtab->ebdTryLast == bNext) || (HBtab->ebdHndLast == bNext)) + if ((HBtab->ebdTryLast == bFalseDest) || (HBtab->ebdHndLast == bFalseDest)) { fgSkipRmvdBlocks(HBtab); } @@ -5155,11 +5172,11 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) as jumping to REPEAT will cause us to delete 'block' because it currently appears to be unreachable. As it is a self loop that only has a single bbRef (itself) - However since the unlinked bNext has additional bbRefs + However since the unlinked bFalseDest has additional bbRefs (that we will later connect to 'block'), it is not really unreachable. */ - if ((bNext->bbRefs > 0) && bNext->TargetIs(block) && (block->bbRefs == 1)) + if ((bFalseDest->bbRefs > 0) && bFalseDest->TargetIs(block) && (block->bbRefs == 1)) { continue; } diff --git a/src/coreclr/jit/optimizer.cpp b/src/coreclr/jit/optimizer.cpp index 42e746d4a29b6..a94ba0a9f45bb 100644 --- a/src/coreclr/jit/optimizer.cpp +++ b/src/coreclr/jit/optimizer.cpp @@ -2038,10 +2038,10 @@ class LoopSearch comp->ehUpdateLastBlocks(moveAfter, lastNonLoopBlock); // Apply any adjustments needed for fallthrough at the boundaries of the moved region. - FixupFallThrough(moveAfter, moveBefore, block); - FixupFallThrough(lastNonLoopBlock, nextLoopBlock, moveBefore); + FixupFallThrough(moveAfter, block); + FixupFallThrough(lastNonLoopBlock, moveBefore); // Also apply any adjustments needed where the blocks were snipped out of the loop. - FixupFallThrough(previous, block, nextLoopBlock); + FixupFallThrough(previous, nextLoopBlock); // Update moveAfter for the next insertion. moveAfter = lastNonLoopBlock; @@ -2261,14 +2261,32 @@ class LoopSearch // // Arguments: // block - Block whose `bbNext` has changed. - // oldNext - Previous value of `block->bbNext`. // newNext - New value of `block->bbNext`. // - void FixupFallThrough(BasicBlock* block, BasicBlock* oldNext, BasicBlock* newNext) + void FixupFallThrough(BasicBlock* block, BasicBlock* newNext) { - // If we create a new block, that will be our return value. + if (block->KindIs(BBJ_COND) && block->TrueTargetIs(newNext)) + { + // Reverse the jump condition + GenTree* test = block->lastNode(); + noway_assert(test->OperIsConditionalJump()); + + if (test->OperGet() == GT_JTRUE) + { + GenTree* cond = comp->gtReverseCond(test->AsOp()->gtOp1); + assert(cond == test->AsOp()->gtOp1); // Ensure `gtReverseCond` did not create a new node. + test->AsOp()->gtOp1 = cond; + } + else + { + comp->gtReverseCond(test); + } - if (block->KindIs(BBJ_ALWAYS) && block->TargetIs(newNext)) + // Redirect the Conditional JUMP + block->SetTrueTarget(block->GetFalseTarget()); + block->SetFalseTarget(newNext); + } + else if (block->KindIs(BBJ_ALWAYS) && block->TargetIs(newNext)) { // If block is newNext's only predecessor, move the IR from block to newNext, // but keep the now-empty block around. From 25003414f26f551bea544890cf15fc08d00bd69b Mon Sep 17 00:00:00 2001 From: "Aman Khalid (from Dev Box)" Date: Thu, 18 Jan 2024 22:05:04 -0500 Subject: [PATCH 05/12] Branch reversal pass --- src/coreclr/jit/compiler.cpp | 20 ++++++++++++++++++++ src/coreclr/jit/fgopt.cpp | 15 ++++++++------- src/coreclr/jit/stacklevelsetter.cpp | 23 ----------------------- 3 files changed, 28 insertions(+), 30 deletions(-) diff --git a/src/coreclr/jit/compiler.cpp b/src/coreclr/jit/compiler.cpp index e09aac3ac123c..60665c974f99b 100644 --- a/src/coreclr/jit/compiler.cpp +++ b/src/coreclr/jit/compiler.cpp @@ -5127,6 +5127,26 @@ void Compiler::compCompile(void** methodCodePtr, uint32_t* methodCodeSize, JitFl StackLevelSetter stackLevelSetter(this); stackLevelSetter.Run(); + // Block layout should not change at this point. + // Do one last pass to reverse any conditions that might yield more fall-through branches. + for (BasicBlock* const block : comp->Blocks()) + { + if (block->KindIs(BBJ_COND) && block->CanRemoveJumpToTarget(block->GetTrueTarget(), comp)) + { + // Reverse the jump condition + GenTree* test = block->lastNode(); + assert(test->OperIsConditionalJump()); + GenTree* cond = comp->gtReverseCond(test); + assert(cond == test); // Ensure `gtReverseCond` did not create a new node. + + BasicBlock* newFalseTarget = block->GetTrueTarget(); + BasicBlock* newTrueTarget = block->GetFalseTarget(); + block->SetTrueTarget(newTrueTarget); + block->SetFalseTarget(newFalseTarget); + assert(block->CanRemoveJumpToTarget(newFalseTarget, comp)); + } + } + // We can not add any new tracked variables after this point. lvaTrackedFixed = true; diff --git a/src/coreclr/jit/fgopt.cpp b/src/coreclr/jit/fgopt.cpp index 0e387cefb731d..7683ac323d3d3 100644 --- a/src/coreclr/jit/fgopt.cpp +++ b/src/coreclr/jit/fgopt.cpp @@ -4987,11 +4987,11 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) // (b) block jump target is elsewhere but join free, and // bFalseDest's jump target has a join. // - if ((bFalseDest != nullptr) && // block is a BBJ_COND block - (bFalseDest->bbRefs == 1) && // No other block jumps to bFalseDest - bFalseDest->KindIs(BBJ_ALWAYS) && // The next block is a BBJ_ALWAYS block - !bFalseDest->JumpsToNext() && // and it doesn't jump to the next block (we might compact them) - bFalseDest->isEmpty() && // and it is an empty block + if ((bFalseDest != nullptr) && // block is a BBJ_COND block + (bFalseDest->bbRefs == 1) && // No other block jumps to bFalseDest + bFalseDest->KindIs(BBJ_ALWAYS) && // The next block is a BBJ_ALWAYS block + !bFalseDest->JumpsToNext() && // and it doesn't jump to the next block (we might compact them) + bFalseDest->isEmpty() && // and it is an empty block !bFalseDest->TargetIs(bFalseDest) && // special case for self jumps !bDest->IsFirstColdBlock(this) && !fgInDifferentRegions(block, bDest)) // do not cross hot/cold sections @@ -5014,7 +5014,7 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) // * don't consider lexical predecessors, or we may confuse loop recognition // * don't consider blocks of different rarities // - BasicBlock* const bFalseDestTarget = bFalseDest->GetTarget(); + BasicBlock* const bFalseDestTarget = bFalseDest->GetTarget(); const bool isJumpToJoinFree = !isJumpAroundEmpty && (bDest->bbRefs == 1) && (bFalseDestTarget->bbRefs > 1) && (bDest->bbNum > block->bbNum) && (block->isRunRarely() == bDest->isRunRarely()); @@ -5112,7 +5112,8 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) block->SetTrueTarget(bFalseDest->GetTarget()); block->SetFalseTarget(bFalseDest->Next()); - fgAddRefPred(bFalseDest->GetTarget(), block, fgRemoveRefPred(bFalseDest->GetTarget(), bFalseDest)); + fgAddRefPred(bFalseDest->GetTarget(), block, + fgRemoveRefPred(bFalseDest->GetTarget(), bFalseDest)); /* Unlink bFalseDest from the BasicBlock list; note that we can diff --git a/src/coreclr/jit/stacklevelsetter.cpp b/src/coreclr/jit/stacklevelsetter.cpp index 4f37e1621e6fd..b6aabca83e32d 100644 --- a/src/coreclr/jit/stacklevelsetter.cpp +++ b/src/coreclr/jit/stacklevelsetter.cpp @@ -81,29 +81,6 @@ PhaseStatus StackLevelSetter::DoPhase() } } - // The above loop might have moved a BBJ_COND's true target to its next block. - // In such cases, reverse the condition so we can remove a branch. - if (madeChanges) - { - for (BasicBlock* const block : comp->Blocks()) - { - if (block->KindIs(BBJ_COND) && block->CanRemoveJumpToTarget(block->GetTrueTarget(), comp)) - { - // Reverse the jump condition - GenTree* test = block->lastNode(); - assert(test->OperIsConditionalJump()); - GenTree* cond = comp->gtReverseCond(test); - assert(cond == test); // Ensure `gtReverseCond` did not create a new node. - - BasicBlock* newFalseTarget = block->GetTrueTarget(); - BasicBlock* newTrueTarget = block->GetFalseTarget(); - block->SetTrueTarget(newTrueTarget); - block->SetFalseTarget(newFalseTarget); - assert(block->CanRemoveJumpToTarget(newFalseTarget, comp)); - } - } - } - return madeChanges ? PhaseStatus::MODIFIED_EVERYTHING : PhaseStatus::MODIFIED_NOTHING; } From 78051ffed5d1d84a19570f49bc311ebbc03d1b39 Mon Sep 17 00:00:00 2001 From: "Aman Khalid (from Dev Box)" Date: Thu, 18 Jan 2024 22:50:25 -0500 Subject: [PATCH 06/12] Fix loop exit --- src/coreclr/jit/optimizer.cpp | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/coreclr/jit/optimizer.cpp b/src/coreclr/jit/optimizer.cpp index 444b62ca4a649..160034f44c709 100644 --- a/src/coreclr/jit/optimizer.cpp +++ b/src/coreclr/jit/optimizer.cpp @@ -4512,6 +4512,12 @@ BasicBlock* Compiler::optFindLoopCompactionInsertionPoint(FlowGraphNaturalLoop* BasicBlock* insertionPoint = bottom; while (insertionPoint->bbFallsThrough()) { + if (insertionPoint->KindIs(BBJ_COND) && !insertionPoint->NextIs(insertionPoint->GetFalseTarget())) + { + // Found a BBJ_COND block that does not fall through, so insert after it. + break; + } + // Keep looking for a better insertion point if we can. BasicBlock* newInsertionPoint = optTryAdvanceLoopCompactionInsertionPoint(loop, insertionPoint, top, bottom); if (newInsertionPoint == nullptr) From 358d2407c471d0141c1ef79fbf02eb37bb35fb7c Mon Sep 17 00:00:00 2001 From: "Aman Khalid (from Dev Box)" Date: Fri, 19 Jan 2024 10:14:49 -0500 Subject: [PATCH 07/12] Do branch reversal pass when optimizing only --- src/coreclr/jit/compiler.cpp | 29 ++++++++++++++++------------- 1 file changed, 16 insertions(+), 13 deletions(-) diff --git a/src/coreclr/jit/compiler.cpp b/src/coreclr/jit/compiler.cpp index 014e334c697c8..1f7480ad7fc7a 100644 --- a/src/coreclr/jit/compiler.cpp +++ b/src/coreclr/jit/compiler.cpp @@ -5129,21 +5129,24 @@ void Compiler::compCompile(void** methodCodePtr, uint32_t* methodCodeSize, JitFl // Block layout should not change at this point. // Do one last pass to reverse any conditions that might yield more fall-through branches. - for (BasicBlock* const block : Blocks()) + if (opts.OptimizationEnabled()) { - if (block->KindIs(BBJ_COND) && block->CanRemoveJumpToTarget(block->GetTrueTarget(), this)) + for (BasicBlock* const block : Blocks()) { - // Reverse the jump condition - GenTree* test = block->lastNode(); - assert(test->OperIsConditionalJump()); - GenTree* cond = gtReverseCond(test); - assert(cond == test); // Ensure `gtReverseCond` did not create a new node. - - BasicBlock* newFalseTarget = block->GetTrueTarget(); - BasicBlock* newTrueTarget = block->GetFalseTarget(); - block->SetTrueTarget(newTrueTarget); - block->SetFalseTarget(newFalseTarget); - assert(block->CanRemoveJumpToTarget(newFalseTarget, this)); + if (block->KindIs(BBJ_COND) && block->CanRemoveJumpToTarget(block->GetTrueTarget(), this)) + { + // Reverse the jump condition + GenTree* test = block->lastNode(); + assert(test->OperIsConditionalJump()); + GenTree* cond = gtReverseCond(test); + assert(cond == test); // Ensure `gtReverseCond` did not create a new node. + + BasicBlock* newFalseTarget = block->GetTrueTarget(); + BasicBlock* newTrueTarget = block->GetFalseTarget(); + block->SetTrueTarget(newTrueTarget); + block->SetFalseTarget(newFalseTarget); + assert(block->CanRemoveJumpToTarget(newFalseTarget, this)); + } } } From ab1ee76c9ec7045f418b316772458ade141d9105 Mon Sep 17 00:00:00 2001 From: "Aman Khalid (from Dev Box)" Date: Fri, 19 Jan 2024 10:41:35 -0500 Subject: [PATCH 08/12] Fix assert during bool opts --- src/coreclr/jit/fgopt.cpp | 8 ++------ src/coreclr/jit/optimizebools.cpp | 2 +- 2 files changed, 3 insertions(+), 7 deletions(-) diff --git a/src/coreclr/jit/fgopt.cpp b/src/coreclr/jit/fgopt.cpp index 7683ac323d3d3..dc7f186d4d13e 100644 --- a/src/coreclr/jit/fgopt.cpp +++ b/src/coreclr/jit/fgopt.cpp @@ -895,7 +895,7 @@ PhaseStatus Compiler::fgPostImportationCleanup() // fgCanCompactBlocks: Determine if a block and its bbNext successor can be compacted. // // Arguments: -// block - block to check. If nullptr, return false. +// block - block to check. Cannot be nullptr. // bNext - bbNext of `block`. If nullptr, return false. // // Returns: @@ -903,11 +903,7 @@ PhaseStatus Compiler::fgPostImportationCleanup() // bool Compiler::fgCanCompactBlocks(BasicBlock* block, BasicBlock* bNext) { - if ((block == nullptr) || (bNext == nullptr)) - { - return false; - } - + assert(block != nullptr); assert(block->NextIs(bNext)); if (!block->KindIs(BBJ_ALWAYS) || !block->TargetIs(bNext) || block->HasFlag(BBF_KEEP_BBJ_ALWAYS)) diff --git a/src/coreclr/jit/optimizebools.cpp b/src/coreclr/jit/optimizebools.cpp index 57989ad72de50..139e459c30a30 100644 --- a/src/coreclr/jit/optimizebools.cpp +++ b/src/coreclr/jit/optimizebools.cpp @@ -1020,7 +1020,7 @@ bool OptBoolsDsc::optOptimizeCompareChainCondBlock() m_b2->CopyFlags(m_b1, BBF_COPY_PROPAGATE); // Join the two blocks. This is done now to ensure that additional conditions can be chained. - if (m_comp->fgCanCompactBlocks(m_b1, m_b2)) + if (m_b1->NextIs(m_b2) && m_comp->fgCanCompactBlocks(m_b1, m_b2)) { m_comp->fgCompactBlocks(m_b1, m_b2); } From 275c58f369bd3322e1f79d87b80be8cac6e503f3 Mon Sep 17 00:00:00 2001 From: "Aman Khalid (from Dev Box)" Date: Fri, 19 Jan 2024 13:04:44 -0500 Subject: [PATCH 09/12] Overzealous assert? --- src/coreclr/jit/flowgraph.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/coreclr/jit/flowgraph.cpp b/src/coreclr/jit/flowgraph.cpp index 44df218870ffe..6a3ed8c9b42e5 100644 --- a/src/coreclr/jit/flowgraph.cpp +++ b/src/coreclr/jit/flowgraph.cpp @@ -4987,7 +4987,7 @@ bool FlowGraphNaturalLoop::AnalyzeIteration(NaturalLoopIterInfo* info) return false; } - assert(ContainsBlock(cond->GetTrueTarget()) != ContainsBlock(cond->GetFalseTarget())); + // assert(ContainsBlock(cond->GetTrueTarget()) != ContainsBlock(cond->GetFalseTarget())); info->TestBlock = cond; info->IterVar = comp->optIsLoopIncrTree(info->IterTree); From aad717e3d035d26a08e1353717afb7333f8a8189 Mon Sep 17 00:00:00 2001 From: "Aman Khalid (from Dev Box)" Date: Mon, 22 Jan 2024 12:05:34 -0500 Subject: [PATCH 10/12] Remove debug --- src/coreclr/jit/fgopt.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/coreclr/jit/fgopt.cpp b/src/coreclr/jit/fgopt.cpp index d8c7f9463cb26..d622602cc13e5 100644 --- a/src/coreclr/jit/fgopt.cpp +++ b/src/coreclr/jit/fgopt.cpp @@ -5251,8 +5251,6 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) fgDebugCheckBBlist(); fgDebugCheckUpdate(); } - - fgDispBasicBlocks(); #endif // DEBUG return modified; From cb0d354cb4469c25eb1e5952436c3357f7d91e53 Mon Sep 17 00:00:00 2001 From: "Aman Khalid (from Dev Box)" Date: Mon, 22 Jan 2024 14:06:26 -0500 Subject: [PATCH 11/12] Fix JitStress failures --- src/coreclr/jit/fgopt.cpp | 32 +++++++++++++++++++++++++++++--- 1 file changed, 29 insertions(+), 3 deletions(-) diff --git a/src/coreclr/jit/fgopt.cpp b/src/coreclr/jit/fgopt.cpp index d622602cc13e5..f3474feceda11 100644 --- a/src/coreclr/jit/fgopt.cpp +++ b/src/coreclr/jit/fgopt.cpp @@ -4851,13 +4851,23 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) block->SetFalseTarget(bDest); fgRemoveRefPred(bFalseDest, block); fgAddRefPred(bDest, block); + change = true; + modified = true; } - else if (bFalseDest->TargetIs(bNext)) + else if (bFalseDest->TargetIs(bNext) && (bFalseDest != bNext)) { - // Enable fallthrough into false target + // Enable fallthrough into false target (avoid loop edge case) block->SetFalseTarget(bNext); fgRemoveRefPred(bFalseDest, block); fgAddRefPred(bNext, block); + change = true; + modified = true; + } + + if ((bFalseDest->countOfInEdges() == 0) && !bFalseDest->HasFlag(BBF_DONT_REMOVE)) + { + assert(change && modified); + fgRemoveBlock(bFalseDest, /* unreachable */ true); } } else if (bDest->KindIs(BBJ_ALWAYS) && bDest->TargetIs(bFalseDest) && bDest->isEmpty()) @@ -4866,10 +4876,19 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) block->SetTrueTarget(bFalseDest); fgRemoveRefPred(bDest, block); fgAddRefPred(bFalseDest, block); + change = true; + modified = true; + + if ((bDest->countOfInEdges() == 0) && !bDest->HasFlag(BBF_DONT_REMOVE)) + { + fgRemoveBlock(bDest, /* unreachable */ true); + } + bDest = bFalseDest; } - if (block->FalseTargetIs(bDest)) + // Above transformations may convert block into a BBJ_ALWAYS + if (block->KindIs(BBJ_COND) && block->FalseTargetIs(bDest)) { fgRemoveConditionalJump(block); change = true; @@ -4877,6 +4896,13 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) assert(block->KindIs(BBJ_ALWAYS)); assert(block->TargetIs(bDest)); } + else if (block->KindIs(BBJ_ALWAYS)) + { + assert(change && modified); + assert(block->TargetIs(bDest)); + } + + bNext = block->Next(); } if (bDest != nullptr) From 335a74149ea6adee8952d5759f0f76de02698cad Mon Sep 17 00:00:00 2001 From: "Aman Khalid (from Dev Box)" Date: Mon, 22 Jan 2024 15:35:50 -0500 Subject: [PATCH 12/12] Update bPrev pointer --- src/coreclr/jit/fgopt.cpp | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/coreclr/jit/fgopt.cpp b/src/coreclr/jit/fgopt.cpp index f3474feceda11..89b8ad010aebd 100644 --- a/src/coreclr/jit/fgopt.cpp +++ b/src/coreclr/jit/fgopt.cpp @@ -4854,9 +4854,9 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) change = true; modified = true; } - else if (bFalseDest->TargetIs(bNext) && (bFalseDest != bNext)) + else if (bFalseDest->TargetIs(bNext) && (bFalseDest != bNext)) // special case for self jumps { - // Enable fallthrough into false target (avoid loop edge case) + // Enable fallthrough into false target block->SetFalseTarget(bNext); fgRemoveRefPred(bFalseDest, block); fgAddRefPred(bNext, block); @@ -4902,6 +4902,8 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication, bool isPhase) assert(block->TargetIs(bDest)); } + // We might have removed previous/next block, so update those pointers here + bPrev = block->Prev(); bNext = block->Next(); }