Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

JIT: Remove Compiler::fgConnectFallThrough and BBJ_COND fix-ups in loop phases #97191

Closed
wants to merge 15 commits into from
Closed
6 changes: 2 additions & 4 deletions src/coreclr/jit/block.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -844,8 +844,7 @@ void BasicBlock::CopyTarget(Compiler* compiler, const BasicBlock* from)
SetEhf(new (compiler, CMK_BasicBlock) BBehfDesc(compiler, from->GetEhfTargets()));
break;
case BBJ_COND:
// TODO-NoFallThrough: Copy false target, too?
SetCond(from->GetTrueTarget(), Next());
SetCond(from->GetTrueTarget(), from->GetFalseTarget());
break;
case BBJ_ALWAYS:
SetKindAndTarget(from->GetKind(), from->GetTarget());
Expand Down Expand Up @@ -886,8 +885,7 @@ void BasicBlock::TransferTarget(BasicBlock* from)
from->bbEhfTargets = nullptr; // Make sure nobody uses the descriptor after this.
break;
case BBJ_COND:
// TODO-NoFallThrough: Copy false target, too?
SetCond(from->GetTrueTarget(), Next());
SetCond(from->GetTrueTarget(), from->GetFalseTarget());
break;
case BBJ_ALWAYS:
SetKindAndTarget(from->GetKind(), from->GetTarget());
Expand Down
23 changes: 23 additions & 0 deletions src/coreclr/jit/compiler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -5127,6 +5127,29 @@ void Compiler::compCompile(void** methodCodePtr, uint32_t* methodCodeSize, JitFl
StackLevelSetter stackLevelSetter(this);
stackLevelSetter.Run();

// Block layout should not change at this point.
// Do one last pass to reverse any conditions that might yield more fall-through branches.
if (opts.OptimizationEnabled())
{
for (BasicBlock* const block : Blocks())
{
if (block->KindIs(BBJ_COND) && block->CanRemoveJumpToTarget(block->GetTrueTarget(), this))
{
// Reverse the jump condition
GenTree* test = block->lastNode();
assert(test->OperIsConditionalJump());
GenTree* cond = gtReverseCond(test);
assert(cond == test); // Ensure `gtReverseCond` did not create a new node.

BasicBlock* newFalseTarget = block->GetTrueTarget();
BasicBlock* newTrueTarget = block->GetFalseTarget();
block->SetTrueTarget(newTrueTarget);
block->SetFalseTarget(newFalseTarget);
assert(block->CanRemoveJumpToTarget(newFalseTarget, this));
}
}
}
Comment on lines +5137 to +5158
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This should be a proper phase ensuring post-phase checks run, and ensuring the changed IR nodes are dumped.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I was planning on removing this change from this PR, and possibly introducing it if/when we remove the condition reversals done in earlier phases. I'll add it as a new phase then.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sounds good... sorry about the draft review.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No worries! I appreciate the feedback -- this should be ready soon.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this should be ready soon.

Famous last words... closing this in favor of #97488.


// We can not add any new tracked variables after this point.
lvaTrackedFixed = true;

Expand Down
4 changes: 1 addition & 3 deletions src/coreclr/jit/compiler.h
Original file line number Diff line number Diff line change
Expand Up @@ -5973,8 +5973,6 @@ class Compiler

void fgUpdateLoopsAfterCompacting(BasicBlock* block, BasicBlock* bNext);

BasicBlock* fgConnectFallThrough(BasicBlock* bSrc, BasicBlock* bDst, bool noFallThroughQuirk = false);

bool fgRenumberBlocks();

bool fgExpandRarelyRunBlocks();
Expand Down Expand Up @@ -6831,7 +6829,7 @@ class Compiler
bool optCompactLoop(FlowGraphNaturalLoop* loop);
BasicBlock* optFindLoopCompactionInsertionPoint(FlowGraphNaturalLoop* loop, BasicBlock* top);
BasicBlock* optTryAdvanceLoopCompactionInsertionPoint(FlowGraphNaturalLoop* loop, BasicBlock* insertionPoint, BasicBlock* top, BasicBlock* bottom);
bool optLoopCompactionFixupFallThrough(BasicBlock* block, BasicBlock* oldNext, BasicBlock* newNext);
bool optLoopCompactionFixupFallThrough(BasicBlock* block, BasicBlock* newNext);
bool optCreatePreheader(FlowGraphNaturalLoop* loop);
void optSetPreheaderWeight(FlowGraphNaturalLoop* loop, BasicBlock* preheader);

Expand Down
127 changes: 11 additions & 116 deletions src/coreclr/jit/fgbasic.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4838,8 +4838,7 @@ BasicBlock* Compiler::fgSplitBlockAtEnd(BasicBlock* curr)
curr->RemoveFlags(BBF_HAS_JMP | BBF_RETLESS_CALL);

// Transfer the kind and target. Do this after the code above, to avoid null-ing out the old targets used by the
// above code (and so newBlock->bbNext is valid, so SetCond() can initialize bbFalseTarget if newBlock is a
// BBJ_COND).
// above code.
newBlock->TransferTarget(curr);

// Default to fallthrough, and add the arc for that.
Expand Down Expand Up @@ -5253,7 +5252,7 @@ void Compiler::fgUnlinkRange(BasicBlock* bBeg, BasicBlock* bEnd)
//
// Arguments:
// block - the block to remove
// unreachable - the block to remove
// unreachable - indicates whether removal is because block is unreachable or empty
//
// Return Value:
// The block after the block, or blocks, removed.
Expand Down Expand Up @@ -5303,7 +5302,7 @@ BasicBlock* Compiler::fgRemoveBlock(BasicBlock* block, bool unreachable)
// If we delete such a BBJ_CALLFINALLY we also delete the BBJ_CALLFINALLYRET.
if (block->isBBCallFinallyPair())
{
BasicBlock* const leaveBlock = block->Next();
BasicBlock* const leaveBlock = bNext;
bNext = leaveBlock->Next();
fgPrepareCallFinallyRetForRemoval(leaveBlock);
fgRemoveBlock(leaveBlock, /* unreachable */ true);
Expand All @@ -5323,12 +5322,6 @@ BasicBlock* Compiler::fgRemoveBlock(BasicBlock* block, bool unreachable)

/* At this point the bbPreds and bbRefs had better be zero */
noway_assert((block->bbRefs == 0) && (block->bbPreds == nullptr));

if (bPrev->KindIs(BBJ_COND) && bPrev->FalseTargetIs(block))
{
assert(bNext != nullptr);
bPrev->SetFalseTarget(bNext);
}
}
else // block is empty
{
Expand Down Expand Up @@ -5540,105 +5533,6 @@ void Compiler::fgPrepareCallFinallyRetForRemoval(BasicBlock* block)
block->SetKind(BBJ_ALWAYS);
}

//------------------------------------------------------------------------
// fgConnectFallThrough: fix flow from a block that previously had a fall through
//
// Arguments:
// bSrc - source of fall through
// bDst - target of fall through
//
// Returns:
// Newly inserted block after bSrc that jumps to bDst,
// or nullptr if bSrc already falls through to bDst
//
BasicBlock* Compiler::fgConnectFallThrough(BasicBlock* bSrc, BasicBlock* bDst, bool noFallThroughQuirk /* = false */)
{
assert(bSrc != nullptr);
assert(fgPredsComputed);
BasicBlock* jmpBlk = nullptr;

/* If bSrc falls through to a block that is not bDst, we will insert a jump to bDst */

if (bSrc->KindIs(BBJ_COND) && bSrc->FalseTargetIs(bDst) && !bSrc->NextIs(bDst))
{
assert(fgGetPredForBlock(bDst, bSrc) != nullptr);

// Allow the caller to decide whether to use the old logic of maintaining implicit fallthrough behavior,
// or to allow BBJ_COND blocks' false targets to diverge from bbNext.
// TODO-NoFallThrough: Remove this quirk once callers' dependencies on implicit fallthrough are gone.
if (noFallThroughQuirk)
{
return jmpBlk;
}

// Add a new block after bSrc which jumps to 'bDst'
jmpBlk = fgNewBBafter(BBJ_ALWAYS, bSrc, true, bDst);
bSrc->SetFalseTarget(jmpBlk);
fgAddRefPred(jmpBlk, bSrc, fgGetPredForBlock(bDst, bSrc));

// Record the loop number in the new block
jmpBlk->bbNatLoopNum = bSrc->bbNatLoopNum;

// When adding a new jmpBlk we will set the bbWeight and bbFlags
//
if (fgHaveValidEdgeWeights && fgHaveProfileWeights())
{
FlowEdge* const newEdge = fgGetPredForBlock(jmpBlk, bSrc);

jmpBlk->bbWeight = (newEdge->edgeWeightMin() + newEdge->edgeWeightMax()) / 2;
if (bSrc->bbWeight == BB_ZERO_WEIGHT)
{
jmpBlk->bbWeight = BB_ZERO_WEIGHT;
}

if (jmpBlk->bbWeight == BB_ZERO_WEIGHT)
{
jmpBlk->SetFlags(BBF_RUN_RARELY);
}

weight_t weightDiff = (newEdge->edgeWeightMax() - newEdge->edgeWeightMin());
weight_t slop = BasicBlock::GetSlopFraction(bSrc, bDst);
//
// If the [min/max] values for our edge weight is within the slop factor
// then we will set the BBF_PROF_WEIGHT flag for the block
//
if (weightDiff <= slop)
{
jmpBlk->SetFlags(BBF_PROF_WEIGHT);
}
}
else
{
// We set the bbWeight to the smaller of bSrc->bbWeight or bDst->bbWeight
if (bSrc->bbWeight < bDst->bbWeight)
{
jmpBlk->bbWeight = bSrc->bbWeight;
jmpBlk->CopyFlags(bSrc, BBF_RUN_RARELY);
}
else
{
jmpBlk->bbWeight = bDst->bbWeight;
jmpBlk->CopyFlags(bDst, BBF_RUN_RARELY);
}
}

fgReplacePred(bDst, bSrc, jmpBlk);

JITDUMP("Added an unconditional jump to " FMT_BB " after block " FMT_BB "\n", jmpBlk->GetTarget()->bbNum,
bSrc->bbNum);
}
else if (bSrc->KindIs(BBJ_ALWAYS) && bSrc->HasInitializedTarget() && bSrc->JumpsToNext())
{
bSrc->SetFlags(BBF_NONE_QUIRK);
}
else if (bSrc->KindIs(BBJ_COND) && bSrc->NextIs(bDst))
{
bSrc->SetFalseTarget(bDst);
}

return jmpBlk;
}

//------------------------------------------------------------------------
// fgRenumberBlocks: update block bbNums to reflect bbNext order
//
Expand Down Expand Up @@ -6154,11 +6048,15 @@ BasicBlock* Compiler::fgRelocateEHRange(unsigned regionIndex, FG_RELOCATE_TYPE r
// We have decided to insert the block(s) after fgLastBlock
fgMoveBlocksAfter(bStart, bLast, insertAfterBlk);

// If bPrev falls through, we will insert a jump to block
fgConnectFallThrough(bPrev, bStart);
if (bPrev->KindIs(BBJ_ALWAYS) && bPrev->JumpsToNext())
{
bPrev->SetFlags(BBF_NONE_QUIRK);
}

// If bLast falls through, we will insert a jump to bNext
fgConnectFallThrough(bLast, bNext);
if (bLast->KindIs(BBJ_ALWAYS) && bLast->JumpsToNext())
{
bLast->SetFlags(BBF_NONE_QUIRK);
}

#endif // !FEATURE_EH_FUNCLETS

Expand Down Expand Up @@ -7183,9 +7081,6 @@ BasicBlock* Compiler::fgNewBBinRegionWorker(BBKinds jumpKind,
}
}

/* If afterBlk falls through, we insert a jump around newBlk */
fgConnectFallThrough(afterBlk, newBlk->Next());

// If the loop table is valid, add this block to the appropriate loop.
// Note we don't verify (via flow) that this block actually belongs
// to the loop, just that it is lexically within the span of blocks
Expand Down
Loading
Loading