Skip to content

Commit

Permalink
JIT: Straighten out flow during early jump threading
Browse files Browse the repository at this point in the history
After early jump threading had kicked in we can frequently prove that a
branch will go in one direction. This was previously left up to RBO;
instead, try to fold it immediately and straighten out the flow before
we build SSA, so that we can refine the phis.

Fix #101176
  • Loading branch information
jakobbotsch committed Jul 9, 2024
1 parent e336326 commit 965da21
Show file tree
Hide file tree
Showing 4 changed files with 144 additions and 16 deletions.
1 change: 1 addition & 0 deletions src/coreclr/jit/compiler.h
Original file line number Diff line number Diff line change
Expand Up @@ -6128,6 +6128,7 @@ class Compiler
#endif // FEATURE_EH_WINDOWS_X86

bool fgOptimizeUncondBranchToSimpleCond(BasicBlock* block, BasicBlock* target);
bool fgFoldSimpleCondByForwardSub(BasicBlock* block);

bool fgBlockEndFavorsTailDuplication(BasicBlock* block, unsigned lclNum);

Expand Down
141 changes: 137 additions & 4 deletions src/coreclr/jit/fgopt.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1966,6 +1966,12 @@ bool Compiler::fgBlockIsGoodTailDuplicationCandidate(BasicBlock* target, unsigne
return false;
}

// No point duplicating this block if it would not remove (part of) the joint.
if ((target->GetTrueTarget() == target) || (target->GetFalseTarget() == target))
{
return false;
}

Statement* const lastStmt = target->lastStmt();
Statement* const firstStmt = target->FirstNonPhiDef();

Expand Down Expand Up @@ -2265,6 +2271,108 @@ bool Compiler::fgOptimizeUncondBranchToSimpleCond(BasicBlock* block, BasicBlock*
return true;
}

//-------------------------------------------------------------
// fgFoldSimpleCondByForwardSub:
// Try to refine the flow of a block that may have just been tail duplicated
// or compacted.
//
// Arguments:
// block - block that was tail duplicated or compacted
//
// Returns Value:
// true if control flow was changed
//
bool Compiler::fgFoldSimpleCondByForwardSub(BasicBlock* block)
{
assert(block->KindIs(BBJ_COND));
GenTree* jtrue = block->lastStmt()->GetRootNode();
assert(jtrue->OperIs(GT_JTRUE));

GenTree* relop = jtrue->gtGetOp1();
if (!relop->OperIsCompare())
{
return false;
}

GenTree* op1 = relop->gtGetOp1();
GenTree* op2 = relop->gtGetOp2();

GenTree** lclUse;
GenTreeLclVarCommon* lcl;

if (op1->OperIs(GT_LCL_VAR) && op2->IsIntegralConst())
{
lclUse = &relop->AsOp()->gtOp1;
lcl = op1->AsLclVarCommon();
}
else if (op2->OperIs(GT_LCL_VAR) && op1->IsIntegralConst())
{
lclUse = &relop->AsOp()->gtOp2;
lcl = op2->AsLclVarCommon();
}
else
{
return false;
}

Statement* secondLastStmt = block->lastStmt()->GetPrevStmt();
if ((secondLastStmt == nullptr) || (secondLastStmt == block->lastStmt()))
{
return false;
}

GenTree* prevTree = secondLastStmt->GetRootNode();
if (!prevTree->OperIs(GT_STORE_LCL_VAR))
{
return false;
}

GenTreeLclVarCommon* store = prevTree->AsLclVarCommon();
if (store->GetLclNum() != lcl->GetLclNum())
{
return false;
}

if (!store->Data()->IsIntegralConst())
{
return false;
}

if (genActualType(store) != genActualType(store->Data()) || (genActualType(store) != genActualType(lcl)))
{
return false;
}

JITDUMP("Forward substituting local after jump threading. Before:\n");
DISPSTMT(block->lastStmt());

JITDUMP("\nAfter:\n");

LclVarDsc* varDsc = lvaGetDesc(lcl);
GenTree* newData = gtCloneExpr(store->Data());
if (varTypeIsSmall(varDsc) && fgCastNeeded(store->Data(), varDsc->TypeGet()))
{
newData = gtNewCastNode(TYP_INT, newData, false, varDsc->TypeGet());
newData = gtFoldExpr(newData);
}

*lclUse = newData;
DISPSTMT(block->lastStmt());

JITDUMP("\nNow trying to fold...\n");
jtrue->AsUnOp()->gtOp1 = gtFoldExpr(relop);
DISPSTMT(block->lastStmt());

Compiler::FoldResult result = fgFoldConditional(block);
if (result != Compiler::FoldResult::FOLD_DID_NOTHING)
{
assert(block->KindIs(BBJ_ALWAYS));
return true;
}

return false;
}

//-------------------------------------------------------------
// fgRemoveConditionalJump:
// Optimize a BBJ_COND block that unconditionally jumps to the same target
Expand Down Expand Up @@ -5176,10 +5284,35 @@ bool Compiler::fgUpdateFlowGraph(bool doTailDuplication /* = false */,
{
assert(block->KindIs(BBJ_COND));
assert(bNext == block->Next());
change = true;
modified = true;
bDest = block->GetTrueTarget();
bFalseDest = block->GetFalseTarget();
change = true;
modified = true;

if (fgFoldSimpleCondByForwardSub(block))
{
// It is likely another pred of the target now can
// similarly have its control flow straightened out.
// Try to compact it and repeat the optimization for
// it.
if (bDest->bbRefs == 1)
{
BasicBlock* otherPred = bDest->bbPreds->getSourceBlock();
JITDUMP("Trying to compact last pred " FMT_BB " of " FMT_BB " that we now bypass\n",
otherPred->bbNum, bDest->bbNum);
if (fgCanCompactBlock(otherPred))
{
fgCompactBlock(otherPred);
fgFoldSimpleCondByForwardSub(otherPred);
}
}

assert(block->KindIs(BBJ_ALWAYS));
bDest = block->GetTarget();
}
else
{
bDest = block->GetTrueTarget();
bFalseDest = block->GetFalseTarget();
}
}
}

Expand Down
1 change: 0 additions & 1 deletion src/coreclr/jit/promotion.h
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,6 @@ class PromotionLiveness
unsigned* m_structLclToTrackedIndex = nullptr;
unsigned m_numVars = 0;
BasicBlockLiveness* m_bbInfo = nullptr;
bool m_hasPossibleBackEdge = false;
BitVec m_liveIn;
BitVec m_ehLiveVars;
JitHashTable<GenTree*, JitPtrKeyFuncs<GenTree>, BitVec> m_aggDeaths;
Expand Down
17 changes: 6 additions & 11 deletions src/coreclr/jit/promotionliveness.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -294,22 +294,19 @@ void PromotionLiveness::MarkIndex(unsigned index, bool isUse, bool isDef, BitVec
//
void PromotionLiveness::InterBlockLiveness()
{
FlowGraphDfsTree* dfsTree = m_compiler->m_dfsTree;
assert(dfsTree != nullptr);

bool changed;
do
{
changed = false;

for (BasicBlock* block = m_compiler->fgLastBB; block != nullptr; block = block->Prev())
{
m_hasPossibleBackEdge |= !block->IsLast() && (block->Next()->bbNum <= block->bbNum);
changed |= PerBlockLiveness(block);
}

if (!m_hasPossibleBackEdge)
for (unsigned i = 0; i < dfsTree->GetPostOrderCount(); i++)
{
break;
changed |= PerBlockLiveness(dfsTree->GetPostOrder(i));
}
} while (changed);
} while (changed && dfsTree->HasCycle());

#ifdef DEBUG
if (m_compiler->verbose)
Expand Down Expand Up @@ -345,7 +342,6 @@ bool PromotionLiveness::PerBlockLiveness(BasicBlock* block)
BitVecOps::ClearD(m_bvTraits, bbInfo.LiveOut);
block->VisitRegularSuccs(m_compiler, [=, &bbInfo](BasicBlock* succ) {
BitVecOps::UnionD(m_bvTraits, bbInfo.LiveOut, m_bbInfo[succ->bbNum].LiveIn);
m_hasPossibleBackEdge |= succ->bbNum <= block->bbNum;
return BasicBlockVisit::Continue;
});

Expand All @@ -357,7 +353,6 @@ bool PromotionLiveness::PerBlockLiveness(BasicBlock* block)
AddHandlerLiveVars(block, m_ehLiveVars);
BitVecOps::UnionD(m_bvTraits, m_liveIn, m_ehLiveVars);
BitVecOps::UnionD(m_bvTraits, bbInfo.LiveOut, m_ehLiveVars);
m_hasPossibleBackEdge = true;
}

bool liveInChanged = !BitVecOps::Equal(m_bvTraits, bbInfo.LiveIn, m_liveIn);
Expand Down

0 comments on commit 965da21

Please sign in to comment.