Skip to content

Commit

Permalink
[SCEV] Require that addrec operands dominate the loop
Browse files Browse the repository at this point in the history
SCEVExpander currently has special handling for the case where the
start or the step of an addrec do not dominate the loop header,
which is not used by any lit test.

Initially I thought that this is entirely dead code, because
addrec operands are required to be loop invariant. However,
SCEV currently allows creating an addrec with operands that are
loop invariant but defined *after* the loop.

This doesn't seem like a useful case to allow, and we don't
appear to be using this outside a single easy to adjust unit test.
  • Loading branch information
nikic committed Sep 22, 2023
1 parent 0c7d0ad commit 2d8d622
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 64 deletions.
4 changes: 2 additions & 2 deletions llvm/lib/Analysis/ScalarEvolution.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3671,8 +3671,8 @@ ScalarEvolution::getAddRecExpr(SmallVectorImpl<const SCEV *> &Operands,
assert(!Operands[i]->getType()->isPointerTy() && "Step must be integer");
}
for (unsigned i = 0, e = Operands.size(); i != e; ++i)
assert(isLoopInvariant(Operands[i], L) &&
"SCEVAddRecExpr operand is not loop-invariant!");
assert(isAvailableAtLoopEntry(Operands[i], L) &&
"SCEVAddRecExpr operand is not available at loop entry!");
#endif

if (Operands.back()->isZero()) {
Expand Down
64 changes: 5 additions & 59 deletions llvm/lib/Transforms/Utils/ScalarEvolutionExpander.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1074,40 +1074,14 @@ Value *SCEVExpander::expandAddRecExprLiterally(const SCEVAddRecExpr *S) {
normalizeForPostIncUse(S, Loops, SE, /*CheckInvertible=*/false));
}

// Strip off any non-loop-dominating component from the addrec start.
const SCEV *Start = Normalized->getStart();
const SCEV *PostLoopOffset = nullptr;
if (!SE.properlyDominates(Start, L->getHeader())) {
PostLoopOffset = Start;
Start = SE.getConstant(Normalized->getType(), 0);
Normalized = cast<SCEVAddRecExpr>(
SE.getAddRecExpr(Start, Normalized->getStepRecurrence(SE),
Normalized->getLoop(),
Normalized->getNoWrapFlags(SCEV::FlagNW)));
}

// Strip off any non-loop-dominating component from the addrec step.
const SCEV *Step = Normalized->getStepRecurrence(SE);
const SCEV *PostLoopScale = nullptr;
if (!SE.dominates(Step, L->getHeader())) {
PostLoopScale = Step;
Step = SE.getConstant(Normalized->getType(), 1);
if (!Start->isZero()) {
// The normalization below assumes that Start is constant zero, so if
// it isn't re-associate Start to PostLoopOffset.
assert(!PostLoopOffset && "Start not-null but PostLoopOffset set?");
PostLoopOffset = Start;
Start = SE.getConstant(Normalized->getType(), 0);
}
Normalized =
cast<SCEVAddRecExpr>(SE.getAddRecExpr(
Start, Step, Normalized->getLoop(),
Normalized->getNoWrapFlags(SCEV::FlagNW)));
}
assert(SE.properlyDominates(Start, L->getHeader()) &&
"Start does not properly dominate loop header");
assert(SE.dominates(Step, L->getHeader()) && "Step not dominate loop header");

// Expand the core addrec. If we need post-loop scaling, force it to
// expand to an integer type to avoid the need for additional casting.
Type *ExpandTy = PostLoopScale ? IntTy : STy;
// Expand the core addrec.
Type *ExpandTy = STy;
// We can't use a pointer type for the addrec if the pointer type is
// non-integral.
Type *AddRecPHIExpandTy =
Expand Down Expand Up @@ -1188,28 +1162,6 @@ Value *SCEVExpander::expandAddRecExprLiterally(const SCEVAddRecExpr *S) {
Result);
}

// Re-apply any non-loop-dominating scale.
if (PostLoopScale) {
assert(S->isAffine() && "Can't linearly scale non-affine recurrences.");
Result = InsertNoopCastOfTo(Result, IntTy);
Result = Builder.CreateMul(Result, expandCodeFor(PostLoopScale, IntTy));
}

// Re-apply any non-loop-dominating offset.
if (PostLoopOffset) {
if (isa<PointerType>(ExpandTy)) {
if (Result->getType()->isIntegerTy()) {
Value *Base = expandCodeFor(PostLoopOffset, ExpandTy);
Result = expandAddToGEP(SE.getUnknown(Result), Base);
} else {
Result = expandAddToGEP(PostLoopOffset, Result);
}
} else {
Result = InsertNoopCastOfTo(Result, IntTy);
Result = Builder.CreateAdd(Result, expandCodeFor(PostLoopOffset, IntTy));
}
}

return Result;
}

Expand Down Expand Up @@ -2339,12 +2291,6 @@ struct SCEVFindUnsafe {
}
}
if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(S)) {
const SCEV *Step = AR->getStepRecurrence(SE);
if (!AR->isAffine() && !SE.dominates(Step, AR->getLoop()->getHeader())) {
IsUnsafe = true;
return false;
}

// For non-affine addrecs or in non-canonical mode we need a preheader
// to insert into.
if (!AR->getLoop()->getLoopPreheader() &&
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,13 +129,13 @@ TEST_F(ScalarEvolutionExpanderTest, SCEVZeroExtendExprNonIntegral) {
* top:
* br label %L.ph
* L.ph:
* %gepbase = getelementptr i64 addrspace(10)* %arg, i64 1
* br label %L
* L:
* %phi = phi i64 [i64 0, %L.ph], [ %add, %L2 ]
* %add = add i64 %phi2, 1
* br i1 undef, label %post, label %L2
* post:
* %gepbase = getelementptr i64 addrspace(10)* %arg, i64 1
* #= %gep = getelementptr i64 addrspace(10)* %gepbase, i64 %add =#
* ret void
*
Expand Down Expand Up @@ -170,6 +170,8 @@ TEST_F(ScalarEvolutionExpanderTest, SCEVZeroExtendExprNonIntegral) {
Builder.CreateBr(LPh);

Builder.SetInsertPoint(LPh);
Value *GepBase =
Builder.CreateGEP(T_int64, Arg, ConstantInt::get(T_int64, 1));
Builder.CreateBr(L);

Builder.SetInsertPoint(L);
Expand All @@ -180,8 +182,6 @@ TEST_F(ScalarEvolutionExpanderTest, SCEVZeroExtendExprNonIntegral) {
Phi->addIncoming(Add, L);

Builder.SetInsertPoint(Post);
Value *GepBase =
Builder.CreateGEP(T_int64, Arg, ConstantInt::get(T_int64, 1));
Instruction *Ret = Builder.CreateRetVoid();

ScalarEvolution SE = buildSE(*F);
Expand Down

0 comments on commit 2d8d622

Please sign in to comment.