Skip to content

Commit 675080a

Browse files
committed
[SCEV] Construct SCEV iteratively.
This patch updates SCEV construction to work iteratively instead of recursively in most cases. It resolves stack overflow issues when trying to construct SCEVs for certain inputs, e.g. PR45201. The basic approach is to to use a worklist to queue operands of V which need to be created before V. To do so, the current patch adds a getOperandsToCreate function which collects the operands SCEV construction depends on for a given value. This is a slight duplication with createSCEV. At the moment, SCEVs for phis are still created recursively. Fixes #32078, #42594, #44546, #49293, #49599, #55333, #55511 Reviewed By: nikic Differential Revision: https://reviews.llvm.org/D114650
1 parent 4ee6b78 commit 675080a

File tree

2 files changed

+217
-13
lines changed

2 files changed

+217
-13
lines changed

llvm/include/llvm/Analysis/ScalarEvolution.h

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1575,9 +1575,17 @@ class ScalarEvolution {
15751575
ConstantRange getRangeForUnknownRecurrence(const SCEVUnknown *U);
15761576

15771577
/// We know that there is no SCEV for the specified value. Analyze the
1578-
/// expression.
1578+
/// expression recursively.
15791579
const SCEV *createSCEV(Value *V);
15801580

1581+
/// We know that there is no SCEV for the specified value. Create a new SCEV
1582+
/// for \p V iteratively.
1583+
const SCEV *createSCEVIter(Value *V);
1584+
/// Collect operands of \p V for which SCEV expressions should be constructed
1585+
/// first. Returns a SCEV directly if it can be constructed trivially for \p
1586+
/// V.
1587+
const SCEV *getOperandsToCreate(Value *V, SmallVectorImpl<Value *> &Ops);
1588+
15811589
/// Provide the special handling we need to analyze PHI SCEVs.
15821590
const SCEV *createNodeForPHI(PHINode *PN);
15831591

llvm/lib/Analysis/ScalarEvolution.cpp

Lines changed: 208 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -4406,18 +4406,9 @@ void ScalarEvolution::insertValueToMap(Value *V, const SCEV *S) {
44064406
const SCEV *ScalarEvolution::getSCEV(Value *V) {
44074407
assert(isSCEVable(V->getType()) && "Value is not SCEVable!");
44084408

4409-
const SCEV *S = getExistingSCEV(V);
4410-
if (S == nullptr) {
4411-
S = createSCEV(V);
4412-
// During PHI resolution, it is possible to create two SCEVs for the same
4413-
// V, so it is needed to double check whether V->S is inserted into
4414-
// ValueExprMap before insert S->{V, 0} into ExprValueMap.
4415-
std::pair<ValueExprMapType::iterator, bool> Pair =
4416-
ValueExprMap.insert({SCEVCallbackVH(V, this), S});
4417-
if (Pair.second)
4418-
ExprValueMap[S].insert(V);
4419-
}
4420-
return S;
4409+
if (const SCEV *S = getExistingSCEV(V))
4410+
return S;
4411+
return createSCEVIter(V);
44214412
}
44224413

44234414
const SCEV *ScalarEvolution::getExistingSCEV(Value *V) {
@@ -7185,6 +7176,211 @@ bool ScalarEvolution::loopIsFiniteByAssumption(const Loop *L) {
71857176
return isFinite(L) || (isMustProgress(L) && loopHasNoSideEffects(L));
71867177
}
71877178

7179+
const SCEV *ScalarEvolution::createSCEVIter(Value *V) {
7180+
// Worklist item with a Value and a bool indicating whether all operands have
7181+
// been visited already.
7182+
using PointerTy = PointerIntPair<Value *, 1, bool>;
7183+
SmallVector<PointerTy> Stack;
7184+
7185+
Stack.emplace_back(V, true);
7186+
Stack.emplace_back(V, false);
7187+
while (!Stack.empty()) {
7188+
auto E = Stack.pop_back_val();
7189+
Value *CurV = E.getPointer();
7190+
7191+
if (getExistingSCEV(CurV))
7192+
continue;
7193+
7194+
SmallVector<Value *> Ops;
7195+
const SCEV *CreatedSCEV = nullptr;
7196+
// If all operands have been visited already, create the SCEV.
7197+
if (E.getInt()) {
7198+
CreatedSCEV = createSCEV(CurV);
7199+
} else {
7200+
// Otherwise get the operands we need to create SCEV's for before creating
7201+
// the SCEV for CurV. If the SCEV for CurV can be constructed trivially,
7202+
// just use it.
7203+
CreatedSCEV = getOperandsToCreate(CurV, Ops);
7204+
}
7205+
7206+
if (CreatedSCEV) {
7207+
insertValueToMap(CurV, CreatedSCEV);
7208+
} else {
7209+
// Queue CurV for SCEV creation, followed by its's operands which need to
7210+
// be constructed first.
7211+
Stack.emplace_back(CurV, true);
7212+
for (Value *Op : Ops)
7213+
Stack.emplace_back(Op, false);
7214+
}
7215+
}
7216+
7217+
return getExistingSCEV(V);
7218+
}
7219+
7220+
const SCEV *
7221+
ScalarEvolution::getOperandsToCreate(Value *V, SmallVectorImpl<Value *> &Ops) {
7222+
if (!isSCEVable(V->getType()))
7223+
return getUnknown(V);
7224+
7225+
if (Instruction *I = dyn_cast<Instruction>(V)) {
7226+
// Don't attempt to analyze instructions in blocks that aren't
7227+
// reachable. Such instructions don't matter, and they aren't required
7228+
// to obey basic rules for definitions dominating uses which this
7229+
// analysis depends on.
7230+
if (!DT.isReachableFromEntry(I->getParent()))
7231+
return getUnknown(PoisonValue::get(V->getType()));
7232+
} else if (ConstantInt *CI = dyn_cast<ConstantInt>(V))
7233+
return getConstant(CI);
7234+
else if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
7235+
if (!GA->isInterposable()) {
7236+
Ops.push_back(GA->getAliasee());
7237+
return nullptr;
7238+
}
7239+
return getUnknown(V);
7240+
} else if (!isa<ConstantExpr>(V))
7241+
return getUnknown(V);
7242+
7243+
Operator *U = cast<Operator>(V);
7244+
if (auto BO = MatchBinaryOp(U, DT)) {
7245+
bool IsConstArg = isa<ConstantInt>(BO->RHS);
7246+
switch (U->getOpcode()) {
7247+
case Instruction::Add: {
7248+
// For additions and multiplications, traverse add/mul chains for which we
7249+
// can potentially create a single SCEV, to reduce the number of
7250+
// get{Add,Mul}Expr calls.
7251+
do {
7252+
if (BO->Op) {
7253+
if (BO->Op != V && getExistingSCEV(BO->Op)) {
7254+
Ops.push_back(BO->Op);
7255+
break;
7256+
}
7257+
}
7258+
Ops.push_back(BO->RHS);
7259+
auto NewBO = MatchBinaryOp(BO->LHS, DT);
7260+
if (!NewBO || (NewBO->Opcode != Instruction::Add &&
7261+
NewBO->Opcode != Instruction::Sub)) {
7262+
Ops.push_back(BO->LHS);
7263+
break;
7264+
}
7265+
BO = NewBO;
7266+
} while (true);
7267+
return nullptr;
7268+
}
7269+
7270+
case Instruction::Mul: {
7271+
do {
7272+
if (BO->Op) {
7273+
if (BO->Op != V && getExistingSCEV(BO->Op)) {
7274+
Ops.push_back(BO->Op);
7275+
break;
7276+
}
7277+
}
7278+
Ops.push_back(BO->RHS);
7279+
auto NewBO = MatchBinaryOp(BO->LHS, DT);
7280+
if (!NewBO || NewBO->Opcode != Instruction::Mul) {
7281+
Ops.push_back(BO->LHS);
7282+
break;
7283+
}
7284+
BO = NewBO;
7285+
} while (true);
7286+
return nullptr;
7287+
}
7288+
7289+
case Instruction::AShr:
7290+
case Instruction::Shl:
7291+
case Instruction::Xor:
7292+
if (!IsConstArg)
7293+
return nullptr;
7294+
break;
7295+
case Instruction::And:
7296+
case Instruction::Or:
7297+
if (!IsConstArg && BO->LHS->getType()->isIntegerTy(1))
7298+
return nullptr;
7299+
break;
7300+
default:
7301+
break;
7302+
}
7303+
7304+
Ops.push_back(BO->LHS);
7305+
Ops.push_back(BO->RHS);
7306+
return nullptr;
7307+
}
7308+
7309+
switch (U->getOpcode()) {
7310+
case Instruction::Trunc:
7311+
case Instruction::ZExt:
7312+
case Instruction::SExt:
7313+
case Instruction::PtrToInt:
7314+
Ops.push_back(U->getOperand(0));
7315+
return nullptr;
7316+
7317+
case Instruction::BitCast:
7318+
if (isSCEVable(U->getType()) && isSCEVable(U->getOperand(0)->getType())) {
7319+
Ops.push_back(U->getOperand(0));
7320+
return nullptr;
7321+
}
7322+
return getUnknown(V);
7323+
7324+
case Instruction::SDiv:
7325+
case Instruction::SRem:
7326+
Ops.push_back(U->getOperand(0));
7327+
Ops.push_back(U->getOperand(1));
7328+
return nullptr;
7329+
7330+
case Instruction::GetElementPtr:
7331+
assert(cast<GEPOperator>(U)->getSourceElementType()->isSized() &&
7332+
"GEP source element type must be sized");
7333+
for (Value *Index : U->operands())
7334+
Ops.push_back(Index);
7335+
return nullptr;
7336+
7337+
case Instruction::IntToPtr:
7338+
return getUnknown(V);
7339+
7340+
case Instruction::PHI:
7341+
// Keep constructing SCEVs' for phis recursively for now.
7342+
return nullptr;
7343+
7344+
case Instruction::Select:
7345+
for (Value *Inc : U->operands())
7346+
Ops.push_back(Inc);
7347+
return nullptr;
7348+
break;
7349+
7350+
case Instruction::Call:
7351+
case Instruction::Invoke:
7352+
if (Value *RV = cast<CallBase>(U)->getReturnedArgOperand()) {
7353+
Ops.push_back(RV);
7354+
return nullptr;
7355+
}
7356+
7357+
if (auto *II = dyn_cast<IntrinsicInst>(U)) {
7358+
switch (II->getIntrinsicID()) {
7359+
case Intrinsic::abs:
7360+
Ops.push_back(II->getArgOperand(0));
7361+
return nullptr;
7362+
case Intrinsic::umax:
7363+
case Intrinsic::umin:
7364+
case Intrinsic::smax:
7365+
case Intrinsic::smin:
7366+
case Intrinsic::usub_sat:
7367+
case Intrinsic::uadd_sat:
7368+
Ops.push_back(II->getArgOperand(0));
7369+
Ops.push_back(II->getArgOperand(1));
7370+
return nullptr;
7371+
case Intrinsic::start_loop_iterations:
7372+
Ops.push_back(II->getArgOperand(0));
7373+
return nullptr;
7374+
default:
7375+
break;
7376+
}
7377+
}
7378+
break;
7379+
}
7380+
7381+
return nullptr;
7382+
}
7383+
71887384
const SCEV *ScalarEvolution::createSCEV(Value *V) {
71897385
if (!isSCEVable(V->getType()))
71907386
return getUnknown(V);

0 commit comments

Comments
 (0)