@@ -1074,40 +1074,14 @@ Value *SCEVExpander::expandAddRecExprLiterally(const SCEVAddRecExpr *S) {
1074
1074
normalizeForPostIncUse (S, Loops, SE, /* CheckInvertible=*/ false ));
1075
1075
}
1076
1076
1077
- // Strip off any non-loop-dominating component from the addrec start.
1078
1077
const SCEV *Start = Normalized->getStart ();
1079
- const SCEV *PostLoopOffset = nullptr ;
1080
- if (!SE.properlyDominates (Start, L->getHeader ())) {
1081
- PostLoopOffset = Start;
1082
- Start = SE.getConstant (Normalized->getType (), 0 );
1083
- Normalized = cast<SCEVAddRecExpr>(
1084
- SE.getAddRecExpr (Start, Normalized->getStepRecurrence (SE),
1085
- Normalized->getLoop (),
1086
- Normalized->getNoWrapFlags (SCEV::FlagNW)));
1087
- }
1088
-
1089
- // Strip off any non-loop-dominating component from the addrec step.
1090
1078
const SCEV *Step = Normalized->getStepRecurrence (SE);
1091
- const SCEV *PostLoopScale = nullptr ;
1092
- if (!SE.dominates (Step, L->getHeader ())) {
1093
- PostLoopScale = Step;
1094
- Step = SE.getConstant (Normalized->getType (), 1 );
1095
- if (!Start->isZero ()) {
1096
- // The normalization below assumes that Start is constant zero, so if
1097
- // it isn't re-associate Start to PostLoopOffset.
1098
- assert (!PostLoopOffset && " Start not-null but PostLoopOffset set?" );
1099
- PostLoopOffset = Start;
1100
- Start = SE.getConstant (Normalized->getType (), 0 );
1101
- }
1102
- Normalized =
1103
- cast<SCEVAddRecExpr>(SE.getAddRecExpr (
1104
- Start, Step, Normalized->getLoop (),
1105
- Normalized->getNoWrapFlags (SCEV::FlagNW)));
1106
- }
1079
+ assert (SE.properlyDominates (Start, L->getHeader ()) &&
1080
+ " Start does not properly dominate loop header" );
1081
+ assert (SE.dominates (Step, L->getHeader ()) && " Step not dominate loop header" );
1107
1082
1108
- // Expand the core addrec. If we need post-loop scaling, force it to
1109
- // expand to an integer type to avoid the need for additional casting.
1110
- Type *ExpandTy = PostLoopScale ? IntTy : STy;
1083
+ // Expand the core addrec.
1084
+ Type *ExpandTy = STy;
1111
1085
// We can't use a pointer type for the addrec if the pointer type is
1112
1086
// non-integral.
1113
1087
Type *AddRecPHIExpandTy =
@@ -1188,28 +1162,6 @@ Value *SCEVExpander::expandAddRecExprLiterally(const SCEVAddRecExpr *S) {
1188
1162
Result);
1189
1163
}
1190
1164
1191
- // Re-apply any non-loop-dominating scale.
1192
- if (PostLoopScale) {
1193
- assert (S->isAffine () && " Can't linearly scale non-affine recurrences." );
1194
- Result = InsertNoopCastOfTo (Result, IntTy);
1195
- Result = Builder.CreateMul (Result, expandCodeFor (PostLoopScale, IntTy));
1196
- }
1197
-
1198
- // Re-apply any non-loop-dominating offset.
1199
- if (PostLoopOffset) {
1200
- if (isa<PointerType>(ExpandTy)) {
1201
- if (Result->getType ()->isIntegerTy ()) {
1202
- Value *Base = expandCodeFor (PostLoopOffset, ExpandTy);
1203
- Result = expandAddToGEP (SE.getUnknown (Result), Base);
1204
- } else {
1205
- Result = expandAddToGEP (PostLoopOffset, Result);
1206
- }
1207
- } else {
1208
- Result = InsertNoopCastOfTo (Result, IntTy);
1209
- Result = Builder.CreateAdd (Result, expandCodeFor (PostLoopOffset, IntTy));
1210
- }
1211
- }
1212
-
1213
1165
return Result;
1214
1166
}
1215
1167
@@ -2339,12 +2291,6 @@ struct SCEVFindUnsafe {
2339
2291
}
2340
2292
}
2341
2293
if (const SCEVAddRecExpr *AR = dyn_cast<SCEVAddRecExpr>(S)) {
2342
- const SCEV *Step = AR->getStepRecurrence (SE);
2343
- if (!AR->isAffine () && !SE.dominates (Step, AR->getLoop ()->getHeader ())) {
2344
- IsUnsafe = true ;
2345
- return false ;
2346
- }
2347
-
2348
2294
// For non-affine addrecs or in non-canonical mode we need a preheader
2349
2295
// to insert into.
2350
2296
if (!AR->getLoop ()->getLoopPreheader () &&
0 commit comments