aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/Analysis/ScalarEvolution.cpp
diff options
context:
space:
mode:
authorFlorian Hahn <flo@fhahn.com>2022-12-28 00:09:52 +0000
committerFlorian Hahn <flo@fhahn.com>2022-12-28 00:09:52 +0000
commita564048899a1a1de526a02cfd1a5d691ec31bafd (patch)
treed7884b704bb702dcf877f9dc3f8d3f3afcfbfa30 /llvm/lib/Analysis/ScalarEvolution.cpp
parent58906e4901ec5b7ed230d7fa96123654f6a974af (diff)
downloadllvm-a564048899a1a1de526a02cfd1a5d691ec31bafd.zip
llvm-a564048899a1a1de526a02cfd1a5d691ec31bafd.tar.gz
llvm-a564048899a1a1de526a02cfd1a5d691ec31bafd.tar.bz2
[SCEV] Properly clean up duplicated FoldCacheUser ID entries.
The current code did not properly handled duplicated FoldCacheUser ID entries when overwriting an existing entry in the FoldCache. This triggered verification failures reported by @uabelho and #59721. The patch fixes that by removing stale IDs when overwriting an existing entry in the cache. Fixes #59721.
Diffstat (limited to 'llvm/lib/Analysis/ScalarEvolution.cpp')
-rw-r--r--llvm/lib/Analysis/ScalarEvolution.cpp37
1 files changed, 27 insertions, 10 deletions
diff --git a/llvm/lib/Analysis/ScalarEvolution.cpp b/llvm/lib/Analysis/ScalarEvolution.cpp
index 34a9cdd..ecf65b4 100644
--- a/llvm/lib/Analysis/ScalarEvolution.cpp
+++ b/llvm/lib/Analysis/ScalarEvolution.cpp
@@ -1624,6 +1624,29 @@ static APInt extractConstantWithoutWrapping(ScalarEvolution &SE,
return APInt(BitWidth, 0);
}
+static void insertFoldCacheEntry(
+ const ScalarEvolution::FoldID &ID, const SCEV *S,
+ DenseMap<ScalarEvolution::FoldID, const SCEV *> &FoldCache,
+ DenseMap<const SCEV *, SmallVector<ScalarEvolution::FoldID, 2>>
+ &FoldCacheUser) {
+ auto I = FoldCache.insert({ID, S});
+ if (!I.second) {
+ // Remove FoldCacheUser entry for ID when replacing an existing FoldCache
+ // entry.
+ auto &UserIDs = FoldCacheUser[I.first->second];
+ assert(count(UserIDs, ID) == 1 && "unexpected duplicates in UserIDs");
+ for (unsigned I = 0; I != UserIDs.size(); ++I)
+ if (UserIDs[I] == ID) {
+ std::swap(UserIDs[I], UserIDs.back());
+ break;
+ }
+ UserIDs.pop_back();
+ I.first->second = S;
+ }
+ auto R = FoldCacheUser.insert({S, {}});
+ R.first->second.push_back(ID);
+}
+
const SCEV *
ScalarEvolution::getZeroExtendExpr(const SCEV *Op, Type *Ty, unsigned Depth) {
assert(getTypeSizeInBits(Op->getType()) < getTypeSizeInBits(Ty) &&
@@ -1642,11 +1665,8 @@ ScalarEvolution::getZeroExtendExpr(const SCEV *Op, Type *Ty, unsigned Depth) {
return Iter->second;
const SCEV *S = getZeroExtendExprImpl(Op, Ty, Depth);
- if (!isa<SCEVZeroExtendExpr>(S)) {
- FoldCache.insert({ID, S});
- auto R = FoldCacheUser.insert({S, {}});
- R.first->second.push_back(ID);
- }
+ if (!isa<SCEVZeroExtendExpr>(S))
+ insertFoldCacheEntry(ID, S, FoldCache, FoldCacheUser);
return S;
}
@@ -1968,11 +1988,8 @@ ScalarEvolution::getSignExtendExpr(const SCEV *Op, Type *Ty, unsigned Depth) {
return Iter->second;
const SCEV *S = getSignExtendExprImpl(Op, Ty, Depth);
- if (!isa<SCEVSignExtendExpr>(S)) {
- FoldCache.insert({ID, S});
- auto R = FoldCacheUser.insert({S, {}});
- R.first->second.push_back(ID);
- }
+ if (!isa<SCEVSignExtendExpr>(S))
+ insertFoldCacheEntry(ID, S, FoldCache, FoldCacheUser);
return S;
}