aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/Analysis/BasicAliasAnalysis.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Analysis/BasicAliasAnalysis.cpp')
-rw-r--r--llvm/lib/Analysis/BasicAliasAnalysis.cpp46
1 files changed, 28 insertions, 18 deletions
diff --git a/llvm/lib/Analysis/BasicAliasAnalysis.cpp b/llvm/lib/Analysis/BasicAliasAnalysis.cpp
index 1028b52..19c4393 100644
--- a/llvm/lib/Analysis/BasicAliasAnalysis.cpp
+++ b/llvm/lib/Analysis/BasicAliasAnalysis.cpp
@@ -1113,10 +1113,6 @@ AliasResult BasicAAResult::aliasGEP(
return BaseAlias;
}
- // Bail on analysing scalable LocationSize
- if (V1Size.isScalable() || V2Size.isScalable())
- return AliasResult::MayAlias;
-
// If there is a constant difference between the pointers, but the difference
// is less than the size of the associated memory object, then we know
// that the objects are partially overlapping. If the difference is
@@ -1146,24 +1142,38 @@ AliasResult BasicAAResult::aliasGEP(
if (!VLeftSize.hasValue())
return AliasResult::MayAlias;
- const uint64_t LSize = VLeftSize.getValue();
- if (Off.ult(LSize)) {
- // Conservatively drop processing if a phi was visited and/or offset is
- // too big.
- AliasResult AR = AliasResult::PartialAlias;
- if (VRightSize.hasValue() && Off.ule(INT32_MAX) &&
- (Off + VRightSize.getValue()).ule(LSize)) {
- // Memory referenced by right pointer is nested. Save the offset in
- // cache. Note that originally offset estimated as GEP1-V2, but
- // AliasResult contains the shift that represents GEP1+Offset=V2.
- AR.setOffset(-Off.getSExtValue());
- AR.swap(Swapped);
+ const TypeSize LSize = VLeftSize.getValue();
+ if (!LSize.isScalable()) {
+ if (Off.ult(LSize)) {
+ // Conservatively drop processing if a phi was visited and/or offset is
+ // too big.
+ AliasResult AR = AliasResult::PartialAlias;
+ if (VRightSize.hasValue() && !VRightSize.isScalable() &&
+ Off.ule(INT32_MAX) && (Off + VRightSize.getValue()).ule(LSize)) {
+ // Memory referenced by right pointer is nested. Save the offset in
+ // cache. Note that originally offset estimated as GEP1-V2, but
+ // AliasResult contains the shift that represents GEP1+Offset=V2.
+ AR.setOffset(-Off.getSExtValue());
+ AR.swap(Swapped);
+ }
+ return AR;
}
- return AR;
+ return AliasResult::NoAlias;
+ } else {
+ // We can use the getVScaleRange to prove that Off >= (CR.upper * LSize).
+ ConstantRange CR = getVScaleRange(&F, Off.getBitWidth());
+ bool Overflow;
+ APInt UpperRange = CR.getUnsignedMax().umul_ov(
+ APInt(Off.getBitWidth(), LSize.getKnownMinValue()), Overflow);
+ if (!Overflow && Off.uge(UpperRange))
+ return AliasResult::NoAlias;
}
- return AliasResult::NoAlias;
}
+ // Bail on analysing scalable LocationSize
+ if (V1Size.isScalable() || V2Size.isScalable())
+ return AliasResult::MayAlias;
+
// We need to know both acess sizes for all the following heuristics.
if (!V1Size.hasValue() || !V2Size.hasValue())
return AliasResult::MayAlias;