aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/Analysis/ValueTracking.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Analysis/ValueTracking.cpp')
-rw-r--r--llvm/lib/Analysis/ValueTracking.cpp90
1 files changed, 65 insertions, 25 deletions
diff --git a/llvm/lib/Analysis/ValueTracking.cpp b/llvm/lib/Analysis/ValueTracking.cpp
index 4dd536e..9ecc5ce 100644
--- a/llvm/lib/Analysis/ValueTracking.cpp
+++ b/llvm/lib/Analysis/ValueTracking.cpp
@@ -1358,24 +1358,32 @@ static void computeKnownBitsFromOperator(const Operator *I,
case Instruction::GetElementPtr: {
// Analyze all of the subscripts of this getelementptr instruction
// to determine if we can prove known low zero bits.
- KnownBits LocalKnown(BitWidth);
- computeKnownBits(I->getOperand(0), LocalKnown, Depth + 1, Q);
- unsigned TrailZ = LocalKnown.countMinTrailingZeros();
+ computeKnownBits(I->getOperand(0), Known, Depth + 1, Q);
+ // Accumulate the constant indices in a separate variable
+ // to minimize the number of calls to computeForAddSub.
+ APInt AccConstIndices(BitWidth, 0, /*IsSigned*/ true);
gep_type_iterator GTI = gep_type_begin(I);
+ // If the inbounds keyword is not present, the offsets are added to the
+ // base address with silently-wrapping two’s complement arithmetic.
+ bool IsInBounds = cast<GEPOperator>(I)->isInBounds();
for (unsigned i = 1, e = I->getNumOperands(); i != e; ++i, ++GTI) {
// TrailZ can only become smaller, short-circuit if we hit zero.
- if (TrailZ == 0)
+ if (Known.isUnknown())
break;
Value *Index = I->getOperand(i);
+
+ // Handle case when index is zero.
+ Constant *CIndex = dyn_cast<Constant>(Index);
+ if (CIndex && CIndex->isZeroValue())
+ continue;
+
if (StructType *STy = GTI.getStructTypeOrNull()) {
// Handle struct member offset arithmetic.
- // Handle case when index is vector zeroinitializer
- Constant *CIndex = cast<Constant>(Index);
- if (CIndex->isZeroValue())
- continue;
+ assert(CIndex &&
+ "Access to structure field must be known at compile time");
if (CIndex->getType()->isVectorTy())
Index = CIndex->getSplatValue();
@@ -1383,26 +1391,58 @@ static void computeKnownBitsFromOperator(const Operator *I,
unsigned Idx = cast<ConstantInt>(Index)->getZExtValue();
const StructLayout *SL = Q.DL.getStructLayout(STy);
uint64_t Offset = SL->getElementOffset(Idx);
- TrailZ = std::min<unsigned>(TrailZ,
- countTrailingZeros(Offset));
+ AccConstIndices += Offset;
+ continue;
+ }
+
+ // Handle array index arithmetic.
+ Type *IndexedTy = GTI.getIndexedType();
+ if (!IndexedTy->isSized()) {
+ Known.resetAll();
+ break;
+ }
+
+ unsigned IndexBitWidth = Index->getType()->getScalarSizeInBits();
+ KnownBits IndexBits(IndexBitWidth);
+ computeKnownBits(Index, IndexBits, Depth + 1, Q);
+ TypeSize IndexTypeSize = Q.DL.getTypeAllocSize(IndexedTy);
+ uint64_t TypeSizeInBytes = IndexTypeSize.getKnownMinSize();
+ KnownBits ScalingFactor(IndexBitWidth);
+ // Multiply by current sizeof type.
+ // &A[i] == A + i * sizeof(*A[i]).
+ if (IndexTypeSize.isScalable()) {
+ // For scalable types the only thing we know about sizeof is
+ // that this is a multiple of the minimum size.
+ ScalingFactor.Zero.setLowBits(countTrailingZeros(TypeSizeInBytes));
+ } else if (IndexBits.isConstant()) {
+ APInt IndexConst = IndexBits.getConstant();
+ APInt ScalingFactor(IndexBitWidth, TypeSizeInBytes);
+ IndexConst *= ScalingFactor;
+ AccConstIndices += IndexConst.sextOrTrunc(BitWidth);
+ continue;
} else {
- // Handle array index arithmetic.
- Type *IndexedTy = GTI.getIndexedType();
- if (!IndexedTy->isSized()) {
- TrailZ = 0;
- break;
- }
- unsigned GEPOpiBits = Index->getType()->getScalarSizeInBits();
- uint64_t TypeSize = Q.DL.getTypeAllocSize(IndexedTy).getKnownMinSize();
- LocalKnown.Zero = LocalKnown.One = APInt(GEPOpiBits, 0);
- computeKnownBits(Index, LocalKnown, Depth + 1, Q);
- TrailZ = std::min(TrailZ,
- unsigned(countTrailingZeros(TypeSize) +
- LocalKnown.countMinTrailingZeros()));
+ ScalingFactor.Zero = ~TypeSizeInBytes;
+ ScalingFactor.One = TypeSizeInBytes;
}
- }
+ IndexBits = KnownBits::computeForMul(IndexBits, ScalingFactor);
+
+ // If the offsets have a different width from the pointer, according
+ // to the language reference we need to sign-extend or truncate them
+ // to the width of the pointer.
+ IndexBits = IndexBits.sextOrTrunc(BitWidth);
- Known.Zero.setLowBits(TrailZ);
+ Known = KnownBits::computeForAddSub(
+ /*Add=*/true,
+ /*NSW=*/IsInBounds, Known, IndexBits);
+ }
+ if (!Known.isUnknown() && !AccConstIndices.isNullValue()) {
+ KnownBits Index(BitWidth);
+ Index.Zero = ~AccConstIndices;
+ Index.One = AccConstIndices;
+ Known = KnownBits::computeForAddSub(
+ /*Add=*/true,
+ /*NSW=*/IsInBounds, Known, Index);
+ }
break;
}
case Instruction::PHI: {