aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/Analysis/ValueTracking.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Analysis/ValueTracking.cpp')
-rw-r--r--llvm/lib/Analysis/ValueTracking.cpp28
1 files changed, 6 insertions, 22 deletions
diff --git a/llvm/lib/Analysis/ValueTracking.cpp b/llvm/lib/Analysis/ValueTracking.cpp
index 0606ce7..be00950 100644
--- a/llvm/lib/Analysis/ValueTracking.cpp
+++ b/llvm/lib/Analysis/ValueTracking.cpp
@@ -375,11 +375,6 @@ static unsigned ComputeNumSignBits(const Value *V, const APInt &DemandedElts,
static unsigned ComputeNumSignBits(const Value *V, unsigned Depth,
const Query &Q) {
- // FIXME: We currently have no way to represent the DemandedElts of a scalable
- // vector
- if (isa<ScalableVectorType>(V->getType()))
- return 1;
-
auto *FVTy = dyn_cast<FixedVectorType>(V->getType());
APInt DemandedElts =
FVTy ? APInt::getAllOnes(FVTy->getNumElements()) : APInt(1, 1);
@@ -2449,10 +2444,6 @@ static bool isNonZeroRecurrence(const PHINode *PN) {
/// Supports values with integer or pointer type and vectors of integers.
bool isKnownNonZero(const Value *V, const APInt &DemandedElts, unsigned Depth,
const Query &Q) {
- // FIXME: We currently have no way to represent the DemandedElts of a scalable
- // vector
- if (isa<ScalableVectorType>(V->getType()))
- return false;
#ifndef NDEBUG
Type *Ty = V->getType();
@@ -2574,14 +2565,18 @@ bool isKnownNonZero(const Value *V, const APInt &DemandedElts, unsigned Depth,
// Note that we have to take special care to avoid looking through
// truncating casts, e.g., int2ptr/ptr2int with appropriate sizes, as well
// as casts that can alter the value, e.g., AddrSpaceCasts.
- if (Q.DL.getTypeSizeInBits(I->getOperand(0)->getType()).getFixedSize() <=
+ if (!isa<ScalableVectorType>(I->getOperand(0)->getType()) &&
+ !isa<ScalableVectorType>(I->getType()) &&
+ Q.DL.getTypeSizeInBits(I->getOperand(0)->getType()).getFixedSize() <=
Q.DL.getTypeSizeInBits(I->getType()).getFixedSize())
return isKnownNonZero(I->getOperand(0), Depth, Q);
break;
case Instruction::PtrToInt:
// Similar to int2ptr above, we can look through ptr2int here if the cast
// is a no-op or an extend and not a truncate.
- if (Q.DL.getTypeSizeInBits(I->getOperand(0)->getType()).getFixedSize() <=
+ if (!isa<ScalableVectorType>(I->getOperand(0)->getType()) &&
+ !isa<ScalableVectorType>(I->getType()) &&
+ Q.DL.getTypeSizeInBits(I->getOperand(0)->getType()).getFixedSize() <=
Q.DL.getTypeSizeInBits(I->getType()).getFixedSize())
return isKnownNonZero(I->getOperand(0), Depth, Q);
break;
@@ -2740,11 +2735,6 @@ bool isKnownNonZero(const Value *V, const APInt &DemandedElts, unsigned Depth,
}
bool isKnownNonZero(const Value* V, unsigned Depth, const Query& Q) {
- // FIXME: We currently have no way to represent the DemandedElts of a scalable
- // vector
- if (isa<ScalableVectorType>(V->getType()))
- return false;
-
auto *FVTy = dyn_cast<FixedVectorType>(V->getType());
APInt DemandedElts =
FVTy ? APInt::getAllOnes(FVTy->getNumElements()) : APInt(1, 1);
@@ -3096,12 +3086,6 @@ static unsigned ComputeNumSignBitsImpl(const Value *V,
const APInt &DemandedElts,
unsigned Depth, const Query &Q) {
Type *Ty = V->getType();
-
- // FIXME: We currently have no way to represent the DemandedElts of a scalable
- // vector
- if (isa<ScalableVectorType>(Ty))
- return 1;
-
#ifndef NDEBUG
assert(Depth <= MaxAnalysisRecursionDepth && "Limit Search Depth");