aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/Analysis
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Analysis')
-rw-r--r--llvm/lib/Analysis/Loads.cpp4
-rw-r--r--llvm/lib/Analysis/StackLifetime.cpp42
-rw-r--r--llvm/lib/Analysis/ValueTracking.cpp27
3 files changed, 31 insertions, 42 deletions
diff --git a/llvm/lib/Analysis/Loads.cpp b/llvm/lib/Analysis/Loads.cpp
index 6fc81d787..da76f5b 100644
--- a/llvm/lib/Analysis/Loads.cpp
+++ b/llvm/lib/Analysis/Loads.cpp
@@ -833,6 +833,10 @@ bool llvm::canReplacePointersInUseIfEqual(const Use &U, const Value *To,
if (!To->getType()->isPointerTy())
return true;
+ // Do not perform replacements in lifetime intrinsic arguments.
+ if (isa<LifetimeIntrinsic>(U.getUser()))
+ return false;
+
if (isPointerAlwaysReplaceable(&*U, To, DL))
return true;
return isPointerUseReplacable(U);
diff --git a/llvm/lib/Analysis/StackLifetime.cpp b/llvm/lib/Analysis/StackLifetime.cpp
index 34a7a04..abe4985 100644
--- a/llvm/lib/Analysis/StackLifetime.cpp
+++ b/llvm/lib/Analysis/StackLifetime.cpp
@@ -59,44 +59,20 @@ bool StackLifetime::isAliveAfter(const AllocaInst *AI,
return getLiveRange(AI).test(InstNum);
}
-// Returns unique alloca annotated by lifetime marker only if
-// markers has the same size and points to the alloca start.
-static const AllocaInst *findMatchingAlloca(const IntrinsicInst &II,
- const DataLayout &DL) {
- const AllocaInst *AI = cast<AllocaInst>(II.getArgOperand(1));
- auto AllocaSize = AI->getAllocationSize(DL);
- if (!AllocaSize)
- return nullptr;
-
- auto *Size = dyn_cast<ConstantInt>(II.getArgOperand(0));
- if (!Size)
- return nullptr;
- int64_t LifetimeSize = Size->getSExtValue();
-
- if (LifetimeSize != -1 && uint64_t(LifetimeSize) != *AllocaSize)
- return nullptr;
-
- return AI;
-}
-
void StackLifetime::collectMarkers() {
InterestingAllocas.resize(NumAllocas);
DenseMap<const BasicBlock *, SmallDenseMap<const IntrinsicInst *, Marker>>
BBMarkerSet;
- const DataLayout &DL = F.getDataLayout();
-
// Compute the set of start/end markers per basic block.
for (const BasicBlock *BB : depth_first(&F)) {
for (const Instruction &I : *BB) {
const IntrinsicInst *II = dyn_cast<IntrinsicInst>(&I);
if (!II || !II->isLifetimeStartOrEnd())
continue;
- const AllocaInst *AI = findMatchingAlloca(*II, DL);
- if (!AI) {
- HasUnknownLifetimeStartOrEnd = true;
+ const AllocaInst *AI = dyn_cast<AllocaInst>(II->getArgOperand(1));
+ if (!AI)
continue;
- }
auto It = AllocaNumbering.find(AI);
if (It == AllocaNumbering.end())
continue;
@@ -325,20 +301,6 @@ StackLifetime::StackLifetime(const Function &F,
}
void StackLifetime::run() {
- if (HasUnknownLifetimeStartOrEnd) {
- // There is marker which we can't assign to a specific alloca, so we
- // fallback to the most conservative results for the type.
- switch (Type) {
- case LivenessType::May:
- LiveRanges.resize(NumAllocas, getFullLiveRange());
- break;
- case LivenessType::Must:
- LiveRanges.resize(NumAllocas, LiveRange(Instructions.size()));
- break;
- }
- return;
- }
-
LiveRanges.resize(NumAllocas, LiveRange(Instructions.size()));
for (unsigned I = 0; I < NumAllocas; ++I)
if (!InterestingAllocas.test(I))
diff --git a/llvm/lib/Analysis/ValueTracking.cpp b/llvm/lib/Analysis/ValueTracking.cpp
index af85ce4..1e70228 100644
--- a/llvm/lib/Analysis/ValueTracking.cpp
+++ b/llvm/lib/Analysis/ValueTracking.cpp
@@ -1351,6 +1351,8 @@ static void computeKnownBitsFromOperator(const Operator *I,
isa<ScalableVectorType>(I->getType()))
break;
+ unsigned NumElts = DemandedElts.getBitWidth();
+ bool IsLE = Q.DL.isLittleEndian();
// Look through a cast from narrow vector elements to wider type.
// Examples: v4i32 -> v2i64, v3i8 -> v24
unsigned SubBitWidth = SrcVecTy->getScalarSizeInBits();
@@ -1369,7 +1371,6 @@ static void computeKnownBitsFromOperator(const Operator *I,
//
// The known bits of each sub-element are then inserted into place
// (dependent on endian) to form the full result of known bits.
- unsigned NumElts = DemandedElts.getBitWidth();
unsigned SubScale = BitWidth / SubBitWidth;
APInt SubDemandedElts = APInt::getZero(NumElts * SubScale);
for (unsigned i = 0; i != NumElts; ++i) {
@@ -1381,10 +1382,32 @@ static void computeKnownBitsFromOperator(const Operator *I,
for (unsigned i = 0; i != SubScale; ++i) {
computeKnownBits(I->getOperand(0), SubDemandedElts.shl(i), KnownSrc, Q,
Depth + 1);
- unsigned ShiftElt = Q.DL.isLittleEndian() ? i : SubScale - 1 - i;
+ unsigned ShiftElt = IsLE ? i : SubScale - 1 - i;
Known.insertBits(KnownSrc, ShiftElt * SubBitWidth);
}
}
+ // Look through a cast from wider vector elements to narrow type.
+ // Examples: v2i64 -> v4i32
+ if (SubBitWidth % BitWidth == 0) {
+ unsigned SubScale = SubBitWidth / BitWidth;
+ KnownBits KnownSrc(SubBitWidth);
+ APInt SubDemandedElts =
+ APIntOps::ScaleBitMask(DemandedElts, NumElts / SubScale);
+ computeKnownBits(I->getOperand(0), SubDemandedElts, KnownSrc, Q,
+ Depth + 1);
+
+ Known.Zero.setAllBits();
+ Known.One.setAllBits();
+ for (unsigned i = 0; i != NumElts; ++i) {
+ if (DemandedElts[i]) {
+ unsigned Shifts = IsLE ? i : NumElts - 1 - i;
+ unsigned Offset = (Shifts % SubScale) * BitWidth;
+ Known = Known.intersectWith(KnownSrc.extractBits(BitWidth, Offset));
+ if (Known.isUnknown())
+ break;
+ }
+ }
+ }
break;
}
case Instruction::SExt: {