aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/Analysis
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Analysis')
-rw-r--r--llvm/lib/Analysis/CaptureTracking.cpp6
-rw-r--r--llvm/lib/Analysis/LazyValueInfo.cpp28
-rw-r--r--llvm/lib/Analysis/LoopAccessAnalysis.cpp14
-rw-r--r--llvm/lib/Analysis/ScalarEvolution.cpp7
-rw-r--r--llvm/lib/Analysis/ValueTracking.cpp35
5 files changed, 57 insertions, 33 deletions
diff --git a/llvm/lib/Analysis/CaptureTracking.cpp b/llvm/lib/Analysis/CaptureTracking.cpp
index a0fe7f9..22229d9 100644
--- a/llvm/lib/Analysis/CaptureTracking.cpp
+++ b/llvm/lib/Analysis/CaptureTracking.cpp
@@ -320,8 +320,12 @@ UseCaptureInfo llvm::DetermineUseCaptureKind(const Use &U, const Value *Base) {
return CaptureComponents::None;
case Instruction::Store:
// Stored the pointer - conservatively assume it may be captured.
+ if (U.getOperandNo() == 0)
+ return MDNode::toCaptureComponents(
+ I->getMetadata(LLVMContext::MD_captures));
+
// Volatile stores make the address observable.
- if (U.getOperandNo() == 0 || cast<StoreInst>(I)->isVolatile())
+ if (cast<StoreInst>(I)->isVolatile())
return CaptureComponents::All;
return CaptureComponents::None;
case Instruction::AtomicRMW: {
diff --git a/llvm/lib/Analysis/LazyValueInfo.cpp b/llvm/lib/Analysis/LazyValueInfo.cpp
index 6fb2807..0e5bc48 100644
--- a/llvm/lib/Analysis/LazyValueInfo.cpp
+++ b/llvm/lib/Analysis/LazyValueInfo.cpp
@@ -1632,19 +1632,25 @@ LazyValueInfoImpl::getEdgeValueLocal(Value *Val, BasicBlock *BBFrom,
*getValueFromCondition(Usr->getOperand(0), Condition,
isTrueDest, /*UseBlockValue*/ false);
- if (!OpLatticeVal.isConstantRange())
- return OpLatticeVal;
+ if (OpLatticeVal.isConstantRange()) {
+ const unsigned ResultBitWidth =
+ Usr->getType()->getScalarSizeInBits();
+ if (auto *Trunc = dyn_cast<TruncInst>(Usr))
+ return ValueLatticeElement::getRange(
+ OpLatticeVal.getConstantRange().truncate(
+ ResultBitWidth, Trunc->getNoWrapKind()));
- const unsigned ResultBitWidth =
- Usr->getType()->getScalarSizeInBits();
- if (auto *Trunc = dyn_cast<TruncInst>(Usr))
return ValueLatticeElement::getRange(
- OpLatticeVal.getConstantRange().truncate(
- ResultBitWidth, Trunc->getNoWrapKind()));
-
- return ValueLatticeElement::getRange(
- OpLatticeVal.getConstantRange().castOp(
- cast<CastInst>(Usr)->getOpcode(), ResultBitWidth));
+ OpLatticeVal.getConstantRange().castOp(
+ cast<CastInst>(Usr)->getOpcode(), ResultBitWidth));
+ }
+ if (OpLatticeVal.isConstant()) {
+ Constant *C = OpLatticeVal.getConstant();
+ if (auto *CastC = ConstantFoldCastOperand(
+ cast<CastInst>(Usr)->getOpcode(), C, Usr->getType(), DL))
+ return ValueLatticeElement::get(CastC);
+ }
+ return ValueLatticeElement::getOverdefined();
} else {
// If one of Val's operand has an inferred value, we may be able to
// infer the value of Val.
diff --git a/llvm/lib/Analysis/LoopAccessAnalysis.cpp b/llvm/lib/Analysis/LoopAccessAnalysis.cpp
index 87fae92..47dccde 100644
--- a/llvm/lib/Analysis/LoopAccessAnalysis.cpp
+++ b/llvm/lib/Analysis/LoopAccessAnalysis.cpp
@@ -234,9 +234,14 @@ static bool evaluatePtrAddRecAtMaxBTCWillNotWrap(
// Check if we have a suitable dereferencable assumption we can use.
if (!StartPtrV->canBeFreed()) {
+ Instruction *CtxI = &*L->getHeader()->getFirstNonPHIIt();
+ if (BasicBlock *LoopPred = L->getLoopPredecessor()) {
+ if (isa<BranchInst>(LoopPred->getTerminator()))
+ CtxI = LoopPred->getTerminator();
+ }
+
RetainedKnowledge DerefRK = getKnowledgeValidInContext(
- StartPtrV, {Attribute::Dereferenceable}, *AC,
- L->getLoopPredecessor()->getTerminator(), DT);
+ StartPtrV, {Attribute::Dereferenceable}, *AC, CtxI, DT);
if (DerefRK) {
DerefBytesSCEV =
SE.getUMaxExpr(DerefBytesSCEV, SE.getSCEV(DerefRK.IRArgValue));
@@ -2856,8 +2861,9 @@ void LoopAccessInfo::emitUnsafeDependenceRemark() {
}
}
-bool LoopAccessInfo::blockNeedsPredication(BasicBlock *BB, Loop *TheLoop,
- DominatorTree *DT) {
+bool LoopAccessInfo::blockNeedsPredication(const BasicBlock *BB,
+ const Loop *TheLoop,
+ const DominatorTree *DT) {
assert(TheLoop->contains(BB) && "Unknown block used");
// Blocks that do not dominate the latch need predication.
diff --git a/llvm/lib/Analysis/ScalarEvolution.cpp b/llvm/lib/Analysis/ScalarEvolution.cpp
index b08399b..63e1b14 100644
--- a/llvm/lib/Analysis/ScalarEvolution.cpp
+++ b/llvm/lib/Analysis/ScalarEvolution.cpp
@@ -3598,6 +3598,13 @@ const SCEV *ScalarEvolution::getUDivExpr(const SCEV *LHS,
}
}
+ // TODO: Generalize to handle any common factors.
+ // udiv (mul nuw a, vscale), (mul nuw b, vscale) --> udiv a, b
+ const SCEV *NewLHS, *NewRHS;
+ if (match(LHS, m_scev_c_NUWMul(m_SCEV(NewLHS), m_SCEVVScale())) &&
+ match(RHS, m_scev_c_NUWMul(m_SCEV(NewRHS), m_SCEVVScale())))
+ return getUDivExpr(NewLHS, NewRHS);
+
// The Insertion Point (IP) might be invalid by now (due to UniqueSCEVs
// changes). Make sure we get a new one.
IP = nullptr;
diff --git a/llvm/lib/Analysis/ValueTracking.cpp b/llvm/lib/Analysis/ValueTracking.cpp
index 6f11b25..09a8fbe 100644
--- a/llvm/lib/Analysis/ValueTracking.cpp
+++ b/llvm/lib/Analysis/ValueTracking.cpp
@@ -7651,25 +7651,26 @@ static bool isGuaranteedNotToBeUndefOrPoison(
return true;
}
- if (const auto *PN = dyn_cast<PHINode>(V)) {
- unsigned Num = PN->getNumIncomingValues();
- bool IsWellDefined = true;
- for (unsigned i = 0; i < Num; ++i) {
- if (PN == PN->getIncomingValue(i))
- continue;
- auto *TI = PN->getIncomingBlock(i)->getTerminator();
- if (!isGuaranteedNotToBeUndefOrPoison(PN->getIncomingValue(i), AC, TI,
- DT, Depth + 1, Kind)) {
- IsWellDefined = false;
- break;
+ if (!::canCreateUndefOrPoison(Opr, Kind,
+ /*ConsiderFlagsAndMetadata=*/true)) {
+ if (const auto *PN = dyn_cast<PHINode>(V)) {
+ unsigned Num = PN->getNumIncomingValues();
+ bool IsWellDefined = true;
+ for (unsigned i = 0; i < Num; ++i) {
+ if (PN == PN->getIncomingValue(i))
+ continue;
+ auto *TI = PN->getIncomingBlock(i)->getTerminator();
+ if (!isGuaranteedNotToBeUndefOrPoison(PN->getIncomingValue(i), AC, TI,
+ DT, Depth + 1, Kind)) {
+ IsWellDefined = false;
+ break;
+ }
}
- }
- if (IsWellDefined)
+ if (IsWellDefined)
+ return true;
+ } else if (all_of(Opr->operands(), OpCheck))
return true;
- } else if (!::canCreateUndefOrPoison(Opr, Kind,
- /*ConsiderFlagsAndMetadata*/ true) &&
- all_of(Opr->operands(), OpCheck))
- return true;
+ }
}
if (auto *I = dyn_cast<LoadInst>(V))