aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/Analysis/ConstantFolding.cpp
diff options
context:
space:
mode:
authorDan Gohman <gohman@apple.com>2010-02-01 18:27:38 +0000
committerDan Gohman <gohman@apple.com>2010-02-01 18:27:38 +0000
commite5e1b7b05a2f1f40bdd2fec0105cd48f39c8ad0f (patch)
tree980fce0d31c411705994d589f8370e559034baf3 /llvm/lib/Analysis/ConstantFolding.cpp
parent846a52e228b0050995f1679288e93c15739b004d (diff)
downloadllvm-e5e1b7b05a2f1f40bdd2fec0105cd48f39c8ad0f.zip
llvm-e5e1b7b05a2f1f40bdd2fec0105cd48f39c8ad0f.tar.gz
llvm-e5e1b7b05a2f1f40bdd2fec0105cd48f39c8ad0f.tar.bz2
Generalize target-independent folding rules for sizeof to handle more
cases, and implement target-independent folding rules for alignof and offsetof. Also, reassociate reassociative operators when it leads to more folding. Generalize ScalarEvolution's isOffsetOf to recognize offsetof on arrays. Rename getAllocSizeExpr to getSizeOfExpr, and getFieldOffsetExpr to getOffsetOfExpr, for consistency with analagous ConstantExpr routines. Make the target-dependent folder promote GEP array indices to pointer-sized integers, to make implicit casting explicit and exposed to subsequent folding. And add a bunch of testcases for this new functionality, and a bunch of related existing functionality. llvm-svn: 94987
Diffstat (limited to 'llvm/lib/Analysis/ConstantFolding.cpp')
-rw-r--r--llvm/lib/Analysis/ConstantFolding.cpp38
1 files changed, 38 insertions, 0 deletions
diff --git a/llvm/lib/Analysis/ConstantFolding.cpp b/llvm/lib/Analysis/ConstantFolding.cpp
index 4ae8859..b8e8401 100644
--- a/llvm/lib/Analysis/ConstantFolding.cpp
+++ b/llvm/lib/Analysis/ConstantFolding.cpp
@@ -517,6 +517,42 @@ static Constant *SymbolicallyEvaluateBinop(unsigned Opc, Constant *Op0,
return 0;
}
+/// CastGEPIndices - If array indices are not pointer-sized integers,
+/// explicitly cast them so that they aren't implicitly casted by the
+/// getelementptr.
+static Constant *CastGEPIndices(Constant *const *Ops, unsigned NumOps,
+ const Type *ResultTy,
+ const TargetData *TD) {
+ if (!TD) return 0;
+ const Type *IntPtrTy = TD->getIntPtrType(ResultTy->getContext());
+
+ bool Any = false;
+ SmallVector<Constant*, 32> NewIdxs;
+ for (unsigned i = 1; i != NumOps; ++i) {
+ if ((i == 1 ||
+ !isa<StructType>(GetElementPtrInst::getIndexedType(Ops[0]->getType(),
+ reinterpret_cast<Value *const *>(Ops+1),
+ i-1))) &&
+ Ops[i]->getType() != IntPtrTy) {
+ Any = true;
+ NewIdxs.push_back(ConstantExpr::getCast(CastInst::getCastOpcode(Ops[i],
+ true,
+ IntPtrTy,
+ true),
+ Ops[i], IntPtrTy));
+ } else
+ NewIdxs.push_back(Ops[i]);
+ }
+ if (!Any) return 0;
+
+ Constant *C =
+ ConstantExpr::getGetElementPtr(Ops[0], &NewIdxs[0], NewIdxs.size());
+ if (ConstantExpr *CE = dyn_cast<ConstantExpr>(C))
+ if (Constant *Folded = ConstantFoldConstantExpression(CE, TD))
+ C = Folded;
+ return C;
+}
+
/// SymbolicallyEvaluateGEP - If we can symbolically evaluate the specified GEP
/// constant expression, do so.
static Constant *SymbolicallyEvaluateGEP(Constant *const *Ops, unsigned NumOps,
@@ -810,6 +846,8 @@ Constant *llvm::ConstantFoldInstOperands(unsigned Opcode, const Type *DestTy,
case Instruction::ShuffleVector:
return ConstantExpr::getShuffleVector(Ops[0], Ops[1], Ops[2]);
case Instruction::GetElementPtr:
+ if (Constant *C = CastGEPIndices(Ops, NumOps, DestTy, TD))
+ return C;
if (Constant *C = SymbolicallyEvaluateGEP(Ops, NumOps, DestTy, TD))
return C;