aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/CodeGen/StackProtector.cpp
diff options
context:
space:
mode:
authorJohn Brawn <john.brawn@arm.com>2021-12-14 11:11:41 +0000
committerJohn Brawn <john.brawn@arm.com>2021-12-14 11:30:48 +0000
commitdc9f65be4555406262ff693c8bac5f1f0b960a97 (patch)
tree9a573354ee3c2edc13b08a26eb1e68b28843a836 /llvm/lib/CodeGen/StackProtector.cpp
parentb81450afb6529cd4d1eece46e9945caa5de51c11 (diff)
downloadllvm-dc9f65be4555406262ff693c8bac5f1f0b960a97.zip
llvm-dc9f65be4555406262ff693c8bac5f1f0b960a97.tar.gz
llvm-dc9f65be4555406262ff693c8bac5f1f0b960a97.tar.bz2
[AArch64][SVE] Fix handling of stack protection with SVE
Fix a couple of things that were causing stack protection to not work correctly in functions that have scalable vectors on the stack: * Use TypeSize when determining if accesses to a variable are considered out-of-bounds so that the behaviour is correct for scalable vectors. * When stack protection is enabled move the stack protector location to the top of the SVE locals, so that any overflow in them (or the other locals which are below that) will be detected. Fixes: https://github.com/llvm/llvm-project/issues/51137 Differential Revision: https://reviews.llvm.org/D111631
Diffstat (limited to 'llvm/lib/CodeGen/StackProtector.cpp')
-rw-r--r--llvm/lib/CodeGen/StackProtector.cpp21
1 files changed, 14 insertions, 7 deletions
diff --git a/llvm/lib/CodeGen/StackProtector.cpp b/llvm/lib/CodeGen/StackProtector.cpp
index 7445f77..6765fd2 100644
--- a/llvm/lib/CodeGen/StackProtector.cpp
+++ b/llvm/lib/CodeGen/StackProtector.cpp
@@ -162,7 +162,7 @@ bool StackProtector::ContainsProtectableArray(Type *Ty, bool &IsLarge,
}
bool StackProtector::HasAddressTaken(const Instruction *AI,
- uint64_t AllocSize) {
+ TypeSize AllocSize) {
const DataLayout &DL = M->getDataLayout();
for (const User *U : AI->users()) {
const auto *I = cast<Instruction>(U);
@@ -170,7 +170,8 @@ bool StackProtector::HasAddressTaken(const Instruction *AI,
// the bounds of the allocated object.
Optional<MemoryLocation> MemLoc = MemoryLocation::getOrNone(I);
if (MemLoc.hasValue() && MemLoc->Size.hasValue() &&
- MemLoc->Size.getValue() > AllocSize)
+ !TypeSize::isKnownGE(AllocSize,
+ TypeSize::getFixed(MemLoc->Size.getValue())))
return true;
switch (I->getOpcode()) {
case Instruction::Store:
@@ -203,13 +204,19 @@ bool StackProtector::HasAddressTaken(const Instruction *AI,
// would use it could also be out-of-bounds meaning stack protection is
// required.
const GetElementPtrInst *GEP = cast<GetElementPtrInst>(I);
- unsigned TypeSize = DL.getIndexTypeSizeInBits(I->getType());
- APInt Offset(TypeSize, 0);
- APInt MaxOffset(TypeSize, AllocSize);
- if (!GEP->accumulateConstantOffset(DL, Offset) || Offset.ugt(MaxOffset))
+ unsigned IndexSize = DL.getIndexTypeSizeInBits(I->getType());
+ APInt Offset(IndexSize, 0);
+ if (!GEP->accumulateConstantOffset(DL, Offset))
+ return true;
+ TypeSize OffsetSize = TypeSize::Fixed(Offset.getLimitedValue());
+ if (!TypeSize::isKnownGT(AllocSize, OffsetSize))
return true;
// Adjust AllocSize to be the space remaining after this offset.
- if (HasAddressTaken(I, AllocSize - Offset.getLimitedValue()))
+ // We can't subtract a fixed size from a scalable one, so in that case
+ // assume the scalable value is of minimum size.
+ TypeSize NewAllocSize =
+ TypeSize::Fixed(AllocSize.getKnownMinValue()) - OffsetSize;
+ if (HasAddressTaken(I, NewAllocSize))
return true;
break;
}