aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/Analysis/VectorUtils.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/Analysis/VectorUtils.cpp')
-rw-r--r--llvm/lib/Analysis/VectorUtils.cpp6
1 files changed, 3 insertions, 3 deletions
diff --git a/llvm/lib/Analysis/VectorUtils.cpp b/llvm/lib/Analysis/VectorUtils.cpp
index 150ddce..b3b4c37 100644
--- a/llvm/lib/Analysis/VectorUtils.cpp
+++ b/llvm/lib/Analysis/VectorUtils.cpp
@@ -1117,7 +1117,7 @@ Constant *
llvm::createBitMaskForGaps(IRBuilderBase &Builder, unsigned VF,
const InterleaveGroup<Instruction> &Group) {
// All 1's means mask is not needed.
- if (Group.getNumMembers() == Group.getFactor())
+ if (Group.isFull())
return nullptr;
// TODO: support reversed access.
@@ -1663,7 +1663,7 @@ void InterleavedAccessInfo::analyzeInterleaving(
// Case 1: A full group. Can Skip the checks; For full groups, if the wide
// load would wrap around the address space we would do a memory access at
// nullptr even without the transformation.
- if (Group->getNumMembers() == Group->getFactor())
+ if (Group->isFull())
continue;
// Case 2: If first and last members of the group don't wrap this implies
@@ -1698,7 +1698,7 @@ void InterleavedAccessInfo::analyzeInterleaving(
// Case 1: A full group. Can Skip the checks; For full groups, if the wide
// store would wrap around the address space we would do a memory access at
// nullptr even without the transformation.
- if (Group->getNumMembers() == Group->getFactor())
+ if (Group->isFull())
continue;
// Interleave-store-group with gaps is implemented using masked wide store.