aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/CodeGen/RegAllocFast.cpp
diff options
context:
space:
mode:
authorMatt Arsenault <Matthew.Arsenault@amd.com>2019-03-19 19:16:04 +0000
committerMatt Arsenault <Matthew.Arsenault@amd.com>2019-03-19 19:16:04 +0000
commit3c98cdd21832207694ab9307f83a6c6e4e21185b (patch)
tree4d715cd3b14d2eda22b7b373ce2419c493c4bf3f /llvm/lib/CodeGen/RegAllocFast.cpp
parent72122d058b170eafc643ec659a9298b3b103cdfd (diff)
downloadllvm-3c98cdd21832207694ab9307f83a6c6e4e21185b.zip
llvm-3c98cdd21832207694ab9307f83a6c6e4e21185b.tar.gz
llvm-3c98cdd21832207694ab9307f83a6c6e4e21185b.tar.bz2
RegAllocFast: Do not allocate registers for undef uses
Do not actually allocate a register for an undef use. Previously we we would create unnecessary reload instruction for undef uses where the register wasn't live. Patch by Matthias Braun llvm-svn: 356501
Diffstat (limited to 'llvm/lib/CodeGen/RegAllocFast.cpp')
-rw-r--r--llvm/lib/CodeGen/RegAllocFast.cpp48
1 files changed, 48 insertions, 0 deletions
diff --git a/llvm/lib/CodeGen/RegAllocFast.cpp b/llvm/lib/CodeGen/RegAllocFast.cpp
index 46620d8..a711db1 100644
--- a/llvm/lib/CodeGen/RegAllocFast.cpp
+++ b/llvm/lib/CodeGen/RegAllocFast.cpp
@@ -203,6 +203,7 @@ namespace {
}
void allocVirtReg(MachineInstr &MI, LiveReg &LR, unsigned Hint);
+ void allocVirtRegUndef(MachineOperand &MO);
MCPhysReg defineVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg,
unsigned Hint);
LiveReg &reloadVirtReg(MachineInstr &MI, unsigned OpNum, unsigned VirtReg,
@@ -616,6 +617,31 @@ void RegAllocFast::allocVirtReg(MachineInstr &MI, LiveReg &LR, unsigned Hint) {
assignVirtToPhysReg(LR, BestReg);
}
+void RegAllocFast::allocVirtRegUndef(MachineOperand &MO) {
+ assert(MO.isUndef() && "expected undef use");
+ unsigned VirtReg = MO.getReg();
+ assert(TargetRegisterInfo::isVirtualRegister(VirtReg) && "Expected virtreg");
+
+ LiveRegMap::const_iterator LRI = findLiveVirtReg(VirtReg);
+ MCPhysReg PhysReg;
+ if (LRI != LiveVirtRegs.end() && LRI->PhysReg) {
+ PhysReg = LRI->PhysReg;
+ } else {
+ const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
+ ArrayRef<MCPhysReg> AllocationOrder = RegClassInfo.getOrder(&RC);
+ assert(!AllocationOrder.empty() && "Allocation order must not be empty");
+ PhysReg = AllocationOrder[0];
+ }
+
+ unsigned SubRegIdx = MO.getSubReg();
+ if (SubRegIdx != 0) {
+ PhysReg = TRI->getSubReg(PhysReg, SubRegIdx);
+ MO.setSubReg(0);
+ }
+ MO.setReg(PhysReg);
+ MO.setIsRenamable(true);
+}
+
/// Allocates a register for VirtReg and mark it as dirty.
MCPhysReg RegAllocFast::defineVirtReg(MachineInstr &MI, unsigned OpNum,
unsigned VirtReg, unsigned Hint) {
@@ -933,12 +959,18 @@ void RegAllocFast::allocateInstruction(MachineInstr &MI) {
// Second scan.
// Allocate virtreg uses.
+ bool HasUndefUse = false;
for (unsigned I = 0; I != VirtOpEnd; ++I) {
MachineOperand &MO = MI.getOperand(I);
if (!MO.isReg()) continue;
unsigned Reg = MO.getReg();
if (!TargetRegisterInfo::isVirtualRegister(Reg)) continue;
if (MO.isUse()) {
+ if (MO.isUndef()) {
+ HasUndefUse = true;
+ // There is no need to allocate a register for an undef use.
+ continue;
+ }
LiveReg &LR = reloadVirtReg(MI, I, Reg, CopyDstReg);
MCPhysReg PhysReg = LR.PhysReg;
CopySrcReg = (CopySrcReg == Reg || CopySrcReg == PhysReg) ? PhysReg : 0;
@@ -947,6 +979,22 @@ void RegAllocFast::allocateInstruction(MachineInstr &MI) {
}
}
+ // Allocate undef operands. This is a separate step because in a situation
+ // like ` = OP undef %X, %X` both operands need the same register assign
+ // so we should perform the normal assignment first.
+ if (HasUndefUse) {
+ for (MachineOperand &MO : MI.uses()) {
+ if (!MO.isReg() || !MO.isUse())
+ continue;
+ unsigned Reg = MO.getReg();
+ if (!TargetRegisterInfo::isVirtualRegister(Reg))
+ continue;
+
+ assert(MO.isUndef() && "Should only have undef virtreg uses left");
+ allocVirtRegUndef(MO);
+ }
+ }
+
// Track registers defined by instruction - early clobbers and tied uses at
// this point.
UsedInInstr.clear();