aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/CodeGen/RegAllocFast.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib/CodeGen/RegAllocFast.cpp')
-rw-r--r--llvm/lib/CodeGen/RegAllocFast.cpp271
1 files changed, 155 insertions, 116 deletions
diff --git a/llvm/lib/CodeGen/RegAllocFast.cpp b/llvm/lib/CodeGen/RegAllocFast.cpp
index 17df5d6..fe2746d 100644
--- a/llvm/lib/CodeGen/RegAllocFast.cpp
+++ b/llvm/lib/CodeGen/RegAllocFast.cpp
@@ -240,6 +240,8 @@ namespace {
void addRegClassDefCounts(std::vector<unsigned> &RegClassDefCounts,
Register Reg) const;
+ void findAndSortDefOperandIndexes(const MachineInstr &MI);
+
void allocateInstruction(MachineInstr &MI);
void handleDebugValue(MachineInstr &MI);
void handleBundle(MachineInstr &MI);
@@ -265,18 +267,18 @@ namespace {
void allocVirtRegUndef(MachineOperand &MO);
void assignDanglingDebugValues(MachineInstr &Def, Register VirtReg,
MCPhysReg Reg);
- void defineLiveThroughVirtReg(MachineInstr &MI, unsigned OpNum,
+ bool defineLiveThroughVirtReg(MachineInstr &MI, unsigned OpNum,
Register VirtReg);
- void defineVirtReg(MachineInstr &MI, unsigned OpNum, Register VirtReg,
+ bool defineVirtReg(MachineInstr &MI, unsigned OpNum, Register VirtReg,
bool LookAtPhysRegUses = false);
- void useVirtReg(MachineInstr &MI, unsigned OpNum, Register VirtReg);
+ bool useVirtReg(MachineInstr &MI, unsigned OpNum, Register VirtReg);
MachineBasicBlock::iterator
getMBBBeginInsertionPoint(MachineBasicBlock &MBB,
SmallSet<Register, 2> &PrologLiveIns) const;
void reloadAtBegin(MachineBasicBlock &MBB);
- void setPhysReg(MachineInstr &MI, MachineOperand &MO, MCPhysReg PhysReg);
+ bool setPhysReg(MachineInstr &MI, MachineOperand &MO, MCPhysReg PhysReg);
Register traceCopies(Register VirtReg) const;
Register traceCopyChain(Register Reg) const;
@@ -875,10 +877,11 @@ void RegAllocFast::allocVirtRegUndef(MachineOperand &MO) {
/// Variation of defineVirtReg() with special handling for livethrough regs
/// (tied or earlyclobber) that may interfere with preassigned uses.
-void RegAllocFast::defineLiveThroughVirtReg(MachineInstr &MI, unsigned OpNum,
+/// \return true if MI's MachineOperands were re-arranged/invalidated.
+bool RegAllocFast::defineLiveThroughVirtReg(MachineInstr &MI, unsigned OpNum,
Register VirtReg) {
if (!shouldAllocateRegister(VirtReg))
- return;
+ return false;
LiveRegMap::iterator LRI = findLiveVirtReg(VirtReg);
if (LRI != LiveVirtRegs.end()) {
MCPhysReg PrevReg = LRI->PhysReg;
@@ -909,11 +912,13 @@ void RegAllocFast::defineLiveThroughVirtReg(MachineInstr &MI, unsigned OpNum,
/// perform an allocation if:
/// - It is a dead definition without any uses.
/// - The value is live out and all uses are in different basic blocks.
-void RegAllocFast::defineVirtReg(MachineInstr &MI, unsigned OpNum,
+///
+/// \return true if MI's MachineOperands were re-arranged/invalidated.
+bool RegAllocFast::defineVirtReg(MachineInstr &MI, unsigned OpNum,
Register VirtReg, bool LookAtPhysRegUses) {
assert(VirtReg.isVirtual() && "Not a virtual register");
if (!shouldAllocateRegister(VirtReg))
- return;
+ return false;
MachineOperand &MO = MI.getOperand(OpNum);
LiveRegMap::iterator LRI;
bool New;
@@ -974,15 +979,16 @@ void RegAllocFast::defineVirtReg(MachineInstr &MI, unsigned OpNum,
BundleVirtRegsMap[VirtReg] = PhysReg;
}
markRegUsedInInstr(PhysReg);
- setPhysReg(MI, MO, PhysReg);
+ return setPhysReg(MI, MO, PhysReg);
}
/// Allocates a register for a VirtReg use.
-void RegAllocFast::useVirtReg(MachineInstr &MI, unsigned OpNum,
+/// \return true if MI's MachineOperands were re-arranged/invalidated.
+bool RegAllocFast::useVirtReg(MachineInstr &MI, unsigned OpNum,
Register VirtReg) {
assert(VirtReg.isVirtual() && "Not a virtual register");
if (!shouldAllocateRegister(VirtReg))
- return;
+ return false;
MachineOperand &MO = MI.getOperand(OpNum);
LiveRegMap::iterator LRI;
bool New;
@@ -1019,8 +1025,7 @@ void RegAllocFast::useVirtReg(MachineInstr &MI, unsigned OpNum,
if (LRI->Error) {
const TargetRegisterClass &RC = *MRI->getRegClass(VirtReg);
ArrayRef<MCPhysReg> AllocationOrder = RegClassInfo.getOrder(&RC);
- setPhysReg(MI, MO, *AllocationOrder.begin());
- return;
+ return setPhysReg(MI, MO, *AllocationOrder.begin());
}
}
@@ -1030,18 +1035,17 @@ void RegAllocFast::useVirtReg(MachineInstr &MI, unsigned OpNum,
BundleVirtRegsMap[VirtReg] = LRI->PhysReg;
}
markRegUsedInInstr(LRI->PhysReg);
- setPhysReg(MI, MO, LRI->PhysReg);
+ return setPhysReg(MI, MO, LRI->PhysReg);
}
-/// Changes operand OpNum in MI the refer the PhysReg, considering subregs. This
-/// may invalidate any operand pointers. Return true if the operand kills its
-/// register.
-void RegAllocFast::setPhysReg(MachineInstr &MI, MachineOperand &MO,
+/// Changes operand OpNum in MI the refer the PhysReg, considering subregs.
+/// \return true if MI's MachineOperands were re-arranged/invalidated.
+bool RegAllocFast::setPhysReg(MachineInstr &MI, MachineOperand &MO,
MCPhysReg PhysReg) {
if (!MO.getSubReg()) {
MO.setReg(PhysReg);
MO.setIsRenamable(true);
- return;
+ return false;
}
// Handle subregister index.
@@ -1057,7 +1061,8 @@ void RegAllocFast::setPhysReg(MachineInstr &MI, MachineOperand &MO,
// register kill.
if (MO.isKill()) {
MI.addRegisterKilled(PhysReg, TRI, true);
- return;
+ // Conservatively assume implicit MOs were re-arranged
+ return true;
}
// A <def,read-undef> of a sub-register requires an implicit def of the full
@@ -1067,7 +1072,10 @@ void RegAllocFast::setPhysReg(MachineInstr &MI, MachineOperand &MO,
MI.addRegisterDead(PhysReg, TRI, true);
else
MI.addRegisterDefined(PhysReg, TRI);
+ // Conservatively assume implicit MOs were re-arranged
+ return true;
}
+ return false;
}
#ifndef NDEBUG
@@ -1147,6 +1155,72 @@ void RegAllocFast::addRegClassDefCounts(std::vector<unsigned> &RegClassDefCounts
}
}
+/// Compute \ref DefOperandIndexes so it contains the indices of "def" operands
+/// that are to be allocated. Those are ordered in a way that small classes,
+/// early clobbers and livethroughs are allocated first.
+void RegAllocFast::findAndSortDefOperandIndexes(const MachineInstr &MI) {
+ DefOperandIndexes.clear();
+
+ // Track number of defs which may consume a register from the class.
+ std::vector<unsigned> RegClassDefCounts(TRI->getNumRegClasses(), 0);
+ assert(RegClassDefCounts[0] == 0);
+
+ LLVM_DEBUG(dbgs() << "Need to assign livethroughs\n");
+ for (unsigned I = 0, E = MI.getNumOperands(); I < E; ++I) {
+ const MachineOperand &MO = MI.getOperand(I);
+ if (!MO.isReg())
+ continue;
+ Register Reg = MO.getReg();
+ if (MO.readsReg()) {
+ if (Reg.isPhysical()) {
+ LLVM_DEBUG(dbgs() << "mark extra used: " << printReg(Reg, TRI) << '\n');
+ markPhysRegUsedInInstr(Reg);
+ }
+ }
+
+ if (MO.isDef()) {
+ if (Reg.isVirtual() && shouldAllocateRegister(Reg))
+ DefOperandIndexes.push_back(I);
+
+ addRegClassDefCounts(RegClassDefCounts, Reg);
+ }
+ }
+
+ llvm::sort(DefOperandIndexes, [&](uint16_t I0, uint16_t I1) {
+ const MachineOperand &MO0 = MI.getOperand(I0);
+ const MachineOperand &MO1 = MI.getOperand(I1);
+ Register Reg0 = MO0.getReg();
+ Register Reg1 = MO1.getReg();
+ const TargetRegisterClass &RC0 = *MRI->getRegClass(Reg0);
+ const TargetRegisterClass &RC1 = *MRI->getRegClass(Reg1);
+
+ // Identify regclass that are easy to use up completely just in this
+ // instruction.
+ unsigned ClassSize0 = RegClassInfo.getOrder(&RC0).size();
+ unsigned ClassSize1 = RegClassInfo.getOrder(&RC1).size();
+
+ bool SmallClass0 = ClassSize0 < RegClassDefCounts[RC0.getID()];
+ bool SmallClass1 = ClassSize1 < RegClassDefCounts[RC1.getID()];
+ if (SmallClass0 > SmallClass1)
+ return true;
+ if (SmallClass0 < SmallClass1)
+ return false;
+
+ // Allocate early clobbers and livethrough operands first.
+ bool Livethrough0 = MO0.isEarlyClobber() || MO0.isTied() ||
+ (MO0.getSubReg() == 0 && !MO0.isUndef());
+ bool Livethrough1 = MO1.isEarlyClobber() || MO1.isTied() ||
+ (MO1.getSubReg() == 0 && !MO1.isUndef());
+ if (Livethrough0 > Livethrough1)
+ return true;
+ if (Livethrough0 < Livethrough1)
+ return false;
+
+ // Tie-break rule: operand index.
+ return I0 < I1;
+ });
+}
+
void RegAllocFast::allocateInstruction(MachineInstr &MI) {
// The basic algorithm here is:
// 1. Mark registers of def operands as free
@@ -1218,6 +1292,10 @@ void RegAllocFast::allocateInstruction(MachineInstr &MI) {
// Allocate virtreg defs.
if (HasDef) {
if (HasVRegDef) {
+ // Note that Implicit MOs can get re-arranged by defineVirtReg(), so loop
+ // multiple times to ensure no operand is missed.
+ bool ReArrangedImplicitOps = true;
+
// Special handling for early clobbers, tied operands or subregister defs:
// Compared to "normal" defs these:
// - Must not use a register that is pre-assigned for a use operand.
@@ -1225,90 +1303,45 @@ void RegAllocFast::allocateInstruction(MachineInstr &MI) {
// heuristic to figure out a good operand order before doing
// assignments.
if (NeedToAssignLiveThroughs) {
- DefOperandIndexes.clear();
PhysRegUses.clear();
- // Track number of defs which may consume a register from the class.
- std::vector<unsigned> RegClassDefCounts(TRI->getNumRegClasses(), 0);
- assert(RegClassDefCounts[0] == 0);
-
- LLVM_DEBUG(dbgs() << "Need to assign livethroughs\n");
- for (unsigned I = 0, E = MI.getNumOperands(); I < E; ++I) {
- const MachineOperand &MO = MI.getOperand(I);
- if (!MO.isReg())
- continue;
- Register Reg = MO.getReg();
- if (MO.readsReg()) {
- if (Reg.isPhysical()) {
- LLVM_DEBUG(dbgs() << "mark extra used: " << printReg(Reg, TRI)
- << '\n');
- markPhysRegUsedInInstr(Reg);
+ while (ReArrangedImplicitOps) {
+ ReArrangedImplicitOps = false;
+ findAndSortDefOperandIndexes(MI);
+ for (uint16_t OpIdx : DefOperandIndexes) {
+ MachineOperand &MO = MI.getOperand(OpIdx);
+ LLVM_DEBUG(dbgs() << "Allocating " << MO << '\n');
+ unsigned Reg = MO.getReg();
+ if (MO.isEarlyClobber() ||
+ (MO.isTied() && !TiedOpIsUndef(MO, OpIdx)) ||
+ (MO.getSubReg() && !MO.isUndef())) {
+ ReArrangedImplicitOps = defineLiveThroughVirtReg(MI, OpIdx, Reg);
+ } else {
+ ReArrangedImplicitOps = defineVirtReg(MI, OpIdx, Reg);
+ }
+ if (ReArrangedImplicitOps) {
+ // Implicit operands of MI were re-arranged,
+ // re-compute DefOperandIndexes.
+ break;
}
- }
-
- if (MO.isDef()) {
- if (Reg.isVirtual() && shouldAllocateRegister(Reg))
- DefOperandIndexes.push_back(I);
-
- addRegClassDefCounts(RegClassDefCounts, Reg);
- }
- }
-
- llvm::sort(DefOperandIndexes, [&](uint16_t I0, uint16_t I1) {
- const MachineOperand &MO0 = MI.getOperand(I0);
- const MachineOperand &MO1 = MI.getOperand(I1);
- Register Reg0 = MO0.getReg();
- Register Reg1 = MO1.getReg();
- const TargetRegisterClass &RC0 = *MRI->getRegClass(Reg0);
- const TargetRegisterClass &RC1 = *MRI->getRegClass(Reg1);
-
- // Identify regclass that are easy to use up completely just in this
- // instruction.
- unsigned ClassSize0 = RegClassInfo.getOrder(&RC0).size();
- unsigned ClassSize1 = RegClassInfo.getOrder(&RC1).size();
-
- bool SmallClass0 = ClassSize0 < RegClassDefCounts[RC0.getID()];
- bool SmallClass1 = ClassSize1 < RegClassDefCounts[RC1.getID()];
- if (SmallClass0 > SmallClass1)
- return true;
- if (SmallClass0 < SmallClass1)
- return false;
-
- // Allocate early clobbers and livethrough operands first.
- bool Livethrough0 = MO0.isEarlyClobber() || MO0.isTied() ||
- (MO0.getSubReg() == 0 && !MO0.isUndef());
- bool Livethrough1 = MO1.isEarlyClobber() || MO1.isTied() ||
- (MO1.getSubReg() == 0 && !MO1.isUndef());
- if (Livethrough0 > Livethrough1)
- return true;
- if (Livethrough0 < Livethrough1)
- return false;
-
- // Tie-break rule: operand index.
- return I0 < I1;
- });
-
- for (uint16_t OpIdx : DefOperandIndexes) {
- MachineOperand &MO = MI.getOperand(OpIdx);
- LLVM_DEBUG(dbgs() << "Allocating " << MO << '\n');
- unsigned Reg = MO.getReg();
- if (MO.isEarlyClobber() ||
- (MO.isTied() && !TiedOpIsUndef(MO, OpIdx)) ||
- (MO.getSubReg() && !MO.isUndef())) {
- defineLiveThroughVirtReg(MI, OpIdx, Reg);
- } else {
- defineVirtReg(MI, OpIdx, Reg);
}
}
} else {
// Assign virtual register defs.
- for (unsigned I = 0, E = MI.getNumOperands(); I < E; ++I) {
- MachineOperand &MO = MI.getOperand(I);
- if (!MO.isReg() || !MO.isDef())
- continue;
- Register Reg = MO.getReg();
- if (Reg.isVirtual())
- defineVirtReg(MI, I, Reg);
+ while (ReArrangedImplicitOps) {
+ ReArrangedImplicitOps = false;
+ for (unsigned I = 0, E = MI.getNumOperands(); I < E; ++I) {
+ MachineOperand &MO = MI.getOperand(I);
+ if (!MO.isReg() || !MO.isDef())
+ continue;
+ Register Reg = MO.getReg();
+ if (Reg.isVirtual()) {
+ ReArrangedImplicitOps = defineVirtReg(MI, I, Reg);
+ if (ReArrangedImplicitOps) {
+ break;
+ }
+ }
+ }
}
}
}
@@ -1382,29 +1415,35 @@ void RegAllocFast::allocateInstruction(MachineInstr &MI) {
}
// Allocate virtreg uses and insert reloads as necessary.
+ // Implicit MOs can get moved/removed by useVirtReg(), so loop multiple
+ // times to ensure no operand is missed.
bool HasUndefUse = false;
- for (unsigned I = 0; I < MI.getNumOperands(); ++I) {
- MachineOperand &MO = MI.getOperand(I);
- if (!MO.isReg() || !MO.isUse())
- continue;
- Register Reg = MO.getReg();
- if (!Reg.isVirtual() || !shouldAllocateRegister(Reg))
- continue;
-
- if (MO.isUndef()) {
- HasUndefUse = true;
- continue;
- }
-
+ bool ReArrangedImplicitMOs = true;
+ while (ReArrangedImplicitMOs) {
+ ReArrangedImplicitMOs = false;
+ for (unsigned I = 0; I < MI.getNumOperands(); ++I) {
+ MachineOperand &MO = MI.getOperand(I);
+ if (!MO.isReg() || !MO.isUse())
+ continue;
+ Register Reg = MO.getReg();
+ if (!Reg.isVirtual() || !shouldAllocateRegister(Reg))
+ continue;
- // Populate MayLiveAcrossBlocks in case the use block is allocated before
- // the def block (removing the vreg uses).
- mayLiveIn(Reg);
+ if (MO.isUndef()) {
+ HasUndefUse = true;
+ continue;
+ }
+ // Populate MayLiveAcrossBlocks in case the use block is allocated before
+ // the def block (removing the vreg uses).
+ mayLiveIn(Reg);
- assert(!MO.isInternalRead() && "Bundles not supported");
- assert(MO.readsReg() && "reading use");
- useVirtReg(MI, I, Reg);
+ assert(!MO.isInternalRead() && "Bundles not supported");
+ assert(MO.readsReg() && "reading use");
+ ReArrangedImplicitMOs = useVirtReg(MI, I, Reg);
+ if (ReArrangedImplicitMOs)
+ break;
+ }
}
// Allocate undef operands. This is a separate step because in a situation