aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/CodeGen/PeepholeOptimizer.cpp
diff options
context:
space:
mode:
authorGuozhi Wei <carrot@google.com>2023-10-17 16:22:42 +0000
committerGuozhi Wei <carrot@google.com>2023-10-17 16:22:42 +0000
commit760e7d00d142ba85fcf48c00e0acc14a355da7c3 (patch)
tree35d8df6bab9961a96dec0439abfb039e206b464f /llvm/lib/CodeGen/PeepholeOptimizer.cpp
parent08d6b8745430e133cc9d257cded623229e58fddd (diff)
downloadllvm-760e7d00d142ba85fcf48c00e0acc14a355da7c3.zip
llvm-760e7d00d142ba85fcf48c00e0acc14a355da7c3.tar.gz
llvm-760e7d00d142ba85fcf48c00e0acc14a355da7c3.tar.bz2
[X86, Peephole] Enable FoldImmediate for X86
Enable FoldImmediate for X86 by implementing X86InstrInfo::FoldImmediate. Also enhanced peephole by deleting identical instructions after FoldImmediate. Differential Revision: https://reviews.llvm.org/D151848
Diffstat (limited to 'llvm/lib/CodeGen/PeepholeOptimizer.cpp')
-rw-r--r--llvm/lib/CodeGen/PeepholeOptimizer.cpp60
1 files changed, 44 insertions, 16 deletions
diff --git a/llvm/lib/CodeGen/PeepholeOptimizer.cpp b/llvm/lib/CodeGen/PeepholeOptimizer.cpp
index a08cc78..f413ca5 100644
--- a/llvm/lib/CodeGen/PeepholeOptimizer.cpp
+++ b/llvm/lib/CodeGen/PeepholeOptimizer.cpp
@@ -202,7 +202,8 @@ namespace {
bool isMoveImmediate(MachineInstr &MI, SmallSet<Register, 4> &ImmDefRegs,
DenseMap<Register, MachineInstr *> &ImmDefMIs);
bool foldImmediate(MachineInstr &MI, SmallSet<Register, 4> &ImmDefRegs,
- DenseMap<Register, MachineInstr *> &ImmDefMIs);
+ DenseMap<Register, MachineInstr *> &ImmDefMIs,
+ bool &Deleted);
/// Finds recurrence cycles, but only ones that formulated around
/// a def operand and a use operand that are tied. If there is a use
@@ -217,8 +218,11 @@ namespace {
/// set \p CopyMIs. If this virtual register was previously seen as a
/// copy, replace the uses of this copy with the previously seen copy's
/// destination register.
+ /// \p LocalMIs contains all previous seen instructions. An optimized away
+ /// instruction should be deleted from LocalMIs.
bool foldRedundantCopy(MachineInstr &MI,
- DenseMap<RegSubRegPair, MachineInstr *> &CopyMIs);
+ DenseMap<RegSubRegPair, MachineInstr *> &CopyMIs,
+ SmallPtrSetImpl<MachineInstr *> &LocalMIs);
/// Is the register \p Reg a non-allocatable physical register?
bool isNAPhysCopy(Register Reg);
@@ -1351,18 +1355,19 @@ bool PeepholeOptimizer::isMoveImmediate(
MachineInstr &MI, SmallSet<Register, 4> &ImmDefRegs,
DenseMap<Register, MachineInstr *> &ImmDefMIs) {
const MCInstrDesc &MCID = MI.getDesc();
- if (!MI.isMoveImmediate())
- return false;
- if (MCID.getNumDefs() != 1)
+ if (MCID.getNumDefs() != 1 || !MI.getOperand(0).isReg())
return false;
Register Reg = MI.getOperand(0).getReg();
- if (Reg.isVirtual()) {
- ImmDefMIs.insert(std::make_pair(Reg, &MI));
- ImmDefRegs.insert(Reg);
- return true;
- }
+ if (!Reg.isVirtual())
+ return false;
- return false;
+ int64_t ImmVal;
+ if (!MI.isMoveImmediate() && !TII->getConstValDefinedInReg(MI, Reg, ImmVal))
+ return false;
+
+ ImmDefMIs.insert(std::make_pair(Reg, &MI));
+ ImmDefRegs.insert(Reg);
+ return true;
}
/// Try folding register operands that are defined by move immediate
@@ -1370,7 +1375,8 @@ bool PeepholeOptimizer::isMoveImmediate(
/// and only if the def and use are in the same BB.
bool PeepholeOptimizer::foldImmediate(
MachineInstr &MI, SmallSet<Register, 4> &ImmDefRegs,
- DenseMap<Register, MachineInstr *> &ImmDefMIs) {
+ DenseMap<Register, MachineInstr *> &ImmDefMIs, bool &Deleted) {
+ Deleted = false;
for (unsigned i = 0, e = MI.getDesc().getNumOperands(); i != e; ++i) {
MachineOperand &MO = MI.getOperand(i);
if (!MO.isReg() || MO.isDef())
@@ -1384,6 +1390,19 @@ bool PeepholeOptimizer::foldImmediate(
assert(II != ImmDefMIs.end() && "couldn't find immediate definition");
if (TII->FoldImmediate(MI, *II->second, Reg, MRI)) {
++NumImmFold;
+ // FoldImmediate can delete ImmDefMI if MI was its only user. If ImmDefMI
+ // is not deleted, and we happened to get a same MI, we can delete MI and
+ // replace its users.
+ if (MRI->getVRegDef(Reg) &&
+ MI.isIdenticalTo(*II->second, MachineInstr::IgnoreVRegDefs)) {
+ Register DstReg = MI.getOperand(0).getReg();
+ if (DstReg.isVirtual() &&
+ MRI->getRegClass(DstReg) == MRI->getRegClass(Reg)) {
+ MRI->replaceRegWith(DstReg, Reg);
+ MI.eraseFromParent();
+ Deleted = true;
+ }
+ }
return true;
}
}
@@ -1405,7 +1424,8 @@ bool PeepholeOptimizer::foldImmediate(
//
// Should replace %2 uses with %1:sub1
bool PeepholeOptimizer::foldRedundantCopy(
- MachineInstr &MI, DenseMap<RegSubRegPair, MachineInstr *> &CopyMIs) {
+ MachineInstr &MI, DenseMap<RegSubRegPair, MachineInstr *> &CopyMIs,
+ SmallPtrSetImpl<MachineInstr *> &LocalMIs) {
assert(MI.isCopy() && "expected a COPY machine instruction");
Register SrcReg = MI.getOperand(1).getReg();
@@ -1425,6 +1445,8 @@ bool PeepholeOptimizer::foldRedundantCopy(
}
MachineInstr *PrevCopy = CopyMIs.find(SrcPair)->second;
+ if (!LocalMIs.count(PrevCopy))
+ return false;
assert(SrcSubReg == PrevCopy->getOperand(1).getSubReg() &&
"Unexpected mismatching subreg!");
@@ -1732,7 +1754,7 @@ bool PeepholeOptimizer::runOnMachineFunction(MachineFunction &MF) {
continue;
}
- if (MI->isCopy() && (foldRedundantCopy(*MI, CopySrcMIs) ||
+ if (MI->isCopy() && (foldRedundantCopy(*MI, CopySrcMIs, LocalMIs) ||
foldRedundantNAPhysCopy(*MI, NAPhysToVirtMIs))) {
LocalMIs.erase(MI);
LLVM_DEBUG(dbgs() << "Deleting redundant copy: " << *MI << "\n");
@@ -1750,8 +1772,14 @@ bool PeepholeOptimizer::runOnMachineFunction(MachineFunction &MF) {
// next iteration sees the new instructions.
MII = MI;
++MII;
- if (SeenMoveImm)
- Changed |= foldImmediate(*MI, ImmDefRegs, ImmDefMIs);
+ if (SeenMoveImm) {
+ bool Deleted;
+ Changed |= foldImmediate(*MI, ImmDefRegs, ImmDefMIs, Deleted);
+ if (Deleted) {
+ LocalMIs.erase(MI);
+ continue;
+ }
+ }
}
// Check whether MI is a load candidate for folding into a later