aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/Target/WebAssembly/WebAssemblyRegStackify.cpp
diff options
context:
space:
mode:
authorDan Gohman <dan433584@gmail.com>2016-05-17 22:24:18 +0000
committerDan Gohman <dan433584@gmail.com>2016-05-17 22:24:18 +0000
commit1054570a29d43e780f09d72ab31f059740ce576a (patch)
tree7f562b041898b41adc791b9b076091321c2f915a /llvm/lib/Target/WebAssembly/WebAssemblyRegStackify.cpp
parent705231bfd40eb269fc1b226047486e8b337710c9 (diff)
downloadllvm-1054570a29d43e780f09d72ab31f059740ce576a.zip
llvm-1054570a29d43e780f09d72ab31f059740ce576a.tar.gz
llvm-1054570a29d43e780f09d72ab31f059740ce576a.tar.bz2
[WebAssembly] Model the stack evaluation order more precisely.
We currently don't represent get_local and set_local explicitly; they are just implied by virtual register use and def. This avoids a lot of clutter, but it does complicate stackifying: get_locals read their operands at their position in the stack evaluation order, rather than at their parent instruction. This patch adds code to walk the stack to determine the precise ordering, when needed. llvm-svn: 269854
Diffstat (limited to 'llvm/lib/Target/WebAssembly/WebAssemblyRegStackify.cpp')
-rw-r--r--llvm/lib/Target/WebAssembly/WebAssemblyRegStackify.cpp60
1 files changed, 40 insertions, 20 deletions
diff --git a/llvm/lib/Target/WebAssembly/WebAssemblyRegStackify.cpp b/llvm/lib/Target/WebAssembly/WebAssemblyRegStackify.cpp
index bd843fb..4a0359a 100644
--- a/llvm/lib/Target/WebAssembly/WebAssemblyRegStackify.cpp
+++ b/llvm/lib/Target/WebAssembly/WebAssemblyRegStackify.cpp
@@ -163,10 +163,12 @@ static void Query(const MachineInstr *MI, AliasAnalysis &AA,
// of memoperands as having a potential unknown memory reference.
break;
default:
- // Record potential stores, unless it's a call, as calls are handled
+ // Record volatile accesses, unless it's a call, as calls are handled
// specially below.
- if (!MI->isCall())
+ if (!MI->isCall()) {
Write = true;
+ Effects = true;
+ }
break;
}
}
@@ -197,24 +199,15 @@ static void Query(const MachineInstr *MI, AliasAnalysis &AA,
if (MI->isCall()) {
switch (MI->getOpcode()) {
case WebAssembly::CALL_VOID:
+ case WebAssembly::CALL_INDIRECT_VOID:
QueryCallee(MI, 0, Read, Write, Effects, StackPointer);
break;
- case WebAssembly::CALL_I32:
- case WebAssembly::CALL_I64:
- case WebAssembly::CALL_F32:
- case WebAssembly::CALL_F64:
+ case WebAssembly::CALL_I32: case WebAssembly::CALL_I64:
+ case WebAssembly::CALL_F32: case WebAssembly::CALL_F64:
+ case WebAssembly::CALL_INDIRECT_I32: case WebAssembly::CALL_INDIRECT_I64:
+ case WebAssembly::CALL_INDIRECT_F32: case WebAssembly::CALL_INDIRECT_F64:
QueryCallee(MI, 1, Read, Write, Effects, StackPointer);
break;
- case WebAssembly::CALL_INDIRECT_VOID:
- case WebAssembly::CALL_INDIRECT_I32:
- case WebAssembly::CALL_INDIRECT_I64:
- case WebAssembly::CALL_INDIRECT_F32:
- case WebAssembly::CALL_INDIRECT_F64:
- Read = true;
- Write = true;
- Effects = true;
- StackPointer = true;
- break;
default:
llvm_unreachable("unexpected call opcode");
}
@@ -360,7 +353,8 @@ static bool OneUseDominatesOtherUses(unsigned Reg, const MachineOperand &OneUse,
const MachineBasicBlock &MBB,
const MachineRegisterInfo &MRI,
const MachineDominatorTree &MDT,
- LiveIntervals &LIS) {
+ LiveIntervals &LIS,
+ WebAssemblyFunctionInfo &MFI) {
const LiveInterval &LI = LIS.getInterval(Reg);
const MachineInstr *OneUseInst = OneUse.getParent();
@@ -384,8 +378,31 @@ static bool OneUseDominatesOtherUses(unsigned Reg, const MachineOperand &OneUse,
return false;
} else {
// Test that the use is dominated by the one selected use.
- if (!MDT.dominates(OneUseInst, UseInst))
- return false;
+ while (!MDT.dominates(OneUseInst, UseInst)) {
+ // Actually, dominating is over-conservative. Test that the use would
+ // happen after the one selected use in the stack evaluation order.
+ //
+ // This is needed as a consequence of using implicit get_locals for
+ // uses and implicit set_locals for defs.
+ if (UseInst->getDesc().getNumDefs() == 0)
+ return false;
+ const MachineOperand &MO = UseInst->getOperand(0);
+ if (!MO.isReg())
+ return false;
+ unsigned DefReg = MO.getReg();
+ if (!TargetRegisterInfo::isVirtualRegister(DefReg) ||
+ !MFI.isVRegStackified(DefReg))
+ return false;
+ assert(MRI.hasOneUse(DefReg));
+ const MachineOperand &NewUse = *MRI.use_begin(DefReg);
+ const MachineInstr *NewUseInst = NewUse.getParent();
+ if (NewUseInst == OneUseInst) {
+ if (&OneUse > &NewUse)
+ return false;
+ break;
+ }
+ UseInst = NewUseInst;
+ }
}
}
return true;
@@ -619,6 +636,9 @@ public:
/// Test whether the given register is present on the stack, indicating an
/// operand in the tree that we haven't visited yet. Moving a definition of
/// Reg to a point in the tree after that would change its value.
+ ///
+ /// This is needed as a consequence of using implicit get_locals for
+ /// uses and implicit set_locals for defs.
bool IsOnStack(unsigned Reg) const {
for (const RangeTy &Range : Worklist)
for (const MachineOperand &MO : Range)
@@ -763,7 +783,7 @@ bool WebAssemblyRegStackify::runOnMachineFunction(MachineFunction &MF) {
Insert = RematerializeCheapDef(Reg, Op, Def, MBB, Insert, LIS, MFI,
MRI, TII, TRI);
} else if (CanMove &&
- OneUseDominatesOtherUses(Reg, Op, MBB, MRI, MDT, LIS)) {
+ OneUseDominatesOtherUses(Reg, Op, MBB, MRI, MDT, LIS, MFI)) {
Insert = MoveAndTeeForMultiUse(Reg, Op, Def, MBB, Insert, LIS, MFI,
MRI, TII);
} else {