diff options
author | Stephen Tozer <stephen.tozer@sony.com> | 2024-08-29 17:53:32 +0100 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-08-29 17:53:32 +0100 |
commit | 3d08ade7bd32f0296e0ca3a13640cc95fa89229a (patch) | |
tree | 215a84046ff8a15cfa8e80c475fbdbe7ef2acc05 /llvm/lib/CodeGen/CodeGenPrepare.cpp | |
parent | e5e38ddf1b8043324175868831da21e941c00aff (diff) | |
download | llvm-3d08ade7bd32f0296e0ca3a13640cc95fa89229a.zip llvm-3d08ade7bd32f0296e0ca3a13640cc95fa89229a.tar.gz llvm-3d08ade7bd32f0296e0ca3a13640cc95fa89229a.tar.bz2 |
[ExtendLifetimes] Implement llvm.fake.use to extend variable lifetimes (#86149)
This patch is part of a set of patches that add an `-fextend-lifetimes`
flag to clang, which extends the lifetimes of local variables and
parameters for improved debuggability. In addition to that flag, the
patch series adds a pragma to selectively disable `-fextend-lifetimes`,
and an `-fextend-this-ptr` flag which functions as `-fextend-lifetimes`
for this pointers only. All changes and tests in these patches were
written by Wolfgang Pieb (@wolfy1961), while Stephen Tozer (@SLTozer)
has handled review and merging. The extend lifetimes flag is intended to
eventually be set on by `-Og`, as discussed in the RFC
here:
https://discourse.llvm.org/t/rfc-redefine-og-o1-and-add-a-new-level-of-og/72850
This patch implements a new intrinsic instruction in LLVM,
`llvm.fake.use` in IR and `FAKE_USE` in MIR, that takes a single operand
and has no effect other than "using" its operand, to ensure that its
operand remains live until after the fake use. This patch does not emit
fake uses anywhere; the next patch in this sequence causes them to be
emitted from the clang frontend, such that for each variable (or this) a
fake.use operand is inserted at the end of that variable's scope, using
that variable's value. This patch covers everything post-frontend, which
is largely just the basic plumbing for a new intrinsic/instruction,
along with a few steps to preserve the fake uses through optimizations
(such as moving them ahead of a tail call or translating them through
SROA).
Co-authored-by: Stephen Tozer <stephen.tozer@sony.com>
Diffstat (limited to 'llvm/lib/CodeGen/CodeGenPrepare.cpp')
-rw-r--r-- | llvm/lib/CodeGen/CodeGenPrepare.cpp | 44 |
1 files changed, 41 insertions, 3 deletions
diff --git a/llvm/lib/CodeGen/CodeGenPrepare.cpp b/llvm/lib/CodeGen/CodeGenPrepare.cpp index da6c758..271a047 100644 --- a/llvm/lib/CodeGen/CodeGenPrepare.cpp +++ b/llvm/lib/CodeGen/CodeGenPrepare.cpp @@ -2800,12 +2800,34 @@ bool CodeGenPrepare::dupRetToEnableTailCallOpts(BasicBlock *BB, return false; }; + SmallVector<const IntrinsicInst *, 4> FakeUses; + + auto isFakeUse = [&FakeUses](const Instruction *Inst) { + if (auto *II = dyn_cast<IntrinsicInst>(Inst); + II && II->getIntrinsicID() == Intrinsic::fake_use) { + // Record the instruction so it can be preserved when the exit block is + // removed. Do not preserve the fake use that uses the result of the + // PHI instruction. + // Do not copy fake uses that use the result of a PHI node. + // FIXME: If we do want to copy the fake use into the return blocks, we + // have to figure out which of the PHI node operands to use for each + // copy. + if (!isa<PHINode>(II->getOperand(0))) { + FakeUses.push_back(II); + } + return true; + } + + return false; + }; + // Make sure there are no instructions between the first instruction // and return. const Instruction *BI = BB->getFirstNonPHI(); // Skip over debug and the bitcast. while (isa<DbgInfoIntrinsic>(BI) || BI == BCI || BI == EVI || - isa<PseudoProbeInst>(BI) || isLifetimeEndOrBitCastFor(BI)) + isa<PseudoProbeInst>(BI) || isLifetimeEndOrBitCastFor(BI) || + isFakeUse(BI)) BI = BI->getNextNode(); if (BI != RetI) return false; @@ -2814,6 +2836,9 @@ bool CodeGenPrepare::dupRetToEnableTailCallOpts(BasicBlock *BB, /// call. const Function *F = BB->getParent(); SmallVector<BasicBlock *, 4> TailCallBBs; + // Record the call instructions so we can insert any fake uses + // that need to be preserved before them. + SmallVector<CallInst *, 4> CallInsts; if (PN) { for (unsigned I = 0, E = PN->getNumIncomingValues(); I != E; ++I) { // Look through bitcasts. @@ -2825,6 +2850,7 @@ bool CodeGenPrepare::dupRetToEnableTailCallOpts(BasicBlock *BB, TLI->mayBeEmittedAsTailCall(CI) && attributesPermitTailCall(F, CI, RetI, *TLI)) { TailCallBBs.push_back(PredBB); + CallInsts.push_back(CI); } else { // Consider the cases in which the phi value is indirectly produced by // the tail call, for example when encountering memset(), memmove(), @@ -2844,8 +2870,10 @@ bool CodeGenPrepare::dupRetToEnableTailCallOpts(BasicBlock *BB, isIntrinsicOrLFToBeTailCalled(TLInfo, CI) && IncomingVal == CI->getArgOperand(0) && TLI->mayBeEmittedAsTailCall(CI) && - attributesPermitTailCall(F, CI, RetI, *TLI)) + attributesPermitTailCall(F, CI, RetI, *TLI)) { TailCallBBs.push_back(PredBB); + CallInsts.push_back(CI); + } } } } else { @@ -2863,6 +2891,7 @@ bool CodeGenPrepare::dupRetToEnableTailCallOpts(BasicBlock *BB, (isIntrinsicOrLFToBeTailCalled(TLInfo, CI) && V == CI->getArgOperand(0))) { TailCallBBs.push_back(Pred); + CallInsts.push_back(CI); } } } @@ -2889,8 +2918,17 @@ bool CodeGenPrepare::dupRetToEnableTailCallOpts(BasicBlock *BB, } // If we eliminated all predecessors of the block, delete the block now. - if (Changed && !BB->hasAddressTaken() && pred_empty(BB)) + if (Changed && !BB->hasAddressTaken() && pred_empty(BB)) { + // Copy the fake uses found in the original return block to all blocks + // that contain tail calls. + for (auto *CI : CallInsts) { + for (auto const *FakeUse : FakeUses) { + auto *ClonedInst = FakeUse->clone(); + ClonedInst->insertBefore(CI); + } + } BB->eraseFromParent(); + } return Changed; } |