diff options
author | Piotr Padlewski <piotr.padlewski@gmail.com> | 2018-05-05 10:23:27 +0000 |
---|---|---|
committer | Piotr Padlewski <piotr.padlewski@gmail.com> | 2018-05-05 10:23:27 +0000 |
commit | e9832dfdf366ddffba68164adb6855d17c9f87c1 (patch) | |
tree | f4c3f721b3903da7bb0ada7e34e27bd992edbd65 /llvm/lib/Analysis/CaptureTracking.cpp | |
parent | c2ad0968450085bb39b1750d3c43960610e15816 (diff) | |
download | llvm-e9832dfdf366ddffba68164adb6855d17c9f87c1.zip llvm-e9832dfdf366ddffba68164adb6855d17c9f87c1.tar.gz llvm-e9832dfdf366ddffba68164adb6855d17c9f87c1.tar.bz2 |
[CaptureTracking] Handle capturing of launder.invariant.group
Summary:
launder.invariant.group has the same rules of capturing as
bitcast, gep, etc - the original value is not captured
if the returned pointer is not captured.
With this patch, we mark 40% more functions as noalias when compiling with -fstrict-vtable-pointers;
1078 vs 1778 (39.37%)
Reviewers: sanjoy, davide, nlewycky, majnemer, mehdi_amini
Subscribers: JDevlieghere, llvm-commits
Differential Revision: https://reviews.llvm.org/D32673
llvm-svn: 331587
Diffstat (limited to 'llvm/lib/Analysis/CaptureTracking.cpp')
-rw-r--r-- | llvm/lib/Analysis/CaptureTracking.cpp | 45 |
1 files changed, 23 insertions, 22 deletions
diff --git a/llvm/lib/Analysis/CaptureTracking.cpp b/llvm/lib/Analysis/CaptureTracking.cpp index 6566c7a..782e277 100644 --- a/llvm/lib/Analysis/CaptureTracking.cpp +++ b/llvm/lib/Analysis/CaptureTracking.cpp @@ -215,18 +215,22 @@ void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker) { assert(V->getType()->isPointerTy() && "Capture is for pointers only!"); SmallVector<const Use *, Threshold> Worklist; SmallSet<const Use *, Threshold> Visited; - int Count = 0; - for (const Use &U : V->uses()) { - // If there are lots of uses, conservatively say that the value - // is captured to avoid taking too much compile time. - if (Count++ >= Threshold) - return Tracker->tooManyUses(); - - if (!Tracker->shouldExplore(&U)) continue; - Visited.insert(&U); - Worklist.push_back(&U); - } + auto AddUses = [&](const Value *V) { + int Count = 0; + for (const Use &U : V->uses()) { + // If there are lots of uses, conservatively say that the value + // is captured to avoid taking too much compile time. + if (Count++ >= Threshold) + return Tracker->tooManyUses(); + if (!Visited.insert(&U).second) + continue; + if (!Tracker->shouldExplore(&U)) + continue; + Worklist.push_back(&U); + } + }; + AddUses(V); while (!Worklist.empty()) { const Use *U = Worklist.pop_back_val(); @@ -243,6 +247,13 @@ void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker) { if (CS.onlyReadsMemory() && CS.doesNotThrow() && I->getType()->isVoidTy()) break; + // launder.invariant.group only captures pointer by returning it, + // so the pointer wasn't captured if returned pointer is not captured. + if (CS.getIntrinsicID() == Intrinsic::launder_invariant_group) { + AddUses(I); + break; + } + // Volatile operations effectively capture the memory location that they // load and store to. if (auto *MI = dyn_cast<MemIntrinsic>(I)) @@ -313,17 +324,7 @@ void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker) { case Instruction::Select: case Instruction::AddrSpaceCast: // The original value is not captured via this if the new value isn't. - Count = 0; - for (Use &UU : I->uses()) { - // If there are lots of uses, conservatively say that the value - // is captured to avoid taking too much compile time. - if (Count++ >= Threshold) - return Tracker->tooManyUses(); - - if (Visited.insert(&UU).second) - if (Tracker->shouldExplore(&UU)) - Worklist.push_back(&UU); - } + AddUses(I); break; case Instruction::ICmp: { // Don't count comparisons of a no-alias return value against null as |