aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/Analysis/CaptureTracking.cpp
blob: d8ca4a60cfbd4c56293472fdfe3b35d008f4678c (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
//===--- CaptureTracking.cpp - Determine whether a pointer is captured ----===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// This file contains routines that help determine which pointers are captured.
// A pointer value is captured if the function makes a copy of any part of the
// pointer that outlives the call.  Not being captured means, more or less, that
// the pointer is only dereferenced and not stored in a global.  Returning part
// of the pointer as the function return value may or may not count as capturing
// the pointer, depending on the context.
//
//===----------------------------------------------------------------------===//

#include "llvm/Analysis/CaptureTracking.h"
#include "llvm/ADT/SmallSet.h"
#include "llvm/ADT/SmallVector.h"
#include "llvm/ADT/Statistic.h"
#include "llvm/Analysis/AliasAnalysis.h"
#include "llvm/Analysis/CFG.h"
#include "llvm/Analysis/ValueTracking.h"
#include "llvm/IR/Constants.h"
#include "llvm/IR/Dominators.h"
#include "llvm/IR/Instructions.h"
#include "llvm/IR/IntrinsicInst.h"
#include "llvm/Support/CommandLine.h"

using namespace llvm;

#define DEBUG_TYPE "capture-tracking"

STATISTIC(NumCaptured,          "Number of pointers maybe captured");
STATISTIC(NumNotCaptured,       "Number of pointers not captured");
STATISTIC(NumCapturedBefore,    "Number of pointers maybe captured before");
STATISTIC(NumNotCapturedBefore, "Number of pointers not captured before");

/// The default value for MaxUsesToExplore argument. It's relatively small to
/// keep the cost of analysis reasonable for clients like BasicAliasAnalysis,
/// where the results can't be cached.
/// TODO: we should probably introduce a caching CaptureTracking analysis and
/// use it where possible. The caching version can use much higher limit or
/// don't have this cap at all.
static cl::opt<unsigned>
    DefaultMaxUsesToExplore("capture-tracking-max-uses-to-explore", cl::Hidden,
                            cl::desc("Maximal number of uses to explore."),
                            cl::init(100));

unsigned llvm::getDefaultMaxUsesToExploreForCaptureTracking() {
  return DefaultMaxUsesToExplore;
}

CaptureTracker::~CaptureTracker() = default;

bool CaptureTracker::shouldExplore(const Use *U) { return true; }

bool CaptureTracker::isDereferenceableOrNull(Value *O, const DataLayout &DL) {
  // We want comparisons to null pointers to not be considered capturing,
  // but need to guard against cases like gep(p, -ptrtoint(p2)) == null,
  // which are equivalent to p == p2 and would capture the pointer.
  //
  // A dereferenceable pointer is a case where this is known to be safe,
  // because the pointer resulting from such a construction would not be
  // dereferenceable.
  //
  // It is not sufficient to check for inbounds GEP here, because GEP with
  // zero offset is always inbounds.
  bool CanBeNull, CanBeFreed;
  return O->getPointerDereferenceableBytes(DL, CanBeNull, CanBeFreed);
}

namespace {
struct SimpleCaptureTracker : public CaptureTracker {
  explicit SimpleCaptureTracker(bool ReturnCaptures)
      : ReturnCaptures(ReturnCaptures) {}

  void tooManyUses() override {
    LLVM_DEBUG(dbgs() << "Captured due to too many uses\n");
    Captured = true;
  }

  bool captured(const Use *U) override {
    if (isa<ReturnInst>(U->getUser()) && !ReturnCaptures)
      return false;

    LLVM_DEBUG(dbgs() << "Captured by: " << *U->getUser() << "\n");

    Captured = true;
    return true;
  }

  bool ReturnCaptures;

  bool Captured = false;
};

/// Only find pointer captures which happen before the given instruction. Uses
/// the dominator tree to determine whether one instruction is before another.
/// Only support the case where the Value is defined in the same basic block
/// as the given instruction and the use.
struct CapturesBefore : public CaptureTracker {

  CapturesBefore(bool ReturnCaptures, const Instruction *I,
                 const DominatorTree *DT, bool IncludeI, const LoopInfo *LI)
      : BeforeHere(I), DT(DT), ReturnCaptures(ReturnCaptures),
        IncludeI(IncludeI), LI(LI) {}

  void tooManyUses() override { Captured = true; }

  bool isSafeToPrune(Instruction *I) {
    if (BeforeHere == I)
      return !IncludeI;

    // We explore this usage only if the usage can reach "BeforeHere".
    // If use is not reachable from entry, there is no need to explore.
    if (!DT->isReachableFromEntry(I->getParent()))
      return true;

    // Check whether there is a path from I to BeforeHere.
    return !isPotentiallyReachable(I, BeforeHere, nullptr, DT, LI);
  }

  bool captured(const Use *U) override {
    Instruction *I = cast<Instruction>(U->getUser());
    if (isa<ReturnInst>(I) && !ReturnCaptures)
      return false;

    // Check isSafeToPrune() here rather than in shouldExplore() to avoid
    // an expensive reachability query for every instruction we look at.
    // Instead we only do one for actual capturing candidates.
    if (isSafeToPrune(I))
      return false;

    Captured = true;
    return true;
  }

  const Instruction *BeforeHere;
  const DominatorTree *DT;

  bool ReturnCaptures;
  bool IncludeI;

  bool Captured = false;

  const LoopInfo *LI;
};

/// Find the 'earliest' instruction before which the pointer is known not to
/// be captured. Here an instruction A is considered earlier than instruction
/// B, if A dominates B. If 2 escapes do not dominate each other, the
/// terminator of the common dominator is chosen. If not all uses cannot be
/// analyzed, the earliest escape is set to the first instruction in the
/// function entry block.
// NOTE: Users have to make sure instructions compared against the earliest
// escape are not in a cycle.
struct EarliestCaptures : public CaptureTracker {

  EarliestCaptures(bool ReturnCaptures, Function &F, const DominatorTree &DT)
      : DT(DT), ReturnCaptures(ReturnCaptures), F(F) {}

  void tooManyUses() override {
    Captured = true;
    EarliestCapture = &*F.getEntryBlock().begin();
  }

  bool captured(const Use *U) override {
    Instruction *I = cast<Instruction>(U->getUser());
    if (isa<ReturnInst>(I) && !ReturnCaptures)
      return false;

    if (!EarliestCapture)
      EarliestCapture = I;
    else
      EarliestCapture = DT.findNearestCommonDominator(EarliestCapture, I);
    Captured = true;

    // Return false to continue analysis; we need to see all potential
    // captures.
    return false;
  }

  Instruction *EarliestCapture = nullptr;

  const DominatorTree &DT;

  bool ReturnCaptures;

  bool Captured = false;

  Function &F;
};
} // namespace

/// PointerMayBeCaptured - Return true if this pointer value may be captured
/// by the enclosing function (which is required to exist).  This routine can
/// be expensive, so consider caching the results.  The boolean ReturnCaptures
/// specifies whether returning the value (or part of it) from the function
/// counts as capturing it or not.  The boolean StoreCaptures specified whether
/// storing the value (or part of it) into memory anywhere automatically
/// counts as capturing it or not.
bool llvm::PointerMayBeCaptured(const Value *V, bool ReturnCaptures,
                                bool StoreCaptures, unsigned MaxUsesToExplore) {
  assert(!isa<GlobalValue>(V) &&
         "It doesn't make sense to ask whether a global is captured.");

  // TODO: If StoreCaptures is not true, we could do Fancy analysis
  // to determine whether this store is not actually an escape point.
  // In that case, BasicAliasAnalysis should be updated as well to
  // take advantage of this.
  (void)StoreCaptures;

  LLVM_DEBUG(dbgs() << "Captured?: " << *V << " = ");

  SimpleCaptureTracker SCT(ReturnCaptures);
  PointerMayBeCaptured(V, &SCT, MaxUsesToExplore);
  if (SCT.Captured)
    ++NumCaptured;
  else {
    ++NumNotCaptured;
    LLVM_DEBUG(dbgs() << "not captured\n");
  }
  return SCT.Captured;
}

/// PointerMayBeCapturedBefore - Return true if this pointer value may be
/// captured by the enclosing function (which is required to exist). If a
/// DominatorTree is provided, only captures which happen before the given
/// instruction are considered. This routine can be expensive, so consider
/// caching the results.  The boolean ReturnCaptures specifies whether
/// returning the value (or part of it) from the function counts as capturing
/// it or not.  The boolean StoreCaptures specified whether storing the value
/// (or part of it) into memory anywhere automatically counts as capturing it
/// or not.
bool llvm::PointerMayBeCapturedBefore(const Value *V, bool ReturnCaptures,
                                      bool StoreCaptures, const Instruction *I,
                                      const DominatorTree *DT, bool IncludeI,
                                      unsigned MaxUsesToExplore,
                                      const LoopInfo *LI) {
  assert(!isa<GlobalValue>(V) &&
         "It doesn't make sense to ask whether a global is captured.");

  if (!DT)
    return PointerMayBeCaptured(V, ReturnCaptures, StoreCaptures,
                                MaxUsesToExplore);

  // TODO: See comment in PointerMayBeCaptured regarding what could be done
  // with StoreCaptures.

  CapturesBefore CB(ReturnCaptures, I, DT, IncludeI, LI);
  PointerMayBeCaptured(V, &CB, MaxUsesToExplore);
  if (CB.Captured)
    ++NumCapturedBefore;
  else
    ++NumNotCapturedBefore;
  return CB.Captured;
}

Instruction *llvm::FindEarliestCapture(const Value *V, Function &F,
                                       bool ReturnCaptures, bool StoreCaptures,
                                       const DominatorTree &DT,
                                       unsigned MaxUsesToExplore) {
  assert(!isa<GlobalValue>(V) &&
         "It doesn't make sense to ask whether a global is captured.");

  EarliestCaptures CB(ReturnCaptures, F, DT);
  PointerMayBeCaptured(V, &CB, MaxUsesToExplore);
  if (CB.Captured)
    ++NumCapturedBefore;
  else
    ++NumNotCapturedBefore;
  return CB.EarliestCapture;
}

UseCaptureKind llvm::DetermineUseCaptureKind(
    const Use &U,
    function_ref<bool(Value *, const DataLayout &)> IsDereferenceableOrNull) {
  Instruction *I = dyn_cast<Instruction>(U.getUser());

  // TODO: Investigate non-instruction uses.
  if (!I)
    return UseCaptureKind::MAY_CAPTURE;

  switch (I->getOpcode()) {
  case Instruction::Call:
  case Instruction::Invoke: {
    auto *Call = cast<CallBase>(I);
    // Not captured if the callee is readonly, doesn't return a copy through
    // its return value and doesn't unwind (a readonly function can leak bits
    // by throwing an exception or not depending on the input value).
    if (Call->onlyReadsMemory() && Call->doesNotThrow() &&
        Call->getType()->isVoidTy())
      return UseCaptureKind::NO_CAPTURE;

    // The pointer is not captured if returned pointer is not captured.
    // NOTE: CaptureTracking users should not assume that only functions
    // marked with nocapture do not capture. This means that places like
    // getUnderlyingObject in ValueTracking or DecomposeGEPExpression
    // in BasicAA also need to know about this property.
    if (isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(Call, true))
      return UseCaptureKind::PASSTHROUGH;

    // Volatile operations effectively capture the memory location that they
    // load and store to.
    if (auto *MI = dyn_cast<MemIntrinsic>(Call))
      if (MI->isVolatile())
        return UseCaptureKind::MAY_CAPTURE;

    // Calling a function pointer does not in itself cause the pointer to
    // be captured.  This is a subtle point considering that (for example)
    // the callee might return its own address.  It is analogous to saying
    // that loading a value from a pointer does not cause the pointer to be
    // captured, even though the loaded value might be the pointer itself
    // (think of self-referential objects).
    if (Call->isCallee(&U))
      return UseCaptureKind::NO_CAPTURE;

    // Not captured if only passed via 'nocapture' arguments.
    if (Call->isDataOperand(&U) &&
        !Call->doesNotCapture(Call->getDataOperandNo(&U))) {
      // The parameter is not marked 'nocapture' - captured.
      return UseCaptureKind::MAY_CAPTURE;
    }
    return UseCaptureKind::NO_CAPTURE;
  }
  case Instruction::Load:
    // Volatile loads make the address observable.
    if (cast<LoadInst>(I)->isVolatile())
      return UseCaptureKind::MAY_CAPTURE;
    return UseCaptureKind::NO_CAPTURE;
  case Instruction::VAArg:
    // "va-arg" from a pointer does not cause it to be captured.
    return UseCaptureKind::NO_CAPTURE;
  case Instruction::Store:
    // Stored the pointer - conservatively assume it may be captured.
    // Volatile stores make the address observable.
    if (U.getOperandNo() == 0 || cast<StoreInst>(I)->isVolatile())
      return UseCaptureKind::MAY_CAPTURE;
    return UseCaptureKind::NO_CAPTURE;
  case Instruction::AtomicRMW: {
    // atomicrmw conceptually includes both a load and store from
    // the same location.
    // As with a store, the location being accessed is not captured,
    // but the value being stored is.
    // Volatile stores make the address observable.
    auto *ARMWI = cast<AtomicRMWInst>(I);
    if (U.getOperandNo() == 1 || ARMWI->isVolatile())
      return UseCaptureKind::MAY_CAPTURE;
    return UseCaptureKind::NO_CAPTURE;
  }
  case Instruction::AtomicCmpXchg: {
    // cmpxchg conceptually includes both a load and store from
    // the same location.
    // As with a store, the location being accessed is not captured,
    // but the value being stored is.
    // Volatile stores make the address observable.
    auto *ACXI = cast<AtomicCmpXchgInst>(I);
    if (U.getOperandNo() == 1 || U.getOperandNo() == 2 || ACXI->isVolatile())
      return UseCaptureKind::MAY_CAPTURE;
    return UseCaptureKind::NO_CAPTURE;
  }
  case Instruction::GetElementPtr:
    // AA does not support pointers of vectors, so GEP vector splats need to
    // be considered as captures.
    if (I->getType()->isVectorTy())
      return UseCaptureKind::MAY_CAPTURE;
    return UseCaptureKind::PASSTHROUGH;
  case Instruction::BitCast:
  case Instruction::PHI:
  case Instruction::Select:
  case Instruction::AddrSpaceCast:
    // The original value is not captured via this if the new value isn't.
    return UseCaptureKind::PASSTHROUGH;
  case Instruction::ICmp: {
    unsigned Idx = U.getOperandNo();
    unsigned OtherIdx = 1 - Idx;
    if (auto *CPN = dyn_cast<ConstantPointerNull>(I->getOperand(OtherIdx))) {
      // Don't count comparisons of a no-alias return value against null as
      // captures. This allows us to ignore comparisons of malloc results
      // with null, for example.
      if (CPN->getType()->getAddressSpace() == 0)
        if (isNoAliasCall(U.get()->stripPointerCasts()))
          return UseCaptureKind::NO_CAPTURE;
      if (!I->getFunction()->nullPointerIsDefined()) {
        auto *O = I->getOperand(Idx)->stripPointerCastsSameRepresentation();
        // Comparing a dereferenceable_or_null pointer against null cannot
        // lead to pointer escapes, because if it is not null it must be a
        // valid (in-bounds) pointer.
        const DataLayout &DL = I->getModule()->getDataLayout();
        if (IsDereferenceableOrNull && IsDereferenceableOrNull(O, DL))
          return UseCaptureKind::NO_CAPTURE;
      }
    }

    // Otherwise, be conservative. There are crazy ways to capture pointers
    // using comparisons.
    return UseCaptureKind::MAY_CAPTURE;
  }
  default:
    // Something else - be conservative and say it is captured.
    return UseCaptureKind::MAY_CAPTURE;
  }
}

void llvm::PointerMayBeCaptured(const Value *V, CaptureTracker *Tracker,
                                unsigned MaxUsesToExplore) {
  assert(V->getType()->isPointerTy() && "Capture is for pointers only!");
  if (MaxUsesToExplore == 0)
    MaxUsesToExplore = DefaultMaxUsesToExplore;

  SmallVector<const Use *, 20> Worklist;
  Worklist.reserve(getDefaultMaxUsesToExploreForCaptureTracking());
  SmallSet<const Use *, 20> Visited;

  auto AddUses = [&](const Value *V) {
    for (const Use &U : V->uses()) {
      // If there are lots of uses, conservatively say that the value
      // is captured to avoid taking too much compile time.
      if (Visited.size()  >= MaxUsesToExplore) {
        Tracker->tooManyUses();
        return false;
      }
      if (!Visited.insert(&U).second)
        continue;
      if (!Tracker->shouldExplore(&U))
        continue;
      Worklist.push_back(&U);
    }
    return true;
  };
  if (!AddUses(V))
    return;

  auto IsDereferenceableOrNull = [Tracker](Value *V, const DataLayout &DL) {
    return Tracker->isDereferenceableOrNull(V, DL);
  };
  while (!Worklist.empty()) {
    const Use *U = Worklist.pop_back_val();
    switch (DetermineUseCaptureKind(*U, IsDereferenceableOrNull)) {
    case UseCaptureKind::NO_CAPTURE:
      continue;
    case UseCaptureKind::MAY_CAPTURE:
      if (Tracker->captured(U))
        return;
      continue;
    case UseCaptureKind::PASSTHROUGH:
      if (!AddUses(U->getUser()))
        return;
      continue;
    }
  }

  // All uses examined.
}

bool llvm::isNonEscapingLocalObject(
    const Value *V, SmallDenseMap<const Value *, bool, 8> *IsCapturedCache) {
  SmallDenseMap<const Value *, bool, 8>::iterator CacheIt;
  if (IsCapturedCache) {
    bool Inserted;
    std::tie(CacheIt, Inserted) = IsCapturedCache->insert({V, false});
    if (!Inserted)
      // Found cached result, return it!
      return CacheIt->second;
  }

  // If this is an identified function-local object, check to see if it escapes.
  if (isIdentifiedFunctionLocal(V)) {
    // Set StoreCaptures to True so that we can assume in our callers that the
    // pointer is not the result of a load instruction. Currently
    // PointerMayBeCaptured doesn't have any special analysis for the
    // StoreCaptures=false case; if it did, our callers could be refined to be
    // more precise.
    auto Ret = !PointerMayBeCaptured(V, false, /*StoreCaptures=*/true);
    if (IsCapturedCache)
      CacheIt->second = Ret;
    return Ret;
  }

  return false;
}