aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib
diff options
context:
space:
mode:
Diffstat (limited to 'llvm/lib')
-rw-r--r--llvm/lib/Transforms/IPO/MemProfContextDisambiguation.cpp96
1 files changed, 12 insertions, 84 deletions
diff --git a/llvm/lib/Transforms/IPO/MemProfContextDisambiguation.cpp b/llvm/lib/Transforms/IPO/MemProfContextDisambiguation.cpp
index 81de0a5..0164fcd 100644
--- a/llvm/lib/Transforms/IPO/MemProfContextDisambiguation.cpp
+++ b/llvm/lib/Transforms/IPO/MemProfContextDisambiguation.cpp
@@ -97,8 +97,6 @@ STATISTIC(MissingAllocForContextId,
"Number of missing alloc nodes for context ids");
STATISTIC(SkippedCallsCloning,
"Number of calls skipped during cloning due to unexpected operand");
-STATISTIC(MismatchedCloneAssignments,
- "Number of callsites assigned to call multiple non-matching clones");
static cl::opt<std::string> DotFilePathPrefix(
"memprof-dot-file-path-prefix", cl::init(""), cl::Hidden,
@@ -2062,20 +2060,6 @@ static bool isMemProfClone(const Function &F) {
return F.getName().contains(MemProfCloneSuffix);
}
-// Return the clone number of the given function by extracting it from the
-// memprof suffix. Assumes the caller has already confirmed it is a memprof
-// clone.
-static unsigned getMemProfCloneNum(const Function &F) {
- assert(isMemProfClone(F));
- auto Pos = F.getName().find_last_of('.');
- assert(Pos > 0);
- unsigned CloneNo;
- bool Err = F.getName().drop_front(Pos + 1).getAsInteger(10, CloneNo);
- assert(!Err);
- (void)Err;
- return CloneNo;
-}
-
std::string ModuleCallsiteContextGraph::getLabel(const Function *Func,
const Instruction *Call,
unsigned CloneNo) const {
@@ -3995,22 +3979,7 @@ IndexCallsiteContextGraph::getAllocationCallType(const CallInfo &Call) const {
void ModuleCallsiteContextGraph::updateCall(CallInfo &CallerCall,
FuncInfo CalleeFunc) {
- auto *CurF = cast<CallBase>(CallerCall.call())->getCalledFunction();
- auto NewCalleeCloneNo = CalleeFunc.cloneNo();
- if (isMemProfClone(*CurF)) {
- // If we already assigned this callsite to call a specific non-default
- // clone (i.e. not the original function which is clone 0), ensure that we
- // aren't trying to now update it to call a different clone, which is
- // indicative of a bug in the graph or function assignment.
- auto CurCalleeCloneNo = getMemProfCloneNum(*CurF);
- if (CurCalleeCloneNo != NewCalleeCloneNo) {
- LLVM_DEBUG(dbgs() << "Mismatch in call clone assignment: was "
- << CurCalleeCloneNo << " now " << NewCalleeCloneNo
- << "\n");
- MismatchedCloneAssignments++;
- }
- }
- if (NewCalleeCloneNo > 0)
+ if (CalleeFunc.cloneNo() > 0)
cast<CallBase>(CallerCall.call())->setCalledFunction(CalleeFunc.func());
OREGetter(CallerCall.call()->getFunction())
.emit(OptimizationRemark(DEBUG_TYPE, "MemprofCall", CallerCall.call())
@@ -4026,19 +3995,7 @@ void IndexCallsiteContextGraph::updateCall(CallInfo &CallerCall,
assert(CI &&
"Caller cannot be an allocation which should not have profiled calls");
assert(CI->Clones.size() > CallerCall.cloneNo());
- auto NewCalleeCloneNo = CalleeFunc.cloneNo();
- auto &CurCalleeCloneNo = CI->Clones[CallerCall.cloneNo()];
- // If we already assigned this callsite to call a specific non-default
- // clone (i.e. not the original function which is clone 0), ensure that we
- // aren't trying to now update it to call a different clone, which is
- // indicative of a bug in the graph or function assignment.
- if (CurCalleeCloneNo != 0 && CurCalleeCloneNo != NewCalleeCloneNo) {
- LLVM_DEBUG(dbgs() << "Mismatch in call clone assignment: was "
- << CurCalleeCloneNo << " now " << NewCalleeCloneNo
- << "\n");
- MismatchedCloneAssignments++;
- }
- CurCalleeCloneNo = NewCalleeCloneNo;
+ CI->Clones[CallerCall.cloneNo()] = CalleeFunc.cloneNo();
}
// Update the debug information attached to NewFunc to use the clone Name. Note
@@ -4746,19 +4703,6 @@ bool CallsiteContextGraph<DerivedCCG, FuncTy, CallTy>::assignFunctions() {
// where the callers were assigned to different clones of a function.
}
- auto FindFirstAvailFuncClone = [&]() {
- // Find first function in FuncClonesToCallMap without an assigned
- // clone of this callsite Node. We should always have one
- // available at this point due to the earlier cloning when the
- // FuncClonesToCallMap size was smaller than the clone number.
- for (auto &CF : FuncClonesToCallMap) {
- if (!FuncCloneToCurNodeCloneMap.count(CF.first))
- return CF.first;
- }
- assert(false &&
- "Expected an available func clone for this callsite clone");
- };
-
// See if we can use existing function clone. Walk through
// all caller edges to see if any have already been assigned to
// a clone of this callsite's function. If we can use it, do so. If not,
@@ -4875,7 +4819,16 @@ bool CallsiteContextGraph<DerivedCCG, FuncTy, CallTy>::assignFunctions() {
// clone of OrigFunc for another caller during this iteration over
// its caller edges.
if (!FuncCloneAssignedToCurCallsiteClone) {
- FuncCloneAssignedToCurCallsiteClone = FindFirstAvailFuncClone();
+ // Find first function in FuncClonesToCallMap without an assigned
+ // clone of this callsite Node. We should always have one
+ // available at this point due to the earlier cloning when the
+ // FuncClonesToCallMap size was smaller than the clone number.
+ for (auto &CF : FuncClonesToCallMap) {
+ if (!FuncCloneToCurNodeCloneMap.count(CF.first)) {
+ FuncCloneAssignedToCurCallsiteClone = CF.first;
+ break;
+ }
+ }
assert(FuncCloneAssignedToCurCallsiteClone);
// Assign Clone to FuncCloneAssignedToCurCallsiteClone
AssignCallsiteCloneToFuncClone(
@@ -4889,31 +4842,6 @@ bool CallsiteContextGraph<DerivedCCG, FuncTy, CallTy>::assignFunctions() {
FuncCloneAssignedToCurCallsiteClone);
}
}
- // If we didn't assign a function clone to this callsite clone yet, e.g.
- // none of its callers has a non-null call, do the assignment here.
- // We want to ensure that every callsite clone is assigned to some
- // function clone, so that the call updates below work as expected.
- // In particular if this is the original callsite, we want to ensure it
- // is assigned to the original function, otherwise the original function
- // will appear available for assignment to other callsite clones,
- // leading to unintended effects. For one, the unknown and not updated
- // callers will call into cloned paths leading to the wrong hints,
- // because they still call the original function (clone 0). Also,
- // because all callsites start out as being clone 0 by default, we can't
- // easily distinguish between callsites explicitly assigned to clone 0
- // vs those never assigned, which can lead to multiple updates of the
- // calls when invoking updateCall below, with mismatched clone values.
- // TODO: Add a flag to the callsite nodes or some other mechanism to
- // better distinguish and identify callsite clones that are not getting
- // assigned to function clones as expected.
- if (!FuncCloneAssignedToCurCallsiteClone) {
- FuncCloneAssignedToCurCallsiteClone = FindFirstAvailFuncClone();
- assert(FuncCloneAssignedToCurCallsiteClone &&
- "No available func clone for this callsite clone");
- AssignCallsiteCloneToFuncClone(
- FuncCloneAssignedToCurCallsiteClone, Call, Clone,
- /*IsAlloc=*/AllocationCallToContextNodeMap.contains(Call));
- }
}
if (VerifyCCG) {
checkNode<DerivedCCG, FuncTy, CallTy>(Node);