aboutsummaryrefslogtreecommitdiff
path: root/llvm/lib/ProfileData/SampleProf.cpp
diff options
context:
space:
mode:
authorAaron Ballman <aaron@aaronballman.com>2023-07-28 09:39:08 -0400
committerAaron Ballman <aaron@aaronballman.com>2023-07-28 09:41:38 -0400
commit1a53b5c367b5ebf7d7f34afaa653ea337982f1d6 (patch)
treeab1cdc76af3ead1292fc8587669700271952b431 /llvm/lib/ProfileData/SampleProf.cpp
parent7ca6b7693433955ab28cbc5225bcb16a19fd53f4 (diff)
downloadllvm-1a53b5c367b5ebf7d7f34afaa653ea337982f1d6.zip
llvm-1a53b5c367b5ebf7d7f34afaa653ea337982f1d6.tar.gz
llvm-1a53b5c367b5ebf7d7f34afaa653ea337982f1d6.tar.bz2
Revert "[llvm-profdata] Refactoring Sample Profile Reader to increase FDO build speed using MD5 as key to Sample Profile map"
This reverts commit 66ba71d913df7f7cd75e92c0c4265932b7c93292. Addressing issues found by: https://lab.llvm.org/buildbot/#/builders/245/builds/11732 https://lab.llvm.org/buildbot/#/builders/187/builds/12251 https://lab.llvm.org/buildbot/#/builders/186/builds/11099 https://lab.llvm.org/buildbot/#/builders/182/builds/6976
Diffstat (limited to 'llvm/lib/ProfileData/SampleProf.cpp')
-rw-r--r--llvm/lib/ProfileData/SampleProf.cpp74
1 files changed, 54 insertions, 20 deletions
diff --git a/llvm/lib/ProfileData/SampleProf.cpp b/llvm/lib/ProfileData/SampleProf.cpp
index b14dc01..fdae8a0 100644
--- a/llvm/lib/ProfileData/SampleProf.cpp
+++ b/llvm/lib/ProfileData/SampleProf.cpp
@@ -202,12 +202,13 @@ void sampleprof::sortFuncProfiles(
const SampleProfileMap &ProfileMap,
std::vector<NameFunctionSamples> &SortedProfiles) {
for (const auto &I : ProfileMap) {
- SortedProfiles.push_back(std::make_pair(I.first, &I.second));
+ assert(I.first == I.second.getContext() && "Inconsistent profile map");
+ SortedProfiles.push_back(std::make_pair(I.second.getContext(), &I.second));
}
llvm::stable_sort(SortedProfiles, [](const NameFunctionSamples &A,
const NameFunctionSamples &B) {
if (A.second->getTotalSamples() == B.second->getTotalSamples())
- return A.second->getContext() < B.second->getContext();
+ return A.first < B.first;
return A.second->getTotalSamples() > B.second->getTotalSamples();
});
}
@@ -356,13 +357,13 @@ void SampleContextTrimmer::trimAndMergeColdContextProfiles(
// Filter the cold profiles from ProfileMap and move them into a tmp
// container
- std::vector<std::pair<hash_code, const FunctionSamples *>> ColdProfiles;
+ std::vector<std::pair<SampleContext, const FunctionSamples *>> ColdProfiles;
for (const auto &I : ProfileMap) {
- const SampleContext &Context = I.second.getContext();
+ const SampleContext &Context = I.first;
const FunctionSamples &FunctionProfile = I.second;
if (FunctionProfile.getTotalSamples() < ColdCountThreshold &&
(!TrimBaseProfileOnly || Context.isBaseContext()))
- ColdProfiles.emplace_back(I.first, &I.second);
+ ColdProfiles.emplace_back(Context, &I.second);
}
// Remove the cold profile from ProfileMap and merge them into
@@ -373,8 +374,8 @@ void SampleContextTrimmer::trimAndMergeColdContextProfiles(
auto MergedContext = I.second->getContext().getContextFrames();
if (ColdContextFrameLength < MergedContext.size())
MergedContext = MergedContext.take_back(ColdContextFrameLength);
- // Need to set MergedProfile's context here otherwise it will be lost.
- FunctionSamples &MergedProfile = MergedProfileMap.Create(MergedContext);
+ auto Ret = MergedProfileMap.emplace(MergedContext, FunctionSamples());
+ FunctionSamples &MergedProfile = Ret.first->second;
MergedProfile.merge(*I.second);
}
ProfileMap.erase(I.first);
@@ -384,17 +385,57 @@ void SampleContextTrimmer::trimAndMergeColdContextProfiles(
for (const auto &I : MergedProfileMap) {
// Filter the cold merged profile
if (TrimColdContext && I.second.getTotalSamples() < ColdCountThreshold &&
- ProfileMap.find(I.second.getContext()) == ProfileMap.end())
+ ProfileMap.find(I.first) == ProfileMap.end())
continue;
// Merge the profile if the original profile exists, otherwise just insert
- // as a new profile. If inserted as a new profile from MergedProfileMap, it
- // already has the right context.
- auto Ret = ProfileMap.emplace(I.second.getContext(), FunctionSamples());
+ // as a new profile
+ auto Ret = ProfileMap.emplace(I.first, FunctionSamples());
+ if (Ret.second) {
+ SampleContext FContext(Ret.first->first, RawContext);
+ FunctionSamples &FProfile = Ret.first->second;
+ FProfile.setContext(FContext);
+ }
FunctionSamples &OrigProfile = Ret.first->second;
OrigProfile.merge(I.second);
}
}
+void SampleContextTrimmer::canonicalizeContextProfiles() {
+ std::vector<SampleContext> ProfilesToBeRemoved;
+ SampleProfileMap ProfilesToBeAdded;
+ for (auto &I : ProfileMap) {
+ FunctionSamples &FProfile = I.second;
+ SampleContext &Context = FProfile.getContext();
+ if (I.first == Context)
+ continue;
+
+ // Use the context string from FunctionSamples to update the keys of
+ // ProfileMap. They can get out of sync after context profile promotion
+ // through pre-inliner.
+ // Duplicate the function profile for later insertion to avoid a conflict
+ // caused by a context both to be add and to be removed. This could happen
+ // when a context is promoted to another context which is also promoted to
+ // the third context. For example, given an original context A @ B @ C that
+ // is promoted to B @ C and the original context B @ C which is promoted to
+ // just C, adding B @ C to the profile map while removing same context (but
+ // with different profiles) from the map can cause a conflict if they are
+ // not handled in a right order. This can be solved by just caching the
+ // profiles to be added.
+ auto Ret = ProfilesToBeAdded.emplace(Context, FProfile);
+ (void)Ret;
+ assert(Ret.second && "Context conflict during canonicalization");
+ ProfilesToBeRemoved.push_back(I.first);
+ }
+
+ for (auto &I : ProfilesToBeRemoved) {
+ ProfileMap.erase(I);
+ }
+
+ for (auto &I : ProfilesToBeAdded) {
+ ProfileMap.emplace(I.first, I.second);
+ }
+}
+
std::error_code ProfileSymbolList::write(raw_ostream &OS) {
// Sort the symbols before output. If doing compression.
// It will make the compression much more effective.
@@ -468,7 +509,6 @@ void ProfileConverter::convertCSProfiles(ProfileConverter::FrameNode &Node) {
if (!ChildProfile)
continue;
SampleContext OrigChildContext = ChildProfile->getContext();
- hash_code OrigChildContextHash = OrigChildContext.getHashCode();
// Reset the child context to be contextless.
ChildProfile->getContext().setName(OrigChildContext.getName());
if (NodeProfile) {
@@ -484,7 +524,6 @@ void ProfileConverter::convertCSProfiles(ProfileConverter::FrameNode &Node) {
NodeProfile->removeTotalSamples(Count);
}
- hash_code NewChildProfileHash(0);
// Separate child profile to be a standalone profile, if the current parent
// profile doesn't exist. This is a duplicating operation when the child
// profile is already incorporated into the parent which is still useful and
@@ -493,20 +532,15 @@ void ProfileConverter::convertCSProfiles(ProfileConverter::FrameNode &Node) {
// profile in the prelink phase for to-be-fully-inlined functions.
if (!NodeProfile) {
ProfileMap[ChildProfile->getContext()].merge(*ChildProfile);
- NewChildProfileHash = ChildProfile->getContext().getHashCode();
} else if (GenerateMergedBaseProfiles) {
ProfileMap[ChildProfile->getContext()].merge(*ChildProfile);
- NewChildProfileHash = ChildProfile->getContext().getHashCode();
auto &SamplesMap = NodeProfile->functionSamplesAt(ChildNode.CallSiteLoc);
SamplesMap[ChildProfile->getName().str()].getContext().setAttribute(
ContextDuplicatedIntoBase);
}
- // Remove the original child profile. Check if MD5 of new child profile
- // collides with old profile, in this case the [] operator already
- // overwritten it without the need of erase.
- if (NewChildProfileHash != OrigChildContextHash)
- ProfileMap.erase(OrigChildContextHash);
+ // Remove the original child profile.
+ ProfileMap.erase(OrigChildContext);
}
}