aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMaxim Kuvyrkov <maxim.kuvyrkov@linaro.org>2021-07-08 08:21:18 +0000
committerMaxim Kuvyrkov <maxim.kuvyrkov@linaro.org>2023-06-14 14:29:46 +0000
commit5f8cc7f00cdf9127a9c16adecb98fdb2f3eeba82 (patch)
tree20c677c88759761f377a7600cd9a8e6e5edbf9dd
parentb713de1ce50e603d1559d1d4838318e245ef6706 (diff)
downloadgcc-5f8cc7f00cdf9127a9c16adecb98fdb2f3eeba82.zip
gcc-5f8cc7f00cdf9127a9c16adecb98fdb2f3eeba82.tar.gz
gcc-5f8cc7f00cdf9127a9c16adecb98fdb2f3eeba82.tar.bz2
[contrib] validate_failures.py: Read in manifest when comparing build dirs
This allows comparison of two build directories with a manifest listing known flaky tests on the side. contrib/ChangeLog: * testsuite-management/validate_failures.py (GetResults): Update. (CompareBuilds): Read in manifest.
-rwxr-xr-xcontrib/testsuite-management/validate_failures.py14
1 files changed, 11 insertions, 3 deletions
diff --git a/contrib/testsuite-management/validate_failures.py b/contrib/testsuite-management/validate_failures.py
index 7351ba1..4733dd8 100755
--- a/contrib/testsuite-management/validate_failures.py
+++ b/contrib/testsuite-management/validate_failures.py
@@ -420,9 +420,10 @@ def CollectSumFiles(builddir):
return sum_files
-def GetResults(sum_files):
+def GetResults(sum_files, build_results = None):
"""Collect all the test results from the given .sum files."""
- build_results = ResultSet()
+ if build_results == None:
+ build_results = ResultSet()
for sum_fname in sum_files:
print('\t%s' % sum_fname)
build_results |= ParseSummary(sum_fname)
@@ -567,8 +568,15 @@ def CompareBuilds():
sum_files = GetSumFiles(_OPTIONS.results, _OPTIONS.build_dir)
actual = GetResults(sum_files)
+ clean = ResultSet()
+
+ if _OPTIONS.manifest:
+ manifest_path = GetManifestPath(srcdir, target, True)
+ print('Manifest: %s' % manifest_path)
+ clean = GetManifest(manifest_path)
+
clean_sum_files = GetSumFiles(_OPTIONS.results, _OPTIONS.clean_build)
- clean = GetResults(clean_sum_files)
+ clean = GetResults(clean_sum_files, clean)
return PerformComparison(clean, actual, _OPTIONS.ignore_missing_failures)