summaryrefslogtreecommitdiff
path: root/Test/runTests.py
diff options
context:
space:
mode:
authorGravatar Clément Pit--Claudel <clement.pitclaudel@live.com>2015-08-18 18:14:36 -0700
committerGravatar Clément Pit--Claudel <clement.pitclaudel@live.com>2015-08-18 18:14:36 -0700
commit6a24e8c90ac467678dbf9aeb0d16c3d36c2dcf44 (patch)
tree029fc5e9c18fe81738df293da7675dccb21430e5 /Test/runTests.py
parenta019d797bd42866242e48ef00850f74e3bdc9241 (diff)
runTests: Report mean completion time of passed tests, excluding outliers
Diffstat (limited to 'Test/runTests.py')
-rw-r--r--Test/runTests.py23
1 files changed, 22 insertions, 1 deletions
diff --git a/Test/runTests.py b/Test/runTests.py
index 0232f81b..9f4fa5a5 100644
--- a/Test/runTests.py
+++ b/Test/runTests.py
@@ -6,6 +6,7 @@ import shutil
import argparse
import operator
import platform
+from math import floor, ceil
from enum import Enum
from time import time, strftime
from collections import defaultdict
@@ -169,6 +170,25 @@ class Test:
return results
@staticmethod
+ def mean_duration(results, margin):
+ durations = sorted(result.duration for result in results
+ if result.status == TestStatus.PASSED)
+ if len(durations) >= 15:
+ lq = durations[floor(0.25 * len(durations))]
+ hq = durations[ceil(0.85 * len(durations))]
+ iqr = hq - lq
+ filtered = [d for d in durations if (lq - margin * iqr) <= d <= (hq + margin * iqr)]
+ if filtered:
+ avg = sum(durations) / len(durations)
+ trimmed_avg = sum(filtered) / len(filtered)
+ outliers_count = len(durations) - len(filtered)
+ msg = "mean completion time: {:.2f}s".format(avg)
+ if outliers_count > 0:
+ msg += "; ignoring {} outliers: {:.2f}s".format(outliers_count, trimmed_avg)
+ return " ({})".format(msg)
+ return ""
+
+ @staticmethod
def summarize(results):
debug(Debug.INFO, "\nTesting complete ({} test(s))".format(len(results)))
@@ -193,7 +213,8 @@ class Test:
writer.write("{}\n".format(t.name))
debug(Debug.REPORT, "Some tests failed: use [runTests.py failing.lst] to rerun the failing tests")
- debug(Debug.REPORT, "Testing took {:.2f}s on {} thread(s)".format(results[0].suite_time, results[0].njobs))
+ debug(Debug.REPORT, "Testing took {:.2f}s on {} thread(s){}".format(
+ results[0].suite_time, results[0].njobs, Test.mean_duration(results, 1.5)))
def run(self):