aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools/run_tests
diff options
context:
space:
mode:
authorGravatar Craig Tiller <ctiller@google.com>2015-06-01 17:04:17 -0700
committerGravatar Craig Tiller <ctiller@google.com>2015-06-01 17:04:17 -0700
commit8e0b08a33d819d0a2523ec439d545296e1ad2086 (patch)
tree5613a61a51db9c5a5f7ecc5fec8cfd0eb0b2c436 /tools/run_tests
parent3c1331f920569ba3a182e13321db26796d6e920e (diff)
parentfa275a97b968060383fe27c26b1d85f08d9582f9 (diff)
Merge branch 'count-the-things' into we-dont-need-no-backup
Diffstat (limited to 'tools/run_tests')
-rwxr-xr-xtools/run_tests/jobset.py16
-rwxr-xr-xtools/run_tests/run_tests.py30
2 files changed, 42 insertions, 4 deletions
diff --git a/tools/run_tests/jobset.py b/tools/run_tests/jobset.py
index e2b03bd0ab..51d61db7f6 100755
--- a/tools/run_tests/jobset.py
+++ b/tools/run_tests/jobset.py
@@ -66,6 +66,7 @@ def shuffle_iteratable(it):
# p as we take elements - this gives us a somewhat random set of values before
# we've seen all the values, but starts producing values without having to
# compute ALL of them at once, allowing tests to start a little earlier
+ LARGE_THRESHOLD = 1000
nextit = []
p = 1
for val in it:
@@ -74,6 +75,17 @@ def shuffle_iteratable(it):
yield val
else:
nextit.append(val)
+ # if the input iterates over a large number of values (potentially
+ # infinite, we'd be in the loop for a while (again, potentially forever).
+ # We need to reset "nextit" every so often to, in the case of an infinite
+ # iterator, avoid growing "nextit" without ever freeing it.
+ if len(nextit) > LARGE_THRESHOLD:
+ random.shuffle(nextit)
+ for val in nextit:
+ yield val
+ nextit = []
+ p = 1
+
# after taking a random sampling, we shuffle the rest of the elements and
# yield them
random.shuffle(nextit)
@@ -339,13 +351,15 @@ def run(cmdlines,
maxjobs=None,
newline_on_success=False,
travis=False,
+ infinite_runs=False,
stop_on_failure=False,
cache=None):
js = Jobset(check_cancelled,
maxjobs if maxjobs is not None else _DEFAULT_MAX_JOBS,
newline_on_success, travis, stop_on_failure,
cache if cache is not None else NoCache())
- if not travis:
+ # We can't sort an infinite sequence of runs.
+ if not travis or infinite_runs:
cmdlines = shuffle_iteratable(cmdlines)
else:
cmdlines = sorted(cmdlines, key=lambda x: x.shortname)
diff --git a/tools/run_tests/run_tests.py b/tools/run_tests/run_tests.py
index f34a6c9c07..cb50e38ca1 100755
--- a/tools/run_tests/run_tests.py
+++ b/tools/run_tests/run_tests.py
@@ -330,7 +330,28 @@ argp.add_argument('-c', '--config',
choices=['all'] + sorted(_CONFIGS.keys()),
nargs='+',
default=_DEFAULT)
-argp.add_argument('-n', '--runs_per_test', default=1, type=int)
+
+def runs_per_test_type(arg_str):
+ """Auxilary function to parse the "runs_per_test" flag.
+
+ Returns:
+ A positive integer or 0, the latter indicating an infinite number of
+ runs.
+
+ Raises:
+ argparse.ArgumentTypeError: Upon invalid input.
+ """
+ if arg_str == 'inf':
+ return 0
+ try:
+ n = int(arg_str)
+ if n <= 0: raise ValueError
+ except:
+ msg = "'{}' isn't a positive integer or 'inf'".format(arg_str)
+ raise argparse.ArgumentTypeError(msg)
+argp.add_argument('-n', '--runs_per_test', default=1, type=runs_per_test_type,
+ help='A positive integer or "inf". If "inf", all tests will run in an '
+ 'infinite loop. Especially useful in combination with "-f"')
argp.add_argument('-r', '--regex', default='.*', type=str)
argp.add_argument('-j', '--jobs', default=2 * multiprocessing.cpu_count(), type=int)
argp.add_argument('-s', '--slowdown', default=1.0, type=float)
@@ -456,11 +477,14 @@ def _build_and_run(check_cancelled, newline_on_success, travis, cache):
antagonists = [subprocess.Popen(['tools/run_tests/antagonist.py'])
for _ in range(0, args.antagonists)]
try:
+ infinite_runs = runs_per_test == 0
# run all the tests
- all_runs = itertools.chain.from_iterable(
- itertools.repeat(one_run, runs_per_test))
+ runs_sequence = (itertools.repeat(one_run) if infinite_runs
+ else itertools.repeat(one_run, runs_per_test))
+ all_runs = itertools.chain.from_iterable(runs_sequence)
if not jobset.run(all_runs, check_cancelled,
newline_on_success=newline_on_success, travis=travis,
+ infinite_runs=infinite_runs,
maxjobs=args.jobs,
stop_on_failure=args.stop_on_failure,
cache=cache):