aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools/run_tests
diff options
context:
space:
mode:
Diffstat (limited to 'tools/run_tests')
-rwxr-xr-xtools/run_tests/jobset.py15
-rwxr-xr-xtools/run_tests/run_tests.py11
2 files changed, 19 insertions, 7 deletions
diff --git a/tools/run_tests/jobset.py b/tools/run_tests/jobset.py
index a58071ee35..e2b03bd0ab 100755
--- a/tools/run_tests/jobset.py
+++ b/tools/run_tests/jobset.py
@@ -234,7 +234,8 @@ class Job(object):
class Jobset(object):
"""Manages one run of jobs."""
- def __init__(self, check_cancelled, maxjobs, newline_on_success, travis, cache):
+ def __init__(self, check_cancelled, maxjobs, newline_on_success, travis,
+ stop_on_failure, cache):
self._running = set()
self._check_cancelled = check_cancelled
self._cancelled = False
@@ -244,6 +245,7 @@ class Jobset(object):
self._newline_on_success = newline_on_success
self._travis = travis
self._cache = cache
+ self._stop_on_failure = stop_on_failure
def start(self, spec):
"""Start a job. Return True on success, False on failure."""
@@ -280,8 +282,12 @@ class Jobset(object):
for job in self._running:
st = job.state(self._cache)
if st == _RUNNING: continue
- if st == _FAILURE: self._failures += 1
- if st == _KILLED: self._failures += 1
+ if st == _FAILURE or st == _KILLED:
+ self._failures += 1
+ if self._stop_on_failure:
+ self._cancelled = True
+ for job in self._running:
+ job.kill()
dead.add(job)
for job in dead:
self._completed += 1
@@ -333,10 +339,11 @@ def run(cmdlines,
maxjobs=None,
newline_on_success=False,
travis=False,
+ stop_on_failure=False,
cache=None):
js = Jobset(check_cancelled,
maxjobs if maxjobs is not None else _DEFAULT_MAX_JOBS,
- newline_on_success, travis,
+ newline_on_success, travis, stop_on_failure,
cache if cache is not None else NoCache())
if not travis:
cmdlines = shuffle_iteratable(cmdlines)
diff --git a/tools/run_tests/run_tests.py b/tools/run_tests/run_tests.py
index 2428e21ca3..7d5d6b43b3 100755
--- a/tools/run_tests/run_tests.py
+++ b/tools/run_tests/run_tests.py
@@ -349,6 +349,10 @@ argp.add_argument('-l', '--language',
choices=sorted(_LANGUAGES.keys()),
nargs='+',
default=sorted(_LANGUAGES.keys()))
+argp.add_argument('-S', '--stop_on_failure',
+ default=False,
+ action='store_const',
+ const=True)
argp.add_argument('-a', '--antagonists', default=0, type=int)
args = argp.parse_args()
@@ -376,11 +380,11 @@ else:
def make_jobspec(cfg, targets):
return jobset.JobSpec(['make',
'-j', '%d' % (multiprocessing.cpu_count() + 1),
- 'EXTRA_DEFINES=GRPC_TEST_SLOWDOWN_MACHINE_FACTOR=%f' %
+ 'EXTRA_DEFINES=GRPC_TEST_SLOWDOWN_MACHINE_FACTOR=%f' %
args.slowdown,
'CONFIG=%s' % cfg] + targets)
-build_steps = [make_jobspec(cfg,
+build_steps = [make_jobspec(cfg,
list(set(itertools.chain.from_iterable(
l.make_targets() for l in languages))))
for cfg in build_configs]
@@ -388,7 +392,7 @@ build_steps.extend(set(
jobset.JobSpec(cmdline, environ={'CONFIG': cfg})
for cfg in build_configs
for l in languages
- for cmdline in l.build_steps()))
+ for cmdline in l.build_steps()))
one_run = set(
spec
for config in run_configs
@@ -454,6 +458,7 @@ def _build_and_run(check_cancelled, newline_on_success, travis, cache):
if not jobset.run(all_runs, check_cancelled,
newline_on_success=newline_on_success, travis=travis,
maxjobs=args.jobs,
+ stop_on_failure=args.stop_on_failure,
cache=cache):
return 2
finally: