aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools
diff options
context:
space:
mode:
authorGravatar murgatroid99 <michael.lumish@gmail.com>2015-01-13 12:56:46 -0800
committerGravatar murgatroid99 <michael.lumish@gmail.com>2015-01-13 12:56:46 -0800
commit6160df919272ca4ff92d45e1afd4e1a588517b1f (patch)
treebbd3f1a2edb3940efe6ffd2e91d88617fcf9726b /tools
parent27df8ae203b409575ba21c5cbe0f13b47ab66d2d (diff)
parentabcc27a5a590c3adead5869030b776f248984857 (diff)
Merge remote-tracking branch 'upstream/master'
Diffstat (limited to 'tools')
-rwxr-xr-xtools/clang-format/clang-format-all.sh8
-rwxr-xr-xtools/gce_setup/grpc_docker.sh19
-rwxr-xr-xtools/run_tests/jobset.py50
-rwxr-xr-xtools/run_tests/run_tests.py67
4 files changed, 115 insertions, 29 deletions
diff --git a/tools/clang-format/clang-format-all.sh b/tools/clang-format/clang-format-all.sh
new file mode 100755
index 0000000000..c9caea0d61
--- /dev/null
+++ b/tools/clang-format/clang-format-all.sh
@@ -0,0 +1,8 @@
+#!/bin/bash
+set -ex
+cd $(dirname $0)/../..
+for dir in src test include
+do
+ find $dir -name '*.c' -or -name '*.cc' -or -name '*.h' | xargs clang-format -i
+done
+
diff --git a/tools/gce_setup/grpc_docker.sh b/tools/gce_setup/grpc_docker.sh
index d2614fbb13..c5d02d77a3 100755
--- a/tools/gce_setup/grpc_docker.sh
+++ b/tools/gce_setup/grpc_docker.sh
@@ -18,6 +18,20 @@
# * on startup, some of the docker images will be regenerated automatically
# - used grpc_update_image to update images via that instance
+
+# Creates the ssh key file expect by 'gcloud compute ssh' if it does not exist.
+#
+# Allows gcloud ssh commands to run on freshly started docker instances.
+_grpc_ensure_gcloud_ssh() {
+ local default_key_file="$HOME/.ssh/google_compute_engine"
+ [ -f $default_key_file ] || {
+ ssh-keygen -f $default_key_file -N '' > /dev/null || {
+ echo "could not precreate $default_key_file" 1>&2
+ return 1
+ }
+ }
+}
+
# Pushes a dockerfile dir to cloud storage.
#
# dockerfile is expected to the parent directory to a nunber of directoies each
@@ -50,6 +64,7 @@ grpc_push_dockerfiles() {
# Adds the user to docker group on a GCE instance, and restarts the docker
# daemon
grpc_add_docker_user() {
+ _grpc_ensure_gcloud_ssh || return 1;
local host=$1
[[ -n $host ]] || {
echo "$FUNCNAME: missing arg: host" 1>&2
@@ -81,6 +96,7 @@ grpc_add_docker_user() {
# grpc_update_image gs://bucket/path/to/dockerfile parent \.
# image_label path/to/docker_dir docker_gce_instance [project] [zone]
grpc_update_image() {
+ _grpc_ensure_gcloud_ssh || return 1;
local gs_root_uri=$1
[[ -n $gs_root_uri ]] || {
echo "$FUNCNAME: missing arg: gs_root_uri" 1>&2
@@ -350,6 +366,8 @@ grpc_update_docker_images_args() {
#
# Updates the GCE docker instance <server_name>
grpc_update_docker_images() {
+ _grpc_ensure_gcloud_ssh || return 1;
+
# declare vars local so that they don't pollute the shell environment
# where they this func is used.
local grpc_zone grpc_project dry_run # set by grpc_set_project_and_zone
@@ -469,6 +487,7 @@ grpc_launch_server() {
#
# --server_host=<svr_addr> --server_port=<svr_port> --test_case=<...>
grpc_interop_test() {
+ _grpc_ensure_gcloud_ssh || return 1;
# declare vars local so that they don't pollute the shell environment
# where they this func is used.
diff --git a/tools/run_tests/jobset.py b/tools/run_tests/jobset.py
index 2ab95ef97c..17fb1d6924 100755
--- a/tools/run_tests/jobset.py
+++ b/tools/run_tests/jobset.py
@@ -39,6 +39,36 @@ _RUNNING = object()
_KILLED = object()
+_COLORS = {
+ 'red': 31,
+ 'green': 32,
+ 'yellow': 33,
+ }
+
+
+_BEGINNING_OF_LINE = '\x1b[0G'
+_CLEAR_LINE = '\x1b[2K'
+
+
+_TAG_COLOR = {
+ 'FAILED': 'red',
+ 'PASSED': 'green',
+ 'START': 'yellow',
+ 'WAITING': 'yellow',
+ }
+
+
+def message(tag, message, explanatory_text=None):
+ sys.stdout.write('%s%s\x1b[%dm%s\x1b[0m: %s%s' % (
+ _BEGINNING_OF_LINE,
+ _CLEAR_LINE,
+ _COLORS[_TAG_COLOR[tag]],
+ tag,
+ message,
+ '\n%s\n' % explanatory_text if explanatory_text is not None else ''))
+ sys.stdout.flush()
+
+
class Job(object):
"""Manages one job."""
@@ -49,9 +79,7 @@ class Job(object):
stderr=subprocess.STDOUT,
stdout=self._tempfile)
self._state = _RUNNING
- sys.stdout.write('\x1b[0G\x1b[2K\x1b[33mSTART\x1b[0m: %s' %
- self._cmdline)
- sys.stdout.flush()
+ message('START', self._cmdline)
def state(self):
"""Poll current state of the job. Prints messages at completion."""
@@ -60,16 +88,10 @@ class Job(object):
self._state = _FAILURE
self._tempfile.seek(0)
stdout = self._tempfile.read()
- sys.stdout.write('\x1b[0G\x1b[2K\x1b[31mFAILED\x1b[0m: %s'
- ' [ret=%d]\n'
- '%s\n' % (
- self._cmdline, self._process.returncode, stdout))
- sys.stdout.flush()
+ message('FAILED', '%s [ret=%d]' % (self._cmdline, self._process.returncode), stdout)
else:
self._state = _SUCCESS
- sys.stdout.write('\x1b[0G\x1b[2K\x1b[32mPASSED\x1b[0m: %s' %
- self._cmdline)
- sys.stdout.flush()
+ message('PASSED', '%s' % self._cmdline)
return self._state
def kill(self):
@@ -86,6 +108,7 @@ class Jobset(object):
self._check_cancelled = check_cancelled
self._cancelled = False
self._failures = 0
+ self._completed = 0
self._maxjobs = maxjobs
def start(self, cmdline):
@@ -107,8 +130,11 @@ class Jobset(object):
if st == _FAILURE: self._failures += 1
dead.add(job)
for job in dead:
+ self._completed += 1
self._running.remove(job)
- if not dead: return
+ if dead: return
+ message('WAITING', '%d jobs running, %d complete' % (
+ len(self._running), self._completed))
time.sleep(0.1)
def cancelled(self):
diff --git a/tools/run_tests/run_tests.py b/tools/run_tests/run_tests.py
index d678721274..6ab80d9045 100755
--- a/tools/run_tests/run_tests.py
+++ b/tools/run_tests/run_tests.py
@@ -11,15 +11,44 @@ import time
import jobset
import watch_dirs
-# flags required for make for each configuration
-_CONFIGS = ['dbg', 'opt', 'tsan', 'msan', 'asan']
+# SimpleConfig: just compile with CONFIG=config, and run the binary to test
+class SimpleConfig(object):
+ def __init__(self, config):
+ self.build_config = config
+
+ def run_command(self, binary):
+ return [binary]
+
+
+# ValgrindConfig: compile with some CONFIG=config, but use valgrind to run
+class ValgrindConfig(object):
+ def __init__(self, config):
+ self.build_config = config
+
+ def run_command(self, binary):
+ return ['valgrind', binary]
+
+
+# different configurations we can run under
+_CONFIGS = {
+ 'dbg': SimpleConfig('dbg'),
+ 'opt': SimpleConfig('opt'),
+ 'tsan': SimpleConfig('tsan'),
+ 'msan': SimpleConfig('msan'),
+ 'asan': SimpleConfig('asan'),
+ 'valgrind': ValgrindConfig('dbg'),
+ }
+
+
+_DEFAULT = ['dbg', 'opt']
+_MAKE_TEST_TARGETS = ['buildtests_c', 'buildtests_cxx']
# parse command line
argp = argparse.ArgumentParser(description='Run grpc tests.')
argp.add_argument('-c', '--config',
- choices=['all'] + _CONFIGS,
+ choices=['all'] + sorted(_CONFIGS.keys()),
nargs='+',
- default=['all'])
+ default=_DEFAULT)
argp.add_argument('-t', '--test-filter', nargs='*', default=['*'])
argp.add_argument('-n', '--runs_per_test', default=1, type=int)
argp.add_argument('-f', '--forever',
@@ -29,10 +58,11 @@ argp.add_argument('-f', '--forever',
args = argp.parse_args()
# grab config
-configs = [cfg
- for cfg in itertools.chain.from_iterable(
- _CONFIGS if x == 'all' else [x]
- for x in args.config)]
+run_configs = set(_CONFIGS[cfg]
+ for cfg in itertools.chain.from_iterable(
+ _CONFIGS.iterkeys() if x == 'all' else [x]
+ for x in args.config))
+build_configs = set(cfg.build_config for cfg in run_configs)
filters = args.test_filter
runs_per_test = args.runs_per_test
forever = args.forever
@@ -44,19 +74,22 @@ def _build_and_run(check_cancelled):
if not jobset.run(
(['make',
'-j', '%d' % (multiprocessing.cpu_count() + 1),
- 'buildtests_c',
+ target,
'CONFIG=%s' % cfg]
- for cfg in configs), check_cancelled, maxjobs=1):
+ for cfg in build_configs
+ for target in _MAKE_TEST_TARGETS),
+ check_cancelled, maxjobs=1):
sys.exit(1)
# run all the tests
- jobset.run(([x]
- for x in itertools.chain.from_iterable(
- itertools.chain.from_iterable(itertools.repeat(
- glob.glob('bins/%s/%s_test' % (config, filt)),
- runs_per_test))
- for config in configs
- for filt in filters)), check_cancelled)
+ jobset.run((
+ config.run_command(x)
+ for config in run_configs
+ for filt in filters
+ for x in itertools.chain.from_iterable(itertools.repeat(
+ glob.glob('bins/%s/%s_test' % (
+ config.build_config, filt)),
+ runs_per_test))), check_cancelled)
if forever: