aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools/run_tests
diff options
context:
space:
mode:
authorGravatar Alistair Veitch <aveitch@google.com>2016-01-15 17:57:56 -0800
committerGravatar Alistair Veitch <aveitch@google.com>2016-01-15 17:57:56 -0800
commit0923126cf85a83dab29bbd21f66c5df1f9a56bf8 (patch)
tree27e80289f06ab43187a30ac58d8a44a5b616299e /tools/run_tests
parent04de8c16d2f1fc4f9c97e460ea1f450872981d58 (diff)
parent7e098a272ccb2b2ac06464064cb59fca857e2e2a (diff)
Merge branch 'master' into tag_set
Diffstat (limited to 'tools/run_tests')
-rwxr-xr-xtools/run_tests/build_ruby.sh2
-rwxr-xr-xtools/run_tests/check_sources_and_headers.py4
-rwxr-xr-xtools/run_tests/run_interop_tests.py8
-rwxr-xr-xtools/run_tests/run_ruby.sh2
-rwxr-xr-xtools/run_tests/run_sanity.sh2
-rwxr-xr-xtools/run_tests/run_stress_tests.py328
-rwxr-xr-xtools/run_tests/run_tests.py7
-rw-r--r--tools/run_tests/sources_and_headers.json19
-rw-r--r--tools/run_tests/tests.json17
9 files changed, 377 insertions, 12 deletions
diff --git a/tools/run_tests/build_ruby.sh b/tools/run_tests/build_ruby.sh
index 6d23c316c5..8acb40dc62 100755
--- a/tools/run_tests/build_ruby.sh
+++ b/tools/run_tests/build_ruby.sh
@@ -34,7 +34,7 @@ set -ex
export GRPC_CONFIG=${CONFIG:-opt}
# change to grpc's ruby directory
-cd $(dirname $0)/../../src/ruby
+cd $(dirname $0)/../..
rm -rf ./tmp
rake compile:grpc
diff --git a/tools/run_tests/check_sources_and_headers.py b/tools/run_tests/check_sources_and_headers.py
index cee32888dc..50574f44b0 100755
--- a/tools/run_tests/check_sources_and_headers.py
+++ b/tools/run_tests/check_sources_and_headers.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python2.7
-# Copyright 2015, Google Inc.
+# Copyright 2015-2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@@ -80,4 +80,4 @@ for target in js:
target['name'], fn, m.group(1)))
errors += 1
-assert errors == 0 \ No newline at end of file
+assert errors == 0
diff --git a/tools/run_tests/run_interop_tests.py b/tools/run_tests/run_interop_tests.py
index 40bbe3cc3c..10566d6bc8 100755
--- a/tools/run_tests/run_interop_tests.py
+++ b/tools/run_tests/run_interop_tests.py
@@ -1,5 +1,5 @@
#!/usr/bin/env python
-# Copyright 2015, Google Inc.
+# Copyright 2015-2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@@ -549,7 +549,7 @@ def aggregate_http2_results(stdout):
match = re.search(r'\{"cases[^\]]*\]\}', stdout)
if not match:
return None
-
+
results = json.loads(match.group(0))
skipped = 0
passed = 0
@@ -742,7 +742,7 @@ try:
for test_case in _HTTP2_TEST_CASES:
if server_name == "go":
# TODO(carl-mastrangelo): Reenable after https://github.com/grpc/grpc-go/issues/434
- continue
+ continue
test_job = cloud_to_cloud_jobspec(http2Interop,
test_case,
server_name,
@@ -771,7 +771,7 @@ try:
job[0].http2results = aggregate_http2_results(job[0].message)
report_utils.render_interop_html_report(
- set([str(l) for l in languages]), servers, _TEST_CASES, _AUTH_TEST_CASES,
+ set([str(l) for l in languages]), servers, _TEST_CASES, _AUTH_TEST_CASES,
_HTTP2_TEST_CASES, resultset, num_failures,
args.cloud_to_prod_auth or args.cloud_to_prod, args.http2_interop)
diff --git a/tools/run_tests/run_ruby.sh b/tools/run_tests/run_ruby.sh
index b82ce52af3..73a84ac361 100755
--- a/tools/run_tests/run_ruby.sh
+++ b/tools/run_tests/run_ruby.sh
@@ -31,6 +31,6 @@
set -ex
# change to grpc repo root
-cd $(dirname $0)/../../src/ruby
+cd $(dirname $0)/../..
rake
diff --git a/tools/run_tests/run_sanity.sh b/tools/run_tests/run_sanity.sh
index 690332daae..467f06edd7 100755
--- a/tools/run_tests/run_sanity.sh
+++ b/tools/run_tests/run_sanity.sh
@@ -60,4 +60,4 @@ fi
./tools/buildgen/generate_projects.sh
./tools/distrib/check_copyright.py
./tools/distrib/clang_format_code.sh
-
+./tools/distrib/check_trailing_newlines.sh
diff --git a/tools/run_tests/run_stress_tests.py b/tools/run_tests/run_stress_tests.py
new file mode 100755
index 0000000000..193ff2e53a
--- /dev/null
+++ b/tools/run_tests/run_stress_tests.py
@@ -0,0 +1,328 @@
+#!/usr/bin/env python
+# Copyright 2015-2016, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Run stress test in C++"""
+
+import argparse
+import atexit
+import dockerjob
+import itertools
+import jobset
+import json
+import multiprocessing
+import os
+import re
+import subprocess
+import sys
+import tempfile
+import time
+import uuid
+
+# Docker doesn't clean up after itself, so we do it on exit.
+atexit.register(lambda: subprocess.call(['stty', 'echo']))
+
+ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
+os.chdir(ROOT)
+
+_DEFAULT_SERVER_PORT = 8080
+_DEFAULT_METRICS_PORT = 8081
+_DEFAULT_TEST_CASES = 'empty_unary:20,large_unary:20,client_streaming:20,server_streaming:20,empty_stream:20'
+_DEFAULT_NUM_CHANNELS_PER_SERVER = 5
+_DEFAULT_NUM_STUBS_PER_CHANNEL = 10
+
+# 15 mins default
+_DEFAULT_TEST_DURATION_SECS = 900
+
+class CXXLanguage:
+
+ def __init__(self):
+ self.client_cwd = None
+ self.server_cwd = None
+ self.safename = 'cxx'
+
+ def client_cmd(self, args):
+ return ['bins/opt/stress_test'] + args
+
+ def server_cmd(self, args):
+ return ['bins/opt/interop_server'] + args
+
+ def global_env(self):
+ return {}
+
+ def __str__(self):
+ return 'c++'
+
+
+_LANGUAGES = {'c++': CXXLanguage(),}
+
+# languages supported as cloud_to_cloud servers
+_SERVERS = ['c++']
+
+DOCKER_WORKDIR_ROOT = '/var/local/git/grpc'
+
+
+def docker_run_cmdline(cmdline, image, docker_args=[], cwd=None, environ=None):
+ """Wraps given cmdline array to create 'docker run' cmdline from it."""
+ docker_cmdline = ['docker', 'run', '-i', '--rm=true']
+
+ # turn environ into -e docker args
+ if environ:
+ for k, v in environ.iteritems():
+ docker_cmdline += ['-e', '%s=%s' % (k, v)]
+
+ # set working directory
+ workdir = DOCKER_WORKDIR_ROOT
+ if cwd:
+ workdir = os.path.join(workdir, cwd)
+ docker_cmdline += ['-w', workdir]
+
+ docker_cmdline += docker_args + [image] + cmdline
+ return docker_cmdline
+
+
+def bash_login_cmdline(cmdline):
+ """Creates bash -l -c cmdline from args list."""
+ # Use login shell:
+ # * rvm and nvm require it
+ # * makes error messages clearer if executables are missing
+ return ['bash', '-l', '-c', ' '.join(cmdline)]
+
+
+def _job_kill_handler(job):
+ if job._spec.container_name:
+ dockerjob.docker_kill(job._spec.container_name)
+ # When the job times out and we decide to kill it,
+ # we need to wait a before restarting the job
+ # to prevent "container name already in use" error.
+ # TODO(jtattermusch): figure out a cleaner way to to this.
+ time.sleep(2)
+
+
+def cloud_to_cloud_jobspec(language,
+ test_cases,
+ server_addresses,
+ test_duration_secs,
+ num_channels_per_server,
+ num_stubs_per_channel,
+ metrics_port,
+ docker_image=None):
+ """Creates jobspec for cloud-to-cloud interop test"""
+ cmdline = bash_login_cmdline(language.client_cmd([
+ '--test_cases=%s' % test_cases, '--server_addresses=%s' %
+ server_addresses, '--test_duration_secs=%s' % test_duration_secs,
+ '--num_stubs_per_channel=%s' % num_stubs_per_channel,
+ '--num_channels_per_server=%s' % num_channels_per_server,
+ '--metrics_port=%s' % metrics_port
+ ]))
+ print cmdline
+ cwd = language.client_cwd
+ environ = language.global_env()
+ if docker_image:
+ container_name = dockerjob.random_name('interop_client_%s' %
+ language.safename)
+ cmdline = docker_run_cmdline(
+ cmdline,
+ image=docker_image,
+ environ=environ,
+ cwd=cwd,
+ docker_args=['--net=host', '--name', container_name])
+ cwd = None
+
+ test_job = jobset.JobSpec(cmdline=cmdline,
+ cwd=cwd,
+ environ=environ,
+ shortname='cloud_to_cloud:%s:%s_server:stress_test' % (
+ language, server_name),
+ timeout_seconds=test_duration_secs * 2,
+ flake_retries=0,
+ timeout_retries=0,
+ kill_handler=_job_kill_handler)
+ test_job.container_name = container_name
+ return test_job
+
+
+def server_jobspec(language, docker_image, test_duration_secs):
+ """Create jobspec for running a server"""
+ container_name = dockerjob.random_name('interop_server_%s' %
+ language.safename)
+ cmdline = bash_login_cmdline(language.server_cmd(['--port=%s' %
+ _DEFAULT_SERVER_PORT]))
+ environ = language.global_env()
+ docker_cmdline = docker_run_cmdline(
+ cmdline,
+ image=docker_image,
+ cwd=language.server_cwd,
+ environ=environ,
+ docker_args=['-p', str(_DEFAULT_SERVER_PORT), '--name', container_name])
+
+ server_job = jobset.JobSpec(cmdline=docker_cmdline,
+ environ=environ,
+ shortname='interop_server_%s' % language,
+ timeout_seconds=test_duration_secs * 3)
+ server_job.container_name = container_name
+ return server_job
+
+
+def build_interop_stress_image_jobspec(language, tag=None):
+ """Creates jobspec for building stress test docker image for a language"""
+ if not tag:
+ tag = 'grpc_interop_stress_%s:%s' % (language.safename, uuid.uuid4())
+ env = {'INTEROP_IMAGE': tag,
+ 'BASE_NAME': 'grpc_interop_stress_%s' % language.safename}
+ build_job = jobset.JobSpec(cmdline=['tools/jenkins/build_interop_stress_image.sh'],
+ environ=env,
+ shortname='build_docker_%s' % (language),
+ timeout_seconds=30 * 60)
+ build_job.tag = tag
+ return build_job
+
+argp = argparse.ArgumentParser(description='Run stress tests.')
+argp.add_argument('-l',
+ '--language',
+ choices=['all'] + sorted(_LANGUAGES),
+ nargs='+',
+ default=['all'],
+ help='Clients to run.')
+argp.add_argument('-j', '--jobs', default=multiprocessing.cpu_count(), type=int)
+argp.add_argument(
+ '-s',
+ '--server',
+ choices=['all'] + sorted(_SERVERS),
+ action='append',
+ help='Run cloud_to_cloud servers in a separate docker ' + 'image.',
+ default=[])
+argp.add_argument(
+ '--override_server',
+ action='append',
+ type=lambda kv: kv.split('='),
+ help=
+ 'Use servername=HOST:PORT to explicitly specify a server. E.g. '
+ 'csharp=localhost:50000',
+ default=[])
+argp.add_argument('--test_duration_secs',
+ help='The duration of the test in seconds',
+ default=_DEFAULT_TEST_DURATION_SECS)
+
+args = argp.parse_args()
+
+servers = set(
+ s
+ for s in itertools.chain.from_iterable(_SERVERS if x == 'all' else [x]
+ for x in args.server))
+
+languages = set(_LANGUAGES[l]
+ for l in itertools.chain.from_iterable(_LANGUAGES.iterkeys(
+ ) if x == 'all' else [x] for x in args.language))
+
+docker_images = {}
+# languages for which to build docker images
+languages_to_build = set(
+ _LANGUAGES[k]
+ for k in set([str(l) for l in languages] + [s for s in servers]))
+build_jobs = []
+for l in languages_to_build:
+ job = build_interop_stress_image_jobspec(l)
+ docker_images[str(l)] = job.tag
+ build_jobs.append(job)
+
+if build_jobs:
+ jobset.message('START', 'Building interop docker images.', do_newline=True)
+ num_failures, _ = jobset.run(build_jobs,
+ newline_on_success=True,
+ maxjobs=args.jobs)
+ if num_failures == 0:
+ jobset.message('SUCCESS',
+ 'All docker images built successfully.',
+ do_newline=True)
+ else:
+ jobset.message('FAILED',
+ 'Failed to build interop docker images.',
+ do_newline=True)
+ for image in docker_images.itervalues():
+ dockerjob.remove_image(image, skip_nonexistent=True)
+ sys.exit(1)
+
+# Start interop servers.
+server_jobs = {}
+server_addresses = {}
+try:
+ for s in servers:
+ lang = str(s)
+ spec = server_jobspec(_LANGUAGES[lang], docker_images.get(lang), args.test_duration_secs)
+ job = dockerjob.DockerJob(spec)
+ server_jobs[lang] = job
+ server_addresses[lang] = ('localhost',
+ job.mapped_port(_DEFAULT_SERVER_PORT))
+
+ jobs = []
+
+ for server in args.override_server:
+ server_name = server[0]
+ (server_host, server_port) = server[1].split(':')
+ server_addresses[server_name] = (server_host, server_port)
+
+ for server_name, server_address in server_addresses.iteritems():
+ (server_host, server_port) = server_address
+ for language in languages:
+ test_job = cloud_to_cloud_jobspec(
+ language,
+ _DEFAULT_TEST_CASES,
+ ('%s:%s' % (server_host, server_port)),
+ args.test_duration_secs,
+ _DEFAULT_NUM_CHANNELS_PER_SERVER,
+ _DEFAULT_NUM_STUBS_PER_CHANNEL,
+ _DEFAULT_METRICS_PORT,
+ docker_image=docker_images.get(str(language)))
+ jobs.append(test_job)
+
+ if not jobs:
+ print 'No jobs to run.'
+ for image in docker_images.itervalues():
+ dockerjob.remove_image(image, skip_nonexistent=True)
+ sys.exit(1)
+
+ num_failures, resultset = jobset.run(jobs,
+ newline_on_success=True,
+ maxjobs=args.jobs)
+ if num_failures:
+ jobset.message('FAILED', 'Some tests failed', do_newline=True)
+ else:
+ jobset.message('SUCCESS', 'All tests passed', do_newline=True)
+
+finally:
+ # Check if servers are still running.
+ for server, job in server_jobs.iteritems():
+ if not job.is_running():
+ print 'Server "%s" has exited prematurely.' % server
+
+ dockerjob.finish_jobs([j for j in server_jobs.itervalues()])
+
+ for image in docker_images.itervalues():
+ print 'Removing docker image %s' % image
+ dockerjob.remove_image(image)
diff --git a/tools/run_tests/run_tests.py b/tools/run_tests/run_tests.py
index 0de20a634a..97962bd928 100755
--- a/tools/run_tests/run_tests.py
+++ b/tools/run_tests/run_tests.py
@@ -772,9 +772,10 @@ else:
return [jobset.JobSpec([os.getenv('MAKE', 'make'),
'-f', makefile,
'-j', '%d' % (multiprocessing.cpu_count() + 1),
- 'EXTRA_DEFINES=GRPC_TEST_SLOWDOWN_MACHINE_FACTOR=%f' %
- args.slowdown,
- 'CONFIG=%s' % cfg] + targets,
+ 'EXTRA_DEFINES=GRPC_TEST_SLOWDOWN_MACHINE_FACTOR=%f' % args.slowdown,
+ 'CONFIG=%s' % cfg] +
+ ([] if not args.travis else ['JENKINS_BUILD=1']) +
+ targets,
timeout_seconds=30*60)]
else:
return []
diff --git a/tools/run_tests/sources_and_headers.json b/tools/run_tests/sources_and_headers.json
index 4067863f89..4884adfcff 100644
--- a/tools/run_tests/sources_and_headers.json
+++ b/tools/run_tests/sources_and_headers.json
@@ -1518,6 +1518,23 @@
"grpc",
"grpc++",
"grpc++_test_util",
+ "grpc_test_util",
+ "qps"
+ ],
+ "headers": [],
+ "language": "c++",
+ "name": "generic_async_streaming_ping_pong_test",
+ "src": [
+ "test/cpp/qps/generic_async_streaming_ping_pong_test.cc"
+ ]
+ },
+ {
+ "deps": [
+ "gpr",
+ "gpr_test_util",
+ "grpc",
+ "grpc++",
+ "grpc++_test_util",
"grpc_test_util"
],
"headers": [],
@@ -3227,6 +3244,7 @@
"src/core/surface/server_chttp2.c",
"src/core/surface/server_create.c",
"src/core/surface/surface_trace.h",
+ "src/core/surface/validate_metadata.c",
"src/core/surface/version.c",
"src/core/transport/byte_stream.c",
"src/core/transport/byte_stream.h",
@@ -3699,6 +3717,7 @@
"src/core/surface/server_chttp2.c",
"src/core/surface/server_create.c",
"src/core/surface/surface_trace.h",
+ "src/core/surface/validate_metadata.c",
"src/core/surface/version.c",
"src/core/transport/byte_stream.c",
"src/core/transport/byte_stream.h",
diff --git a/tools/run_tests/tests.json b/tools/run_tests/tests.json
index 6be5ee5562..950e6203a0 100644
--- a/tools/run_tests/tests.json
+++ b/tools/run_tests/tests.json
@@ -1847,6 +1847,23 @@
"ci_platforms": [
"linux",
"mac",
+ "posix"
+ ],
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c++",
+ "name": "generic_async_streaming_ping_pong_test",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [],
+ "ci_platforms": [
+ "linux",
+ "mac",
"posix",
"windows"
],