aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools/run_tests
diff options
context:
space:
mode:
authorGravatar Craig Tiller <ctiller@google.com>2016-10-10 10:16:35 -0700
committerGravatar Craig Tiller <ctiller@google.com>2016-10-10 10:16:35 -0700
commitfdc2b10dce93c806edc95eb76fdca38625c47d06 (patch)
tree4c46ed34203da7750b15d7c8837c4dd0da9234d1 /tools/run_tests
parent46db0d29061d49d68c3a0e490e036d2a076d59f9 (diff)
parent062ba7b8baefdc76f74fffb9aa3e2134ba047ea6 (diff)
Merge github.com:grpc/grpc into direct-calls
Diffstat (limited to 'tools/run_tests')
-rwxr-xr-xtools/run_tests/dockerize/build_docker_and_run_tests.sh20
-rwxr-xr-xtools/run_tests/dockerize/docker_run_tests.sh3
-rwxr-xr-xtools/run_tests/run_tests.py2
-rwxr-xr-xtools/run_tests/run_tests_in_workspace.sh46
-rwxr-xr-xtools/run_tests/run_tests_matrix.py282
-rw-r--r--tools/run_tests/sources_and_headers.json6
6 files changed, 344 insertions, 15 deletions
diff --git a/tools/run_tests/dockerize/build_docker_and_run_tests.sh b/tools/run_tests/dockerize/build_docker_and_run_tests.sh
index c2ea6f2c6e..c3219c533d 100755
--- a/tools/run_tests/dockerize/build_docker_and_run_tests.sh
+++ b/tools/run_tests/dockerize/build_docker_and_run_tests.sh
@@ -44,9 +44,6 @@ mkdir -p /tmp/ccache
# its cache location now that --download-cache is deprecated).
mkdir -p /tmp/xdg-cache-home
-# Create a local branch so the child Docker script won't complain
-git branch -f jenkins-docker
-
# Inputs
# DOCKERFILE_DIR - Directory in which Dockerfile file is located.
# DOCKER_RUN_SCRIPT - Script to run under docker (relative to grpc repo root)
@@ -64,6 +61,7 @@ CONTAINER_NAME="run_tests_$(uuidgen)"
docker_instance_git_root=/var/local/jenkins/grpc
# Run tests inside docker
+DOCKER_EXIT_CODE=0
docker run \
-e "RUN_TESTS_COMMAND=$RUN_TESTS_COMMAND" \
-e "config=$config" \
@@ -84,16 +82,16 @@ docker run \
-w /var/local/git/grpc \
--name=$CONTAINER_NAME \
$DOCKER_IMAGE_NAME \
- bash -l "/var/local/jenkins/grpc/$DOCKER_RUN_SCRIPT" || DOCKER_FAILED="true"
+ bash -l "/var/local/jenkins/grpc/$DOCKER_RUN_SCRIPT" || DOCKER_EXIT_CODE=$?
-docker cp "$CONTAINER_NAME:/var/local/git/grpc/reports.zip" $git_root || true
-unzip -o $git_root/reports.zip -d $git_root || true
-rm -f reports.zip
+# use unique name for reports.zip to prevent clash between concurrent
+# run_tests.py runs
+TEMP_REPORTS_ZIP=`mktemp`
+docker cp "$CONTAINER_NAME:/var/local/git/grpc/reports.zip" ${TEMP_REPORTS_ZIP} || true
+unzip -o ${TEMP_REPORTS_ZIP} -d $git_root || true
+rm -f ${TEMP_REPORTS_ZIP}
# remove the container, possibly killing it first
docker rm -f $CONTAINER_NAME || true
-if [ "$DOCKER_FAILED" != "" ] && [ "$XML_REPORT" == "" ]
-then
- exit 1
-fi
+exit $DOCKER_EXIT_CODE
diff --git a/tools/run_tests/dockerize/docker_run_tests.sh b/tools/run_tests/dockerize/docker_run_tests.sh
index 8c6143d24f..ef02d26625 100755
--- a/tools/run_tests/dockerize/docker_run_tests.sh
+++ b/tools/run_tests/dockerize/docker_run_tests.sh
@@ -63,6 +63,7 @@ echo '</body></html>' >> index.html
cd ..
zip -r reports.zip reports
-find . -name report.xml | xargs zip reports.zip
+find . -name report.xml | xargs -r zip reports.zip
+find . -name 'report_*.xml' | xargs -r zip reports.zip
exit $exit_code
diff --git a/tools/run_tests/run_tests.py b/tools/run_tests/run_tests.py
index 168331602c..a6f3d405dc 100755
--- a/tools/run_tests/run_tests.py
+++ b/tools/run_tests/run_tests.py
@@ -1423,7 +1423,7 @@ else:
exit_code = 0
if BuildAndRunError.BUILD in errors:
exit_code |= 1
- if BuildAndRunError.TEST in errors and not args.travis:
+ if BuildAndRunError.TEST in errors:
exit_code |= 2
if BuildAndRunError.POST_TEST in errors:
exit_code |= 4
diff --git a/tools/run_tests/run_tests_in_workspace.sh b/tools/run_tests/run_tests_in_workspace.sh
new file mode 100755
index 0000000000..98ef3566db
--- /dev/null
+++ b/tools/run_tests/run_tests_in_workspace.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+# Copyright 2015, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# Create a workspace in a subdirectory to allow running multiple builds in isolation.
+# WORKSPACE_NAME env variable needs to contain name of the workspace to create.
+# All cmdline args will be passed to run_tests.py script (executed in the
+# newly created workspace)
+set -ex
+
+cd $(dirname $0)/../..
+
+rm -rf "${WORKSPACE_NAME}"
+# TODO(jtattermusch): clone --recursive fetches the submodules from github.
+# Try avoiding that to save time and network capacity.
+git clone --recursive . "${WORKSPACE_NAME}"
+
+echo "Running run_tests.py in workspace ${WORKSPACE_NAME}"
+python "${WORKSPACE_NAME}/tools/run_tests/run_tests.py" $@
+
diff --git a/tools/run_tests/run_tests_matrix.py b/tools/run_tests/run_tests_matrix.py
new file mode 100755
index 0000000000..a94f9cfef5
--- /dev/null
+++ b/tools/run_tests/run_tests_matrix.py
@@ -0,0 +1,282 @@
+#!/usr/bin/env python2.7
+# Copyright 2015, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+"""Run test matrix."""
+
+import argparse
+import jobset
+import multiprocessing
+import os
+import report_utils
+import sys
+
+_ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
+os.chdir(_ROOT)
+
+# Set the timeout high to allow enough time for sanitizers and pre-building
+# clang docker.
+_RUNTESTS_TIMEOUT = 2*60*60
+
+# Number of jobs assigned to each run_tests.py instance
+_INNER_JOBS = 2
+
+
+def _docker_jobspec(name, runtests_args=[]):
+ """Run a single instance of run_tests.py in a docker container"""
+ test_job = jobset.JobSpec(
+ cmdline=['python', 'tools/run_tests/run_tests.py',
+ '--use_docker',
+ '-t',
+ '-j', str(_INNER_JOBS),
+ '-x', 'report_%s.xml' % name] + runtests_args,
+ shortname='run_tests_%s' % name,
+ timeout_seconds=_RUNTESTS_TIMEOUT)
+ return test_job
+
+
+def _workspace_jobspec(name, runtests_args=[], workspace_name=None):
+ """Run a single instance of run_tests.py in a separate workspace"""
+ if not workspace_name:
+ workspace_name = 'workspace_%s' % name
+ env = {'WORKSPACE_NAME': workspace_name}
+ test_job = jobset.JobSpec(
+ cmdline=['tools/run_tests/run_tests_in_workspace.sh',
+ '-t',
+ '-j', str(_INNER_JOBS),
+ '-x', '../report_%s.xml' % name] + runtests_args,
+ environ=env,
+ shortname='run_tests_%s' % name,
+ timeout_seconds=_RUNTESTS_TIMEOUT)
+ return test_job
+
+
+def _generate_jobs(languages, configs, platforms,
+ arch=None, compiler=None,
+ labels=[], extra_args=[]):
+ result = []
+ for language in languages:
+ for platform in platforms:
+ for config in configs:
+ name = '%s_%s_%s' % (language, platform, config)
+ runtests_args = ['-l', language,
+ '-c', config]
+ if arch or compiler:
+ name += '_%s_%s' % (arch, compiler)
+ runtests_args += ['--arch', arch,
+ '--compiler', compiler]
+
+ runtests_args += extra_args
+ if platform == 'linux':
+ job = _docker_jobspec(name=name, runtests_args=runtests_args)
+ else:
+ job = _workspace_jobspec(name=name, runtests_args=runtests_args)
+
+ job.labels = [platform, config, language] + labels
+ result.append(job)
+ return result
+
+
+def _create_test_jobs(extra_args=[]):
+ test_jobs = []
+ # supported on linux only
+ test_jobs += _generate_jobs(languages=['sanity', 'php7'],
+ configs=['dbg', 'opt'],
+ platforms=['linux'],
+ labels=['basictests'],
+ extra_args=extra_args)
+
+ # supported on all platforms.
+ test_jobs += _generate_jobs(languages=['c', 'csharp', 'node', 'python'],
+ configs=['dbg', 'opt'],
+ platforms=['linux', 'macos', 'windows'],
+ labels=['basictests'],
+ extra_args=extra_args)
+
+ # supported on linux and mac.
+ test_jobs += _generate_jobs(languages=['c++', 'ruby', 'php'],
+ configs=['dbg', 'opt'],
+ platforms=['linux', 'macos'],
+ labels=['basictests'],
+ extra_args=extra_args)
+
+ # supported on mac only.
+ test_jobs += _generate_jobs(languages=['objc'],
+ configs=['dbg', 'opt'],
+ platforms=['macos'],
+ labels=['basictests'],
+ extra_args=extra_args)
+
+ # sanitizers
+ test_jobs += _generate_jobs(languages=['c'],
+ configs=['msan', 'asan', 'tsan'],
+ platforms=['linux'],
+ labels=['sanitizers'],
+ extra_args=extra_args)
+ test_jobs += _generate_jobs(languages=['c++'],
+ configs=['asan', 'tsan'],
+ platforms=['linux'],
+ labels=['sanitizers'],
+ extra_args=extra_args)
+ return test_jobs
+
+
+def _create_portability_test_jobs(extra_args=[]):
+ test_jobs = []
+ # portability C x86
+ test_jobs += _generate_jobs(languages=['c'],
+ configs=['dbg'],
+ platforms=['linux'],
+ arch='x86',
+ compiler='default',
+ labels=['portability'],
+ extra_args=extra_args)
+
+ # portability C and C++ on x64
+ for compiler in ['gcc4.4', 'gcc4.6', 'gcc5.3',
+ 'clang3.5', 'clang3.6', 'clang3.7']:
+ test_jobs += _generate_jobs(languages=['c', 'c++'],
+ configs=['dbg'],
+ platforms=['linux'],
+ arch='x64',
+ compiler=compiler,
+ labels=['portability'],
+ extra_args=extra_args)
+
+ # portability C on Windows
+ for arch in ['x86', 'x64']:
+ for compiler in ['vs2013', 'vs2015']:
+ test_jobs += _generate_jobs(languages=['c'],
+ configs=['dbg'],
+ platforms=['windows'],
+ arch=arch,
+ compiler=compiler,
+ labels=['portability'],
+ extra_args=extra_args)
+
+ test_jobs += _generate_jobs(languages=['python'],
+ configs=['dbg'],
+ platforms=['linux'],
+ arch='default',
+ compiler='python3.4',
+ labels=['portability'],
+ extra_args=extra_args)
+
+ test_jobs += _generate_jobs(languages=['csharp'],
+ configs=['dbg'],
+ platforms=['linux'],
+ arch='default',
+ compiler='coreclr',
+ labels=['portability'],
+ extra_args=extra_args)
+ return test_jobs
+
+
+def _allowed_labels():
+ """Returns a list of existing job labels."""
+ all_labels = set()
+ for job in _create_test_jobs() + _create_portability_test_jobs():
+ for label in job.labels:
+ all_labels.add(label)
+ return sorted(all_labels)
+
+
+argp = argparse.ArgumentParser(description='Run a matrix of run_tests.py tests.')
+argp.add_argument('-j', '--jobs',
+ default=multiprocessing.cpu_count()/_INNER_JOBS,
+ type=int,
+ help='Number of concurrent run_tests.py instances.')
+argp.add_argument('-f', '--filter',
+ choices=_allowed_labels(),
+ nargs='+',
+ default=[],
+ help='Filter targets to run by label with AND semantics.')
+argp.add_argument('--build_only',
+ default=False,
+ action='store_const',
+ const=True,
+ help='Pass --build_only flag to run_tests.py instances.')
+argp.add_argument('--force_default_poller', default=False, action='store_const', const=True,
+ help='Pass --force_default_poller to run_tests.py instances.')
+argp.add_argument('--dry_run',
+ default=False,
+ action='store_const',
+ const=True,
+ help='Only print what would be run.')
+args = argp.parse_args()
+
+extra_args = []
+if args.build_only:
+ extra_args.append('--build_only')
+if args.force_default_poller:
+ extra_args.append('--force_default_poller')
+
+all_jobs = _create_test_jobs(extra_args=extra_args) + _create_portability_test_jobs(extra_args=extra_args)
+
+jobs = []
+for job in all_jobs:
+ if not args.filter or all(filter in job.labels for filter in args.filter):
+ jobs.append(job)
+
+if not jobs:
+ jobset.message('FAILED', 'No test suites match given criteria.',
+ do_newline=True)
+ sys.exit(1)
+
+print('IMPORTANT: The changes you are testing need to be locally committed')
+print('because only the committed changes in the current branch will be')
+print('copied to the docker environment or into subworkspaces.')
+
+print
+print 'Will run these tests:'
+for job in jobs:
+ if args.dry_run:
+ print ' %s: "%s"' % (job.shortname, ' '.join(job.cmdline))
+ else:
+ print ' %s' % job.shortname
+print
+
+if args.dry_run:
+ print '--dry_run was used, exiting'
+ sys.exit(1)
+
+jobset.message('START', 'Running test matrix.', do_newline=True)
+num_failures, resultset = jobset.run(jobs,
+ newline_on_success=True,
+ travis=True,
+ maxjobs=args.jobs)
+report_utils.render_junit_xml_report(resultset, 'report.xml')
+
+if num_failures == 0:
+ jobset.message('SUCCESS', 'All run_tests.py instance finished successfully.',
+ do_newline=True)
+else:
+ jobset.message('FAILED', 'Some run_tests.py instance have failed.',
+ do_newline=True)
+ sys.exit(1)
diff --git a/tools/run_tests/sources_and_headers.json b/tools/run_tests/sources_and_headers.json
index 51d194bd80..cbed0fc036 100644
--- a/tools/run_tests/sources_and_headers.json
+++ b/tools/run_tests/sources_and_headers.json
@@ -5010,8 +5010,7 @@
{
"deps": [
"grpc++",
- "grpc++_reflection",
- "grpc++_test_config"
+ "grpc++_reflection"
],
"headers": [
"test/cpp/util/cli_call.h",
@@ -6914,6 +6913,7 @@
],
"headers": [
"test/core/end2end/cq_verifier.h",
+ "test/core/end2end/fake_resolver.h",
"test/core/end2end/fixtures/http_proxy.h",
"test/core/end2end/fixtures/proxy.h",
"test/core/iomgr/endpoint_tests.h",
@@ -6932,6 +6932,8 @@
"src": [
"test/core/end2end/cq_verifier.c",
"test/core/end2end/cq_verifier.h",
+ "test/core/end2end/fake_resolver.c",
+ "test/core/end2end/fake_resolver.h",
"test/core/end2end/fixtures/http_proxy.c",
"test/core/end2end/fixtures/http_proxy.h",
"test/core/end2end/fixtures/proxy.c",