aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rwxr-xr-xtools/buildgen/build-cleaner.py4
-rw-r--r--tools/dockerfile/grpc_cxx/Dockerfile2
-rw-r--r--tools/dockerfile/grpc_java/Dockerfile10
-rw-r--r--tools/dockerfile/grpc_java_base/Dockerfile43
-rw-r--r--tools/dockerfile/grpc_node/Dockerfile14
-rw-r--r--tools/dockerfile/grpc_node_base/Dockerfile22
-rw-r--r--tools/dockerfile/grpc_ruby/Dockerfile10
-rw-r--r--tools/dockerfile/grpc_ruby_base/Dockerfile1
-rwxr-xr-xtools/gce_setup/grpc_docker.sh144
-rwxr-xr-xtools/gce_setup/interop_test_runner.sh4
-rwxr-xr-xtools/gce_setup/new_grpc_docker_builder.sh1
-rwxr-xr-xtools/gce_setup/shared_startup_funcs.sh33
-rwxr-xr-xtools/run_tests/build_python.sh3
-rwxr-xr-xtools/run_tests/jobset.py67
-rwxr-xr-xtools/run_tests/run_tests.py95
-rw-r--r--tools/run_tests/tests.json16
16 files changed, 354 insertions, 115 deletions
diff --git a/tools/buildgen/build-cleaner.py b/tools/buildgen/build-cleaner.py
index f9307360c3..4992beb897 100755
--- a/tools/buildgen/build-cleaner.py
+++ b/tools/buildgen/build-cleaner.py
@@ -33,9 +33,9 @@ def clean_elem(indict):
for name in ['public_headers', 'headers', 'src']:
if name not in indict: continue
inlist = indict[name]
- protos = set(x for x in inlist if os.path.splitext(x)[1] == '.proto')
+ protos = list(x for x in inlist if os.path.splitext(x)[1] == '.proto')
others = set(x for x in inlist if x not in protos)
- indict[name] = sorted(protos) + sorted(others)
+ indict[name] = protos + sorted(others)
return rebuild_as_ordered_dict(indict, _ELEM_KEYS)
for filename in sys.argv[1:]:
diff --git a/tools/dockerfile/grpc_cxx/Dockerfile b/tools/dockerfile/grpc_cxx/Dockerfile
index 141a20a881..43da9fefc3 100644
--- a/tools/dockerfile/grpc_cxx/Dockerfile
+++ b/tools/dockerfile/grpc_cxx/Dockerfile
@@ -21,4 +21,6 @@ RUN cd /var/local/git/grpc && ls \
&& make interop_client \
&& make interop_server
+ADD service_account service_account
+
CMD ["/var/local/git/grpc/bins/opt/interop_server", "--enable_ssl", "--port=8010"]
diff --git a/tools/dockerfile/grpc_java/Dockerfile b/tools/dockerfile/grpc_java/Dockerfile
index f234f514e6..a5508cad7f 100644
--- a/tools/dockerfile/grpc_java/Dockerfile
+++ b/tools/dockerfile/grpc_java/Dockerfile
@@ -1,13 +1,11 @@
# Dockerfile for the gRPC Java dev image
FROM grpc/java_base
-RUN cd /var/local/git/grpc-java/lib/okhttp && \
- mvn -pl okhttp -am install
-RUN cd /var/local/git/grpc-java/lib/netty && \
- mvn -pl codec-http2 -am -DskipTests install
+RUN git clone --recursive --depth 1 git@github.com:google/grpc-java.git /var/local/git/grpc-java
+RUN cd /var/local/git/grpc-java/lib/netty && \
+ mvn -pl codec-http2 -am -DskipTests install clean
RUN cd /var/local/git/grpc-java && \
- protoc --version>ver.txt && \
- mvn install
+ ./gradlew build
# Specify the default command such that the interop server runs on its known testing port
CMD ["/var/local/git/grpc-java/run-test-server.sh", "--use_tls=true", "--port=8030"]
diff --git a/tools/dockerfile/grpc_java_base/Dockerfile b/tools/dockerfile/grpc_java_base/Dockerfile
index 3271d1b2c2..73382ed8c9 100644
--- a/tools/dockerfile/grpc_java_base/Dockerfile
+++ b/tools/dockerfile/grpc_java_base/Dockerfile
@@ -9,35 +9,36 @@ RUN echo oracle-java8-installer shared/accepted-oracle-license-v1-1 select true
RUN echo "deb http://ppa.launchpad.net/webupd8team/java/ubuntu trusty main" | tee /etc/apt/sources.list.d/webupd8team-java.list
RUN echo "deb-src http://ppa.launchpad.net/webupd8team/java/ubuntu trusty main" | tee -a /etc/apt/sources.list.d/webupd8team-java.list
RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys EEA14886
-RUN apt-get update && apt-get -y install oracle-java8-installer
+RUN apt-get update && apt-get -y install oracle-java8-installer && \
+ apt-get clean && rm -r /var/cache/oracle-jdk8-installer/
# Install maven
-RUN wget http://mirror.olnevhost.net/pub/apache/maven/binaries/apache-maven-3.2.1-bin.tar.gz && \
- tar xvf apache-maven-3.2.1-bin.tar.gz -C /var/local
+RUN wget -O - http://mirror.olnevhost.net/pub/apache/maven/binaries/apache-maven-3.2.1-bin.tar.gz | \
+ tar xz -C /var/local
ENV JAVA_HOME /usr/lib/jvm/java-8-oracle
ENV M2_HOME /var/local/apache-maven-3.2.1
ENV PATH $PATH:$JAVA_HOME/bin:$M2_HOME/bin
ENV LD_LIBRARY_PATH /usr/local/lib
-# Install a GitHub SSH service credential that gives access to the GitHub repo while it's private
-# TODO: remove this once the repo is public
-ADD .ssh .ssh
-RUN chmod 600 .ssh/github.rsa
-RUN mkdir -p $HOME/.ssh && echo 'Host github.com' > $HOME/.ssh/config
-RUN echo " IdentityFile /.ssh/github.rsa" >> $HOME/.ssh/config
-RUN echo 'StrictHostKeyChecking no' >> $HOME/.ssh/config
-
# Get the protobuf source from GitHub and install it
-RUN git clone --recursive --branch v2.6.1 git@github.com:google/protobuf.git /var/local/git/protobuf
-RUN cd /var/local/git/protobuf && \
- ./autogen.sh && \
+RUN wget -O - https://github.com/google/protobuf/releases/download/v2.6.1/protobuf-2.6.1.tar.bz2 | \
+ tar xj && \
+ cd protobuf-2.6.1 && \
./configure --prefix=/usr && \
- make -j12 && make check && make install && make clean
+ make -j12 && make check && make install && \
+ rm -r "$(pwd)"
+
+# Install a GitHub SSH service credential that gives access to the GitHub repo while it's private
+# TODO: remove this once the repo is public
+COPY .ssh/github.rsa /root/.ssh/id_rsa
+RUN echo 'Host github.com\nStrictHostKeyChecking no' > /root/.ssh/config
-RUN cd /var/local/git/grpc-java/lib/okhttp && \
- mvn -pl okhttp -am validate
-RUN cd /var/local/git/grpc-java/lib/netty && \
- mvn -pl codec-http2 -am validate
-RUN cd /var/local/git/grpc-java && \
- mvn validate
+# Trigger download of as many Maven and Gradle artifacts as possible. We don't build grpc-java
+# because we don't want to install netty
+RUN git clone --recursive --depth 1 git@github.com:google/grpc-java.git && \
+ cd grpc-java/lib/netty && \
+ mvn -pl codec-http2 -am -DskipTests verify && \
+ cd ../.. && \
+ ./gradlew && \
+ rm -r "$(pwd)"
diff --git a/tools/dockerfile/grpc_node/Dockerfile b/tools/dockerfile/grpc_node/Dockerfile
new file mode 100644
index 0000000000..baec0e21d8
--- /dev/null
+++ b/tools/dockerfile/grpc_node/Dockerfile
@@ -0,0 +1,14 @@
+# Dockerfile for gRPC Node
+FROM grpc/node_base
+
+# Update the C libary
+RUN cd /var/local/git/grpc \
+ && git pull --recurse-submodules \
+ && git submodule update --init --recursive
+
+# Install the C core.
+RUN make install_c -C /var/local/git/grpc
+
+RUN cd /var/local/git/grpc/src/node && npm install && node-gyp rebuild
+
+CMD ["/usr/bin/nodejs", "/var/local/git/grpc/src/node/interop/interop_server.js", "--use_tls=true", "--port 8040"] \ No newline at end of file
diff --git a/tools/dockerfile/grpc_node_base/Dockerfile b/tools/dockerfile/grpc_node_base/Dockerfile
new file mode 100644
index 0000000000..28bd7b2556
--- /dev/null
+++ b/tools/dockerfile/grpc_node_base/Dockerfile
@@ -0,0 +1,22 @@
+# Base Dockerfile for gRPC Node.
+#
+# Includes Node installation dependencies
+FROM grpc/base
+
+RUN curl -sL https://deb.nodesource.com/setup | bash -
+
+RUN apt-get update && apt-get install -y nodejs
+
+RUN npm install -g node-gyp
+
+# Get the source from GitHub, this gets the protobuf library as well
+RUN git clone git@github.com:google/grpc.git /var/local/git/grpc
+RUN cd /var/local/git/grpc && \
+ git pull --recurse-submodules && \
+ git submodule update --init --recursive
+
+# Build the C core
+RUN make static_c shared_c -j12 -C /var/local/git/grpc
+
+# Define the default command.
+CMD ["bash"] \ No newline at end of file
diff --git a/tools/dockerfile/grpc_ruby/Dockerfile b/tools/dockerfile/grpc_ruby/Dockerfile
index f01f81d539..c84548c880 100644
--- a/tools/dockerfile/grpc_ruby/Dockerfile
+++ b/tools/dockerfile/grpc_ruby/Dockerfile
@@ -12,14 +12,8 @@ RUN touch /var/local/git/grpc/include/grpc/support/string.h
# Build the C core.
RUN make install_c -C /var/local/git/grpc
-# Install the grpc gem locally with its dependencies and build the extension
-RUN /bin/bash -l -c 'cd /var/local/git/grpc/src/ruby && bundle && rake compile:grpc && gem build grpc.gemspec && gem install grpc'
-
-# TODO add a command to run the unittest tests when the bug below is fixed
-# - the tests fail due to an error in the C threading library:
-# they fail with 'ruby: __pthread_mutex_cond_lock_adjust for unknown reasons' at the end of a testcase
-# - however, the interop server and client run OK, so this bug can be investigated
-# RUN /bin/bash -l -c 'cd /var/local/git/grpc/src/ruby && bundle && rake'
+# Build ruby gRPC and run its tests
+RUN /bin/bash -l -c 'cd /var/local/git/grpc/src/ruby && bundle && rake'
# Add a cacerts directory containing the Google root pem file, allowing the ruby client to access the production test instance
ADD cacerts cacerts
diff --git a/tools/dockerfile/grpc_ruby_base/Dockerfile b/tools/dockerfile/grpc_ruby_base/Dockerfile
index b2af9d7160..ec4544d2fd 100644
--- a/tools/dockerfile/grpc_ruby_base/Dockerfile
+++ b/tools/dockerfile/grpc_ruby_base/Dockerfile
@@ -39,7 +39,6 @@ RUN /bin/bash -l -c "curl -L get.rvm.io | bash -s stable"
RUN /bin/bash -l -c "rvm install ruby-2.1"
RUN /bin/bash -l -c "rvm use --default ruby-2.1"
RUN /bin/bash -l -c "echo 'gem: --no-ri --no-rdoc' > ~/.gemrc"
-RUN /bin/bash -l -c "echo 'source /home/grpc_ruby/.rvm/scripts/rvm' >> ~/.bashrc"
RUN /bin/bash -l -c "echo 'rvm --default use ruby-2.1' >> ~/.bashrc"
RUN /bin/bash -l -c "gem install bundler --no-ri --no-rdoc"
diff --git a/tools/gce_setup/grpc_docker.sh b/tools/gce_setup/grpc_docker.sh
index 6bb73269ae..a97cc88aee 100755
--- a/tools/gce_setup/grpc_docker.sh
+++ b/tools/gce_setup/grpc_docker.sh
@@ -350,7 +350,7 @@ grpc_interop_test_args() {
[[ -n $1 ]] && { # client_type
case $1 in
- cxx|go|java|nodejs|php|python|ruby)
+ cxx|go|java|node|php|python|ruby)
grpc_gen_test_cmd="grpc_interop_gen_$1_cmd"
declare -F $grpc_gen_test_cmd >> /dev/null || {
echo "-f: test_func for $1 => $grpc_gen_test_cmd is not defined" 1>&2
@@ -381,7 +381,7 @@ grpc_interop_test_args() {
cxx) grpc_port=8010 ;;
go) grpc_port=8020 ;;
java) grpc_port=8030 ;;
- nodejs) grpc_port=8040 ;;
+ node) grpc_port=8040 ;;
python) grpc_port=8050 ;;
ruby) grpc_port=8060 ;;
*) echo "bad server_type: $1" 1>&2; return 1 ;;
@@ -421,7 +421,7 @@ grpc_cloud_prod_test_args() {
[[ -n $1 ]] && { # client_type
case $1 in
- cxx|go|java|nodejs|php|python|ruby)
+ cxx|go|java|node|php|python|ruby)
grpc_gen_test_cmd="grpc_cloud_prod_gen_$1_cmd"
declare -F $grpc_gen_test_cmd >> /dev/null || {
echo "-f: test_func for $1 => $grpc_gen_test_cmd is not defined" 1>&2
@@ -440,6 +440,55 @@ grpc_cloud_prod_test_args() {
}
}
+# checks the positional args and assigns them to variables visible in the caller
+#
+# these are the positional args passed to grpc_cloud_prod_auth_test after option flags
+# are removed
+#
+# three args are expected, in order
+# - test_case
+# - host <the gce docker instance on which to run the test>
+# - client to run
+grpc_cloud_prod_auth_test_args() {
+ grpc_gen_test_cmd="grpc_cloud_prod_auth_"
+ [[ -n $1 ]] && { # test_case
+ test_case=$1
+ grpc_gen_test_cmd+="$1"
+ shift
+ } || {
+ echo "$FUNCNAME: missing arg: test_case" 1>&2
+ return 1
+ }
+
+ [[ -n $1 ]] && { # host
+ host=$1
+ shift
+ } || {
+ echo "$FUNCNAME: missing arg: host" 1>&2
+ return 1
+ }
+
+ [[ -n $1 ]] && { # client_type
+ case $1 in
+ cxx|go|java|nodejs|php|python|ruby)
+ grpc_gen_test_cmd+="_gen_$1_cmd"
+ declare -F $grpc_gen_test_cmd >> /dev/null || {
+ echo "-f: test_func for $1 => $grpc_gen_test_cmd is not defined" 1>&2
+ return 2
+ }
+ shift
+ ;;
+ *)
+ echo "bad client_type: $1" 1>&2
+ return 1
+ ;;
+ esac
+ } || {
+ echo "$FUNCNAME: missing arg: client_type" 1>&2
+ return 1
+ }
+}
+
_grpc_sync_scripts_args() {
grpc_gce_script_root='tools/gce_setup'
@@ -555,7 +604,7 @@ grpc_launch_server_args() {
cxx) grpc_port=8010 ;;
go) grpc_port=8020 ;;
java) grpc_port=8030 ;;
- nodejs) grpc_port=8040 ;;
+ node) grpc_port=8040 ;;
python) grpc_port=8050 ;;
ruby) grpc_port=8060 ;;
*) echo "bad server_type: $1" 1>&2; return 1 ;;
@@ -627,7 +676,7 @@ grpc_launch_server() {
# cxx: 8010
# go: 8020
# java: 8030
-# nodejs: 8040
+# node: 8040
# python: 8050
# ruby: 8060
#
@@ -715,6 +764,52 @@ grpc_cloud_prod_test() {
gcloud compute $project_opt ssh $zone_opt $host --command "$cmd"
}
+# Runs a test command on a docker instance.
+#
+# call-seq:
+# grpc_cloud_prod_auth_test <test_name> <host> <client_type>
+#
+# requirements:
+# host is a GCE instance running docker with access to the gRPC docker images
+# test_name is one of the named gRPC tests [http://go/grpc_interop_tests]
+# client_type is one of [cxx,go,java,php,python,ruby]
+#
+# it assumes:
+# that each grpc-imp has a docker image named grpc/<imp>, e.g, grpc/java
+# a test is run using $ docker run 'path/to/interop_test_bin --flags'
+# the required images are available on <host>
+#
+# each client_type should have an associated bash func:
+# grpc_cloud_prod_auth_<test_case>_gen_<client_type>_cmd
+# the func provides the dockerized commmand for running client_type's test.
+# If no such func is available, tests for that client type cannot be run.
+grpc_cloud_prod_auth_test() {
+ _grpc_ensure_gcloud_ssh || return 1;
+ # declare vars local so that they don't pollute the shell environment
+ # where they this func is used.
+
+ local grpc_zone grpc_project dry_run # set by _grpc_set_project_and_zone
+ # grpc_cloud_prod_test_args
+ local test_case host grpc_gen_test_cmd
+
+ # set the project zone and check that all necessary args are provided
+ _grpc_set_project_and_zone -f grpc_cloud_prod_auth_test_args "$@" || return 1
+ gce_has_instance $grpc_project $host || return 1;
+
+ local test_case_flag=" --test_case=$test_case"
+ cmd=$($grpc_gen_test_cmd $test_case_flag)
+ [[ -n $cmd ]] || return 1
+
+ local project_opt="--project $grpc_project"
+ local zone_opt="--zone $grpc_zone"
+ local ssh_cmd="bash -l -c \"$cmd\""
+ echo "will run:"
+ echo " $ssh_cmd"
+ echo "on $host"
+ [[ $dry_run == 1 ]] && return 0 # don't run the command on a dry run
+ gcloud compute $project_opt ssh $zone_opt $host --command "$cmd"
+}
+
# constructs the full dockerized ruby interop test cmd.
#
# call-seq:
@@ -827,6 +922,13 @@ grpc_interop_gen_cxx_cmd() {
echo $the_cmd
}
+grpc_interop_gen_node_cmd() {
+ local cmd_prefix="sudo docker run grpc/node";
+ local test_script="/usr/bin/nodejs /var/local/git/grpc/src/node/interop/interop_client.js --use_tls=true";
+ local the_cmd="$cmd_prefix $test_script $@";
+ echo $the_cmd
+}
+
# constructs the full dockerized cpp interop test cmd.
#
#
@@ -841,4 +943,34 @@ grpc_cloud_prod_gen_cxx_cmd() {
echo $the_cmd
}
-# TODO(grpc-team): add grpc_interop_gen_xxx_cmd for python|cxx|nodejs
+# constructs the full dockerized cpp interop test cmd.
+#
+#
+# call-seq:
+# flags= .... # generic flags to include the command
+# cmd=$($grpc_gen_test_cmd $flags)
+grpc_cloud_prod_auth_service_account_creds_gen_cxx_cmd() {
+ local cmd_prefix="sudo docker run grpc/cxx";
+ local test_script="/var/local/git/grpc/bins/opt/interop_client --enable_ssl";
+ local gfe_flags=" --use_prod_roots --server_port=443 --server_host=grpc-test.sandbox.google.com --server_host_override=grpc-test.sandbox.google.com"
+ local added_gfe_flags=" --service_account_key_file=/service_account/stubbyCloudTestingTest-7dd63462c60c.json --oauth_scope=https://www.googleapis.com/auth/xapi.zoo"
+ local the_cmd="$cmd_prefix $test_script $gfe_flags $added_gfe_flags $@";
+ echo $the_cmd
+}
+
+# constructs the full dockerized cpp interop test cmd.
+#
+#
+# call-seq:
+# flags= .... # generic flags to include the command
+# cmd=$($grpc_gen_test_cmd $flags)
+grpc_cloud_prod_auth_compute_engine_creds_gen_cxx_cmd() {
+ local cmd_prefix="sudo docker run grpc/cxx";
+ local test_script="/var/local/git/grpc/bins/opt/interop_client --enable_ssl";
+ local gfe_flags=" --use_prod_roots --server_port=443 --server_host=grpc-test.sandbox.google.com --server_host_override=grpc-test.sandbox.google.com"
+ local added_gfe_flags=" --default_service_account=155450119199-r5aaqa2vqoa9g5mv2m6s3m1l293rlmel@developer.gserviceaccount.com --oauth_scope=https://www.googleapis.com/auth/xapi.zoo"
+ local the_cmd="$cmd_prefix $test_script $gfe_flags $added_gfe_flags $@";
+ echo $the_cmd
+}
+
+# TODO(grpc-team): add grpc_interop_gen_xxx_cmd for python|nodejs
diff --git a/tools/gce_setup/interop_test_runner.sh b/tools/gce_setup/interop_test_runner.sh
index 1c0d82095c..edc8bba4b5 100755
--- a/tools/gce_setup/interop_test_runner.sh
+++ b/tools/gce_setup/interop_test_runner.sh
@@ -3,8 +3,8 @@
main() {
source grpc_docker.sh
test_cases=(large_unary empty_unary ping_pong client_streaming server_streaming)
- clients=(cxx java go ruby)
- servers=(cxx java go ruby)
+ clients=(cxx java go ruby node)
+ servers=(cxx java go ruby node)
for test_case in "${test_cases[@]}"
do
for client in "${clients[@]}"
diff --git a/tools/gce_setup/new_grpc_docker_builder.sh b/tools/gce_setup/new_grpc_docker_builder.sh
index 9a3988f343..5d4fc361ad 100755
--- a/tools/gce_setup/new_grpc_docker_builder.sh
+++ b/tools/gce_setup/new_grpc_docker_builder.sh
@@ -87,6 +87,7 @@ add_instance() {
local the_image='container-vm-v20140925'
local scopes='compute-rw storage-full'
scopes+=' https://www.googleapis.com/auth/gerritcodereview'
+ scopes+=' https://www.googleapis.com/auth/xapi.zoo'
gcloud --project $project compute instances create $instance \
$address_flag \
--image $the_image \
diff --git a/tools/gce_setup/shared_startup_funcs.sh b/tools/gce_setup/shared_startup_funcs.sh
index 69f6ba8cc0..3300eb257d 100755
--- a/tools/gce_setup/shared_startup_funcs.sh
+++ b/tools/gce_setup/shared_startup_funcs.sh
@@ -367,7 +367,7 @@ grpc_docker_launch_registry() {
grpc_docker_pull_known() {
local addr=$1
[[ -n $addr ]] || addr="0.0.0.0:5000"
- local known="base cxx php_base php ruby_base ruby java_base java go"
+ local known="base cxx php_base php ruby_base ruby java_base java go node_base node"
echo "... pulling docker images for '$known'"
for i in $known
do
@@ -416,6 +416,9 @@ grpc_dockerfile_install() {
[[ $image_label == "grpc/ruby" ]] && {
grpc_docker_sync_roots_pem $dockerfile_dir/cacerts || return 1;
}
+ [[ $image_label == "grpc/cxx" ]] && {
+ grpc_docker_sync_service_account $dockerfile_dir/service_account || return 1;
+ }
# TODO(temiola): maybe make cache/no-cache a func option?
@@ -503,3 +506,31 @@ grpc_docker_sync_roots_pem() {
}
gsutil cp $src $gcs_certs_path $local_certs_path
}
+
+# grpc_docker_sync_service_account.
+#
+# Copies the service account from GCS to the target dir
+#
+# call-seq:
+# grpc_docker_sync_service_account <target_dir>
+grpc_docker_sync_service_account() {
+ local target_dir=$1
+ [[ -n $target_dir ]] || { echo "$FUNCNAME: missing arg: target_dir" >&2; return 1; }
+
+ # determine the admin root; the parent of the dockerfile root,
+ local gs_dockerfile_root=$(load_metadata "attributes/gs_dockerfile_root")
+ [[ -n $gs_dockerfile_root ]] || {
+ echo "$FUNCNAME: missing metadata: gs_dockerfile_root" >&2
+ return 1
+ }
+ local gcs_admin_root=$(dirname $gs_dockerfile_root)
+
+ # cp the file from gsutil to a known local area
+ local gcs_acct_path=$gcs_admin_root/service_account/stubbyCloudTestingTest-7dd63462c60c.json
+ local local_acct_path=$target_dir/stubbyCloudTestingTest-7dd63462c60c.json
+ mkdir -p $target_dir || {
+ echo "$FUNCNAME: could not create dir: $target_dir" 1>&2
+ return 1
+ }
+ gsutil cp $src $gcs_acct_path $local_acct_path
+}
diff --git a/tools/run_tests/build_python.sh b/tools/run_tests/build_python.sh
index 46e5797f62..4abb412c95 100755
--- a/tools/run_tests/build_python.sh
+++ b/tools/run_tests/build_python.sh
@@ -7,6 +7,5 @@ cd $(dirname $0)/../..
root=`pwd`
virtualenv python2.7_virtual_environment
-python2.7_virtual_environment/bin/pip install enum34==1.0.4 futures==2.2.0
-python2.7_virtual_environment/bin/pip install third_party/protobuf/python
+python2.7_virtual_environment/bin/pip install enum34==1.0.4 futures==2.2.0 protobuf==2.6.1
python2.7_virtual_environment/bin/pip install src/python
diff --git a/tools/run_tests/jobset.py b/tools/run_tests/jobset.py
index 8f16a4ff2c..19ae52ef3b 100755
--- a/tools/run_tests/jobset.py
+++ b/tools/run_tests/jobset.py
@@ -86,19 +86,49 @@ def which(filename):
raise Exception('%s not found' % filename)
+class JobSpec(object):
+ """Specifies what to run for a job."""
+
+ def __init__(self, cmdline, shortname=None, environ={}, hash_targets=[]):
+ """
+ Arguments:
+ cmdline: a list of arguments to pass as the command line
+ environ: a dictionary of environment variables to set in the child process
+ hash_targets: which files to include in the hash representing the jobs version
+ (or empty, indicating the job should not be hashed)
+ """
+ self.cmdline = cmdline
+ self.environ = environ
+ self.shortname = cmdline[0] if shortname is None else shortname
+ self.hash_targets = hash_targets or []
+
+ def identity(self):
+ return '%r %r %r' % (self.cmdline, self.environ, self.hash_targets)
+
+ def __hash__(self):
+ return hash(self.identity())
+
+ def __cmp__(self, other):
+ return self.identity() == other.identity()
+
+
class Job(object):
"""Manages one job."""
- def __init__(self, cmdline, bin_hash, newline_on_success):
- self._cmdline = cmdline
+ def __init__(self, spec, bin_hash, newline_on_success):
+ self._spec = spec
self._bin_hash = bin_hash
self._tempfile = tempfile.TemporaryFile()
- self._process = subprocess.Popen(args=cmdline,
+ env = os.environ.copy()
+ for k, v in spec.environ.iteritems():
+ env[k] = v
+ self._process = subprocess.Popen(args=spec.cmdline,
stderr=subprocess.STDOUT,
- stdout=self._tempfile)
+ stdout=self._tempfile,
+ env=env)
self._state = _RUNNING
self._newline_on_success = newline_on_success
- message('START', ' '.join(self._cmdline))
+ message('START', spec.shortname)
def state(self, update_cache):
"""Poll current state of the job. Prints messages at completion."""
@@ -108,12 +138,13 @@ class Job(object):
self._tempfile.seek(0)
stdout = self._tempfile.read()
message('FAILED', '%s [ret=%d]' % (
- ' '.join(self._cmdline), self._process.returncode), stdout)
+ self._spec.shortname, self._process.returncode), stdout)
else:
self._state = _SUCCESS
- message('PASSED', '%s' % ' '.join(self._cmdline),
+ message('PASSED', self._spec.shortname,
do_newline=self._newline_on_success)
- update_cache.finished(self._cmdline, self._bin_hash)
+ if self._bin_hash:
+ update_cache.finished(self._spec.identity(), self._bin_hash)
return self._state
def kill(self):
@@ -135,16 +166,26 @@ class Jobset(object):
self._newline_on_success = newline_on_success
self._cache = cache
- def start(self, cmdline):
+ def start(self, spec):
"""Start a job. Return True on success, False on failure."""
while len(self._running) >= self._maxjobs:
if self.cancelled(): return False
self.reap()
if self.cancelled(): return False
- with open(which(cmdline[0])) as f:
- bin_hash = hashlib.sha1(f.read()).hexdigest()
- if self._cache.should_run(cmdline, bin_hash):
- self._running.add(Job(cmdline, bin_hash, self._newline_on_success))
+ if spec.hash_targets:
+ bin_hash = hashlib.sha1()
+ for fn in spec.hash_targets:
+ with open(which(fn)) as f:
+ bin_hash.update(f.read())
+ bin_hash = bin_hash.hexdigest()
+ should_run = self._cache.should_run(spec.identity(), bin_hash)
+ else:
+ bin_hash = None
+ should_run = True
+ if should_run:
+ self._running.add(Job(spec,
+ bin_hash,
+ self._newline_on_success))
return True
def reap(self):
diff --git a/tools/run_tests/run_tests.py b/tools/run_tests/run_tests.py
index a699399c27..8cc029e3cc 100755
--- a/tools/run_tests/run_tests.py
+++ b/tools/run_tests/run_tests.py
@@ -17,13 +17,17 @@ import watch_dirs
# SimpleConfig: just compile with CONFIG=config, and run the binary to test
class SimpleConfig(object):
- def __init__(self, config):
+ def __init__(self, config, environ={}):
self.build_config = config
self.maxjobs = 2 * multiprocessing.cpu_count()
self.allow_hashing = (config != 'gcov')
+ self.environ = environ
- def run_command(self, binary):
- return [binary]
+ def job_spec(self, binary, hash_targets):
+ return jobset.JobSpec(cmdline=[binary],
+ environ=self.environ,
+ hash_targets=hash_targets
+ if self.allow_hashing else None)
# ValgrindConfig: compile with some CONFIG=config, but use valgrind to run
@@ -35,14 +39,14 @@ class ValgrindConfig(object):
self.maxjobs = 2 * multiprocessing.cpu_count()
self.allow_hashing = False
- def run_command(self, binary):
- return ['valgrind', binary, '--tool=%s' % self.tool]
+ def job_spec(self, binary, hash_targets):
+ return JobSpec(cmdline=['valgrind', '--tool=%s' % self.tool, binary],
+ hash_targets=None)
class CLanguage(object):
def __init__(self, make_target, test_lang):
- self.allow_hashing = True
self.make_target = make_target
with open('tools/run_tests/tests.json') as f:
js = json.load(f)
@@ -50,8 +54,12 @@ class CLanguage(object):
for tgt in js
if tgt['language'] == test_lang]
- def test_binaries(self, config):
- return ['bins/%s/%s' % (config, binary) for binary in self.binaries]
+ def test_specs(self, config):
+ out = []
+ for name in self.binaries:
+ binary = 'bins/%s/%s' % (config.build_config, name)
+ out.append(config.job_spec(binary, [binary]))
+ return out
def make_targets(self):
return ['buildtests_%s' % self.make_target]
@@ -59,13 +67,11 @@ class CLanguage(object):
def build_steps(self):
return []
-class NodeLanguage(object):
- def __init__(self):
- self.allow_hashing = False
+class NodeLanguage(object):
- def test_binaries(self, config):
- return ['tools/run_tests/run_node.sh']
+ def test_specs(self, config):
+ return [config.job_spec('tools/run_tests/run_node.sh', None)]
def make_targets(self):
return ['static_c']
@@ -73,13 +79,11 @@ class NodeLanguage(object):
def build_steps(self):
return [['tools/run_tests/build_node.sh']]
-class PhpLanguage(object):
- def __init__(self):
- self.allow_hashing = False
+class PhpLanguage(object):
- def test_binaries(self, config):
- return ['src/php/bin/run_tests.sh']
+ def test_specs(self, config):
+ return [config.job_spec('src/php/bin/run_tests.sh', None)]
def make_targets(self):
return ['static_c']
@@ -90,11 +94,8 @@ class PhpLanguage(object):
class PythonLanguage(object):
- def __init__(self):
- self.allow_hashing = False
-
- def test_binaries(self, config):
- return ['tools/run_tests/run_python.sh']
+ def test_specs(self, config):
+ return [config.job_spec('tools/run_tests/run_python.sh', None)]
def make_targets(self):
return[]
@@ -109,7 +110,8 @@ _CONFIGS = {
'opt': SimpleConfig('opt'),
'tsan': SimpleConfig('tsan'),
'msan': SimpleConfig('msan'),
- 'asan': SimpleConfig('asan'),
+ 'asan': SimpleConfig('asan', environ={
+ 'ASAN_OPTIONS': 'detect_leaks=1:color=always'}),
'gcov': SimpleConfig('gcov'),
'memcheck': ValgrindConfig('valgrind', 'memcheck'),
'helgrind': ValgrindConfig('dbg', 'helgrind')
@@ -123,7 +125,7 @@ _LANGUAGES = {
'node': NodeLanguage(),
'php': PhpLanguage(),
'python': PythonLanguage(),
-}
+ }
# parse command line
argp = argparse.ArgumentParser(description='Run grpc tests.')
@@ -155,14 +157,20 @@ build_configs = set(cfg.build_config for cfg in run_configs)
make_targets = []
languages = set(_LANGUAGES[l] for l in args.language)
-build_steps = [['make',
- '-j', '%d' % (multiprocessing.cpu_count() + 1),
- 'CONFIG=%s' % cfg] + list(set(
- itertools.chain.from_iterable(l.make_targets()
- for l in languages)))
- for cfg in build_configs] + list(
- itertools.chain.from_iterable(l.build_steps()
- for l in languages))
+build_steps = [jobset.JobSpec(['make',
+ '-j', '%d' % (multiprocessing.cpu_count() + 1),
+ 'CONFIG=%s' % cfg] + list(set(
+ itertools.chain.from_iterable(
+ l.make_targets() for l in languages))))
+ for cfg in build_configs] + list(set(
+ jobset.JobSpec(cmdline)
+ for l in languages
+ for cmdline in l.build_steps()))
+one_run = set(
+ spec
+ for config in run_configs
+ for language in args.language
+ for spec in _LANGUAGES[language].test_specs(config))
runs_per_test = args.runs_per_test
forever = args.forever
@@ -175,7 +183,6 @@ class TestCache(object):
self._last_successful_run = {}
def should_run(self, cmdline, bin_hash):
- cmdline = ' '.join(cmdline)
if cmdline not in self._last_successful_run:
return True
if self._last_successful_run[cmdline] != bin_hash:
@@ -183,7 +190,7 @@ class TestCache(object):
return False
def finished(self, cmdline, bin_hash):
- self._last_successful_run[' '.join(cmdline)] = bin_hash
+ self._last_successful_run[cmdline] = bin_hash
def dump(self):
return [{'cmdline': k, 'hash': v}
@@ -209,12 +216,6 @@ def _build_and_run(check_cancelled, newline_on_success, cache):
return 1
# run all the tests
- one_run = dict(
- (' '.join(config.run_command(x)), config.run_command(x))
- for config in run_configs
- for language in args.language
- for x in _LANGUAGES[language].test_binaries(config.build_config)
- ).values()
all_runs = itertools.chain.from_iterable(
itertools.repeat(one_run, runs_per_test))
if not jobset.run(all_runs, check_cancelled,
@@ -226,12 +227,8 @@ def _build_and_run(check_cancelled, newline_on_success, cache):
return 0
-test_cache = (None
- if not all(x.allow_hashing
- for x in itertools.chain(languages, run_configs))
- else TestCache())
-if test_cache:
- test_cache.maybe_load()
+test_cache = TestCache()
+test_cache.maybe_load()
if forever:
success = True
@@ -248,7 +245,7 @@ if forever:
'All tests are now passing properly',
do_newline=True)
jobset.message('IDLE', 'No change detected')
- if test_cache: test_cache.save()
+ test_cache.save()
while not have_files_changed():
time.sleep(1)
else:
@@ -259,5 +256,5 @@ else:
jobset.message('SUCCESS', 'All tests passed', do_newline=True)
else:
jobset.message('FAILED', 'Some tests failed', do_newline=True)
- if test_cache: test_cache.save()
+ test_cache.save()
sys.exit(result)
diff --git a/tools/run_tests/tests.json b/tools/run_tests/tests.json
index a610e92307..fd15182120 100644
--- a/tools/run_tests/tests.json
+++ b/tools/run_tests/tests.json
@@ -187,6 +187,14 @@
},
{
"language": "c",
+ "name": "json_rewrite_test"
+ },
+ {
+ "language": "c",
+ "name": "json_test"
+ },
+ {
+ "language": "c",
"name": "lame_client_test"
},
{
@@ -263,10 +271,6 @@
},
{
"language": "c++",
- "name": "tips_client_test"
- },
- {
- "language": "c++",
"name": "status_test"
},
{
@@ -278,6 +282,10 @@
"name": "thread_pool_test"
},
{
+ "language": "c++",
+ "name": "tips_client_test"
+ },
+ {
"language": "c",
"name": "chttp2_fake_security_cancel_after_accept_test"
},