aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools/run_tests/run_interop_tests.py
diff options
context:
space:
mode:
Diffstat (limited to 'tools/run_tests/run_interop_tests.py')
-rwxr-xr-xtools/run_tests/run_interop_tests.py30
1 files changed, 21 insertions, 9 deletions
diff --git a/tools/run_tests/run_interop_tests.py b/tools/run_tests/run_interop_tests.py
index 1537641aee..b20d6ce88e 100755
--- a/tools/run_tests/run_interop_tests.py
+++ b/tools/run_tests/run_interop_tests.py
@@ -35,6 +35,11 @@ import traceback
import python_utils.dockerjob as dockerjob
import python_utils.jobset as jobset
import python_utils.report_utils as report_utils
+# It's ok to not import because this is only necessary to upload results to BQ.
+try:
+ from python_utils.upload_test_results import upload_interop_results_to_bq
+except ImportError as e:
+ print(e)
# Docker doesn't clean up after itself, so we do it on exit.
atexit.register(lambda: subprocess.call(['stty', 'echo']))
@@ -308,20 +313,20 @@ class Http2Client:
class NodeLanguage:
def __init__(self):
- self.client_cwd = None
- self.server_cwd = None
+ self.client_cwd = '../grpc-node'
+ self.server_cwd = '../grpc-node'
self.safename = str(self)
def client_cmd(self, args):
- return ['tools/run_tests/interop/with_nvm.sh',
- 'node', 'src/node/interop/interop_client.js'] + args
+ return ['packages/grpc-native-core/deps/grpc/tools/run_tests/interop/with_nvm.sh',
+ 'node', 'test/interop/interop_client.js'] + args
def cloud_to_prod_env(self):
return {}
def server_cmd(self, args):
- return ['tools/run_tests/interop/with_nvm.sh',
- 'node', 'src/node/interop/interop_server.js'] + args
+ return ['packages/grpc-native-core/deps/grpc/tools/run_tests/interop/with_nvm.sh',
+ 'node', 'test/interop/interop_server.js'] + args
def global_env(self):
return {}
@@ -672,10 +677,10 @@ def cloud_to_prod_jobspec(language, test_case, server_host_name,
cmdline=cmdline,
cwd=cwd,
environ=environ,
- shortname='%s:%s:%s:%s' % (suite_name, server_host_name, language,
+ shortname='%s:%s:%s:%s' % (suite_name, language, server_host_name,
test_case),
timeout_seconds=_TEST_TIMEOUT,
- flake_retries=5 if args.allow_flakes else 0,
+ flake_retries=4 if args.allow_flakes else 0,
timeout_retries=2 if args.allow_flakes else 0,
kill_handler=_job_kill_handler)
if docker_image:
@@ -741,7 +746,7 @@ def cloud_to_cloud_jobspec(language, test_case, server_name, server_host,
shortname='cloud_to_cloud:%s:%s_server:%s' % (language, server_name,
test_case),
timeout_seconds=_TEST_TIMEOUT,
- flake_retries=5 if args.allow_flakes else 0,
+ flake_retries=4 if args.allow_flakes else 0,
timeout_retries=2 if args.allow_flakes else 0,
kill_handler=_job_kill_handler)
if docker_image:
@@ -956,6 +961,11 @@ argp.add_argument('--internal_ci',
const=True,
help=('Put reports into subdirectories to improve '
'presentation of results by Internal CI.'))
+argp.add_argument('--bq_result_table',
+ default='',
+ type=str,
+ nargs='?',
+ help='Upload test results to a specified BQ table.')
args = argp.parse_args()
servers = set(s for s in itertools.chain.from_iterable(_SERVERS
@@ -1205,6 +1215,8 @@ try:
num_failures, resultset = jobset.run(jobs, newline_on_success=True,
maxjobs=args.jobs,
skip_jobs=args.manual_run)
+ if args.bq_result_table and resultset:
+ upload_interop_results_to_bq(resultset, args.bq_result_table, args)
if num_failures:
jobset.message('FAILED', 'Some tests failed', do_newline=True)
else: