aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools/run_tests/python_utils/upload_test_results.py
diff options
context:
space:
mode:
Diffstat (limited to 'tools/run_tests/python_utils/upload_test_results.py')
-rw-r--r--tools/run_tests/python_utils/upload_test_results.py51
1 files changed, 51 insertions, 0 deletions
diff --git a/tools/run_tests/python_utils/upload_test_results.py b/tools/run_tests/python_utils/upload_test_results.py
index 15e827769e..ea97bc0aec 100644
--- a/tools/run_tests/python_utils/upload_test_results.py
+++ b/tools/run_tests/python_utils/upload_test_results.py
@@ -51,6 +51,19 @@ _RESULTS_SCHEMA = [
('cpu_measured', 'FLOAT', 'Actual CPU usage of test'),
('return_code', 'INTEGER', 'Exit code of test'),
]
+_INTEROP_RESULTS_SCHEMA = [
+ ('job_name', 'STRING', 'Name of Jenkins/Kokoro job'),
+ ('build_id', 'INTEGER', 'Build ID of Jenkins/Kokoro job'),
+ ('build_url', 'STRING', 'URL of Jenkins/Kokoro job'),
+ ('test_name', 'STRING', 'Unique test name combining client, server, and test_name'),
+ ('suite', 'STRING', 'Test suite: cloud_to_cloud, cloud_to_prod, or cloud_to_prod_auth'),
+ ('client', 'STRING', 'Client language'),
+ ('server', 'STRING', 'Server host name'),
+ ('test_case', 'STRING', 'Name of test case'),
+ ('result', 'STRING', 'Test result: PASSED, TIMEOUT, FAILED, or SKIPPED'),
+ ('timestamp', 'TIMESTAMP', 'Timestamp of test run'),
+ ('elapsed_time', 'FLOAT', 'How long test took to run'),
+]
def _get_build_metadata(test_results):
@@ -114,3 +127,41 @@ def upload_results_to_bq(resultset, bq_table, args, platform):
else:
print('Error uploading result to bigquery, all attempts failed.')
sys.exit(1)
+
+
+def upload_interop_results_to_bq(resultset, bq_table, args):
+ """Upload interop test results to a BQ table.
+
+ Args:
+ resultset: dictionary generated by jobset.run
+ bq_table: string name of table to create/upload results to in BQ
+ args: args in run_interop_tests.py, generated by argparse
+ """
+ bq = big_query_utils.create_big_query()
+ big_query_utils.create_partitioned_table(bq, _PROJECT_ID, _DATASET_ID, bq_table, _INTEROP_RESULTS_SCHEMA, _DESCRIPTION,
+ partition_type=_PARTITION_TYPE, expiration_ms= _EXPIRATION_MS)
+
+ for shortname, results in six.iteritems(resultset):
+ for result in results:
+ test_results = {}
+ _get_build_metadata(test_results)
+ test_results['elapsed_time'] = '%.2f' % result.elapsed_time
+ test_results['result'] = result.state
+ test_results['test_name'] = shortname
+ test_results['suite'] = shortname.split(':')[0]
+ test_results['client'] = shortname.split(':')[1]
+ test_results['server'] = shortname.split(':')[2]
+ test_results['test_case'] = shortname.split(':')[3]
+ test_results['timestamp'] = time.strftime('%Y-%m-%d %H:%M:%S')
+ row = big_query_utils.make_row(str(uuid.uuid4()), test_results)
+ # TODO(jtattermusch): rows are inserted one by one, very inefficient
+ max_retries = 3
+ for attempt in range(max_retries):
+ if big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID, bq_table, [row]):
+ break
+ else:
+ if attempt < max_retries - 1:
+ print('Error uploading result to bigquery, will retry.')
+ else:
+ print('Error uploading result to bigquery, all attempts failed.')
+ sys.exit(1)