aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar Jan Tattermusch <jtattermusch@users.noreply.github.com>2016-10-13 19:30:15 +0200
committerGravatar GitHub <noreply@github.com>2016-10-13 19:30:15 +0200
commit0fd76ab55f7bc3cc96e7bd57ccb6a726c3fe7ac9 (patch)
tree702de599248f559466b0c7469bb3fae230dba8d9
parentd44144c3b2112ed61a21c08331bfe9e18664ceb3 (diff)
parentcfcc0755a0c77180c3eaadea1c8d1103da2e8f36 (diff)
Merge pull request #8352 from jtattermusch/improve_reporting
More flexible test naming in XML reports
-rw-r--r--tools/run_tests/report_utils.py7
-rwxr-xr-xtools/run_tests/run_tests.py8
-rwxr-xr-xtools/run_tests/run_tests_matrix.py9
3 files changed, 16 insertions, 8 deletions
diff --git a/tools/run_tests/report_utils.py b/tools/run_tests/report_utils.py
index 5648a694cd..efe5dc999d 100644
--- a/tools/run_tests/report_utils.py
+++ b/tools/run_tests/report_utils.py
@@ -57,11 +57,12 @@ def _filter_msg(msg, output_format):
return msg
-def render_junit_xml_report(resultset, xml_report):
+def render_junit_xml_report(resultset, xml_report, suite_package='grpc',
+ suite_name='tests'):
"""Generate JUnit-like XML report."""
root = ET.Element('testsuites')
- testsuite = ET.SubElement(root, 'testsuite', id='1', package='grpc',
- name='tests')
+ testsuite = ET.SubElement(root, 'testsuite', id='1', package=suite_package,
+ name=suite_name)
for shortname, results in resultset.items():
for result in results:
xml_test = ET.SubElement(testsuite, 'testcase', name=shortname)
diff --git a/tools/run_tests/run_tests.py b/tools/run_tests/run_tests.py
index a6f3d405dc..dd070b1fe0 100755
--- a/tools/run_tests/run_tests.py
+++ b/tools/run_tests/run_tests.py
@@ -1015,6 +1015,8 @@ argp.add_argument('--update_submodules', default=[], nargs='*',
argp.add_argument('-a', '--antagonists', default=0, type=int)
argp.add_argument('-x', '--xml_report', default=None, type=str,
help='Generates a JUnit-compatible XML report')
+argp.add_argument('--report_suite_name', default='tests', type=str,
+ help='Test suite name to use in generated JUnit XML report')
argp.add_argument('--force_default_poller', default=False, action='store_const', const=True,
help='Dont try to iterate over many polling strategies when they exist')
args = argp.parse_args()
@@ -1327,7 +1329,8 @@ def _build_and_run(
if build_only:
if xml_report:
- report_utils.render_junit_xml_report(resultset, xml_report)
+ report_utils.render_junit_xml_report(resultset, xml_report,
+ suite_name=args.report_suite_name)
return []
# start antagonists
@@ -1379,7 +1382,8 @@ def _build_and_run(
for antagonist in antagonists:
antagonist.kill()
if xml_report and resultset:
- report_utils.render_junit_xml_report(resultset, xml_report)
+ report_utils.render_junit_xml_report(resultset, xml_report,
+ suite_name=args.report_suite_name)
number_failures, _ = jobset.run(
post_tests_steps, maxjobs=1, stop_on_failure=True,
diff --git a/tools/run_tests/run_tests_matrix.py b/tools/run_tests/run_tests_matrix.py
index a94f9cfef5..60c21a1e21 100755
--- a/tools/run_tests/run_tests_matrix.py
+++ b/tools/run_tests/run_tests_matrix.py
@@ -55,7 +55,8 @@ def _docker_jobspec(name, runtests_args=[]):
'--use_docker',
'-t',
'-j', str(_INNER_JOBS),
- '-x', 'report_%s.xml' % name] + runtests_args,
+ '-x', 'report_%s.xml' % name,
+ '--report_suite_name', '%s' % name] + runtests_args,
shortname='run_tests_%s' % name,
timeout_seconds=_RUNTESTS_TIMEOUT)
return test_job
@@ -70,7 +71,8 @@ def _workspace_jobspec(name, runtests_args=[], workspace_name=None):
cmdline=['tools/run_tests/run_tests_in_workspace.sh',
'-t',
'-j', str(_INNER_JOBS),
- '-x', '../report_%s.xml' % name] + runtests_args,
+ '-x', '../report_%s.xml' % name,
+ '--report_suite_name', '%s' % name] + runtests_args,
environ=env,
shortname='run_tests_%s' % name,
timeout_seconds=_RUNTESTS_TIMEOUT)
@@ -271,7 +273,8 @@ num_failures, resultset = jobset.run(jobs,
newline_on_success=True,
travis=True,
maxjobs=args.jobs)
-report_utils.render_junit_xml_report(resultset, 'report.xml')
+report_utils.render_junit_xml_report(resultset, 'report.xml',
+ suite_name='aggregate_tests')
if num_failures == 0:
jobset.message('SUCCESS', 'All run_tests.py instance finished successfully.',