aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools
diff options
context:
space:
mode:
authorGravatar David G. Quintas <dgq@google.com>2015-12-10 10:21:18 -0800
committerGravatar David G. Quintas <dgq@google.com>2015-12-10 10:21:18 -0800
commit8e4eeade96c2c547a64a4c59d9004bb8667f48b8 (patch)
tree8e1a556a0e0555068f6c92ba4df6238d20eb925c /tools
parent3df7e936021cba42f2ad452b51f54f2362f4229b (diff)
parente7f8e8ecafdf36537a0b0ab032aeba0baf1f1cd3 (diff)
Merge pull request #4349 from carl-mastrangelo/percent2
Make pretty interop test output graphs
Diffstat (limited to 'tools')
-rw-r--r--tools/run_tests/interop_html_report.template24
-rw-r--r--tools/run_tests/report_utils.py11
-rwxr-xr-xtools/run_tests/run_interop_tests.py42
3 files changed, 70 insertions, 7 deletions
diff --git a/tools/run_tests/interop_html_report.template b/tools/run_tests/interop_html_report.template
index 1ba2e6cfc2..c01bdf7a77 100644
--- a/tools/run_tests/interop_html_report.template
+++ b/tools/run_tests/interop_html_report.template
@@ -40,6 +40,26 @@
% endif
</%def>
+<%def name="fill_one_http2_test_result(shortname, resultset)">
+ ## keep this mostly in sync with the template above
+ % if shortname in resultset:
+ ## Because interop tests does not have runs_per_test flag, each test is
+ ## run once. So there should only be one element for each result.
+ <% result = resultset[shortname][0] %>
+ <td bgcolor="white">
+ <div style="width:95%; border: 1px solid black; position: relative; padding: 3px;">
+ <span style="position: absolute; left: 45%;">${int(result.http2results['percent'] * 100)}&#37;</span>
+ <div style="height: 20px;
+ background-color: hsl(${result.http2results['percent'] * 120}, 100%, 50%);
+ width: ${result.http2results['percent'] * 100}%;"
+ title="${result.http2results['failed_cases'] | h}"></div>
+ </div>
+ </td>
+ % else:
+ <td bgcolor="magenta">Not implemented</td>
+ % endif
+</%def>
+
% if num_failures > 1:
<p><h2><font color="red">${num_failures} tests failed!</font></h2></p>
% elif num_failures:
@@ -95,11 +115,11 @@
shortname = 'cloud_to_cloud:http2:%s_server:%s' % (
server_lang, test_case)
%>
- ${fill_one_test_result(shortname, resultset)}
+ ${fill_one_http2_test_result(shortname, resultset)}
% endfor
% if cloud_to_prod:
<% shortname = 'cloud_to_prod:http2:%s' % test_case %>
- ${fill_one_test_result(shortname, resultset)}
+ ${fill_one_http2_test_result(shortname, resultset)}
% endif
</tr>
% endfor
diff --git a/tools/run_tests/report_utils.py b/tools/run_tests/report_utils.py
index 12b1972f1a..35f2069bee 100644
--- a/tools/run_tests/report_utils.py
+++ b/tools/run_tests/report_utils.py
@@ -32,6 +32,7 @@
try:
from mako.runtime import Context
from mako.template import Template
+ from mako import exceptions
except (ImportError):
pass # Mako not installed but it is ok.
import os
@@ -103,9 +104,15 @@ def render_interop_html_report(
'num_failures': num_failures,
'cloud_to_prod': cloud_to_prod,
'http2_interop': http2_interop}
+
html_report_out_dir = 'reports'
if not os.path.exists(html_report_out_dir):
os.mkdir(html_report_out_dir)
html_file_path = os.path.join(html_report_out_dir, 'index.html')
- with open(html_file_path, 'w') as output_file:
- mytemplate.render_context(Context(output_file, **args))
+ try:
+ with open(html_file_path, 'w') as output_file:
+ mytemplate.render_context(Context(output_file, **args))
+ except:
+ print(exceptions.text_error_template().render())
+ raise
+
diff --git a/tools/run_tests/run_interop_tests.py b/tools/run_tests/run_interop_tests.py
index 763ff5615c..7a09feb70d 100755
--- a/tools/run_tests/run_interop_tests.py
+++ b/tools/run_tests/run_interop_tests.py
@@ -31,17 +31,24 @@
"""Run interop (cross-language) tests in parallel."""
import argparse
+import atexit
import dockerjob
import itertools
import jobset
+import json
import multiprocessing
import os
+import re
import report_utils
+import subprocess
import sys
import tempfile
import time
import uuid
+# Docker doesn't clean up after itself, so we do it on exit.
+atexit.register(lambda: subprocess.call(['stty', 'echo']))
+
ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
os.chdir(ROOT)
@@ -549,6 +556,33 @@ def build_interop_image_jobspec(language, tag=None):
return build_job
+def aggregate_http2_results(stdout):
+ match = re.search(r'\{"cases[^\]]*\]\}', stdout)
+ if not match:
+ return None
+
+ results = json.loads(match.group(0))
+ skipped = 0
+ passed = 0
+ failed = 0
+ failed_cases = []
+ for case in results['cases']:
+ if case.get('skipped', False):
+ skipped += 1
+ else:
+ if case.get('passed', False):
+ passed += 1
+ else:
+ failed += 1
+ failed_cases.append(case.get('name', "NONAME"))
+ return {
+ 'passed': passed,
+ 'failed': failed,
+ 'skipped': skipped,
+ 'failed_cases': ', '.join(failed_cases),
+ 'percent': 1.0 * passed / (passed + failed)
+ }
+
argp = argparse.ArgumentParser(description='Run interop tests.')
argp.add_argument('-l', '--language',
choices=['all'] + sorted(_LANGUAGES),
@@ -676,9 +710,7 @@ try:
docker_image=docker_images.get(str(language)))
jobs.append(test_job)
- # TODO(carl-mastrangelo): Currently prod TLS terminators aren't spec compliant. Reenable
- # this once a better solution is in place.
- if args.http2_interop and False:
+ if args.http2_interop:
for test_case in _HTTP2_TEST_CASES:
test_job = cloud_to_prod_jobspec(http2Interop, test_case,
docker_image=docker_images.get(str(http2Interop)))
@@ -745,6 +777,10 @@ try:
report_utils.render_junit_xml_report(resultset, 'report.xml')
+ for name, job in resultset.iteritems():
+ if "http2" in name:
+ job[0].http2results = aggregate_http2_results(job[0].message)
+
report_utils.render_interop_html_report(
set([str(l) for l in languages]), servers, _TEST_CASES, _AUTH_TEST_CASES,
_HTTP2_TEST_CASES, resultset, num_failures,