aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools/profiling/latency_profile/run_latency_profile.sh
diff options
context:
space:
mode:
Diffstat (limited to 'tools/profiling/latency_profile/run_latency_profile.sh')
-rwxr-xr-xtools/profiling/latency_profile/run_latency_profile.sh95
1 files changed, 59 insertions, 36 deletions
diff --git a/tools/profiling/latency_profile/run_latency_profile.sh b/tools/profiling/latency_profile/run_latency_profile.sh
index 54a25a9cb7..618db202dc 100755
--- a/tools/profiling/latency_profile/run_latency_profile.sh
+++ b/tools/profiling/latency_profile/run_latency_profile.sh
@@ -1,5 +1,5 @@
#!/bin/bash
-# Copyright 2015, Google Inc.
+# Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@@ -28,17 +28,61 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+# format argument via
+# $ echo '{...}' | python -mjson.tool
+read -r -d '' SCENARIOS_JSON_ARG <<'EOF'
+{
+ "scenarios": [
+ {
+ "benchmark_seconds": 5,
+ "client_config": {
+ "client_channels": 1,
+ "client_type": "SYNC_CLIENT",
+ "histogram_params": {
+ "max_possible": 60000000000.0,
+ "resolution": 0.01
+ },
+ "load_params": {
+ "closed_loop": {}
+ },
+ "outstanding_rpcs_per_channel": 1,
+ "payload_config": {
+ "simple_params": {
+ "req_size": 0,
+ "resp_size": 0
+ }
+ },
+ "rpc_type": "UNARY",
+ "security_params": {
+ "server_host_override": "foo.test.google.fr",
+ "use_test_ca": true
+ }
+ },
+ "name": "cpp_protobuf_sync_unary_ping_pong_secure",
+ "num_clients": 1,
+ "num_servers": 1,
+ "server_config": {
+ "core_limit": 1,
+ "security_params": {
+ "server_host_override": "foo.test.google.fr",
+ "use_test_ca": true
+ },
+ "server_type": "SYNC_SERVER"
+ },
+ "spawn_local_worker_count": 2,
+ "warmup_seconds": 5
+ }
+ ]
+}
+
+EOF
+
set -ex
cd $(dirname $0)/../../..
-BINS="sync_unary_ping_pong_test sync_streaming_ping_pong_test"
CPUS=`python -c 'import multiprocessing; print multiprocessing.cpu_count()'`
-make CONFIG=basicprof -j$CPUS $BINS
-
-mkdir -p reports
-
# try to use pypy for generating reports
# each trace dumps 7-8gig of text to disk, and processing this into a report is
# heavyweight - so any speed boost is worthwhile
@@ -49,35 +93,14 @@ else
PYTHON=python2.7
fi
-# start processes, interleaving report index generation
-echo '<html><head></head><body>' > reports/index.html
-for bin in $BINS
-do
- bins/basicprof/$bin
- mv latency_trace.txt $bin.trace
- echo "<a href='$bin.txt'>$bin</a><br/>" >> reports/index.html
-done
-pids=""
-# generate report pages... this will take some time
-# run them in parallel: they take 1 cpu each
-for bin in $BINS
-do
- $PYTHON tools/profiling/latency_profile/profile_analyzer.py \
- --source=$bin.trace --fmt=simple > reports/$bin.txt &
- pids+=" $!"
-done
-echo '</body></html>' >> reports/index.html
+make CONFIG=basicprof -j$CPUS qps_json_driver
-# make sure we kill the report generation if something goes wrong
-trap "kill $pids || true" 0
+mkdir -p reports
+bins/basicprof/qps_json_driver --scenarios_json="$SCENARIOS_JSON_ARG"
-# finally, wait for the background report generation to finish
-for pid in $pids
-do
- if wait $pid
- then
- echo "Finished $pid"
- else
- exit 1
- fi
-done
+echo '<html><head></head><body>Latency profile for:<br/>' > reports/index.html
+echo "<p><pre>${SCENARIOS_JSON_ARG}</pre></p>" >> reports/index.html
+echo '<p><pre>' >> reports/index.html
+$PYTHON tools/profiling/latency_profile/profile_analyzer.py \
+ --source=latency_trace.txt --fmt=simple >> reports/index.html
+echo '</pre></p></body></html>' >> reports/index.html