aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
-rwxr-xr-xtools/jenkins/run_performance_profile_daily.sh3
-rwxr-xr-xtools/jenkins/run_performance_profile_hourly.sh5
2 files changed, 3 insertions, 5 deletions
diff --git a/tools/jenkins/run_performance_profile_daily.sh b/tools/jenkins/run_performance_profile_daily.sh
index 39c3f2c996..f239fad188 100755
--- a/tools/jenkins/run_performance_profile_daily.sh
+++ b/tools/jenkins/run_performance_profile_daily.sh
@@ -32,8 +32,6 @@ set -ex
cd $(dirname $0)/../..
-CPUS=`python -c 'import multiprocessing; print multiprocessing.cpu_count()'`
-
# try to use pypy for generating reports
# each trace dumps 7-8gig of text to disk, and processing this into a report is
# heavyweight - so any speed boost is worthwhile
@@ -45,4 +43,3 @@ else
fi
$PYTHON tools/run_tests/run_microbenchmark.py --collect summary perf latency
-
diff --git a/tools/jenkins/run_performance_profile_hourly.sh b/tools/jenkins/run_performance_profile_hourly.sh
index 1d5930eb41..dfcc2bb116 100755
--- a/tools/jenkins/run_performance_profile_hourly.sh
+++ b/tools/jenkins/run_performance_profile_hourly.sh
@@ -32,9 +32,10 @@ set -ex
cd $(dirname $0)/../..
-make CONFIG=opt memory_profile_test memory_profile_client memory_profile_server
+CPUS=`python -c 'import multiprocessing; print multiprocessing.cpu_count()'`
+
+make CONFIG=opt memory_profile_test memory_profile_client memory_profile_server -j $CPUS
bins/opt/memory_profile_test
bq load microbenchmarks.memory memory_usage.csv
tools/run_tests/run_microbenchmark.py --collect summary --bigquery_upload
-