diff options
-rwxr-xr-x | tools/gke/run_stress_tests_on_gke.py | 3 | ||||
-rwxr-xr-x | tools/run_tests/stress_test/stress_test_utils.py | 12 |
2 files changed, 9 insertions, 6 deletions
diff --git a/tools/gke/run_stress_tests_on_gke.py b/tools/gke/run_stress_tests_on_gke.py index 9a8a33cac5..065b11e91c 100755 --- a/tools/gke/run_stress_tests_on_gke.py +++ b/tools/gke/run_stress_tests_on_gke.py @@ -403,6 +403,7 @@ def run_test(skip_building_image, gcp_project_id, image_name, tag_name, break # Things seem to be running fine. Wait until next poll time to check the # status + print 'Sleeping for %d seconds..' % poll_interval_secs time.sleep(poll_interval_secs) # Print BiqQuery tables @@ -418,7 +419,7 @@ if __name__ == '__main__': gcp_project_id = 'sree-gce' tag_name = 'gcr.io/%s/%s' % (gcp_project_id, image_name) num_client_instances = 3 - poll_interval_secs = 5, + poll_interval_secs = 10 test_duration_secs = 150 run_test(True, gcp_project_id, image_name, tag_name, num_client_instances, poll_interval_secs, test_duration_secs) diff --git a/tools/run_tests/stress_test/stress_test_utils.py b/tools/run_tests/stress_test/stress_test_utils.py index 71f0dcd921..7adc0068f9 100755 --- a/tools/run_tests/stress_test/stress_test_utils.py +++ b/tools/run_tests/stress_test/stress_test_utils.py @@ -81,10 +81,9 @@ class BigQueryHelper: 'event_type': event_type, 'details': details } - # Something that uniquely identifies the row (Biquery needs it for duplicate - # detection). + # row_unique_id is something that uniquely identifies the row (BigQuery uses + # it for duplicate detection). row_unique_id = '%s_%s_%s' % (self.run_id, self.pod_name, event_type) - row = bq_utils.make_row(row_unique_id, row_values_dict) return bq_utils.insert_rows(self.bq, self.project_id, self.dataset_id, self.summary_table_id, [row]) @@ -97,6 +96,8 @@ class BigQueryHelper: 'qps': qps } + # row_unique_id is something that uniquely identifies the row (BigQuery uses + # it for duplicate detection). row_unique_id = '%s_%s_%s' % (self.run_id, self.pod_name, recorded_at) row = bq_utils.make_row(row_unique_id, row_values_dict) return bq_utils.insert_rows(self.bq, self.project_id, self.dataset_id, @@ -109,7 +110,6 @@ class BigQueryHelper: query_job = bq_utils.sync_query_job(self.bq, self.project_id, query) page = self.bq.jobs().getQueryResults(**query_job['jobReference']).execute( num_retries=num_query_retries) - print page num_failures = int(page['totalRows']) print 'num rows: ', num_failures return num_failures > 0 @@ -118,7 +118,8 @@ class BigQueryHelper: line = '-' * 120 print line print 'Summary records' - print 'Run Id', self.run_id + print 'Run Id: ', self.run_id + print 'Dataset Id: ', self.dataset_id print line query = ('SELECT pod_name, image_type, event_type, event_date, details' ' FROM %s.%s WHERE run_id = \'%s\' ORDER by event_date;') % ( @@ -147,6 +148,7 @@ class BigQueryHelper: print line print 'QPS Summary' print 'Run Id: ', self.run_id + print 'Dataset Id: ', self.dataset_id print line query = ( 'SELECT pod_name, recorded_at, qps FROM %s.%s WHERE run_id = \'%s\' ' |