diff options
author | ncteisen <ncteisen@gmail.com> | 2017-12-11 18:09:31 -0800 |
---|---|---|
committer | ncteisen <ncteisen@gmail.com> | 2017-12-11 18:10:00 -0800 |
commit | 5f8bf79bbf4915b928f75c83c66592b1fa97657e (patch) | |
tree | 38753ccc2a25774e87c27cd0af185e3d7c8cbe0a /tools/github | |
parent | 888093c6ed0d24eed699173b5fb35235fe7a6069 (diff) |
yapf tools
Diffstat (limited to 'tools/github')
-rw-r--r-- | tools/github/pr_latency.py | 211 |
1 files changed, 124 insertions, 87 deletions
diff --git a/tools/github/pr_latency.py b/tools/github/pr_latency.py index 5d635835e5..0131e60bbc 100644 --- a/tools/github/pr_latency.py +++ b/tools/github/pr_latency.py @@ -12,7 +12,6 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - """Measure the time between PR creation and completion of all tests. You'll need a github API token to avoid being rate-limited. See @@ -46,118 +45,156 @@ COMMITS = 'https://api.github.com/repos/grpc/grpc/pulls/{pr_number}/commits' def gh(url): - request = urllib2.Request(url) - if TOKEN: - request.add_header('Authorization', 'token {}'.format(TOKEN)) - response = urllib2.urlopen(request) - return response.read() + request = urllib2.Request(url) + if TOKEN: + request.add_header('Authorization', 'token {}'.format(TOKEN)) + response = urllib2.urlopen(request) + return response.read() def print_csv_header(): - print('pr,base_time,test_time,latency_seconds,successes,failures,errors') - - -def output(pr, base_time, test_time, diff_time, successes, failures, errors, mode='human'): - if mode == 'human': - print("PR #{} base time: {} UTC, Tests completed at: {} UTC. Latency: {}." - "\n\tSuccesses: {}, Failures: {}, Errors: {}".format( - pr, base_time, test_time, diff_time, successes, failures, errors)) - elif mode == 'csv': - print(','.join([str(pr), str(base_time), - str(test_time), str(int((test_time-base_time).total_seconds())), - str(successes), str(failures), str(errors)])) + print('pr,base_time,test_time,latency_seconds,successes,failures,errors') + + +def output(pr, + base_time, + test_time, + diff_time, + successes, + failures, + errors, + mode='human'): + if mode == 'human': + print( + "PR #{} base time: {} UTC, Tests completed at: {} UTC. Latency: {}." + "\n\tSuccesses: {}, Failures: {}, Errors: {}".format( + pr, base_time, test_time, diff_time, successes, failures, + errors)) + elif mode == 'csv': + print(','.join([ + str(pr), str(base_time), str(test_time), str( + int((test_time - base_time).total_seconds())), str(successes), + str(failures), str(errors) + ])) def parse_timestamp(datetime_str): - return datetime.strptime(datetime_str, '%Y-%m-%dT%H:%M:%SZ') + return datetime.strptime(datetime_str, '%Y-%m-%dT%H:%M:%SZ') def to_posix_timestamp(dt): - return str((dt - datetime(1970, 1, 1)).total_seconds()) + return str((dt - datetime(1970, 1, 1)).total_seconds()) def get_pr_data(): - latest_prs = json.loads(gh(PRS)) - res = [{'number': pr['number'], - 'created_at': parse_timestamp(pr['created_at']), - 'updated_at': parse_timestamp(pr['updated_at']), - 'statuses_url': pr['statuses_url']} - for pr in latest_prs] - return res + latest_prs = json.loads(gh(PRS)) + res = [{ + 'number': pr['number'], + 'created_at': parse_timestamp(pr['created_at']), + 'updated_at': parse_timestamp(pr['updated_at']), + 'statuses_url': pr['statuses_url'] + } for pr in latest_prs] + return res def get_commits_data(pr_number): - commits = json.loads(gh(COMMITS.format(pr_number=pr_number))) - return {'num_commits': len(commits), - 'most_recent_date': parse_timestamp(commits[-1]['commit']['author']['date'])} + commits = json.loads(gh(COMMITS.format(pr_number=pr_number))) + return { + 'num_commits': len(commits), + 'most_recent_date': + parse_timestamp(commits[-1]['commit']['author']['date']) + } def get_status_data(statuses_url, system): - status_url = statuses_url.replace('statuses', 'status') - statuses = json.loads(gh(status_url + '?per_page=100')) - successes = 0 - failures = 0 - errors = 0 - latest_datetime = None - if not statuses: return None - if system == 'kokoro': string_in_target_url = 'kokoro' - elif system == 'jenkins': string_in_target_url = 'grpc-testing' - for status in statuses['statuses']: - if not status['target_url'] or string_in_target_url not in status['target_url']: continue # Ignore jenkins - if status['state'] == 'pending': return None - elif status['state'] == 'success': successes += 1 - elif status['state'] == 'failure': failures += 1 - elif status['state'] == 'error': errors += 1 - if not latest_datetime: - latest_datetime = parse_timestamp(status['updated_at']) - else: - latest_datetime = max(latest_datetime, parse_timestamp(status['updated_at'])) - # First status is the most recent one. - if any([successes, failures, errors]) and sum([successes, failures, errors]) > 15: - return {'latest_datetime': latest_datetime, + status_url = statuses_url.replace('statuses', 'status') + statuses = json.loads(gh(status_url + '?per_page=100')) + successes = 0 + failures = 0 + errors = 0 + latest_datetime = None + if not statuses: return None + if system == 'kokoro': string_in_target_url = 'kokoro' + elif system == 'jenkins': string_in_target_url = 'grpc-testing' + for status in statuses['statuses']: + if not status['target_url'] or string_in_target_url not in status[ + 'target_url']: + continue # Ignore jenkins + if status['state'] == 'pending': return None + elif status['state'] == 'success': successes += 1 + elif status['state'] == 'failure': failures += 1 + elif status['state'] == 'error': errors += 1 + if not latest_datetime: + latest_datetime = parse_timestamp(status['updated_at']) + else: + latest_datetime = max(latest_datetime, + parse_timestamp(status['updated_at'])) + # First status is the most recent one. + if any([successes, failures, errors]) and sum( + [successes, failures, errors]) > 15: + return { + 'latest_datetime': latest_datetime, 'successes': successes, 'failures': failures, - 'errors': errors} - else: return None + 'errors': errors + } + else: + return None def build_args_parser(): - import argparse - parser = argparse.ArgumentParser() - parser.add_argument('--format', type=str, choices=['human', 'csv'], - default='human', - help='Output format: are you a human or a machine?') - parser.add_argument('--system', type=str, choices=['jenkins', 'kokoro'], - required=True, help='Consider only the given CI system') - parser.add_argument('--token', type=str, default='', - help='GitHub token to use its API with a higher rate limit') - return parser + import argparse + parser = argparse.ArgumentParser() + parser.add_argument( + '--format', + type=str, + choices=['human', 'csv'], + default='human', + help='Output format: are you a human or a machine?') + parser.add_argument( + '--system', + type=str, + choices=['jenkins', 'kokoro'], + required=True, + help='Consider only the given CI system') + parser.add_argument( + '--token', + type=str, + default='', + help='GitHub token to use its API with a higher rate limit') + return parser def main(): - import sys - global TOKEN - args_parser = build_args_parser() - args = args_parser.parse_args() - TOKEN = args.token - if args.format == 'csv': print_csv_header() - for pr_data in get_pr_data(): - commit_data = get_commits_data(pr_data['number']) - # PR with a single commit -> use the PRs creation time. - # else -> use the latest commit's date. - base_timestamp = pr_data['updated_at'] - if commit_data['num_commits'] > 1: - base_timestamp = commit_data['most_recent_date'] - else: - base_timestamp = pr_data['created_at'] - last_status = get_status_data(pr_data['statuses_url'], args.system) - if last_status: - diff = last_status['latest_datetime'] - base_timestamp - if diff < timedelta(hours=5): - output(pr_data['number'], base_timestamp, last_status['latest_datetime'], - diff, last_status['successes'], last_status['failures'], - last_status['errors'], mode=args.format) + import sys + global TOKEN + args_parser = build_args_parser() + args = args_parser.parse_args() + TOKEN = args.token + if args.format == 'csv': print_csv_header() + for pr_data in get_pr_data(): + commit_data = get_commits_data(pr_data['number']) + # PR with a single commit -> use the PRs creation time. + # else -> use the latest commit's date. + base_timestamp = pr_data['updated_at'] + if commit_data['num_commits'] > 1: + base_timestamp = commit_data['most_recent_date'] + else: + base_timestamp = pr_data['created_at'] + last_status = get_status_data(pr_data['statuses_url'], args.system) + if last_status: + diff = last_status['latest_datetime'] - base_timestamp + if diff < timedelta(hours=5): + output( + pr_data['number'], + base_timestamp, + last_status['latest_datetime'], + diff, + last_status['successes'], + last_status['failures'], + last_status['errors'], + mode=args.format) if __name__ == '__main__': - main() + main() |