aboutsummaryrefslogtreecommitdiffhomepage
path: root/experimental/benchtools
diff options
context:
space:
mode:
authorGravatar bensong <bensong@google.com>2014-07-08 14:36:56 -0700
committerGravatar Commit bot <commit-bot@chromium.org>2014-07-08 14:36:56 -0700
commitc15ba1d5447d113db2f16ca95e13107603e5d75e (patch)
treea8341b6ba3b1402bb1169015075aefe600640077 /experimental/benchtools
parentd3f3e5895efb5edfd838ba9127093c804f9dbc12 (diff)
bench rebase tool: add ability to specify a second CL for fluctuating data adjustment.
R=borenet@google.com TBR=borenet@google.com BUG=skia:2657 NOTRY=true Author: bensong@google.com Review URL: https://codereview.chromium.org/370413002
Diffstat (limited to 'experimental/benchtools')
-rwxr-xr-xexperimental/benchtools/rebase.py116
1 files changed, 98 insertions, 18 deletions
diff --git a/experimental/benchtools/rebase.py b/experimental/benchtools/rebase.py
index 85290d7975..f542454ccf 100755
--- a/experimental/benchtools/rebase.py
+++ b/experimental/benchtools/rebase.py
@@ -32,9 +32,6 @@ SKIA_GIT_HEAD_URL = 'https://skia.googlesource.com/skia/+log/HEAD'
# Google Storage bench file prefix.
GS_PREFIX = 'gs://chromium-skia-gm/perfdata'
-# List of Perf platforms we want to process. Populate from expectations/bench.
-PLATFORMS = []
-
# Regular expression for matching githash data.
HA_RE = '<a href="/skia/\+/([0-9a-f]+)">'
HA_RE_COMPILED = re.compile(HA_RE)
@@ -88,17 +85,61 @@ def download_gs_files(p, h, gs_dir):
return True
return False
-def calc_expectations(p, h, gs_dir, exp_dir, repo_dir):
+def get_expectations_dict(f):
+ """Given an expectations file f, returns a dictionary of data."""
+ # maps row_key to (expected, lower_bound, upper_bound) float tuple.
+ dic = {}
+ for l in open(f).readlines():
+ line_parts = l.strip().split(',')
+ if line_parts[0].startswith('#') or len(line_parts) != 5:
+ continue
+ dic[','.join(line_parts[:2])] = (float(line_parts[2]), float(line_parts[3]),
+ float(line_parts[4]))
+
+ return dic
+
+def calc_expectations(p, h, gs_dir, exp_dir, repo_dir, extra_dir, extra_hash):
exp_filename = 'bench_expectations_%s.txt' % p
+ exp_fullname = os.path.join(exp_dir, exp_filename)
proc = subprocess.Popen(['python', 'skia/bench/gen_bench_expectations.py',
- '-r', h, '-b', p, '-d', os.path.join(gs_dir, p), '-o',
- os.path.join(exp_dir, exp_filename)],
+ '-r', h, '-b', p, '-d', os.path.join(gs_dir, p), '-o', exp_fullname],
stdout=subprocess.PIPE)
out, err = proc.communicate()
if err:
print 'ERR_CALCULATING_EXPECTATIONS: ' + err
return False
print 'CALCULATED_EXPECTATIONS: ' + out
+ if extra_dir: # Adjust data with the ones in extra_dir
+ print 'USE_EXTRA_DATA_FOR_ADJUSTMENT.'
+ proc = subprocess.Popen(['python', 'skia/bench/gen_bench_expectations.py',
+ '-r', extra_hash, '-b', p, '-d', os.path.join(extra_dir, p), '-o',
+ os.path.join(extra_dir, exp_filename)],
+ stdout=subprocess.PIPE)
+ out, err = proc.communicate()
+ if err:
+ print 'ERR_CALCULATING_EXTRA_EXPECTATIONS: ' + err
+ return False
+ extra_dic = get_expectations_dict(os.path.join(extra_dir, exp_filename))
+ output_lines = []
+ for l in open(exp_fullname).readlines():
+ parts = l.strip().split(',')
+ if parts[0].startswith('#') or len(parts) != 5:
+ output_lines.append(l.strip())
+ continue
+ key = ','.join(parts[:2])
+ if key in extra_dic:
+ exp, lb, ub = (float(parts[2]), float(parts[3]), float(parts[4]))
+ alt, _, _ = extra_dic[key]
+ avg = (exp + alt) / 2
+ # Keeps the extra range in lower/upper bounds from two actual values.
+ new_lb = min(exp, alt) - (exp - lb)
+ new_ub = max(exp, alt) + (ub - exp)
+ output_lines.append('%s,%.2f,%.2f,%.2f' % (key, avg, new_lb, new_ub))
+ else:
+ output_lines.append(l.strip())
+ with open(exp_fullname, 'w') as f:
+ f.write('\n'.join(output_lines))
+
repo_file = os.path.join(repo_dir, 'expectations', 'bench', exp_filename)
if (os.path.isfile(repo_file) and
filecmp.cmp(repo_file, os.path.join(exp_dir, exp_filename))):
@@ -125,13 +166,16 @@ def checkout_or_update_skia(repo_dir):
os.chdir(old_cwd)
return status
-def git_commit_expectations(repo_dir, exp_dir, update_li, h, commit):
- commit_msg = """manual bench rebase after %s
+def git_commit_expectations(repo_dir, exp_dir, update_li, h, commit,
+ extra_hash):
+ if extra_hash:
+ extra_hash = ', adjusted with ' + extra_hash
+ commit_msg = """manual bench rebase after %s%s
TBR=robertphillips@google.com
Bypassing trybots:
-NOTRY=true""" % h
+NOTRY=true""" % (h, extra_hash)
old_cwd = os.getcwd()
os.chdir(repo_dir)
upload = ['git', 'cl', 'upload', '-f', '--bypass-hooks',
@@ -175,7 +219,14 @@ def main():
return
parser = argparse.ArgumentParser()
parser.add_argument('--githash',
- help='Githash prefix (7+ chars) to rebaseline to.')
+ help=('Githash prefix (7+ chars) to rebaseline to. If '
+ 'a second one is supplied after comma, and it has '
+ 'corresponding bench data, will shift the range '
+ 'center to the average of two expected values.'))
+ parser.add_argument('--bots',
+ help=('Comma-separated list of bots to work on. If no '
+ 'matching bots are found in the list, will default '
+ 'to processing all bots.'))
parser.add_argument('--commit', action='store_true',
help='Whether to commit changes automatically.')
args = parser.parse_args()
@@ -193,30 +244,54 @@ def main():
print 'Updated this script from repo; please run again.'
return
+ all_platforms = [] # Find existing list of platforms with expectations.
for item in os.listdir(os.path.join(d, 'skia/expectations/bench')):
- PLATFORMS.append(
+ all_platforms.append(
item.replace('bench_expectations_', '').replace('.txt', ''))
+ platforms = []
+ # If at least one given bot is in all_platforms, use list of valid args.bots.
+ if args.bots:
+ bots = args.bots.strip().split(',')
+ for bot in bots:
+ if bot in all_platforms: # Filters platforms with given bot list.
+ platforms.append(bot)
+ if not platforms: # Include all existing platforms with expectations.
+ platforms = all_platforms
+
if not args.githash or len(args.githash) < 7:
raise Exception('Please provide --githash with a longer prefix (7+).')
+ githashes = args.githash.strip().split(',')
+ if len(githashes[0]) < 7:
+ raise Exception('Please provide --githash with longer prefixes (7+).')
commit = False
if args.commit:
commit = True
- rebase_hash = args.githash[:7]
+ rebase_hash = githashes[0][:7]
+ extra_hash = ''
+ if len(githashes) == 2:
+ extra_hash = githashes[1][:7]
hashes = get_git_hashes()
short_hashes = [h[:7] for h in hashes]
- if rebase_hash not in short_hashes:
- raise Exception('Provided --githash not found in recent history!')
+ if (rebase_hash not in short_hashes or
+ (extra_hash and extra_hash not in short_hashes) or
+ rebase_hash == extra_hash):
+ raise Exception('Provided --githashes not found, or identical!')
+ if extra_hash:
+ extra_hash = hashes[short_hashes.index(extra_hash)]
hashes = hashes[:short_hashes.index(rebase_hash) + 1]
update_li = []
ts_str = '%s' % time.time()
gs_dir = os.path.join(d, 'gs' + ts_str)
exp_dir = os.path.join(d, 'exp' + ts_str)
+ extra_dir = os.path.join(d, 'extra' + ts_str)
clean_dir(gs_dir)
clean_dir(exp_dir)
- for p in PLATFORMS:
+ clean_dir(extra_dir)
+ for p in platforms:
clean_dir(os.path.join(gs_dir, p))
+ clean_dir(os.path.join(extra_dir, p))
hash_to_use = ''
for h in reversed(hashes):
li = get_gs_filelist(p, h)
@@ -230,18 +305,23 @@ def main():
print 'DOWNLOAD BENCH FAILED %s/%s' % (p, h)
break
if hash_to_use:
- if calc_expectations(p, h, gs_dir, exp_dir, repo_dir):
+ if extra_hash and download_gs_files(p, extra_hash, extra_dir):
+ print 'Copied extra data %s/%s' % (p, extra_hash)
+ if calc_expectations(p, h, gs_dir, exp_dir, repo_dir, extra_dir,
+ extra_hash):
+ update_li.append('bench_expectations_%s.txt' % p)
+ elif calc_expectations(p, h, gs_dir, exp_dir, repo_dir, '', ''):
update_li.append('bench_expectations_%s.txt' % p)
if not update_li:
print 'No bench data to update after %s!' % args.githash
elif not git_commit_expectations(
- repo_dir, exp_dir, update_li, args.githash[:7], commit):
+ repo_dir, exp_dir, update_li, rebase_hash, commit, extra_hash):
print 'ERROR uploading expectations using git.'
elif not commit:
print 'CL created. Please take a look at the link above.'
else:
print 'New bench baselines should be in CQ now.'
- delete_dirs([gs_dir, exp_dir])
+ delete_dirs([gs_dir, exp_dir, extra_dir])
if __name__ == "__main__":