1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
|
#!/usr/bin/python
'''
Copyright 2013 Google Inc.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
'''
'''
Gathers diffs between 2 JSON expectations files, or between actual and
expected results within a single JSON actual-results file,
and generates an old-vs-new diff dictionary.
'''
# System-level imports
import argparse
import json
import os
import sys
import urllib2
# Imports from within Skia
#
# We need to add the 'gm' directory, so that we can import gm_json.py within
# that directory. That script allows us to parse the actual-results.json file
# written out by the GM tool.
# Make sure that the 'gm' dir is in the PYTHONPATH, but add it at the *end*
# so any dirs that are already in the PYTHONPATH will be preferred.
#
# This assumes that the 'gm' directory has been checked out as a sibling of
# the 'tools' directory containing this script, which will be the case if
# 'trunk' was checked out as a single unit.
GM_DIRECTORY = os.path.realpath(
os.path.join(os.path.dirname(os.path.dirname(__file__)), 'gm'))
if GM_DIRECTORY not in sys.path:
sys.path.append(GM_DIRECTORY)
import gm_json
# Object that generates diffs between two JSON gm result files.
class GMDiffer(object):
def __init__(self):
pass
def _GetFileContentsAsString(self, filepath):
"""Returns the full contents of a file, as a single string.
If the filename looks like a URL, download its contents..."""
if filepath.startswith('http:') or filepath.startswith('https:'):
return urllib2.urlopen(filepath).read()
else:
return open(filepath, 'r').read()
def _GetExpectedResults(self, filepath):
"""Returns the dictionary of expected results from a JSON file,
in this form:
{
'test1' : 14760033689012826769,
'test2' : 9151974350149210736,
...
}
We make these simplifying assumptions:
1. Each test has either 0 or 1 allowed results.
2. All expectations are of type JSONKEY_HASHTYPE_BITMAP_64BITMD5.
Any tests which violate those assumptions will cause an exception to
be raised.
Any tests for which we have no expectations will be left out of the
returned dictionary.
"""
result_dict = {}
contents = self._GetFileContentsAsString(filepath)
json_dict = gm_json.LoadFromString(contents)
all_expectations = json_dict[gm_json.JSONKEY_EXPECTEDRESULTS]
for test_name in all_expectations.keys():
test_expectations = all_expectations[test_name]
allowed_digests = test_expectations[
gm_json.JSONKEY_EXPECTEDRESULTS_ALLOWEDDIGESTS]
if allowed_digests:
num_allowed_digests = len(allowed_digests)
if num_allowed_digests > 1:
raise ValueError(
'test %s in file %s has %d allowed digests' % (
test_name, filepath, num_allowed_digests))
digest_pair = allowed_digests[0]
if digest_pair[0] != gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5:
raise ValueError(
'test %s in file %s has unsupported hashtype %s' % (
test_name, filepath, digest_pair[0]))
result_dict[test_name] = digest_pair[1]
return result_dict
def _GetActualResults(self, filepath):
"""Returns the dictionary of actual results from a JSON file,
in this form:
{
'test1' : 14760033689012826769,
'test2' : 9151974350149210736,
...
}
We make these simplifying assumptions:
1. All results are of type JSONKEY_HASHTYPE_BITMAP_64BITMD5.
Any tests which violate those assumptions will cause an exception to
be raised.
Any tests for which we have no actual results will be left out of the
returned dictionary.
"""
result_dict = {}
contents = self._GetFileContentsAsString(filepath)
json_dict = gm_json.LoadFromString(contents)
all_result_types = json_dict[gm_json.JSONKEY_ACTUALRESULTS]
for result_type in all_result_types.keys():
results_of_this_type = all_result_types[result_type]
if results_of_this_type:
for test_name in results_of_this_type.keys():
digest_pair = results_of_this_type[test_name]
if digest_pair[0] != gm_json.JSONKEY_HASHTYPE_BITMAP_64BITMD5:
raise ValueError(
'test %s in file %s has unsupported hashtype %s' % (
test_name, filepath, digest_pair[0]))
result_dict[test_name] = digest_pair[1]
return result_dict
def _DictionaryDiff(self, old_dict, new_dict):
"""Generate a dictionary showing the diffs between old_dict and new_dict.
Any entries which are identical across them will be left out."""
diff_dict = {}
all_keys = set(old_dict.keys() + new_dict.keys())
for key in all_keys:
if old_dict.get(key) != new_dict.get(key):
new_entry = {}
new_entry['old'] = old_dict.get(key)
new_entry['new'] = new_dict.get(key)
diff_dict[key] = new_entry
return diff_dict
def GenerateDiffDict(self, oldfile, newfile=None):
"""Generate a dictionary showing the diffs:
old = expectations within oldfile
new = expectations within newfile
If newfile is not specified, then 'new' is the actual results within
oldfile.
"""
old_results = self._GetExpectedResults(oldfile)
if newfile:
new_results = self._GetExpectedResults(newfile)
else:
new_results = self._GetActualResults(oldfile)
return self._DictionaryDiff(old_results, new_results)
# main...
parser = argparse.ArgumentParser()
parser.add_argument('old',
help='Path to JSON file whose expectations to display on ' +
'the "old" side of the diff. This can be a filepath on ' +
'local storage, or a URL.')
parser.add_argument('new', nargs='?',
help='Path to JSON file whose expectations to display on ' +
'the "new" side of the diff; if not specified, uses the ' +
'ACTUAL results from the "old" JSON file. This can be a ' +
'filepath on local storage, or a URL.')
args = parser.parse_args()
differ = GMDiffer()
diffs = differ.GenerateDiffDict(oldfile=args.old, newfile=args.new)
json.dump(diffs, sys.stdout, sort_keys=True, indent=2)
|