aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
-rw-r--r--gm/confirm_no_failures_in_json.py47
-rw-r--r--gm/gm_expectations.cpp2
-rwxr-xr-xgm/tests/run.sh32
3 files changed, 81 insertions, 0 deletions
diff --git a/gm/confirm_no_failures_in_json.py b/gm/confirm_no_failures_in_json.py
new file mode 100644
index 0000000000..c84992670b
--- /dev/null
+++ b/gm/confirm_no_failures_in_json.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python
+# Copyright (c) 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Utility to confirm that a JSON summary written by GM contains no failures.
+
+Usage:
+ python confirm_no_failures_in_json.py <filename>
+"""
+
+__author__ = 'Elliot Poger'
+
+
+import json
+import sys
+
+
+# These constants must be kept in sync with the kJsonKey_ constants in
+# gm_expectations.cpp !
+JSONKEY_ACTUALRESULTS = 'actual-results'
+JSONKEY_ACTUALRESULTS_FAILED = 'failed'
+
+# This is the same indent level as used by jsoncpp, just for consistency.
+JSON_INDENTLEVEL = 3
+
+
+def Assert(filepath):
+ """Raises an exception if the JSON summary at filepath contains any failed
+ tests, or if we were unable to read the JSON summary."""
+ failed_tests = GetFailedTests(filepath)
+ if failed_tests:
+ raise Exception('JSON file %s contained these test failures...\n%s' % (
+ filepath, json.dumps(failed_tests, indent=JSON_INDENTLEVEL)))
+
+
+def GetFailedTests(filepath):
+ """Returns the dictionary of failed tests from the JSON file at filepath."""
+ json_dict = json.load(open(filepath))
+ actual_results = json_dict[JSONKEY_ACTUALRESULTS]
+ return actual_results[JSONKEY_ACTUALRESULTS_FAILED]
+
+
+if '__main__' == __name__:
+ if len(sys.argv) != 2:
+ raise Exception('usage: %s <input-json-filepath>' % sys.argv[0])
+ Assert(sys.argv[1])
diff --git a/gm/gm_expectations.cpp b/gm/gm_expectations.cpp
index 2c6dede25a..a92ceb323f 100644
--- a/gm/gm_expectations.cpp
+++ b/gm/gm_expectations.cpp
@@ -11,6 +11,8 @@
#define DEBUGFAIL_SEE_STDERR SkDEBUGFAIL("see stderr for message")
+// These constants must be kept in sync with the JSONKEY_ constants in
+// confirm_no_failures_in_json.py !
const static char kJsonKey_ActualResults[] = "actual-results";
const static char kJsonKey_ActualResults_Failed[] = "failed";
const static char kJsonKey_ActualResults_FailureIgnored[]= "failure-ignored";
diff --git a/gm/tests/run.sh b/gm/tests/run.sh
index e9cc19df61..0ec576dd40 100755
--- a/gm/tests/run.sh
+++ b/gm/tests/run.sh
@@ -44,6 +44,28 @@ function compare_directories {
fi
}
+# Run a command, and validate that it succeeds (returns 0).
+function assert_passes {
+ COMMAND="$1"
+ OUTPUT=$($COMMAND 2>&1)
+ if [ $? != 0 ]; then
+ echo "This command was supposed to pass, but failed: [$COMMAND]"
+ echo $OUTPUT
+ ENCOUNTERED_ANY_ERRORS=1
+ fi
+}
+
+# Run a command, and validate that it fails (returns nonzero).
+function assert_fails {
+ COMMAND="$1"
+ OUTPUT=$($COMMAND 2>&1)
+ if [ $? == 0 ]; then
+ echo "This command was supposed to fail, but passed: [$COMMAND]"
+ echo $OUTPUT
+ ENCOUNTERED_ANY_ERRORS=1
+ fi
+}
+
# Run gm...
# - with the arguments in $1
# - writing stdout into $2/$OUTPUT_ACTUAL_SUBDIR/stdout
@@ -186,6 +208,16 @@ gm_test "--verbose --hierarchy --match selftest1 selftest2 $CONFIGS" "$GM_OUTPUT
# Ignore some error types (including ExpectationsMismatch)
gm_test "--ignoreErrorTypes ExpectationsMismatch NoGpuContext --verbose --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/different-pixels.json" "$GM_OUTPUTS/ignore-expectations-mismatch"
+# Exercise confirm_no_failures_in_json.py
+PASSING_CASES="compared-against-identical-bytes-json compared-against-identical-pixels-json"
+FAILING_CASES="compared-against-different-pixels-json"
+for CASE in $PASSING_CASES; do
+ assert_passes "python gm/confirm_no_failures_in_json.py $GM_OUTPUTS/$CASE/$OUTPUT_EXPECTED_SUBDIR/json-summary.txt"
+done
+for CASE in $FAILING_CASES; do
+ assert_fails "python gm/confirm_no_failures_in_json.py $GM_OUTPUTS/$CASE/$OUTPUT_EXPECTED_SUBDIR/json-summary.txt"
+done
+
if [ $ENCOUNTERED_ANY_ERRORS == 0 ]; then
echo "All tests passed."
exit 0