aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
-rw-r--r--gm/gm_error.h9
-rw-r--r--gm/gmmain.cpp56
-rw-r--r--gm/tests/outputs/compared-against-different-pixels-images/output-expected/stdout5
-rw-r--r--gm/tests/outputs/compared-against-different-pixels-json/output-expected/stdout5
-rw-r--r--gm/tests/outputs/compared-against-empty-dir/output-expected/stdout5
-rw-r--r--gm/tests/outputs/compared-against-identical-bytes-images/output-expected/stdout5
-rw-r--r--gm/tests/outputs/compared-against-identical-bytes-json/output-expected/stdout5
-rw-r--r--gm/tests/outputs/compared-against-identical-pixels-images/output-expected/stdout5
-rw-r--r--gm/tests/outputs/compared-against-identical-pixels-json/output-expected/stdout5
-rw-r--r--gm/tests/outputs/no-readpath/output-expected/stdout5
-rw-r--r--gm/tests/outputs/pipe-playback-failure/output-expected/command_line1
-rw-r--r--gm/tests/outputs/pipe-playback-failure/output-expected/json-summary.txt33
-rw-r--r--gm/tests/outputs/pipe-playback-failure/output-expected/return_value1
-rw-r--r--gm/tests/outputs/pipe-playback-failure/output-expected/stderr1
-rw-r--r--gm/tests/outputs/pipe-playback-failure/output-expected/stdout13
-rwxr-xr-xgm/tests/run.sh3
16 files changed, 110 insertions, 47 deletions
diff --git a/gm/gm_error.h b/gm/gm_error.h
index ced3391bca..aab0ec0b25 100644
--- a/gm/gm_error.h
+++ b/gm/gm_error.h
@@ -26,7 +26,8 @@ namespace skiagm {
// or off (as long as the number of these errors is 0).
kNoGpuContext_ErrorType,
- kImageMismatch_ErrorType,
+ kRenderModeMismatch_ErrorType,
+ kExpectationsMismatch_ErrorType,
kMissingExpectations_ErrorType,
kWritingReferenceImage_ErrorType,
kLast_ErrorType = kWritingReferenceImage_ErrorType
@@ -39,8 +40,10 @@ namespace skiagm {
switch(type) {
case kNoGpuContext_ErrorType:
return "NoGpuContext";
- case kImageMismatch_ErrorType:
- return "ImageMismatch";
+ case kRenderModeMismatch_ErrorType:
+ return "RenderModeMismatch";
+ case kExpectationsMismatch_ErrorType:
+ return "ExpectationsMismatch";
case kMissingExpectations_ErrorType:
return "MissingExpectations";
case kWritingReferenceImage_ErrorType:
diff --git a/gm/gmmain.cpp b/gm/gmmain.cpp
index 6cb47aa047..850106408c 100644
--- a/gm/gmmain.cpp
+++ b/gm/gmmain.cpp
@@ -272,7 +272,8 @@ public:
}
// Things to do only if there is some error condition.
- SkString fullName = make_name(name.c_str(), renderModeDescriptor);
+ SkString fullName = name;
+ fullName.append(renderModeDescriptor);
for (int typeInt = 0; typeInt <= kLast_ErrorType; typeInt++) {
ErrorType type = static_cast<ErrorType>(typeInt);
if (errorCombination.includes(type)) {
@@ -683,18 +684,17 @@ public:
* @param baseNameString name of test without renderModeDescriptor added
* @param renderModeDescriptor e.g., "-rtree", "-deferred"
* @param addToJsonSummary whether to add these results (both actual and
- * expected) to the JSON summary
- *
- * TODO: For now, addToJsonSummary is only set to true within
- * compare_test_results_to_stored_expectations(), so results of our
- * in-memory comparisons (Rtree vs regular, etc.) are not written to the
- * JSON summary. We may wish to change that.
+ * expected) to the JSON summary. Regardless of this setting, if
+ * we find an image mismatch in this test, we will write these
+ * results to the JSON summary. (This is so that we will always
+ * report errors across rendering modes, such as pipe vs tiled.
+ * See https://codereview.chromium.org/13650002/ )
*/
ErrorCombination compare_to_expectations(Expectations expectations,
const SkBitmap& actualBitmap,
const SkString& baseNameString,
const char renderModeDescriptor[],
- bool addToJsonSummary=false) {
+ bool addToJsonSummary) {
ErrorCombination errors;
Checksum actualChecksum = SkBitmapChecksummer::Compute64(actualBitmap);
SkString completeNameString = baseNameString;
@@ -704,7 +704,14 @@ public:
if (expectations.empty()) {
errors.add(kMissingExpectations_ErrorType);
} else if (!expectations.match(actualChecksum)) {
- errors.add(kImageMismatch_ErrorType);
+ addToJsonSummary = true;
+ // The error mode we record depends on whether this was running
+ // in a non-standard renderMode.
+ if ('\0' == *renderModeDescriptor) {
+ errors.add(kExpectationsMismatch_ErrorType);
+ } else {
+ errors.add(kRenderModeMismatch_ErrorType);
+ }
// Write out the "actuals" for any mismatches, if we have
// been directed to do so.
@@ -753,7 +760,7 @@ public:
// (where we can set ignore-failure to either true or
// false), add test cases that exercise ignored
// failures (both for kMissingExpectations_ErrorType
- // and kImageMismatch_ErrorType).
+ // and kExpectationsMismatch_ErrorType).
this->fJsonActualResults_FailureIgnored[testName] =
actualResults;
} else {
@@ -772,7 +779,8 @@ public:
this->fJsonActualResults_NoComparison[testName] =
actualResults;
}
- if (result.includes(kImageMismatch_ErrorType)) {
+ if (result.includes(kExpectationsMismatch_ErrorType) ||
+ result.includes(kRenderModeMismatch_ErrorType)) {
this->fJsonActualResults_Failed[testName] = actualResults;
}
}
@@ -866,18 +874,11 @@ public:
GM* gm, const ConfigData& gRec, const char renderModeDescriptor [],
SkBitmap& actualBitmap, const SkBitmap* referenceBitmap) {
- // TODO(epoger): This method is run to compare results across
- // different rendering modes (as opposed to
- // compare_test_results_to_stored_expectations(), which
- // compares results against expectations stored on disk). If
- // we would like the GenerateGMs step to distinguish between
- // those two types of mismatches, we should report image
- // mismatches in here with a different ErrorType.
SkASSERT(referenceBitmap);
SkString name = make_name(gm->shortName(), gRec.fName);
Expectations expectations(*referenceBitmap);
return compare_to_expectations(expectations, actualBitmap,
- name, renderModeDescriptor);
+ name, renderModeDescriptor, false);
}
static SkPicture* generate_new_picture(GM* gm, BbhType bbhType, uint32_t recordFlags,
@@ -996,9 +997,8 @@ public:
return kEmpty_ErrorCombination;
}
- ErrorCombination test_pipe_playback(GM* gm,
- const ConfigData& gRec,
- const SkBitmap& referenceBitmap) {
+ ErrorCombination test_pipe_playback(GM* gm, const ConfigData& gRec,
+ const SkBitmap& referenceBitmap, bool simulateFailure) {
ErrorCombination errors;
for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) {
SkBitmap bitmap;
@@ -1010,7 +1010,9 @@ public:
SkGPipeWriter writer;
SkCanvas* pipeCanvas = writer.startRecording(
&pipeController, gPipeWritingFlagCombos[i].flags);
- invokeGM(gm, pipeCanvas, false, false);
+ if (!simulateFailure) {
+ invokeGM(gm, pipeCanvas, false, false);
+ }
complete_bitmap(&bitmap);
writer.endRecording();
SkString string("-pipe");
@@ -1177,6 +1179,7 @@ DEFINE_bool(replay, true, "Exercise the SkPicture replay test pass.");
DEFINE_string2(resourcePath, i, "", "Directory that stores image resources.");
DEFINE_bool(rtree, true, "Exercise the R-Tree variant of SkPicture test pass.");
DEFINE_bool(serialize, true, "Exercise the SkPicture serialization & deserialization test pass.");
+DEFINE_bool(simulatePipePlaybackFailure, false, "Simulate a rendering failure in pipe mode only.");
DEFINE_bool(tiledPipe, false, "Exercise tiled SkGPipe replay.");
DEFINE_bool(tileGrid, true, "Exercise the tile grid variant of SkPicture.");
DEFINE_string(tileGridReplayScales, "", "Space separated list of floating-point scale "
@@ -1411,10 +1414,6 @@ ErrorCombination run_multiple_modes(GMMain &gmmain, GM *gm, const ConfigData &co
errorsForAllModes.add(pictErrors);
}
- // TODO: add a test in which the RTree rendering results in a
- // different bitmap than the standard rendering. It should
- // show up as failed in the JSON summary, and should be listed
- // in the stdout also.
if (!(gmFlags & GM::kSkipPicture_Flag) && FLAGS_rtree) {
SkPicture* pict = gmmain.generate_new_picture(
gm, kRTree_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag);
@@ -1459,7 +1458,8 @@ ErrorCombination run_multiple_modes(GMMain &gmmain, GM *gm, const ConfigData &co
ErrorCombination pipeErrors;
if (FLAGS_pipe) {
- pipeErrors.add(gmmain.test_pipe_playback(gm, compareConfig, comparisonBitmap));
+ pipeErrors.add(gmmain.test_pipe_playback(gm, compareConfig, comparisonBitmap,
+ FLAGS_simulatePipePlaybackFailure));
}
if ((pipeErrors.isEmpty()) &&
diff --git a/gm/tests/outputs/compared-against-different-pixels-images/output-expected/stdout b/gm/tests/outputs/compared-against-different-pixels-images/output-expected/stdout
index 3067409e2e..8b8e61ddc3 100644
--- a/gm/tests/outputs/compared-against-different-pixels-images/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-different-pixels-images/output-expected/stdout
@@ -4,9 +4,10 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=2 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=2 MissingExpectations=0 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
-GM: [*] 2 ImageMismatch: 8888/selftest1 565/selftest1
+GM: [*] 0 RenderModeMismatch:
+GM: [*] 2 ExpectationsMismatch: 8888/selftest1 565/selftest1
GM: [ ] 0 MissingExpectations:
GM: [*] 0 WritingReferenceImage:
GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/compared-against-different-pixels-json/output-expected/stdout b/gm/tests/outputs/compared-against-different-pixels-json/output-expected/stdout
index 84b2737216..2fd0269ea3 100644
--- a/gm/tests/outputs/compared-against-different-pixels-json/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-different-pixels-json/output-expected/stdout
@@ -4,9 +4,10 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=2 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=2 MissingExpectations=0 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
-GM: [*] 2 ImageMismatch: 8888/selftest1 565/selftest1
+GM: [*] 0 RenderModeMismatch:
+GM: [*] 2 ExpectationsMismatch: 8888/selftest1 565/selftest1
GM: [ ] 0 MissingExpectations:
GM: [*] 0 WritingReferenceImage:
GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/compared-against-empty-dir/output-expected/stdout b/gm/tests/outputs/compared-against-empty-dir/output-expected/stdout
index a57916e504..829c05e6df 100644
--- a/gm/tests/outputs/compared-against-empty-dir/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-empty-dir/output-expected/stdout
@@ -4,9 +4,10 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=2 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=2 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
-GM: [*] 0 ImageMismatch:
+GM: [*] 0 RenderModeMismatch:
+GM: [*] 0 ExpectationsMismatch:
GM: [ ] 2 MissingExpectations: 8888/selftest1 565/selftest1
GM: [*] 0 WritingReferenceImage:
GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/compared-against-identical-bytes-images/output-expected/stdout b/gm/tests/outputs/compared-against-identical-bytes-images/output-expected/stdout
index c67e5bcc8b..5788d69cca 100644
--- a/gm/tests/outputs/compared-against-identical-bytes-images/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-identical-bytes-images/output-expected/stdout
@@ -4,9 +4,10 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
-GM: [*] 0 ImageMismatch:
+GM: [*] 0 RenderModeMismatch:
+GM: [*] 0 ExpectationsMismatch:
GM: [ ] 0 MissingExpectations:
GM: [*] 0 WritingReferenceImage:
GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/compared-against-identical-bytes-json/output-expected/stdout b/gm/tests/outputs/compared-against-identical-bytes-json/output-expected/stdout
index 25f51b5668..303a23fc35 100644
--- a/gm/tests/outputs/compared-against-identical-bytes-json/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-identical-bytes-json/output-expected/stdout
@@ -4,9 +4,10 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
-GM: [*] 0 ImageMismatch:
+GM: [*] 0 RenderModeMismatch:
+GM: [*] 0 ExpectationsMismatch:
GM: [ ] 0 MissingExpectations:
GM: [*] 0 WritingReferenceImage:
GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/compared-against-identical-pixels-images/output-expected/stdout b/gm/tests/outputs/compared-against-identical-pixels-images/output-expected/stdout
index d23ed86520..4c4edba270 100644
--- a/gm/tests/outputs/compared-against-identical-pixels-images/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-identical-pixels-images/output-expected/stdout
@@ -4,9 +4,10 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
-GM: [*] 0 ImageMismatch:
+GM: [*] 0 RenderModeMismatch:
+GM: [*] 0 ExpectationsMismatch:
GM: [ ] 0 MissingExpectations:
GM: [*] 0 WritingReferenceImage:
GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/compared-against-identical-pixels-json/output-expected/stdout b/gm/tests/outputs/compared-against-identical-pixels-json/output-expected/stdout
index 147c38340d..9a9d91b25f 100644
--- a/gm/tests/outputs/compared-against-identical-pixels-json/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-identical-pixels-json/output-expected/stdout
@@ -4,9 +4,10 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
-GM: [*] 0 ImageMismatch:
+GM: [*] 0 RenderModeMismatch:
+GM: [*] 0 ExpectationsMismatch:
GM: [ ] 0 MissingExpectations:
GM: [*] 0 WritingReferenceImage:
GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/no-readpath/output-expected/stdout b/gm/tests/outputs/no-readpath/output-expected/stdout
index e3b101e531..1bdc86daf7 100644
--- a/gm/tests/outputs/no-readpath/output-expected/stdout
+++ b/gm/tests/outputs/no-readpath/output-expected/stdout
@@ -3,9 +3,10 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 ImageMismatch=0 MissingExpectations=2 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=2 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
-GM: [*] 0 ImageMismatch:
+GM: [*] 0 RenderModeMismatch:
+GM: [*] 0 ExpectationsMismatch:
GM: [ ] 2 MissingExpectations: 8888/selftest1 565/selftest1
GM: [*] 0 WritingReferenceImage:
GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/pipe-playback-failure/output-expected/command_line b/gm/tests/outputs/pipe-playback-failure/output-expected/command_line
new file mode 100644
index 0000000000..950339da65
--- /dev/null
+++ b/gm/tests/outputs/pipe-playback-failure/output-expected/command_line
@@ -0,0 +1 @@
+out/Debug/gm --simulatePipePlaybackFailure --hierarchy --match selftest1 --config 8888 565 -r gm/tests/inputs/json/identical-pixels.json --writeJsonSummaryPath gm/tests/outputs/pipe-playback-failure/output-actual/json-summary.txt
diff --git a/gm/tests/outputs/pipe-playback-failure/output-expected/json-summary.txt b/gm/tests/outputs/pipe-playback-failure/output-expected/json-summary.txt
new file mode 100644
index 0000000000..249fe09335
--- /dev/null
+++ b/gm/tests/outputs/pipe-playback-failure/output-expected/json-summary.txt
@@ -0,0 +1,33 @@
+{
+ "actual-results" : {
+ "failed" : {
+ "comparison/selftest1-pipe" : {
+ "checksum" : 4259036727585789440
+ }
+ },
+ "failure-ignored" : null,
+ "no-comparison" : null,
+ "succeeded" : {
+ "565/selftest1" : {
+ "checksum" : 9512553915271796906
+ },
+ "8888/selftest1" : {
+ "checksum" : 14022967492765711532
+ }
+ }
+ },
+ "expected-results" : {
+ "565/selftest1" : {
+ "checksums" : [ 9512553915271796906 ],
+ "ignore-failure" : false
+ },
+ "8888/selftest1" : {
+ "checksums" : [ 14022967492765711532 ],
+ "ignore-failure" : false
+ },
+ "comparison/selftest1-pipe" : {
+ "checksums" : [ 14022967492765711532 ],
+ "ignore-failure" : false
+ }
+ }
+}
diff --git a/gm/tests/outputs/pipe-playback-failure/output-expected/return_value b/gm/tests/outputs/pipe-playback-failure/output-expected/return_value
new file mode 100644
index 0000000000..ace9d03621
--- /dev/null
+++ b/gm/tests/outputs/pipe-playback-failure/output-expected/return_value
@@ -0,0 +1 @@
+255
diff --git a/gm/tests/outputs/pipe-playback-failure/output-expected/stderr b/gm/tests/outputs/pipe-playback-failure/output-expected/stderr
new file mode 100644
index 0000000000..e65d374e8a
--- /dev/null
+++ b/gm/tests/outputs/pipe-playback-failure/output-expected/stderr
@@ -0,0 +1 @@
+GM: ---- comparison/selftest1-pipe: 60000 (of 60000) differing pixels, max per-channel mismatch R=135 G=246 B=135 A=0
diff --git a/gm/tests/outputs/pipe-playback-failure/output-expected/stdout b/gm/tests/outputs/pipe-playback-failure/output-expected/stdout
new file mode 100644
index 0000000000..52fc54f2a1
--- /dev/null
+++ b/gm/tests/outputs/pipe-playback-failure/output-expected/stdout
@@ -0,0 +1,13 @@
+GM: reading expectations from JSON summary file gm/tests/inputs/json/identical-pixels.json
+GM: drawing... selftest1 [300 200]
+GM: Ran 1 GMs
+GM: ... over 2 configs ["8888", "565"]
+GM: ... and 5 modes ["pipe", "replay", "rtree", "serialize", "tilegrid"]
+GM: ... so there should be a total of 7 tests.
+GM: Ran 7 tests: NoGpuContext=0 RenderModeMismatch=1 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
+GM: [*] 0 NoGpuContext:
+GM: [*] 1 RenderModeMismatch: comparison/selftest1-pipe
+GM: [*] 0 ExpectationsMismatch:
+GM: [ ] 0 MissingExpectations:
+GM: [*] 0 WritingReferenceImage:
+GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/run.sh b/gm/tests/run.sh
index 7ec08be567..093f7d777e 100755
--- a/gm/tests/run.sh
+++ b/gm/tests/run.sh
@@ -156,4 +156,7 @@ gm_test "--hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/images/empty-dir"
# section should be empty.
gm_test "--hierarchy --match selftest1 $CONFIGS" "$GM_OUTPUTS/no-readpath"
+# Test what happens if a subset of the renderModes fail (e.g. pipe)
+gm_test "--simulatePipePlaybackFailure --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/identical-pixels.json" "$GM_OUTPUTS/pipe-playback-failure"
+
echo "All tests passed."