aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar epoger@google.com <epoger@google.com@2bbb7eff-a529-9590-31e7-b0007b416f81>2013-04-10 12:17:34 +0000
committerGravatar epoger@google.com <epoger@google.com@2bbb7eff-a529-9590-31e7-b0007b416f81>2013-04-10 12:17:34 +0000
commitc8263e704135436f71a585801966294d6deadeeb (patch)
tree052fc6332fc4e5bdde91c7325b2b497f4da8d3c0
parent07d3a6575b86845ded0d4bd785aec8f4c2cc99dc (diff)
gm: record IntentionallySkipped tests
Review URL: https://codereview.chromium.org/13663003 git-svn-id: http://skia.googlecode.com/svn/trunk@8586 2bbb7eff-a529-9590-31e7-b0007b416f81
-rw-r--r--gm/gm_error.h3
-rw-r--r--gm/gmmain.cpp301
-rw-r--r--gm/selftest.cpp14
-rw-r--r--gm/tests/outputs/compared-against-different-pixels-images/output-expected/stdout3
-rw-r--r--gm/tests/outputs/compared-against-different-pixels-json/output-expected/stdout3
-rw-r--r--gm/tests/outputs/compared-against-empty-dir/output-expected/stdout3
-rw-r--r--gm/tests/outputs/compared-against-identical-bytes-images/output-expected/stdout3
-rw-r--r--gm/tests/outputs/compared-against-identical-bytes-json/output-expected/stdout3
-rw-r--r--gm/tests/outputs/compared-against-identical-pixels-images/output-expected/stdout3
-rw-r--r--gm/tests/outputs/compared-against-identical-pixels-json/output-expected/stdout3
-rw-r--r--gm/tests/outputs/intentionally-skipped-tests/output-expected/command_line1
-rw-r--r--gm/tests/outputs/intentionally-skipped-tests/output-expected/json-summary.txt22
-rw-r--r--gm/tests/outputs/intentionally-skipped-tests/output-expected/return_value1
-rw-r--r--gm/tests/outputs/intentionally-skipped-tests/output-expected/stderr0
-rw-r--r--gm/tests/outputs/intentionally-skipped-tests/output-expected/stdout14
-rw-r--r--gm/tests/outputs/no-readpath/output-expected/stdout3
-rw-r--r--gm/tests/outputs/pipe-playback-failure/output-expected/stdout3
-rwxr-xr-xgm/tests/run.sh3
18 files changed, 246 insertions, 140 deletions
diff --git a/gm/gm_error.h b/gm/gm_error.h
index aab0ec0b25..16917435d3 100644
--- a/gm/gm_error.h
+++ b/gm/gm_error.h
@@ -26,6 +26,7 @@ namespace skiagm {
// or off (as long as the number of these errors is 0).
kNoGpuContext_ErrorType,
+ kIntentionallySkipped_ErrorType,
kRenderModeMismatch_ErrorType,
kExpectationsMismatch_ErrorType,
kMissingExpectations_ErrorType,
@@ -40,6 +41,8 @@ namespace skiagm {
switch(type) {
case kNoGpuContext_ErrorType:
return "NoGpuContext";
+ case kIntentionallySkipped_ErrorType:
+ return "IntentionallySkipped";
case kRenderModeMismatch_ErrorType:
return "RenderModeMismatch";
case kExpectationsMismatch_ErrorType:
diff --git a/gm/gmmain.cpp b/gm/gmmain.cpp
index 27a7bdaf9a..cf0ab41b66 100644
--- a/gm/gmmain.cpp
+++ b/gm/gmmain.cpp
@@ -179,6 +179,7 @@ public:
GMMain() : fUseFileHierarchy(false), fMismatchPath(NULL), fTestsRun(0),
fRenderModesEncountered(1) {
fIgnorableErrorCombination.add(kMissingExpectations_ErrorType);
+ fIgnorableErrorCombination.add(kIntentionallySkipped_ErrorType);
}
SkString make_name(const char shortName[], const char configName[]) {
@@ -864,21 +865,19 @@ public:
/**
* Compare actualBitmap to referenceBitmap.
*
- * @param gm which test generated the bitmap
- * @param gRec
+ * @param baseNameString name of test without renderModeDescriptor added
* @param renderModeDescriptor
* @param actualBitmap actual bitmap generated by this run
* @param referenceBitmap bitmap we expected to be generated
*/
ErrorCombination compare_test_results_to_reference_bitmap(
- GM* gm, const ConfigData& gRec, const char renderModeDescriptor [],
+ const SkString& baseNameString, const char renderModeDescriptor[],
SkBitmap& actualBitmap, const SkBitmap* referenceBitmap) {
SkASSERT(referenceBitmap);
- SkString name = make_name(gm->shortName(), gRec.fName);
Expectations expectations(*referenceBitmap);
return compare_to_expectations(expectations, actualBitmap,
- name, renderModeDescriptor, false);
+ baseNameString, renderModeDescriptor, false);
}
static SkPicture* generate_new_picture(GM* gm, BbhType bbhType, uint32_t recordFlags,
@@ -971,6 +970,7 @@ public:
if (gRec.fBackend == kRaster_Backend ||
gRec.fBackend == kGPU_Backend) {
+ const char renderModeDescriptor[] = "-deferred";
SkBitmap bitmap;
// Early exit if we can't generate the image, but this is
// expected in some cases, so don't report a test failure.
@@ -991,36 +991,45 @@ public:
// something like kImageGeneration_ErrorType?
return kEmpty_ErrorCombination;
}
+ const SkString name = make_name(gm->shortName(), gRec.fName);
return compare_test_results_to_reference_bitmap(
- gm, gRec, "-deferred", bitmap, &referenceBitmap);
+ name, renderModeDescriptor, bitmap, &referenceBitmap);
}
return kEmpty_ErrorCombination;
}
ErrorCombination test_pipe_playback(GM* gm, const ConfigData& gRec,
const SkBitmap& referenceBitmap, bool simulateFailure) {
+ const SkString name = make_name(gm->shortName(), gRec.fName);
ErrorCombination errors;
for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) {
- SkBitmap bitmap;
- SkISize size = gm->getISize();
- setup_bitmap(gRec, size, &bitmap);
- SkCanvas canvas(bitmap);
- installFilter(&canvas);
- PipeController pipeController(&canvas);
- SkGPipeWriter writer;
- SkCanvas* pipeCanvas = writer.startRecording(
- &pipeController, gPipeWritingFlagCombos[i].flags);
- if (!simulateFailure) {
- invokeGM(gm, pipeCanvas, false, false);
- }
- complete_bitmap(&bitmap);
- writer.endRecording();
- SkString string("-pipe");
- string.append(gPipeWritingFlagCombos[i].name);
- errors.add(compare_test_results_to_reference_bitmap(
- gm, gRec, string.c_str(), bitmap, &referenceBitmap));
- if (!errors.isEmpty()) {
- break;
+ SkString renderModeDescriptor("-pipe");
+ renderModeDescriptor.append(gPipeWritingFlagCombos[i].name);
+
+ if (gm->getFlags() & GM::kSkipPipe_Flag) {
+ RecordTestResults(kIntentionallySkipped_ErrorType, name,
+ renderModeDescriptor.c_str());
+ errors.add(kIntentionallySkipped_ErrorType);
+ } else {
+ SkBitmap bitmap;
+ SkISize size = gm->getISize();
+ setup_bitmap(gRec, size, &bitmap);
+ SkCanvas canvas(bitmap);
+ installFilter(&canvas);
+ PipeController pipeController(&canvas);
+ SkGPipeWriter writer;
+ SkCanvas* pipeCanvas = writer.startRecording(&pipeController,
+ gPipeWritingFlagCombos[i].flags);
+ if (!simulateFailure) {
+ invokeGM(gm, pipeCanvas, false, false);
+ }
+ complete_bitmap(&bitmap);
+ writer.endRecording();
+ errors.add(compare_test_results_to_reference_bitmap(
+ name, renderModeDescriptor.c_str(), bitmap, &referenceBitmap));
+ if (!errors.isEmpty()) {
+ break;
+ }
}
}
return errors;
@@ -1028,26 +1037,36 @@ public:
ErrorCombination test_tiled_pipe_playback(GM* gm, const ConfigData& gRec,
const SkBitmap& referenceBitmap) {
+ const SkString name = make_name(gm->shortName(), gRec.fName);
ErrorCombination errors;
for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) {
- SkBitmap bitmap;
- SkISize size = gm->getISize();
- setup_bitmap(gRec, size, &bitmap);
- SkCanvas canvas(bitmap);
- installFilter(&canvas);
- TiledPipeController pipeController(bitmap);
- SkGPipeWriter writer;
- SkCanvas* pipeCanvas = writer.startRecording(
- &pipeController, gPipeWritingFlagCombos[i].flags);
- invokeGM(gm, pipeCanvas, false, false);
- complete_bitmap(&bitmap);
- writer.endRecording();
- SkString string("-tiled pipe");
- string.append(gPipeWritingFlagCombos[i].name);
- errors.add(compare_test_results_to_reference_bitmap(
- gm, gRec, string.c_str(), bitmap, &referenceBitmap));
- if (!errors.isEmpty()) {
- break;
+ SkString renderModeDescriptor("-tiled pipe");
+ renderModeDescriptor.append(gPipeWritingFlagCombos[i].name);
+
+ if ((gm->getFlags() & GM::kSkipPipe_Flag) ||
+ (gm->getFlags() & GM::kSkipTiled_Flag)) {
+ RecordTestResults(kIntentionallySkipped_ErrorType, name,
+ renderModeDescriptor.c_str());
+ errors.add(kIntentionallySkipped_ErrorType);
+ } else {
+ SkBitmap bitmap;
+ SkISize size = gm->getISize();
+ setup_bitmap(gRec, size, &bitmap);
+ SkCanvas canvas(bitmap);
+ installFilter(&canvas);
+ TiledPipeController pipeController(bitmap);
+ SkGPipeWriter writer;
+ SkCanvas* pipeCanvas = writer.startRecording(&pipeController,
+ gPipeWritingFlagCombos[i].flags);
+ invokeGM(gm, pipeCanvas, false, false);
+ complete_bitmap(&bitmap);
+ writer.endRecording();
+ errors.add(compare_test_results_to_reference_bitmap(name,
+ renderModeDescriptor.c_str(),
+ bitmap, &referenceBitmap));
+ if (!errors.isEmpty()) {
+ break;
+ }
}
}
return errors;
@@ -1272,24 +1291,41 @@ ErrorCombination run_multiple_configs(GMMain &gmmain, GM *gm, const SkTDArray<si
GrContextFactory *grFactory);
ErrorCombination run_multiple_configs(GMMain &gmmain, GM *gm, const SkTDArray<size_t> &configs,
GrContextFactory *grFactory) {
+ const char renderModeDescriptor[] = "";
ErrorCombination errorsForAllConfigs;
uint32_t gmFlags = gm->getFlags();
for (int i = 0; i < configs.count(); i++) {
ConfigData config = gRec[configs[i]];
+ const SkString name = gmmain.make_name(gm->shortName(), config.fName);
// Skip any tests that we don't even need to try.
- if ((kPDF_Backend == config.fBackend) &&
- (!FLAGS_pdf|| (gmFlags & GM::kSkipPDF_Flag))) {
+ // If any of these were skipped on a per-GM basis, record them as
+ // kIntentionallySkipped.
+ if (kPDF_Backend == config.fBackend) {
+ if (!FLAGS_pdf) {
continue;
}
+ if (gmFlags & GM::kSkipPDF_Flag) {
+ gmmain.RecordTestResults(kIntentionallySkipped_ErrorType, name,
+ renderModeDescriptor);
+ errorsForAllConfigs.add(kIntentionallySkipped_ErrorType);
+ continue;
+ }
+ }
if ((gmFlags & GM::kSkip565_Flag) &&
(kRaster_Backend == config.fBackend) &&
(SkBitmap::kRGB_565_Config == config.fConfig)) {
+ gmmain.RecordTestResults(kIntentionallySkipped_ErrorType, name,
+ renderModeDescriptor);
+ errorsForAllConfigs.add(kIntentionallySkipped_ErrorType);
continue;
}
if ((gmFlags & GM::kSkipGPU_Flag) &&
kGPU_Backend == config.fBackend) {
+ gmmain.RecordTestResults(kIntentionallySkipped_ErrorType, name,
+ renderModeDescriptor);
+ errorsForAllConfigs.add(kIntentionallySkipped_ErrorType);
continue;
}
@@ -1374,100 +1410,105 @@ ErrorCombination run_multiple_modes(GMMain &gmmain, GM *gm, const ConfigData &co
const SkTDArray<SkScalar> &tileGridReplayScales) {
ErrorCombination errorsForAllModes;
uint32_t gmFlags = gm->getFlags();
-
- // TODO(epoger): We should start recording any per-GM skipped
- // modes (i.e. those we skipped due to gmFlags) with a new
- // ErrorType, perhaps named kIntentionallySkipped_ErrorType.
- if (!(gmFlags & GM::kSkipPicture_Flag)) {
-
- ErrorCombination pictErrors;
-
- //SkAutoTUnref<SkPicture> pict(generate_new_picture(gm));
- SkPicture* pict = gmmain.generate_new_picture(gm, kNone_BbhType, 0);
- SkAutoUnref aur(pict);
-
- if (FLAGS_replay) {
+ const SkString name = gmmain.make_name(gm->shortName(), compareConfig.fName);
+
+ SkPicture* pict = gmmain.generate_new_picture(gm, kNone_BbhType, 0);
+ SkAutoUnref aur(pict);
+ if (FLAGS_replay) {
+ const char renderModeDescriptor[] = "-replay";
+ if (gmFlags & GM::kSkipPicture_Flag) {
+ gmmain.RecordTestResults(kIntentionallySkipped_ErrorType, name, renderModeDescriptor);
+ errorsForAllModes.add(kIntentionallySkipped_ErrorType);
+ } else {
SkBitmap bitmap;
gmmain.generate_image_from_picture(gm, compareConfig, pict, &bitmap);
- pictErrors.add(gmmain.compare_test_results_to_reference_bitmap(
- gm, compareConfig, "-replay", bitmap, &comparisonBitmap));
+ errorsForAllModes.add(gmmain.compare_test_results_to_reference_bitmap(
+ name, renderModeDescriptor, bitmap, &comparisonBitmap));
}
+ }
- if ((pictErrors.isEmpty()) && FLAGS_serialize) {
+ if (FLAGS_serialize) {
+ const char renderModeDescriptor[] = "-serialize";
+ if (gmFlags & GM::kSkipPicture_Flag) {
+ gmmain.RecordTestResults(kIntentionallySkipped_ErrorType, name, renderModeDescriptor);
+ errorsForAllModes.add(kIntentionallySkipped_ErrorType);
+ } else {
SkPicture* repict = gmmain.stream_to_new_picture(*pict);
SkAutoUnref aurr(repict);
-
SkBitmap bitmap;
gmmain.generate_image_from_picture(gm, compareConfig, repict, &bitmap);
- pictErrors.add(gmmain.compare_test_results_to_reference_bitmap(
- gm, compareConfig, "-serialize", bitmap, &comparisonBitmap));
- }
-
- if (FLAGS_writePicturePath.count() == 1) {
- const char* pictureSuffix = "skp";
- SkString path = make_filename(FLAGS_writePicturePath[0], "",
- gm->shortName(), pictureSuffix);
- SkFILEWStream stream(path.c_str());
- pict->serialize(&stream);
+ errorsForAllModes.add(gmmain.compare_test_results_to_reference_bitmap(
+ name, renderModeDescriptor, bitmap, &comparisonBitmap));
}
-
- errorsForAllModes.add(pictErrors);
}
- if (!(gmFlags & GM::kSkipPicture_Flag) && FLAGS_rtree) {
- SkPicture* pict = gmmain.generate_new_picture(
- gm, kRTree_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag);
- SkAutoUnref aur(pict);
- SkBitmap bitmap;
- gmmain.generate_image_from_picture(gm, compareConfig, pict, &bitmap);
- errorsForAllModes.add(gmmain.compare_test_results_to_reference_bitmap(
- gm, compareConfig, "-rtree", bitmap, &comparisonBitmap));
+ if ((1 == FLAGS_writePicturePath.count()) &&
+ !(gmFlags & GM::kSkipPicture_Flag)) {
+ const char* pictureSuffix = "skp";
+ SkString path = make_filename(FLAGS_writePicturePath[0], "",
+ gm->shortName(), pictureSuffix);
+ SkFILEWStream stream(path.c_str());
+ pict->serialize(&stream);
}
- if (!(gmFlags & GM::kSkipPicture_Flag) && FLAGS_tileGrid) {
- for(int scaleIndex = 0; scaleIndex < tileGridReplayScales.count(); ++scaleIndex) {
- SkScalar replayScale = tileGridReplayScales[scaleIndex];
- if ((gmFlags & GM::kSkipScaledReplay_Flag) && replayScale != 1) {
- continue;
- }
- // We record with the reciprocal scale to obtain a replay
- // result that can be validated against comparisonBitmap.
- SkScalar recordScale = SkScalarInvert(replayScale);
+ if (FLAGS_rtree) {
+ const char renderModeDescriptor[] = "-rtree";
+ if (gmFlags & GM::kSkipPicture_Flag) {
+ gmmain.RecordTestResults(kIntentionallySkipped_ErrorType, name, renderModeDescriptor);
+ errorsForAllModes.add(kIntentionallySkipped_ErrorType);
+ } else {
SkPicture* pict = gmmain.generate_new_picture(
- gm, kTileGrid_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag, recordScale);
+ gm, kRTree_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag);
SkAutoUnref aur(pict);
SkBitmap bitmap;
- // We cannot yet pass 'true' to generate_image_from_picture to
- // perform actual tiled rendering (see Issue 1198 -
- // https://code.google.com/p/skia/issues/detail?id=1198)
- gmmain.generate_image_from_picture(gm, compareConfig, pict, &bitmap,
- replayScale /*, true */);
- SkString suffix("-tilegrid");
- if (SK_Scalar1 != replayScale) {
- suffix += "-scale-";
- suffix.appendScalar(replayScale);
- }
+ gmmain.generate_image_from_picture(gm, compareConfig, pict, &bitmap);
errorsForAllModes.add(gmmain.compare_test_results_to_reference_bitmap(
- gm, compareConfig, suffix.c_str(), bitmap, &comparisonBitmap));
+ name, renderModeDescriptor, bitmap, &comparisonBitmap));
}
}
- // run the pipe centric GM steps
- if (!(gmFlags & GM::kSkipPipe_Flag)) {
-
- ErrorCombination pipeErrors;
+ if (FLAGS_tileGrid) {
+ for(int scaleIndex = 0; scaleIndex < tileGridReplayScales.count(); ++scaleIndex) {
+ SkScalar replayScale = tileGridReplayScales[scaleIndex];
+ SkString renderModeDescriptor("-tilegrid");
+ if (SK_Scalar1 != replayScale) {
+ renderModeDescriptor += "-scale-";
+ renderModeDescriptor.appendScalar(replayScale);
+ }
- if (FLAGS_pipe) {
- pipeErrors.add(gmmain.test_pipe_playback(gm, compareConfig, comparisonBitmap,
- FLAGS_simulatePipePlaybackFailure));
+ if ((gmFlags & GM::kSkipPicture_Flag) ||
+ ((gmFlags & GM::kSkipScaledReplay_Flag) && replayScale != 1)) {
+ gmmain.RecordTestResults(kIntentionallySkipped_ErrorType, name,
+ renderModeDescriptor.c_str());
+ errorsForAllModes.add(kIntentionallySkipped_ErrorType);
+ } else {
+ // We record with the reciprocal scale to obtain a replay
+ // result that can be validated against comparisonBitmap.
+ SkScalar recordScale = SkScalarInvert(replayScale);
+ SkPicture* pict = gmmain.generate_new_picture(
+ gm, kTileGrid_BbhType, SkPicture::kUsePathBoundsForClip_RecordingFlag,
+ recordScale);
+ SkAutoUnref aur(pict);
+ SkBitmap bitmap;
+ // We cannot yet pass 'true' to generate_image_from_picture to
+ // perform actual tiled rendering (see Issue 1198 -
+ // https://code.google.com/p/skia/issues/detail?id=1198)
+ gmmain.generate_image_from_picture(gm, compareConfig, pict, &bitmap,
+ replayScale /*, true */);
+ errorsForAllModes.add(gmmain.compare_test_results_to_reference_bitmap(
+ name, renderModeDescriptor.c_str(), bitmap, &comparisonBitmap));
+ }
}
+ }
- if ((pipeErrors.isEmpty()) &&
- FLAGS_tiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) {
- pipeErrors.add(gmmain.test_tiled_pipe_playback(gm, compareConfig, comparisonBitmap));
+ // run the pipe centric GM steps
+ if (FLAGS_pipe) {
+ errorsForAllModes.add(gmmain.test_pipe_playback(gm, compareConfig, comparisonBitmap,
+ FLAGS_simulatePipePlaybackFailure));
+ if (FLAGS_tiledPipe) {
+ errorsForAllModes.add(gmmain.test_tiled_pipe_playback(gm, compareConfig,
+ comparisonBitmap));
}
-
- errorsForAllModes.add(pipeErrors);
}
return errorsForAllModes;
}
@@ -1753,25 +1794,29 @@ int tool_main(int argc, char** argv) {
SkTArray<SkString> modes;
gmmain.GetRenderModesEncountered(modes);
+ bool reportError = false;
+ if (gmmain.NumSignificantErrors() > 0) {
+ reportError = true;
+ }
// Output summary to stdout.
gm_fprintf(stdout, "Ran %d GMs\n", gmsRun);
gm_fprintf(stdout, "... over %2d configs [%s]\n", configs.count(),
list_all_config_names(configs).c_str());
gm_fprintf(stdout, "... and %2d modes [%s]\n", modes.count(), list_all(modes).c_str());
- gm_fprintf(stdout, "... so there should be a total of %d tests.\n",
- gmsRun * (configs.count() + modes.count()));
-
- // TODO(epoger): Ultimately, we should signal an error if the
- // expected total number of tests (displayed above) does not match
- // gmmain.fTestsRun. But for now, there are cases where those
- // numbers won't match: specifically, if some configs/modes are
- // skipped on a per-GM basis (due to gm->getFlags() for a specific
- // GM). Later on, we should record tests like that using some new
- // ErrorType, like kIntentionallySkipped_ErrorType. Then we could
- // signal an error if the totals didn't match up.
+ int expectedNumberOfTests = gmsRun * (configs.count() + modes.count());
+ gm_fprintf(stdout, "... so there should be a total of %d tests.\n", expectedNumberOfTests);
gmmain.ListErrors();
+ // TODO(epoger): in a standalone CL, enable this new check.
+#if 0
+ if (expectedNumberOfTests != gmmain.fTestsRun) {
+ gm_fprintf(stderr, "expected %d tests, but ran or skipped %d tests\n",
+ expectedNumberOfTests, gmmain.fTestsRun);
+ reportError = true;
+ }
+#endif
+
if (FLAGS_writeJsonSummaryPath.count() == 1) {
Json::Value actualResults;
actualResults[kJsonKey_ActualResults_Failed] =
@@ -1809,7 +1854,7 @@ int tool_main(int argc, char** argv) {
#endif
SkGraphics::Term();
- return (0 == gmmain.NumSignificantErrors()) ? 0 : -1;
+ return (reportError) ? -1 : 0;
}
void GMMain::installFilter(SkCanvas* canvas) {
diff --git a/gm/selftest.cpp b/gm/selftest.cpp
index 812d47512f..dd6e67b50b 100644
--- a/gm/selftest.cpp
+++ b/gm/selftest.cpp
@@ -16,7 +16,8 @@
class SelfTestGM : public skiagm::GM {
public:
- SelfTestGM(const char name[], SkColor color) : fName(name), fColor(color) {}
+ SelfTestGM(const char name[], SkColor color, uint32_t flags) :
+ fName(name), fColor(color), fFlags(flags) {}
const static int kWidth = 300;
const static int kHeight = 200;
@@ -34,9 +35,12 @@ protected:
canvas->drawRectCoords(0, 0, SkIntToScalar(kWidth), SkIntToScalar(kHeight), paint);
}
+ virtual uint32_t onGetFlags() const { return fFlags; }
+
private:
const SkString fName;
const SkColor fColor;
+ const uint32_t fFlags;
};
//////////////////////////////////////////////////////////////////////////////
@@ -47,8 +51,12 @@ private:
static SkColor kTranslucentGreen = 0x7700EE00;
static SkColor kTranslucentBlue = 0x770000DD;
-static skiagm::GM* F1(void*) { return new SelfTestGM("selftest1", kTranslucentGreen); }
-static skiagm::GM* F2(void*) { return new SelfTestGM("selftest2", kTranslucentBlue); }
+static skiagm::GM* F1(void*) {
+ return new SelfTestGM("selftest1", kTranslucentGreen, 0);
+}
+static skiagm::GM* F2(void*) {
+ return new SelfTestGM("selftest2", kTranslucentBlue, skiagm::GM::kSkipPipe_Flag);
+}
static skiagm::GMRegistry gR1(F1);
static skiagm::GMRegistry gR2(F2);
diff --git a/gm/tests/outputs/compared-against-different-pixels-images/output-expected/stdout b/gm/tests/outputs/compared-against-different-pixels-images/output-expected/stdout
index 8b8e61ddc3..02ccb62a68 100644
--- a/gm/tests/outputs/compared-against-different-pixels-images/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-different-pixels-images/output-expected/stdout
@@ -4,8 +4,9 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=2 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 IntentionallySkipped=0 RenderModeMismatch=0 ExpectationsMismatch=2 MissingExpectations=0 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
+GM: [ ] 0 IntentionallySkipped:
GM: [*] 0 RenderModeMismatch:
GM: [*] 2 ExpectationsMismatch: 8888/selftest1 565/selftest1
GM: [ ] 0 MissingExpectations:
diff --git a/gm/tests/outputs/compared-against-different-pixels-json/output-expected/stdout b/gm/tests/outputs/compared-against-different-pixels-json/output-expected/stdout
index 2fd0269ea3..5e34f19c24 100644
--- a/gm/tests/outputs/compared-against-different-pixels-json/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-different-pixels-json/output-expected/stdout
@@ -4,8 +4,9 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=2 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 IntentionallySkipped=0 RenderModeMismatch=0 ExpectationsMismatch=2 MissingExpectations=0 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
+GM: [ ] 0 IntentionallySkipped:
GM: [*] 0 RenderModeMismatch:
GM: [*] 2 ExpectationsMismatch: 8888/selftest1 565/selftest1
GM: [ ] 0 MissingExpectations:
diff --git a/gm/tests/outputs/compared-against-empty-dir/output-expected/stdout b/gm/tests/outputs/compared-against-empty-dir/output-expected/stdout
index 829c05e6df..9482216fa4 100644
--- a/gm/tests/outputs/compared-against-empty-dir/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-empty-dir/output-expected/stdout
@@ -4,8 +4,9 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=2 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 IntentionallySkipped=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=2 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
+GM: [ ] 0 IntentionallySkipped:
GM: [*] 0 RenderModeMismatch:
GM: [*] 0 ExpectationsMismatch:
GM: [ ] 2 MissingExpectations: 8888/selftest1 565/selftest1
diff --git a/gm/tests/outputs/compared-against-identical-bytes-images/output-expected/stdout b/gm/tests/outputs/compared-against-identical-bytes-images/output-expected/stdout
index 5788d69cca..ee43b3ed72 100644
--- a/gm/tests/outputs/compared-against-identical-bytes-images/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-identical-bytes-images/output-expected/stdout
@@ -4,8 +4,9 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 IntentionallySkipped=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
+GM: [ ] 0 IntentionallySkipped:
GM: [*] 0 RenderModeMismatch:
GM: [*] 0 ExpectationsMismatch:
GM: [ ] 0 MissingExpectations:
diff --git a/gm/tests/outputs/compared-against-identical-bytes-json/output-expected/stdout b/gm/tests/outputs/compared-against-identical-bytes-json/output-expected/stdout
index 303a23fc35..92c1b9fd90 100644
--- a/gm/tests/outputs/compared-against-identical-bytes-json/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-identical-bytes-json/output-expected/stdout
@@ -4,8 +4,9 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 IntentionallySkipped=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
+GM: [ ] 0 IntentionallySkipped:
GM: [*] 0 RenderModeMismatch:
GM: [*] 0 ExpectationsMismatch:
GM: [ ] 0 MissingExpectations:
diff --git a/gm/tests/outputs/compared-against-identical-pixels-images/output-expected/stdout b/gm/tests/outputs/compared-against-identical-pixels-images/output-expected/stdout
index 4c4edba270..8a3dbaf216 100644
--- a/gm/tests/outputs/compared-against-identical-pixels-images/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-identical-pixels-images/output-expected/stdout
@@ -4,8 +4,9 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 IntentionallySkipped=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
+GM: [ ] 0 IntentionallySkipped:
GM: [*] 0 RenderModeMismatch:
GM: [*] 0 ExpectationsMismatch:
GM: [ ] 0 MissingExpectations:
diff --git a/gm/tests/outputs/compared-against-identical-pixels-json/output-expected/stdout b/gm/tests/outputs/compared-against-identical-pixels-json/output-expected/stdout
index 9a9d91b25f..0bc0fc4a4e 100644
--- a/gm/tests/outputs/compared-against-identical-pixels-json/output-expected/stdout
+++ b/gm/tests/outputs/compared-against-identical-pixels-json/output-expected/stdout
@@ -4,8 +4,9 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 IntentionallySkipped=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
+GM: [ ] 0 IntentionallySkipped:
GM: [*] 0 RenderModeMismatch:
GM: [*] 0 ExpectationsMismatch:
GM: [ ] 0 MissingExpectations:
diff --git a/gm/tests/outputs/intentionally-skipped-tests/output-expected/command_line b/gm/tests/outputs/intentionally-skipped-tests/output-expected/command_line
new file mode 100644
index 0000000000..c80a9a20ce
--- /dev/null
+++ b/gm/tests/outputs/intentionally-skipped-tests/output-expected/command_line
@@ -0,0 +1 @@
+out/Debug/gm --hierarchy --match selftest1 selftest2 --config 8888 565 --writeJsonSummaryPath gm/tests/outputs/intentionally-skipped-tests/output-actual/json-summary.txt
diff --git a/gm/tests/outputs/intentionally-skipped-tests/output-expected/json-summary.txt b/gm/tests/outputs/intentionally-skipped-tests/output-expected/json-summary.txt
new file mode 100644
index 0000000000..a3ac39fb59
--- /dev/null
+++ b/gm/tests/outputs/intentionally-skipped-tests/output-expected/json-summary.txt
@@ -0,0 +1,22 @@
+{
+ "actual-results" : {
+ "failed" : null,
+ "failure-ignored" : null,
+ "no-comparison" : {
+ "565/selftest1" : {
+ "checksum" : 9512553915271796906
+ },
+ "565/selftest2" : {
+ "checksum" : 11071285354315388429
+ },
+ "8888/selftest1" : {
+ "checksum" : 14022967492765711532
+ },
+ "8888/selftest2" : {
+ "checksum" : 16527650414256125612
+ }
+ },
+ "succeeded" : null
+ },
+ "expected-results" : null
+}
diff --git a/gm/tests/outputs/intentionally-skipped-tests/output-expected/return_value b/gm/tests/outputs/intentionally-skipped-tests/output-expected/return_value
new file mode 100644
index 0000000000..573541ac97
--- /dev/null
+++ b/gm/tests/outputs/intentionally-skipped-tests/output-expected/return_value
@@ -0,0 +1 @@
+0
diff --git a/gm/tests/outputs/intentionally-skipped-tests/output-expected/stderr b/gm/tests/outputs/intentionally-skipped-tests/output-expected/stderr
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/gm/tests/outputs/intentionally-skipped-tests/output-expected/stderr
diff --git a/gm/tests/outputs/intentionally-skipped-tests/output-expected/stdout b/gm/tests/outputs/intentionally-skipped-tests/output-expected/stdout
new file mode 100644
index 0000000000..3165dd4246
--- /dev/null
+++ b/gm/tests/outputs/intentionally-skipped-tests/output-expected/stdout
@@ -0,0 +1,14 @@
+GM: drawing... selftest2 [300 200]
+GM: drawing... selftest1 [300 200]
+GM: Ran 2 GMs
+GM: ... over 2 configs ["8888", "565"]
+GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
+GM: ... so there should be a total of 18 tests.
+GM: Ran 18 tests: NoGpuContext=0 IntentionallySkipped=3 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=4 WritingReferenceImage=0
+GM: [*] 0 NoGpuContext:
+GM: [ ] 3 IntentionallySkipped: comparison/selftest2-pipe comparison/selftest2-pipe cross-process comparison/selftest2-pipe cross-process, shared address
+GM: [*] 0 RenderModeMismatch:
+GM: [*] 0 ExpectationsMismatch:
+GM: [ ] 4 MissingExpectations: 8888/selftest2 565/selftest2 8888/selftest1 565/selftest1
+GM: [*] 0 WritingReferenceImage:
+GM: (results marked with [*] will cause nonzero return value)
diff --git a/gm/tests/outputs/no-readpath/output-expected/stdout b/gm/tests/outputs/no-readpath/output-expected/stdout
index 1bdc86daf7..a892db02f9 100644
--- a/gm/tests/outputs/no-readpath/output-expected/stdout
+++ b/gm/tests/outputs/no-readpath/output-expected/stdout
@@ -3,8 +3,9 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 7 modes ["pipe", "pipe cross-process", "pipe cross-process, shared address", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 9 tests.
-GM: Ran 9 tests: NoGpuContext=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=2 WritingReferenceImage=0
+GM: Ran 9 tests: NoGpuContext=0 IntentionallySkipped=0 RenderModeMismatch=0 ExpectationsMismatch=0 MissingExpectations=2 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
+GM: [ ] 0 IntentionallySkipped:
GM: [*] 0 RenderModeMismatch:
GM: [*] 0 ExpectationsMismatch:
GM: [ ] 2 MissingExpectations: 8888/selftest1 565/selftest1
diff --git a/gm/tests/outputs/pipe-playback-failure/output-expected/stdout b/gm/tests/outputs/pipe-playback-failure/output-expected/stdout
index 52fc54f2a1..64127be93d 100644
--- a/gm/tests/outputs/pipe-playback-failure/output-expected/stdout
+++ b/gm/tests/outputs/pipe-playback-failure/output-expected/stdout
@@ -4,8 +4,9 @@ GM: Ran 1 GMs
GM: ... over 2 configs ["8888", "565"]
GM: ... and 5 modes ["pipe", "replay", "rtree", "serialize", "tilegrid"]
GM: ... so there should be a total of 7 tests.
-GM: Ran 7 tests: NoGpuContext=0 RenderModeMismatch=1 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
+GM: Ran 7 tests: NoGpuContext=0 IntentionallySkipped=0 RenderModeMismatch=1 ExpectationsMismatch=0 MissingExpectations=0 WritingReferenceImage=0
GM: [*] 0 NoGpuContext:
+GM: [ ] 0 IntentionallySkipped:
GM: [*] 1 RenderModeMismatch: comparison/selftest1-pipe
GM: [*] 0 ExpectationsMismatch:
GM: [ ] 0 MissingExpectations:
diff --git a/gm/tests/run.sh b/gm/tests/run.sh
index 093f7d777e..8348cce0c0 100755
--- a/gm/tests/run.sh
+++ b/gm/tests/run.sh
@@ -159,4 +159,7 @@ gm_test "--hierarchy --match selftest1 $CONFIGS" "$GM_OUTPUTS/no-readpath"
# Test what happens if a subset of the renderModes fail (e.g. pipe)
gm_test "--simulatePipePlaybackFailure --hierarchy --match selftest1 $CONFIGS -r $GM_INPUTS/json/identical-pixels.json" "$GM_OUTPUTS/pipe-playback-failure"
+# Confirm that IntentionallySkipped tests are recorded as such.
+gm_test "--hierarchy --match selftest1 selftest2 $CONFIGS" "$GM_OUTPUTS/intentionally-skipped-tests"
+
echo "All tests passed."