aboutsummaryrefslogtreecommitdiffhomepage
diff options
context:
space:
mode:
authorGravatar epoger@google.com <epoger@google.com@2bbb7eff-a529-9590-31e7-b0007b416f81>2013-01-15 20:24:03 +0000
committerGravatar epoger@google.com <epoger@google.com@2bbb7eff-a529-9590-31e7-b0007b416f81>2013-01-15 20:24:03 +0000
commitbc159bdcd01c6f035af6f72e1154874c8eb35dfe (patch)
tree5d4889cfe444020368594b5b7e8dd8920f53264b
parent93182311781e79a6883b356bf2669c389878f9e5 (diff)
Review URL: https://codereview.appspot.com/7124043 git-svn-id: http://skia.googlecode.com/svn/trunk@7203 2bbb7eff-a529-9590-31e7-b0007b416f81
-rw-r--r--gm/gm_expectations.h164
-rw-r--r--gm/gmmain.cpp432
-rw-r--r--gm/tests/outputs/compared-against-empty-dir/output-expected/json-summary.txt2
3 files changed, 201 insertions, 397 deletions
diff --git a/gm/gm_expectations.h b/gm/gm_expectations.h
deleted file mode 100644
index e4e6a8799d..0000000000
--- a/gm/gm_expectations.h
+++ /dev/null
@@ -1,164 +0,0 @@
-/*
- * Copyright 2013 Google Inc.
- *
- * Use of this source code is governed by a BSD-style license that can be
- * found in the LICENSE file.
- */
-#ifndef gm_expectations_DEFINED
-#define gm_expectations_DEFINED
-
-#include "gm.h"
-#include "SkBitmapChecksummer.h"
-#include "SkImageDecoder.h"
-#include "SkOSFile.h"
-#include "SkRefCnt.h"
-#include "SkTArray.h"
-#include "json/value.h"
-
-namespace skiagm {
-
- // The actual type we use to represent a checksum is hidden in here.
- typedef uint64_t Checksum;
- static inline Json::Value asJsonValue(Checksum checksum) {
- return Json::UInt64(checksum);
- }
-
- static SkString make_filename(const char path[],
- const char renderModeDescriptor[],
- const char *name,
- const char suffix[]) {
- SkString filename(path);
- if (filename.endsWith(SkPATH_SEPARATOR)) {
- filename.remove(filename.size() - 1, 1);
- }
- filename.appendf("%c%s%s.%s", SkPATH_SEPARATOR,
- name, renderModeDescriptor, suffix);
- return filename;
- }
-
- /**
- * Test expectations (allowed image checksums, etc.)
- */
- class Expectations {
- public:
- /**
- * No expectations at all.
- *
- * We set ignoreFailure to false by default, but it doesn't really
- * matter... the result will always be "no-comparison" anyway.
- */
- Expectations(bool ignoreFailure=false) :
- fAllowedChecksums(SkTArray<Checksum>()),
- fIgnoreFailure(ignoreFailure) {}
-
- /**
- * Allow exactly one checksum (appropriate for the case when we
- * are comparing against a single PNG file).
- *
- * By default, DO NOT ignore failures.
- */
- Expectations(Checksum singleChecksum, bool ignoreFailure=false) :
- fAllowedChecksums(SkTArray<Checksum>(&singleChecksum, 1)),
- fIgnoreFailure(ignoreFailure) {
- }
-
- /**
- * Returns true iff we want to ignore failed expectations.
- */
- bool ignoreFailure() const { return this->fIgnoreFailure; }
-
- /**
- * Returns true iff there are no allowed checksums.
- */
- bool empty() const { return this->fAllowedChecksums.empty(); }
-
- /**
- * Returns true iff actualChecksum matches any allowedChecksum,
- * regardless of fIgnoreFailure. (The caller can check
- * that separately.)
- */
- bool match(Checksum actualChecksum) const {
- for (int i=0; i < this->fAllowedChecksums.count(); i++) {
- Checksum allowedChecksum = this->fAllowedChecksums[i];
- if (allowedChecksum == actualChecksum) {
- return true;
- }
- }
- return false;
- }
-
- /**
- * Return a JSON representation of the allowed checksums.
- * This does NOT include any information about whether to
- * ignore failures.
- */
- Json::Value allowedChecksumsAsJson() const {
- Json::Value allowedChecksumArray;
- if (!this->fAllowedChecksums.empty()) {
- for (int i=0; i < this->fAllowedChecksums.count(); i++) {
- Checksum allowedChecksum = this->fAllowedChecksums[i];
- allowedChecksumArray.append(asJsonValue(allowedChecksum));
- }
- }
- return allowedChecksumArray;
- }
-
- private:
- const SkTArray<Checksum> fAllowedChecksums;
- const bool fIgnoreFailure;
- };
-
- /**
- * Abstract source of Expectations objects for individual tests.
- */
- class ExpectationsSource : public SkRefCnt {
- public:
- virtual Expectations get(const char *testName) = 0;
- };
-
- /**
- * Return Expectations based on individual image files on disk.
- */
- class IndividualImageExpectationsSource : public ExpectationsSource {
- public:
- /**
- * Create an ExpectationsSource that will return Expectations based on
- * image files found within rootDir.
- *
- * rootDir: directory under which to look for image files
- * (this string will be copied to storage within this object)
- * notifyOfMissingFiles: whether to log a message to stderr if an image
- * file cannot be found
- */
- IndividualImageExpectationsSource(const char *rootDir,
- bool notifyOfMissingFiles) :
- fRootDir(rootDir), fNotifyOfMissingFiles(notifyOfMissingFiles) {}
-
- Expectations get(const char *testName) SK_OVERRIDE {
- SkString path = make_filename(fRootDir.c_str(), "", testName,
- "png");
- SkBitmap referenceBitmap;
- bool decodedReferenceBitmap =
- SkImageDecoder::DecodeFile(path.c_str(), &referenceBitmap,
- SkBitmap::kARGB_8888_Config,
- SkImageDecoder::kDecodePixels_Mode,
- NULL);
- if (decodedReferenceBitmap) {
- Checksum checksum = SkBitmapChecksummer::Compute64(
- referenceBitmap);
- return Expectations(checksum);
- } else {
- if (fNotifyOfMissingFiles) {
- fprintf(stderr, "FAILED to read %s\n", path.c_str());
- }
- return Expectations();
- }
- }
-
- private:
- const SkString fRootDir;
- const bool fNotifyOfMissingFiles;
- };
-
-}
-#endif
diff --git a/gm/gmmain.cpp b/gm/gmmain.cpp
index 290ca225d7..964bafa9ad 100644
--- a/gm/gmmain.cpp
+++ b/gm/gmmain.cpp
@@ -14,7 +14,6 @@
*/
#include "gm.h"
-#include "gm_expectations.h"
#include "system_preferences.h"
#include "SkBitmapChecksummer.h"
#include "SkColorPriv.h"
@@ -200,6 +199,7 @@ public:
GMMain() {
// Set default values of member variables, which tool_main()
// may override.
+ fNotifyMissingReadReference = true;
fUseFileHierarchy = false;
fMismatchPath = NULL;
}
@@ -216,6 +216,19 @@ public:
return name;
}
+ static SkString make_filename(const char path[],
+ const char renderModeDescriptor[],
+ const SkString& name,
+ const char suffix[]) {
+ SkString filename(path);
+ if (filename.endsWith(SkPATH_SEPARATOR)) {
+ filename.remove(filename.size() - 1, 1);
+ }
+ filename.appendf("%c%s%s.%s", SkPATH_SEPARATOR,
+ name.c_str(), renderModeDescriptor, suffix);
+ return filename;
+ }
+
/* since PNG insists on unpremultiplying our alpha, we take no
precision chances and force all pixels to be 100% opaque,
otherwise on compare we may not get a perfect match.
@@ -271,6 +284,51 @@ public:
}
}
+ // Compares "target" and "base" bitmaps, returning the result
+ // (ERROR_NONE if the two bitmaps are identical).
+ ErrorBitfield compare(const SkBitmap& target, const SkBitmap& base,
+ const SkString& name,
+ const char* renderModeDescriptor) {
+ SkBitmap copy;
+ const SkBitmap* bm = &target;
+ if (target.config() != SkBitmap::kARGB_8888_Config) {
+ target.copyTo(&copy, SkBitmap::kARGB_8888_Config);
+ bm = &copy;
+ }
+ SkBitmap baseCopy;
+ const SkBitmap* bp = &base;
+ if (base.config() != SkBitmap::kARGB_8888_Config) {
+ base.copyTo(&baseCopy, SkBitmap::kARGB_8888_Config);
+ bp = &baseCopy;
+ }
+
+ force_all_opaque(*bm);
+ force_all_opaque(*bp);
+
+ const int w = bm->width();
+ const int h = bm->height();
+ if (w != bp->width() || h != bp->height()) {
+ RecordError(ERROR_IMAGE_MISMATCH, name, renderModeDescriptor);
+ return ERROR_IMAGE_MISMATCH;
+ }
+
+ SkAutoLockPixels bmLock(*bm);
+ SkAutoLockPixels baseLock(*bp);
+
+ for (int y = 0; y < h; y++) {
+ for (int x = 0; x < w; x++) {
+ SkPMColor c0 = *bp->getAddr32(x, y);
+ SkPMColor c1 = *bm->getAddr32(x, y);
+ if (c0 != c1) {
+ RecordError(ERROR_IMAGE_MISMATCH, name,
+ renderModeDescriptor);
+ return ERROR_IMAGE_MISMATCH;
+ }
+ }
+ }
+ return ERROR_NONE;
+ }
+
static bool write_document(const SkString& path,
const SkDynamicMemoryWStream& document) {
SkFILEWStream stream(path.c_str());
@@ -427,18 +485,15 @@ public:
gRec.fBackend == kGPU_Backend ||
(gRec.fBackend == kPDF_Backend && CAN_IMAGE_PDF)) {
- path = make_filename(writePath, renderModeDescriptor, name.c_str(),
- "png");
+ path = make_filename(writePath, renderModeDescriptor, name, "png");
success = write_bitmap(path, bitmap);
}
if (kPDF_Backend == gRec.fBackend) {
- path = make_filename(writePath, renderModeDescriptor, name.c_str(),
- "pdf");
+ path = make_filename(writePath, renderModeDescriptor, name, "pdf");
success = write_document(path, *document);
}
if (kXPS_Backend == gRec.fBackend) {
- path = make_filename(writePath, renderModeDescriptor, name.c_str(),
- "xps");
+ path = make_filename(writePath, renderModeDescriptor, name, "xps");
success = write_document(path, *document);
}
if (success) {
@@ -451,197 +506,117 @@ public:
}
}
- /**
- * Compares actual checksum to expectations.
- * Returns ERROR_NONE if they match, or some particular error code otherwise
- *
- * If fMismatchPath has been set, and there are pixel diffs, then the
- * actual bitmap will be written out to a file within fMismatchPath.
- *
- * @param expectations what expectations to compare actualBitmap against
- * @param actualBitmap the image we actually generated
- * @param baseNameString name of test without renderModeDescriptor added
- * @param renderModeDescriptor e.g., "-rtree", "-deferred"
- * @param addToJsonSummary whether to add these results (both actual and
- * expected) to the JSON summary
- *
- * TODO: For now, addToJsonSummary is only set to true within
- * compare_test_results_to_stored_expectations(), so results of our
- * in-memory comparisons (Rtree vs regular, etc.) are not written to the
- * JSON summary. We may wish to change that.
- */
- ErrorBitfield compare_to_expectations(Expectations expectations,
- const SkBitmap& actualBitmap,
- const SkString& baseNameString,
- const char renderModeDescriptor[],
- bool addToJsonSummary=false) {
+ // Compares bitmap "bitmap" to a reference bitmap read from disk.
+ //
+ // Returns a description of the difference between "bitmap" and
+ // the reference bitmap, or ERROR_READING_REFERENCE_IMAGE if
+ // unable to read the reference bitmap from disk.
+ ErrorBitfield compare_to_reference_image_on_disk(
+ const char readPath [], const SkString& name, SkBitmap &bitmap,
+ const char renderModeDescriptor []) {
ErrorBitfield retval;
- Checksum actualChecksum = SkBitmapChecksummer::Compute64(actualBitmap);
- SkString completeNameString = baseNameString;
- completeNameString.append(renderModeDescriptor);
- const char* completeName = completeNameString.c_str();
-
- if (expectations.empty()) {
- retval = ERROR_READING_REFERENCE_IMAGE;
- } else if (expectations.match(actualChecksum)) {
- retval = ERROR_NONE;
+ SkString path = make_filename(readPath, "", name, "png");
+ SkBitmap referenceBitmap;
+ Json::Value expectedChecksumsArray;
+
+ bool decodedReferenceBitmap =
+ SkImageDecoder::DecodeFile(path.c_str(), &referenceBitmap,
+ SkBitmap::kARGB_8888_Config,
+ SkImageDecoder::kDecodePixels_Mode,
+ NULL);
+ if (decodedReferenceBitmap) {
+ expectedChecksumsArray.append(Json::UInt64(
+ SkBitmapChecksummer::Compute64(referenceBitmap)));
+ retval = compare(bitmap, referenceBitmap, name,
+ renderModeDescriptor);
+ if (fMismatchPath && (retval & ERROR_IMAGE_MISMATCH)) {
+ SkString path = make_filename(fMismatchPath, renderModeDescriptor, name, "png");
+ write_bitmap(path, bitmap);
+ }
} else {
- retval = ERROR_IMAGE_MISMATCH;
- if (fMismatchPath) {
- SkString path =
- make_filename(fMismatchPath, renderModeDescriptor,
- baseNameString.c_str(), "png");
- write_bitmap(path, actualBitmap);
+ if (fNotifyMissingReadReference) {
+ fprintf(stderr, "FAILED to read %s\n", path.c_str());
}
+ RecordError(ERROR_READING_REFERENCE_IMAGE, name,
+ renderModeDescriptor);
+ retval = ERROR_READING_REFERENCE_IMAGE;
}
- RecordError(retval, baseNameString, renderModeDescriptor);
-
- if (addToJsonSummary) {
- add_actual_results_to_json_summary(completeName, actualChecksum,
- retval,
- expectations.ignoreFailure());
- add_expected_results_to_json_summary(completeName, expectations);
- }
-
- return retval;
- }
- /**
- * Add this result to the appropriate JSON collection of actual results,
- * depending on status.
- */
- void add_actual_results_to_json_summary(const char testName[],
- Checksum actualChecksum,
- ErrorBitfield result,
- bool ignoreFailure) {
+ // Add this result to the appropriate JSON collection of actual results,
+ // depending on status.
Json::Value actualResults;
- actualResults[kJsonKey_ActualResults_AnyStatus_Checksum] =
- asJsonValue(actualChecksum);
- if (ERROR_NONE == result) {
- this->fJsonActualResults_Succeeded[testName] = actualResults;
- } else {
- if (ignoreFailure) {
- // TODO: Once we have added the ability to compare
- // actual results against expectations in a JSON file
- // (where we can set ignore-failure to either true or
- // false), add tests cases that exercise ignored
- // failures (both for ERROR_READING_REFERENCE_IMAGE
- // and ERROR_IMAGE_MISMATCH).
- this->fJsonActualResults_FailureIgnored[testName] =
- actualResults;
+ actualResults[kJsonKey_ActualResults_AnyStatus_Checksum] = Json::UInt64(
+ SkBitmapChecksummer::Compute64(bitmap));
+ if (decodedReferenceBitmap) {
+ if (ERROR_NONE == retval) {
+ fJsonActualResults_Succeeded[name.c_str()] = actualResults;
} else {
- switch(result) {
- case ERROR_READING_REFERENCE_IMAGE:
- // TODO: What about the case where there IS an
- // expected image checksum, but that gm test
- // doesn't actually run? For now, those cases
- // will always be ignored, because gm only looks
- // at expectations that correspond to gm tests
- // that were actually run.
- //
- // Once we have the ability to express
- // expectations as a JSON file, we should fix this
- // (and add a test case for which an expectation
- // is given but the test is never run).
- this->fJsonActualResults_NoComparison[testName] =
- actualResults;
- break;
- case ERROR_IMAGE_MISMATCH:
- this->fJsonActualResults_Failed[testName] = actualResults;
- break;
- default:
- fprintf(stderr, "encountered unexpected result %d\n",
- result);
- SkDEBUGFAIL("encountered unexpected result");
- break;
- }
+ fJsonActualResults_Failed[name.c_str()] = actualResults;
}
+ } else {
+ fJsonActualResults_NoComparison[name.c_str()] = actualResults;
}
- }
- /**
- * Add this test to the JSON collection of expected results.
- */
- void add_expected_results_to_json_summary(const char testName[],
- Expectations expectations) {
+ // Add this test to the JSON collection of expected results.
// For now, we assume that this collection starts out empty and we
// just fill it in as we go; once gm accepts a JSON file as input,
// we'll have to change that.
Json::Value expectedResults;
- expectedResults[kJsonKey_ExpectedResults_Checksums] =
- expectations.allowedChecksumsAsJson();
- expectedResults[kJsonKey_ExpectedResults_IgnoreFailure] =
- expectations.ignoreFailure();
- this->fJsonExpectedResults[testName] = expectedResults;
- }
+ expectedResults[kJsonKey_ExpectedResults_Checksums] = expectedChecksumsArray;
+ expectedResults[kJsonKey_ExpectedResults_IgnoreFailure] = !decodedReferenceBitmap;
+ fJsonExpectedResults[name.c_str()] = expectedResults;
- /**
- * Compare actualBitmap to expectations stored in this->fExpectationsSource.
- *
- * @param gm which test generated the actualBitmap
- * @param gRec
- * @param writePath unless this is NULL, write out actual images into this
- * directory
- * @param actualBitmap bitmap generated by this run
- * @param pdf
- */
- ErrorBitfield compare_test_results_to_stored_expectations(
- GM* gm, const ConfigData& gRec, const char writePath[],
- SkBitmap& actualBitmap, SkDynamicMemoryWStream* pdf) {
+ return retval;
+ }
+ // NOTE: As far as I can tell, this function is NEVER called with a
+ // non-blank renderModeDescriptor, EXCEPT when readPath and writePath are
+ // both NULL (and thus no images are read from or written to disk).
+ // So I don't trust that the renderModeDescriptor is being used for
+ // anything other than debug output these days.
+ ErrorBitfield handle_test_results(GM* gm,
+ const ConfigData& gRec,
+ const char writePath [],
+ const char readPath [],
+ const char renderModeDescriptor [],
+ SkBitmap& bitmap,
+ SkDynamicMemoryWStream* pdf,
+ const SkBitmap* referenceBitmap) {
SkString name = make_name(gm->shortName(), gRec.fName);
ErrorBitfield retval = ERROR_NONE;
- ExpectationsSource *expectationsSource =
- this->fExpectationsSource.get();
- if (expectationsSource && (gRec.fFlags & kRead_ConfigFlag)) {
- Expectations expectations = expectationsSource->get(name.c_str());
- retval |= compare_to_expectations(expectations, actualBitmap,
- name, "", true);
- } else {
- // If we are running without expectations, we still want to
+ if (readPath && (gRec.fFlags & kRead_ConfigFlag)) {
+ retval |= compare_to_reference_image_on_disk(readPath, name, bitmap,
+ renderModeDescriptor);
+ } else if (NULL == referenceBitmap) {
+ // If we are running without "--readPath", we still want to
// record the actual results.
- Checksum actualChecksum =
- SkBitmapChecksummer::Compute64(actualBitmap);
- add_actual_results_to_json_summary(name.c_str(), actualChecksum,
- ERROR_READING_REFERENCE_IMAGE,
- false);
+ //
+ // For now, though, we don't record results of comparisons against
+ // different in-memory representations (hence the referenceBitmap
+ // NULL check).
+ Json::Value actualResults;
+ actualResults[kJsonKey_ActualResults_AnyStatus_Checksum] =
+ Json::UInt64(SkBitmapChecksummer::Compute64(bitmap));
+ fJsonActualResults_NoComparison[name.c_str()] = actualResults;
}
-
- // TODO: Consider moving this into compare_to_expectations(),
- // similar to fMismatchPath... for now, we don't do that, because
- // we don't want to write out the actual bitmaps for all
- // renderModes of all tests! That would be a lot of files.
if (writePath && (gRec.fFlags & kWrite_ConfigFlag)) {
- retval |= write_reference_image(gRec, writePath, "",
- name, actualBitmap, pdf);
+ retval |= write_reference_image(gRec, writePath,
+ renderModeDescriptor,
+ name, bitmap, pdf);
+ }
+ if (referenceBitmap) {
+ ErrorBitfield compareResult = compare(bitmap, *referenceBitmap, name,
+ renderModeDescriptor);
+ if (fMismatchPath && (compareResult & ERROR_IMAGE_MISMATCH)) {
+ SkString path = make_filename(fMismatchPath, renderModeDescriptor, name, "png");
+ write_bitmap(path, bitmap);
+ }
+ retval |= compareResult;
}
-
return retval;
}
- /**
- * Compare actualBitmap to referenceBitmap.
- *
- * @param gm which test generated the bitmap
- * @param gRec
- * @param renderModeDescriptor
- * @param actualBitmap actual bitmap generated by this run
- * @param referenceBitmap bitmap we expected to be generated
- */
- ErrorBitfield compare_test_results_to_reference_bitmap(
- GM* gm, const ConfigData& gRec, const char renderModeDescriptor [],
- SkBitmap& actualBitmap, const SkBitmap* referenceBitmap) {
-
- SkASSERT(referenceBitmap);
- SkString name = make_name(gm->shortName(), gRec.fName);
- Checksum referenceChecksum =
- SkBitmapChecksummer::Compute64(*referenceBitmap);
- Expectations expectations(referenceChecksum);
- return compare_to_expectations(expectations, actualBitmap,
- name, renderModeDescriptor);
- }
-
static SkPicture* generate_new_picture(GM* gm, BbhType bbhType, SkScalar scale = SK_Scalar1) {
// Pictures are refcounted so must be on heap
SkPicture* pict;
@@ -690,6 +665,7 @@ public:
ErrorBitfield test_drawing(GM* gm,
const ConfigData& gRec,
const char writePath [],
+ const char readPath [],
GrContext* context,
GrRenderTarget* rt,
SkBitmap* bitmap) {
@@ -701,9 +677,6 @@ public:
ErrorBitfield errors = generate_image(gm, gRec, context, rt, bitmap,
false);
if (ERROR_NONE != errors) {
- // TODO: Add a test to exercise what the stdout and
- // JSON look like if we get an "early error" while
- // trying to generate the image.
return errors;
}
} else if (gRec.fBackend == kPDF_Backend) {
@@ -716,8 +689,8 @@ public:
} else if (gRec.fBackend == kXPS_Backend) {
generate_xps(gm, document);
}
- return compare_test_results_to_stored_expectations(
- gm, gRec, writePath, *bitmap, &document);
+ return handle_test_results(gm, gRec, writePath, readPath,
+ "", *bitmap, &document, NULL);
}
ErrorBitfield test_deferred_drawing(GM* gm,
@@ -735,15 +708,17 @@ public:
if (!generate_image(gm, gRec, context, rt, &bitmap, true)) {
return ERROR_NONE;
}
- return compare_test_results_to_reference_bitmap(
- gm, gRec, "-deferred", bitmap, &referenceBitmap);
+ return handle_test_results(gm, gRec, NULL, NULL,
+ "-deferred", bitmap, NULL,
+ &referenceBitmap);
}
return ERROR_NONE;
}
ErrorBitfield test_pipe_playback(GM* gm,
const ConfigData& gRec,
- const SkBitmap& referenceBitmap) {
+ const SkBitmap& referenceBitmap,
+ const char readPath []) {
ErrorBitfield errors = ERROR_NONE;
for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) {
SkBitmap bitmap;
@@ -758,8 +733,9 @@ public:
writer.endRecording();
SkString string("-pipe");
string.append(gPipeWritingFlagCombos[i].name);
- errors |= compare_test_results_to_reference_bitmap(
- gm, gRec, string.c_str(), bitmap, &referenceBitmap);
+ errors |= handle_test_results(gm, gRec, NULL, NULL,
+ string.c_str(), bitmap, NULL,
+ &referenceBitmap);
if (errors != ERROR_NONE) {
break;
}
@@ -768,7 +744,8 @@ public:
}
ErrorBitfield test_tiled_pipe_playback(
- GM* gm, const ConfigData& gRec, const SkBitmap& referenceBitmap) {
+ GM* gm, const ConfigData& gRec, const SkBitmap& referenceBitmap,
+ const char readPath []) {
ErrorBitfield errors = ERROR_NONE;
for (size_t i = 0; i < SK_ARRAY_COUNT(gPipeWritingFlagCombos); ++i) {
SkBitmap bitmap;
@@ -783,8 +760,9 @@ public:
writer.endRecording();
SkString string("-tiled pipe");
string.append(gPipeWritingFlagCombos[i].name);
- errors |= compare_test_results_to_reference_bitmap(
- gm, gRec, string.c_str(), bitmap, &referenceBitmap);
+ errors |= handle_test_results(gm, gRec, NULL, NULL,
+ string.c_str(), bitmap, NULL,
+ &referenceBitmap);
if (errors != ERROR_NONE) {
break;
}
@@ -797,6 +775,9 @@ public:
// They are public for now, to allow easier setting by tool_main().
//
+ // if true, emit a message when we can't find a reference image to compare
+ bool fNotifyMissingReadReference;
+
bool fUseFileHierarchy;
const char* fMismatchPath;
@@ -804,11 +785,6 @@ public:
// information about all failed tests we have encountered so far
SkTArray<FailRec> fFailedTests;
- // Where to read expectations (expected image checksums, etc.) from.
- // If unset, we don't do comparisons.
- SkAutoTUnref<ExpectationsSource> fExpectationsSource;
-
- // JSON summaries that we generate as we go (just for output).
Json::Value fJsonExpectedResults;
Json::Value fJsonActualResults_Failed;
Json::Value fJsonActualResults_FailureIgnored;
@@ -1000,9 +976,6 @@ int tool_main(int argc, char** argv) {
const char* readPath = NULL; // if non-null, were we read from to compare
const char* resourcePath = NULL;// if non-null, where we read from for image resources
- // if true, emit a message when we can't find a reference image to compare
- bool notifyMissingReadReference = true;
-
SkTDArray<const char*> fMatches;
bool doPDF = true;
@@ -1067,7 +1040,7 @@ int tool_main(int argc, char** argv) {
} else if (strcmp(*argv, "--nodeferred") == 0) {
doDeferred = false;
} else if (strcmp(*argv, "--disable-missing-warning") == 0) {
- notifyMissingReadReference = false;
+ gmmain.fNotifyMissingReadReference = false;
} else if (strcmp(*argv, "--mismatchPath") == 0) {
argv++;
if (argv < stop && **argv) {
@@ -1096,7 +1069,7 @@ int tool_main(int argc, char** argv) {
return -1;
}
} else if (strcmp(*argv, "--enable-missing-warning") == 0) {
- notifyMissingReadReference = true;
+ gmmain.fNotifyMissingReadReference = true;
} else if (strcmp(*argv, "--forceBWtext") == 0) {
gForceBWtext = true;
} else if (strcmp(*argv, "--help") == 0 || strcmp(*argv, "-h") == 0) {
@@ -1211,21 +1184,7 @@ int tool_main(int argc, char** argv) {
GM::SetResourcePath(resourcePath);
if (readPath) {
- if (!sk_exists(readPath)) {
- fprintf(stderr, "readPath %s does not exist!\n", readPath);
- return -1;
- }
- if (sk_isdir(readPath)) {
- fprintf(stderr, "reading from %s\n", readPath);
- gmmain.fExpectationsSource.reset(SkNEW_ARGS(
- IndividualImageExpectationsSource,
- (readPath, notifyMissingReadReference)));
- } else {
- fprintf(stderr, "reading expectations from JSON summary file %s ",
- readPath);
- fprintf(stderr, "BUT WE DON'T KNOW HOW TO DO THIS YET!\n");
- return -1;
- }
+ fprintf(stderr, "reading from %s\n", readPath);
}
if (writePath) {
fprintf(stderr, "writing to %s\n", writePath);
@@ -1357,7 +1316,7 @@ int tool_main(int argc, char** argv) {
if (ERROR_NONE == renderErrors) {
renderErrors |= gmmain.test_drawing(gm, config, writePath,
- GetGr(),
+ readPath, GetGr(),
renderTarget,
&comparisonBitmap);
}
@@ -1392,8 +1351,11 @@ int tool_main(int argc, char** argv) {
SkBitmap bitmap;
gmmain.generate_image_from_picture(gm, compareConfig, pict,
&bitmap);
- pictErrors |= gmmain.compare_test_results_to_reference_bitmap(
- gm, compareConfig, "-replay", bitmap, &comparisonBitmap);
+ pictErrors |= gmmain.handle_test_results(gm, compareConfig,
+ NULL, NULL,
+ "-replay", bitmap,
+ NULL,
+ &comparisonBitmap);
}
if ((ERROR_NONE == testErrors) &&
@@ -1405,15 +1367,18 @@ int tool_main(int argc, char** argv) {
SkBitmap bitmap;
gmmain.generate_image_from_picture(gm, compareConfig, repict,
&bitmap);
- pictErrors |= gmmain.compare_test_results_to_reference_bitmap(
- gm, compareConfig, "-serialize", bitmap, &comparisonBitmap);
+ pictErrors |= gmmain.handle_test_results(gm, compareConfig,
+ NULL, NULL,
+ "-serialize", bitmap,
+ NULL,
+ &comparisonBitmap);
}
if (writePicturePath) {
const char* pictureSuffix = "skp";
- SkString path = make_filename(writePicturePath, "",
- gm->shortName(),
- pictureSuffix);
+ SkString path = gmmain.make_filename(writePicturePath, "",
+ SkString(gm->shortName()),
+ pictureSuffix);
SkFILEWStream stream(path.c_str());
pict->serialize(&stream);
}
@@ -1421,18 +1386,17 @@ int tool_main(int argc, char** argv) {
testErrors |= pictErrors;
}
- // TODO: add a test in which the RTree rendering results in a
- // different bitmap than the standard rendering. It should
- // show up as failed in the JSON summary, and should be listed
- // in the stdout also.
if (!(gmFlags & GM::kSkipPicture_Flag) && doRTree) {
SkPicture* pict = gmmain.generate_new_picture(gm, kRTree_BbhType);
SkAutoUnref aur(pict);
SkBitmap bitmap;
gmmain.generate_image_from_picture(gm, compareConfig, pict,
&bitmap);
- testErrors |= gmmain.compare_test_results_to_reference_bitmap(
- gm, compareConfig, "-rtree", bitmap, &comparisonBitmap);
+ testErrors |= gmmain.handle_test_results(gm, compareConfig,
+ NULL, NULL,
+ "-rtree", bitmap,
+ NULL,
+ &comparisonBitmap);
}
if (!(gmFlags & GM::kSkipPicture_Flag) && doTileGrid) {
@@ -1453,9 +1417,11 @@ int tool_main(int argc, char** argv) {
suffix += "-scale-";
suffix.appendScalar(replayScale);
}
- testErrors |= gmmain.compare_test_results_to_reference_bitmap(
- gm, compareConfig, suffix.c_str(), bitmap,
- &comparisonBitmap);
+ testErrors |= gmmain.handle_test_results(gm, compareConfig,
+ NULL, NULL,
+ suffix.c_str(), bitmap,
+ NULL,
+ &comparisonBitmap);
}
}
@@ -1466,14 +1432,16 @@ int tool_main(int argc, char** argv) {
if ((ERROR_NONE == testErrors) && doPipe) {
pipeErrors |= gmmain.test_pipe_playback(gm, compareConfig,
- comparisonBitmap);
+ comparisonBitmap,
+ readPath);
}
if ((ERROR_NONE == testErrors) &&
(ERROR_NONE == pipeErrors) &&
doTiledPipe && !(gmFlags & GM::kSkipTiled_Flag)) {
pipeErrors |= gmmain.test_tiled_pipe_playback(gm, compareConfig,
- comparisonBitmap);
+ comparisonBitmap,
+ readPath);
}
testErrors |= pipeErrors;
@@ -1482,10 +1450,10 @@ int tool_main(int argc, char** argv) {
// Update overall results.
// We only tabulate the particular error types that we currently
// care about (e.g., missing reference images). Later on, if we
- // want to also tabulate other error types, we can do so.
+ // want to also tabulate pixel mismatches vs dimension mistmatches
+ // (or whatever else), we can do so.
testsRun++;
- if (!gmmain.fExpectationsSource.get() ||
- (ERROR_READING_REFERENCE_IMAGE & testErrors)) {
+ if (!readPath || (ERROR_READING_REFERENCE_IMAGE & testErrors)) {
testsMissingReferenceImages++;
} else if (ERROR_NONE == testErrors) {
testsPassed++;
diff --git a/gm/tests/outputs/compared-against-empty-dir/output-expected/json-summary.txt b/gm/tests/outputs/compared-against-empty-dir/output-expected/json-summary.txt
index 226b087a6c..509b0c2299 100644
--- a/gm/tests/outputs/compared-against-empty-dir/output-expected/json-summary.txt
+++ b/gm/tests/outputs/compared-against-empty-dir/output-expected/json-summary.txt
@@ -12,7 +12,7 @@
"expected-results" : {
"8888/dashing2" : {
"checksums" : null,
- "ignore-failure" : false
+ "ignore-failure" : true
}
}
}